Merge isolates to bleeding_edge.
authorvitalyr@chromium.org <vitalyr@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 18 Mar 2011 18:49:56 +0000 (18:49 +0000)
committervitalyr@chromium.org <vitalyr@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 18 Mar 2011 18:49:56 +0000 (18:49 +0000)
Review URL: http://codereview.chromium.org/6685088

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7268 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

321 files changed:
AUTHORS
SConstruct
include/v8-debug.h
include/v8.h
samples/shell.cc
src/SConscript
src/accessors.cc
src/allocation-inl.h [new file with mode: 0644]
src/allocation.cc
src/allocation.h
src/api.cc
src/api.h
src/apiutils.h
src/arguments.h
src/arm/assembler-arm-inl.h
src/arm/assembler-arm.cc
src/arm/assembler-arm.h
src/arm/builtins-arm.cc
src/arm/code-stubs-arm.cc
src/arm/code-stubs-arm.h
src/arm/codegen-arm.cc
src/arm/codegen-arm.h
src/arm/cpu-arm.cc
src/arm/debug-arm.cc
src/arm/deoptimizer-arm.cc
src/arm/disasm-arm.cc
src/arm/frames-arm.h
src/arm/full-codegen-arm.cc
src/arm/ic-arm.cc
src/arm/lithium-arm.cc
src/arm/lithium-arm.h
src/arm/lithium-codegen-arm.cc
src/arm/lithium-codegen-arm.h
src/arm/lithium-gap-resolver-arm.cc
src/arm/macro-assembler-arm.cc
src/arm/macro-assembler-arm.h
src/arm/regexp-macro-assembler-arm.cc
src/arm/regexp-macro-assembler-arm.h
src/arm/simulator-arm.cc
src/arm/simulator-arm.h
src/arm/stub-cache-arm.cc
src/arm/virtual-frame-arm.cc
src/arm/virtual-frame-arm.h
src/assembler.cc
src/assembler.h
src/ast.cc
src/ast.h
src/bootstrapper.cc
src/bootstrapper.h
src/builtins.cc
src/builtins.h
src/checks.cc
src/code-stubs.cc
src/codegen.cc
src/codegen.h
src/compilation-cache.cc
src/compilation-cache.h
src/compiler.cc
src/compiler.h
src/contexts.cc
src/contexts.h
src/conversions.cc
src/counters.cc
src/counters.h
src/cpu-profiler.cc
src/cpu-profiler.h
src/d8-debug.cc
src/d8-debug.h
src/d8.cc
src/data-flow.h
src/dateparser.h
src/debug-agent.cc
src/debug-agent.h
src/debug.cc
src/debug.h
src/deoptimizer.cc
src/deoptimizer.h
src/disasm.h
src/disassembler.cc
src/execution.cc
src/execution.h
src/extensions/externalize-string-extension.cc
src/extensions/gc-extension.cc
src/factory.cc
src/factory.h
src/flag-definitions.h
src/frame-element.cc
src/frame-element.h
src/frames-inl.h
src/frames.cc
src/frames.h
src/full-codegen.cc
src/full-codegen.h
src/func-name-inferrer.cc
src/func-name-inferrer.h
src/gdb-jit.cc
src/global-handles.cc
src/global-handles.h
src/handles-inl.h
src/handles.cc
src/handles.h
src/hashmap.h
src/heap-inl.h
src/heap-profiler.cc
src/heap-profiler.h
src/heap.cc
src/heap.h
src/hydrogen-instructions.cc
src/hydrogen-instructions.h
src/hydrogen.cc
src/ia32/assembler-ia32-inl.h
src/ia32/assembler-ia32.cc
src/ia32/assembler-ia32.h
src/ia32/builtins-ia32.cc
src/ia32/code-stubs-ia32.cc
src/ia32/code-stubs-ia32.h
src/ia32/codegen-ia32.cc
src/ia32/codegen-ia32.h
src/ia32/cpu-ia32.cc
src/ia32/debug-ia32.cc
src/ia32/deoptimizer-ia32.cc
src/ia32/disasm-ia32.cc
src/ia32/frames-ia32.h
src/ia32/full-codegen-ia32.cc
src/ia32/ic-ia32.cc
src/ia32/lithium-codegen-ia32.cc
src/ia32/lithium-codegen-ia32.h
src/ia32/lithium-gap-resolver-ia32.cc
src/ia32/lithium-ia32.cc
src/ia32/lithium-ia32.h
src/ia32/macro-assembler-ia32.cc
src/ia32/macro-assembler-ia32.h
src/ia32/regexp-macro-assembler-ia32.cc
src/ia32/regexp-macro-assembler-ia32.h
src/ia32/register-allocator-ia32.cc
src/ia32/simulator-ia32.h
src/ia32/stub-cache-ia32.cc
src/ia32/virtual-frame-ia32.cc
src/ia32/virtual-frame-ia32.h
src/ic-inl.h
src/ic.cc
src/ic.h
src/interpreter-irregexp.cc
src/interpreter-irregexp.h
src/isolate.cc [new file with mode: 0644]
src/isolate.h [new file with mode: 0644]
src/jsregexp.cc
src/jsregexp.h
src/jump-target-heavy.cc
src/jump-target-heavy.h
src/jump-target-inl.h
src/jump-target-light.cc
src/lithium-allocator.cc
src/lithium-allocator.h
src/lithium.cc
src/liveedit.cc
src/liveedit.h
src/liveobjectlist.h
src/log-utils.cc
src/log-utils.h
src/log.cc
src/log.h
src/mark-compact.cc
src/mark-compact.h
src/messages.cc
src/messages.js
src/mips/codegen-mips.cc
src/mips/disasm-mips.cc
src/mips/fast-codegen-mips.cc [deleted file]
src/mips/macro-assembler-mips.cc
src/mksnapshot.cc
src/objects-debug.cc
src/objects-inl.h
src/objects-visiting.h
src/objects.cc
src/objects.h
src/parser.cc
src/parser.h
src/platform-cygwin.cc
src/platform-freebsd.cc
src/platform-linux.cc
src/platform-macos.cc
src/platform-nullos.cc
src/platform-openbsd.cc
src/platform-posix.cc
src/platform-solaris.cc
src/platform-win32.cc
src/platform.h
src/preparse-data.cc
src/preparser-api.cc
src/preparser.cc
src/prettyprinter.cc
src/prettyprinter.h
src/profile-generator.cc
src/profile-generator.h
src/property.h
src/regexp-macro-assembler-irregexp.cc
src/regexp-macro-assembler.cc
src/regexp-macro-assembler.h
src/regexp-stack.cc
src/regexp-stack.h
src/register-allocator-inl.h
src/register-allocator.cc
src/register-allocator.h
src/rewriter.cc
src/runtime-profiler.cc
src/runtime-profiler.h
src/runtime.cc
src/runtime.h
src/safepoint-table.cc
src/safepoint-table.h
src/scanner-base.cc
src/scanner-base.h
src/scanner.cc
src/scanner.h
src/scopeinfo.cc
src/scopeinfo.h
src/scopes.cc
src/scopes.h
src/serialize.cc
src/serialize.h
src/snapshot-common.cc
src/snapshot.h
src/spaces-inl.h
src/spaces.cc
src/spaces.h
src/string-search.cc
src/string-search.h
src/string-stream.cc
src/stub-cache.cc
src/stub-cache.h
src/token.cc
src/token.h
src/top.cc
src/top.h [deleted file]
src/type-info.cc
src/unicode.cc
src/unicode.h
src/v8-counters.cc
src/v8-counters.h
src/v8.cc
src/v8.h
src/v8globals.h
src/v8threads.cc
src/v8threads.h
src/variables.h
src/version.cc
src/version.h
src/virtual-frame-light-inl.h
src/vm-state-inl.h
src/vm-state.h
src/x64/assembler-x64-inl.h
src/x64/assembler-x64.cc
src/x64/assembler-x64.h
src/x64/builtins-x64.cc
src/x64/code-stubs-x64.cc
src/x64/codegen-x64.cc
src/x64/codegen-x64.h
src/x64/cpu-x64.cc
src/x64/debug-x64.cc
src/x64/deoptimizer-x64.cc
src/x64/disasm-x64.cc
src/x64/frames-x64.h
src/x64/full-codegen-x64.cc
src/x64/ic-x64.cc
src/x64/lithium-codegen-x64.cc
src/x64/lithium-codegen-x64.h
src/x64/lithium-x64.cc
src/x64/lithium-x64.h
src/x64/macro-assembler-x64.cc
src/x64/macro-assembler-x64.h
src/x64/regexp-macro-assembler-x64.cc
src/x64/regexp-macro-assembler-x64.h
src/x64/register-allocator-x64.cc
src/x64/simulator-x64.h
src/x64/stub-cache-x64.cc
src/x64/virtual-frame-x64.cc
src/x64/virtual-frame-x64.h
src/zone-inl.h
src/zone.cc
src/zone.h
test/cctest/cctest.h
test/cctest/test-accessors.cc
test/cctest/test-alloc.cc
test/cctest/test-api.cc
test/cctest/test-assembler-arm.cc
test/cctest/test-assembler-ia32.cc
test/cctest/test-assembler-x64.cc
test/cctest/test-ast.cc
test/cctest/test-circular-queue.cc
test/cctest/test-compiler.cc
test/cctest/test-cpu-profiler.cc
test/cctest/test-dataflow.cc
test/cctest/test-debug.cc
test/cctest/test-decls.cc
test/cctest/test-deoptimization.cc
test/cctest/test-disasm-arm.cc
test/cctest/test-disasm-ia32.cc
test/cctest/test-func-name-inference.cc
test/cctest/test-heap-profiler.cc
test/cctest/test-heap.cc
test/cctest/test-liveedit.cc
test/cctest/test-log-stack-tracer.cc
test/cctest/test-log.cc
test/cctest/test-macro-assembler-x64.cc
test/cctest/test-mark-compact.cc
test/cctest/test-parsing.cc
test/cctest/test-platform-linux.cc
test/cctest/test-platform-win32.cc
test/cctest/test-profile-generator.cc
test/cctest/test-regexp.cc
test/cctest/test-serialize.cc
test/cctest/test-sockets.cc
test/cctest/test-spaces.cc
test/cctest/test-strings.cc
test/cctest/test-thread-termination.cc
test/cctest/test-threads.cc
test/cctest/test-utils.cc
test/mjsunit/testcfg.py
tools/gyp/v8.gyp
tools/test.py

diff --git a/AUTHORS b/AUTHORS
index 92b69cb686b3ba63499ca732becc572fc1f0dd22..99b5ffc5fad2018ba32d7e9bb03e78e5386eb01b 100644 (file)
--- a/AUTHORS
+++ b/AUTHORS
@@ -24,8 +24,9 @@ Jay Freeman <saurik@saurik.com>
 Joel Stanley <joel.stan@gmail.com>
 John Jozwiak <jjozwiak@codeaurora.org>
 Kun Zhang <zhangk@codeaurora.org>
-Matt Hanselman <mjhanselman@gmail.com>
 Martyn Capewell <martyn.capewell@arm.com>
+Matt Hanselman <mjhanselman@gmail.com>
+Maxim Mossienko <maxim.mossienko@gmail.com>
 Michael Smith <mike@w3.org>
 Mike Gilbert <floppymaster@gmail.com>
 Paolo Giarrusso <p.giarrusso@gmail.com>
index 8b09ca160757c47599e50a178269b37f0661f7b7..a6de2b40767fea72473e6cf86118d20809d1adf6 100644 (file)
@@ -578,7 +578,7 @@ SAMPLE_FLAGS = {
       }
     },
     'arch:ia32': {
-      'CPPDEFINES': ['V8_TARGET_ARCH_IA32'],
+      'CPPDEFINES': ['V8_TARGET_ARCH_IA32', 'WIN32'],
       'LINKFLAGS': ['/MACHINE:X86']
     },
     'arch:x64': {
index f17b848550d0f0736ebda3b7dfa3cd506227f4c4..10e8bef83be03576293a482d934c92f6115d9110 100755 (executable)
@@ -227,7 +227,7 @@ class EXPORT Debug {
    * Debug message callback function.
    *
    * \param message the debug message handler message object
-
+   *
    * A MessageHandler does not take possession of the message data,
    * and must not rely on the data persisting after the handler returns.
    */
@@ -253,17 +253,23 @@ class EXPORT Debug {
   static bool SetDebugEventListener(v8::Handle<v8::Object> that,
                                     Handle<Value> data = Handle<Value>());
 
-  // Schedule a debugger break to happen when JavaScript code is run.
-  static void DebugBreak();
-
-  // Remove scheduled debugger break if it has not happened yet.
-  static void CancelDebugBreak();
-
-  // Break execution of JavaScript (this method can be invoked from a
-  // non-VM thread) for further client command execution on a VM
-  // thread. Client data is then passed in EventDetails to
-  // EventCallback at the moment when the VM actually stops.
-  static void DebugBreakForCommand(ClientData* data = NULL);
+  // Schedule a debugger break to happen when JavaScript code is run
+  // in the given isolate. If no isolate is provided the default
+  // isolate is used.
+  static void DebugBreak(Isolate* isolate = NULL);
+
+  // Remove scheduled debugger break in given isolate if it has not
+  // happened yet. If no isolate is provided the default isolate is
+  // used.
+  static void CancelDebugBreak(Isolate* isolate = NULL);
+
+  // Break execution of JavaScript in the given isolate (this method
+  // can be invoked from a non-VM thread) for further client command
+  // execution on a VM thread. Client data is then passed in
+  // EventDetails to EventCallback at the moment when the VM actually
+  // stops. If no isolate is provided the default isolate is used.
+  static void DebugBreakForCommand(ClientData* data = NULL,
+                                   Isolate* isolate = NULL);
 
   // Message based interface. The message protocol is JSON. NOTE the message
   // handler thread is not supported any more parameter must be false.
index 7875cfaf205e453b56a9d494c11351e159597267..62d1085c20e6c4a7665c6e4559a8198816b56812 100644 (file)
@@ -110,7 +110,8 @@ namespace internal {
 class Arguments;
 class Object;
 class Heap;
-class Top;
+class HeapObject;
+class Isolate;
 }
 
 
@@ -443,6 +444,8 @@ class V8EXPORT HandleScope {
    * Creates a new handle with the given value.
    */
   static internal::Object** CreateHandle(internal::Object* value);
+  // Faster version, uses HeapObject to obtain the current Isolate.
+  static internal::Object** CreateHandle(internal::HeapObject* value);
 
  private:
   // Make it impossible to create heap-allocated or illegal handle
@@ -459,7 +462,6 @@ class V8EXPORT HandleScope {
     internal::Object** next;
     internal::Object** limit;
     int level;
-
     inline void Initialize() {
       next = limit = NULL;
       level = 0;
@@ -468,6 +470,7 @@ class V8EXPORT HandleScope {
 
   void Leave();
 
+  internal::Isolate* isolate_;
   internal::Object** prev_next_;
   internal::Object** prev_limit_;
 
@@ -2547,6 +2550,90 @@ class V8EXPORT HeapStatistics {
 
 class RetainedObjectInfo;
 
+/**
+ * Isolate represents an isolated instance of the V8 engine.  V8
+ * isolates have completely separate states.  Objects from one isolate
+ * must not be used in other isolates.  When V8 is initialized a
+ * default isolate is implicitly created and entered.  The embedder
+ * can create additional isolates and use them in parallel in multiple
+ * threads.  An isolate can be entered by at most one thread at any
+ * given time.  The Locker/Unlocker API can be used to synchronize.
+ */
+class V8EXPORT Isolate {
+ public:
+  /**
+   * Stack-allocated class which sets the isolate for all operations
+   * executed within a local scope.
+   */
+  class V8EXPORT Scope {
+   public:
+    explicit Scope(Isolate* isolate) : isolate_(isolate) {
+      isolate->Enter();
+    }
+
+    ~Scope() { isolate_->Exit(); }
+
+   private:
+    Isolate* const isolate_;
+
+    // Prevent copying of Scope objects.
+    Scope(const Scope&);
+    Scope& operator=(const Scope&);
+  };
+
+  /**
+   * Creates a new isolate.  Does not change the currently entered
+   * isolate.
+   *
+   * When an isolate is no longer used its resources should be freed
+   * by calling Dispose().  Using the delete operator is not allowed.
+   */
+  static Isolate* New();
+
+  /**
+   * Returns the entered isolate for the current thread or NULL in
+   * case there is no current isolate.
+   */
+  static Isolate* GetCurrent();
+
+  /**
+   * Methods below this point require holding a lock (using Locker) in
+   * a multi-threaded environment.
+   */
+
+  /**
+   * Sets this isolate as the entered one for the current thread.
+   * Saves the previously entered one (if any), so that it can be
+   * restored when exiting.  Re-entering an isolate is allowed.
+   */
+  void Enter();
+
+  /**
+   * Exits this isolate by restoring the previously entered one in the
+   * current thread.  The isolate may still stay the same, if it was
+   * entered more than once.
+   *
+   * Requires: this == Isolate::GetCurrent().
+   */
+  void Exit();
+
+  /**
+   * Disposes the isolate.  The isolate must not be entered by any
+   * thread to be disposable.
+   */
+  void Dispose();
+
+ private:
+
+  Isolate();
+  Isolate(const Isolate&);
+  ~Isolate();
+  Isolate& operator=(const Isolate&);
+  void* operator new(size_t size);
+  void operator delete(void*, size_t);
+};
+
+
 /**
  * Container class for static utility functions.
  */
@@ -2872,12 +2959,16 @@ class V8EXPORT V8 {
   static void TerminateExecution(int thread_id);
 
   /**
-   * Forcefully terminate the current thread of JavaScript execution.
+   * Forcefully terminate the current thread of JavaScript execution
+   * in the given isolate. If no isolate is provided, the default
+   * isolate is used.
    *
    * This method can be used by any thread even if that thread has not
    * acquired the V8 lock with a Locker object.
+   *
+   * \param isolate The isolate in which to terminate the current JS execution.
    */
-  static void TerminateExecution();
+  static void TerminateExecution(Isolate* isolate = NULL);
 
   /**
    * Is V8 terminating JavaScript execution.
@@ -3055,7 +3146,7 @@ class V8EXPORT TryCatch {
   bool capture_message_ : 1;
   bool rethrow_ : 1;
 
-  friend class v8::internal::Top;
+  friend class v8::internal::Isolate;
 };
 
 
@@ -3218,15 +3309,26 @@ class V8EXPORT Context {
 
 /**
  * Multiple threads in V8 are allowed, but only one thread at a time
- * is allowed to use V8.  The definition of 'using V8' includes
- * accessing handles or holding onto object pointers obtained from V8
- * handles.  It is up to the user of V8 to ensure (perhaps with
- * locking) that this constraint is not violated.
+ * is allowed to use any given V8 isolate. See Isolate class
+ * comments. The definition of 'using V8 isolate' includes
+ * accessing handles or holding onto object pointers obtained
+ * from V8 handles while in the particular V8 isolate.  It is up
+ * to the user of V8 to ensure (perhaps with locking) that this
+ * constraint is not violated.
+ *
+ * More then one thread and multiple V8 isolates can be used
+ * without any locking if each isolate is created and accessed
+ * by a single thread only. For example, one thread can use
+ * multiple isolates or multiple threads can each create and run
+ * their own isolate.
  *
- * If you wish to start using V8 in a thread you can do this by constructing
- * a v8::Locker object.  After the code using V8 has completed for the
- * current thread you can call the destructor.  This can be combined
- * with C++ scope-based construction as follows:
+ * If you wish to start using V8 isolate in more then one thread
+ * you can do this by constructing a v8::Locker object to guard
+ * access to the isolate. After the code using V8 has completed
+ * for the current thread you can call the destructor.  This can
+ * be combined with C++ scope-based construction as follows
+ * (assumes the default isolate that is used if not specified as
+ * a parameter for the Locker):
  *
  * \code
  * ...
@@ -3468,7 +3570,7 @@ class Internals {
   // These values match non-compiler-dependent values defined within
   // the implementation of v8.
   static const int kHeapObjectMapOffset = 0;
-  static const int kMapInstanceTypeOffset = kApiPointerSize + kApiIntSize;
+  static const int kMapInstanceTypeOffset = 1 * kApiPointerSize + kApiIntSize;
   static const int kStringResourceOffset =
       InternalConstants<kApiPointerSize>::kStringResourceOffset;
 
@@ -3525,6 +3627,14 @@ class Internals {
     uint8_t* addr = reinterpret_cast<uint8_t*>(ptr) + offset - kHeapObjectTag;
     return *reinterpret_cast<T*>(addr);
   }
+
+  static inline bool CanCastToHeapObject(void* o) { return false; }
+  static inline bool CanCastToHeapObject(Context* o) { return true; }
+  static inline bool CanCastToHeapObject(String* o) { return true; }
+  static inline bool CanCastToHeapObject(Object* o) { return true; }
+  static inline bool CanCastToHeapObject(Message* o) { return true; }
+  static inline bool CanCastToHeapObject(StackTrace* o) { return true; }
+  static inline bool CanCastToHeapObject(StackFrame* o) { return true; }
 };
 
 }  // namespace internal
@@ -3541,7 +3651,12 @@ Local<T>::Local() : Handle<T>() { }
 template <class T>
 Local<T> Local<T>::New(Handle<T> that) {
   if (that.IsEmpty()) return Local<T>();
-  internal::Object** p = reinterpret_cast<internal::Object**>(*that);
+  T* that_ptr = *that;
+  internal::Object** p = reinterpret_cast<internal::Object**>(that_ptr);
+  if (internal::Internals::CanCastToHeapObject(that_ptr)) {
+    return Local<T>(reinterpret_cast<T*>(HandleScope::CreateHandle(
+        reinterpret_cast<internal::HeapObject*>(*p))));
+  }
   return Local<T>(reinterpret_cast<T*>(HandleScope::CreateHandle(*p)));
 }
 
index fc3d76ce782fe0650b99b54e115fcc11098d45f9..d4aad9170cf67cfd6bd47ba94500308e61f2a6b3 100644 (file)
 #include <stdio.h>
 #include <stdlib.h>
 
+#include "../src/v8.h"
 
+// TODO(isolates):
+//   o Either use V8 internal platform stuff for every platform or
+//     re-implement it.
+//   o Do not assume not WIN32 implies pthreads.
+#ifndef WIN32
+#include <pthread.h>  // NOLINT
+#include <unistd.h>  // NOLINT
+#endif
+
+static void ExitShell(int exit_code) {
+  // Use _exit instead of exit to avoid races between isolate
+  // threads and static destructors.
+  fflush(stdout);
+  fflush(stderr);
+  _exit(exit_code);
+}
+
+v8::Persistent<v8::Context> CreateShellContext();
 void RunShell(v8::Handle<v8::Context> context);
 bool ExecuteString(v8::Handle<v8::String> source,
                    v8::Handle<v8::Value> name,
@@ -48,63 +67,193 @@ v8::Handle<v8::String> ReadFile(const char* name);
 void ReportException(v8::TryCatch* handler);
 
 
+#ifndef WIN32
+void* IsolateThreadEntry(void* arg);
+#endif
+
+static bool last_run = true;
+
+class SourceGroup {
+ public:
+  SourceGroup() : argv_(NULL),
+                  begin_offset_(0),
+                  end_offset_(0),
+                  next_semaphore_(NULL),
+                  done_semaphore_(NULL) {
+#ifndef WIN32
+    next_semaphore_ = v8::internal::OS::CreateSemaphore(0);
+    done_semaphore_ = v8::internal::OS::CreateSemaphore(0);
+    thread_ = 0;
+#endif
+  }
+
+  void Begin(char** argv, int offset) {
+    argv_ = const_cast<const char**>(argv);
+    begin_offset_ = offset;
+  }
+
+  void End(int offset) { end_offset_ = offset; }
+
+  void Execute() {
+    for (int i = begin_offset_; i < end_offset_; ++i) {
+      const char* arg = argv_[i];
+      if (strcmp(arg, "-e") == 0 && i + 1 < end_offset_) {
+        // Execute argument given to -e option directly.
+        v8::HandleScope handle_scope;
+        v8::Handle<v8::String> file_name = v8::String::New("unnamed");
+        v8::Handle<v8::String> source = v8::String::New(argv_[i + 1]);
+        if (!ExecuteString(source, file_name, false, true)) {
+          ExitShell(1);
+          return;
+        }
+        ++i;
+      } else if (arg[0] == '-') {
+        // Ignore other options. They have been parsed already.
+      } else {
+        // Use all other arguments as names of files to load and run.
+        v8::HandleScope handle_scope;
+        v8::Handle<v8::String> file_name = v8::String::New(arg);
+        v8::Handle<v8::String> source = ReadFile(arg);
+        if (source.IsEmpty()) {
+          printf("Error reading '%s'\n", arg);
+        }
+        if (!ExecuteString(source, file_name, false, true)) {
+          ExitShell(1);
+          return;
+        }
+      }
+    }
+  }
+
+#ifdef WIN32
+  void StartExecuteInThread() { ExecuteInThread(); }
+  void WaitForThread() {}
+
+#else
+  void StartExecuteInThread() {
+    if (thread_ == 0) {
+      pthread_attr_t attr;
+      // On some systems (OSX 10.6) the stack size default is 0.5Mb or less
+      // which is not enough to parse the big literal expressions used in tests.
+      // The stack size should be at least StackGuard::kLimitSize + some
+      // OS-specific padding for thread startup code.
+      size_t stacksize = 2 << 20;  // 2 Mb seems to be enough
+      pthread_attr_init(&attr);
+      pthread_attr_setstacksize(&attr, stacksize);
+      int error = pthread_create(&thread_, &attr, &IsolateThreadEntry, this);
+      if (error != 0) {
+        fprintf(stderr, "Error creating isolate thread.\n");
+        ExitShell(1);
+      }
+    }
+    next_semaphore_->Signal();
+  }
+
+  void WaitForThread() {
+    if (thread_ == 0) return;
+    if (last_run) {
+      pthread_join(thread_, NULL);
+      thread_ = 0;
+    } else {
+      done_semaphore_->Wait();
+    }
+  }
+#endif  // WIN32
+
+ private:
+  void ExecuteInThread() {
+    v8::Isolate* isolate = v8::Isolate::New();
+    do {
+      if (next_semaphore_ != NULL) next_semaphore_->Wait();
+      {
+        v8::Isolate::Scope iscope(isolate);
+        v8::HandleScope scope;
+        v8::Persistent<v8::Context> context = CreateShellContext();
+        {
+          v8::Context::Scope cscope(context);
+          Execute();
+        }
+        context.Dispose();
+      }
+      if (done_semaphore_ != NULL) done_semaphore_->Signal();
+    } while (!last_run);
+    isolate->Dispose();
+  }
+
+  const char** argv_;
+  int begin_offset_;
+  int end_offset_;
+  v8::internal::Semaphore* next_semaphore_;
+  v8::internal::Semaphore* done_semaphore_;
+#ifndef WIN32
+  pthread_t thread_;
+#endif
+
+  friend void* IsolateThreadEntry(void* arg);
+};
+
+#ifndef WIN32
+void* IsolateThreadEntry(void* arg) {
+  reinterpret_cast<SourceGroup*>(arg)->ExecuteInThread();
+  return NULL;
+}
+#endif
+
+
+static SourceGroup* isolate_sources = NULL;
+
+
 int RunMain(int argc, char* argv[]) {
+  v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
   v8::HandleScope handle_scope;
-  // Create a template for the global object.
-  v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New();
-  // Bind the global 'print' function to the C++ Print callback.
-  global->Set(v8::String::New("print"), v8::FunctionTemplate::New(Print));
-  // Bind the global 'read' function to the C++ Read callback.
-  global->Set(v8::String::New("read"), v8::FunctionTemplate::New(Read));
-  // Bind the global 'load' function to the C++ Load callback.
-  global->Set(v8::String::New("load"), v8::FunctionTemplate::New(Load));
-  // Bind the 'quit' function
-  global->Set(v8::String::New("quit"), v8::FunctionTemplate::New(Quit));
-  // Bind the 'version' function
-  global->Set(v8::String::New("version"), v8::FunctionTemplate::New(Version));
-  // Create a new execution environment containing the built-in
-  // functions
-  v8::Persistent<v8::Context> context = v8::Context::New(NULL, global);
+  v8::Persistent<v8::Context> context = CreateShellContext();
+  // Enter the newly created execution environment.
+  context->Enter();
   if (context.IsEmpty()) {
     printf("Error creating context\n");
     return 1;
   }
 
   bool run_shell = (argc == 1);
+  int num_isolates = 1;
   for (int i = 1; i < argc; i++) {
-    // Enter the execution environment before evaluating any code.
-    v8::Context::Scope context_scope(context);
-    const char* str = argv[i];
-    if (strcmp(str, "--shell") == 0) {
-      run_shell = true;
-    } else if (strcmp(str, "-f") == 0) {
-      // Ignore any -f flags for compatibility with the other stand-
-      // alone JavaScript engines.
-      continue;
-    } else if (strncmp(str, "--", 2) == 0) {
-      printf("Warning: unknown flag %s.\nTry --help for options\n", str);
-    } else if (strcmp(str, "-e") == 0 && i + 1 < argc) {
-      // Execute argument given to -e option directly
-      v8::HandleScope handle_scope;
-      v8::Handle<v8::String> file_name = v8::String::New("unnamed");
-      v8::Handle<v8::String> source = v8::String::New(argv[i + 1]);
-      if (!ExecuteString(source, file_name, false, true))
-        return 1;
-      i++;
-    } else {
-      // Use all other arguments as names of files to load and run.
-      v8::HandleScope handle_scope;
-      v8::Handle<v8::String> file_name = v8::String::New(str);
-      v8::Handle<v8::String> source = ReadFile(str);
-      if (source.IsEmpty()) {
-        printf("Error reading '%s'\n", str);
-        return 1;
+    if (strcmp(argv[i], "--isolate") == 0) ++num_isolates;
+  }
+  if (isolate_sources == NULL) {
+    isolate_sources = new SourceGroup[num_isolates];
+    SourceGroup* current = isolate_sources;
+    current->Begin(argv, 1);
+    for (int i = 1; i < argc; i++) {
+      const char* str = argv[i];
+      if (strcmp(str, "--isolate") == 0) {
+        current->End(i);
+        current++;
+        current->Begin(argv, i + 1);
+      } else if (strcmp(str, "--shell") == 0) {
+        run_shell = true;
+      } else if (strcmp(str, "-f") == 0) {
+        // Ignore any -f flags for compatibility with the other stand-
+        // alone JavaScript engines.
+        continue;
+      } else if (strncmp(str, "--", 2) == 0) {
+        printf("Warning: unknown flag %s.\nTry --help for options\n", str);
       }
-      if (!ExecuteString(source, file_name, false, true))
-        return 1;
     }
+    current->End(argc);
   }
+  for (int i = 1; i < num_isolates; ++i) {
+    isolate_sources[i].StartExecuteInThread();
+  }
+  isolate_sources[0].Execute();
   if (run_shell) RunShell(context);
+  for (int i = 1; i < num_isolates; ++i) {
+    isolate_sources[i].WaitForThread();
+  }
+  if (last_run) {
+    delete[] isolate_sources;
+    isolate_sources = NULL;
+  }
+  context->Exit();
   context.Dispose();
   return 0;
 }
@@ -142,6 +291,7 @@ int main(int argc, char* argv[]) {
       printf("============ Stress %d/%d ============\n",
              i + 1, stress_runs);
       v8::Testing::PrepareStressRun(i);
+      last_run = (i == stress_runs - 1);
       result = RunMain(argc, argv);
     }
     printf("======== Full Deoptimization =======\n");
@@ -160,6 +310,25 @@ const char* ToCString(const v8::String::Utf8Value& value) {
 }
 
 
+// Creates a new execution environment containing the built-in
+// functions.
+v8::Persistent<v8::Context> CreateShellContext() {
+  // Create a template for the global object.
+  v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New();
+  // Bind the global 'print' function to the C++ Print callback.
+  global->Set(v8::String::New("print"), v8::FunctionTemplate::New(Print));
+  // Bind the global 'read' function to the C++ Read callback.
+  global->Set(v8::String::New("read"), v8::FunctionTemplate::New(Read));
+  // Bind the global 'load' function to the C++ Load callback.
+  global->Set(v8::String::New("load"), v8::FunctionTemplate::New(Load));
+  // Bind the 'quit' function
+  global->Set(v8::String::New("quit"), v8::FunctionTemplate::New(Quit));
+  // Bind the 'version' function
+  global->Set(v8::String::New("version"), v8::FunctionTemplate::New(Version));
+  return v8::Context::New(NULL, global);
+}
+
+
 // The callback that is invoked by v8 whenever the JavaScript 'print'
 // function is called.  Prints its arguments on stdout separated by
 // spaces and ending with a newline.
@@ -229,7 +398,7 @@ v8::Handle<v8::Value> Quit(const v8::Arguments& args) {
   // If not arguments are given args[0] will yield undefined which
   // converts to the integer value 0.
   int exit_code = args[0]->Int32Value();
-  exit(exit_code);
+  ExitShell(exit_code);
   return v8::Undefined();
 }
 
index 34ca91ca65b20b3cc96b50ccbdfa2518b8e0d4ba..3a01dd2ec44214323a53700ef0abdd47008d3bc8 100755 (executable)
@@ -84,6 +84,7 @@ SOURCES = {
     ic.cc
     inspector.cc
     interpreter-irregexp.cc
+    isolate.cc
     jsregexp.cc
     jump-target.cc
     lithium-allocator.cc
index ccd9da995c70c36f27b3062c7c9932ec2e655aed..7326aed4ca8fa2777be46717c760c38d45a31d17 100644 (file)
@@ -34,7 +34,6 @@
 #include "factory.h"
 #include "safepoint-table.h"
 #include "scopeinfo.h"
-#include "top.h"
 
 namespace v8 {
 namespace internal {
@@ -43,8 +42,9 @@ namespace internal {
 template <class C>
 static C* FindInPrototypeChain(Object* obj, bool* found_it) {
   ASSERT(!*found_it);
+  Heap* heap = HEAP;
   while (!Is<C>(obj)) {
-    if (obj == Heap::null_value()) return NULL;
+    if (obj == heap->null_value()) return NULL;
     obj = obj->GetPrototype();
   }
   *found_it = true;
@@ -90,10 +90,10 @@ MaybeObject* Accessors::ArrayGetLength(Object* object, void*) {
 Object* Accessors::FlattenNumber(Object* value) {
   if (value->IsNumber() || !value->IsJSValue()) return value;
   JSValue* wrapper = JSValue::cast(value);
-  ASSERT(
-      Top::context()->global_context()->number_function()->has_initial_map());
-  Map* number_map =
-      Top::context()->global_context()->number_function()->initial_map();
+  ASSERT(Isolate::Current()->context()->global_context()->number_function()->
+      has_initial_map());
+  Map* number_map = Isolate::Current()->context()->global_context()->
+      number_function()->initial_map();
   if (wrapper->map() == number_map) return wrapper->value();
   return value;
 }
@@ -126,12 +126,13 @@ MaybeObject* Accessors::ArraySetLength(JSObject* object, Object* value, void*) {
       // This means one of the object's prototypes is a JSArray and
       // the object does not have a 'length' property.
       // Calling SetProperty causes an infinite loop.
-      return object->SetLocalPropertyIgnoreAttributes(Heap::length_symbol(),
+      return object->SetLocalPropertyIgnoreAttributes(HEAP->length_symbol(),
                                                       value, NONE);
     }
   }
-  return Top::Throw(*Factory::NewRangeError("invalid_array_length",
-                                            HandleVector<Object>(NULL, 0)));
+  return Isolate::Current()->Throw(
+      *FACTORY->NewRangeError("invalid_array_length",
+          HandleVector<Object>(NULL, 0)));
 }
 
 
@@ -320,9 +321,9 @@ MaybeObject* Accessors::ScriptGetLineEnds(Object* object, void*) {
   ASSERT(script->line_ends()->IsFixedArray());
   Handle<FixedArray> line_ends(FixedArray::cast(script->line_ends()));
   // We do not want anyone to modify this array from JS.
-  ASSERT(*line_ends == Heap::empty_fixed_array() ||
-         line_ends->map() == Heap::fixed_cow_array_map());
-  Handle<JSArray> js_array = Factory::NewJSArrayWithElements(line_ends);
+  ASSERT(*line_ends == HEAP->empty_fixed_array() ||
+         line_ends->map() == HEAP->fixed_cow_array_map());
+  Handle<JSArray> js_array = FACTORY->NewJSArrayWithElements(line_ends);
   return *js_array;
 }
 
@@ -368,7 +369,7 @@ MaybeObject* Accessors::ScriptGetEvalFromScript(Object* object, void*) {
       return *GetScriptWrapper(eval_from_script);
     }
   }
-  return Heap::undefined_value();
+  return HEAP->undefined_value();
 }
 
 
@@ -391,7 +392,7 @@ MaybeObject* Accessors::ScriptGetEvalFromScriptPosition(Object* object, void*) {
   // If this is not a script compiled through eval there is no eval position.
   int compilation_type = Smi::cast(script->compilation_type())->value();
   if (compilation_type != Script::COMPILATION_TYPE_EVAL) {
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
 
   // Get the function from where eval was called and find the source position
@@ -445,7 +446,7 @@ const AccessorDescriptor Accessors::ScriptEvalFromFunctionName = {
 MaybeObject* Accessors::FunctionGetPrototype(Object* object, void*) {
   bool found_it = false;
   JSFunction* function = FindInPrototypeChain<JSFunction>(object, &found_it);
-  if (!found_it) return Heap::undefined_value();
+  if (!found_it) return HEAP->undefined_value();
   while (!function->should_have_prototype()) {
     found_it = false;
     function = FindInPrototypeChain<JSFunction>(object->GetPrototype(),
@@ -456,7 +457,7 @@ MaybeObject* Accessors::FunctionGetPrototype(Object* object, void*) {
 
   if (!function->has_prototype()) {
     Object* prototype;
-    { MaybeObject* maybe_prototype = Heap::AllocateFunctionPrototype(function);
+    { MaybeObject* maybe_prototype = HEAP->AllocateFunctionPrototype(function);
       if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
     }
     Object* result;
@@ -473,10 +474,10 @@ MaybeObject* Accessors::FunctionSetPrototype(JSObject* object,
                                              void*) {
   bool found_it = false;
   JSFunction* function = FindInPrototypeChain<JSFunction>(object, &found_it);
-  if (!found_it) return Heap::undefined_value();
+  if (!found_it) return HEAP->undefined_value();
   if (!function->should_have_prototype()) {
     // Since we hit this accessor, object will have no prototype property.
-    return object->SetLocalPropertyIgnoreAttributes(Heap::prototype_symbol(),
+    return object->SetLocalPropertyIgnoreAttributes(HEAP->prototype_symbol(),
                                                     value,
                                                     NONE);
   }
@@ -545,7 +546,7 @@ const AccessorDescriptor Accessors::FunctionLength = {
 MaybeObject* Accessors::FunctionGetName(Object* object, void*) {
   bool found_it = false;
   JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
-  if (!found_it) return Heap::undefined_value();
+  if (!found_it) return HEAP->undefined_value();
   return holder->shared()->name();
 }
 
@@ -604,13 +605,13 @@ class SlotRef BASE_EMBEDDED {
         if (Smi::IsValid(value)) {
           return Handle<Object>(Smi::FromInt(value));
         } else {
-          return Factory::NewNumberFromInt(value);
+          return Isolate::Current()->factory()->NewNumberFromInt(value);
         }
       }
 
       case DOUBLE: {
         double value = Memory::double_at(addr_);
-        return Factory::NewNumber(value);
+        return Isolate::Current()->factory()->NewNumber(value);
       }
 
       case LITERAL:
@@ -732,12 +733,13 @@ static MaybeObject* ConstructArgumentsObjectForInlinedFunction(
     JavaScriptFrame* frame,
     Handle<JSFunction> inlined_function,
     int inlined_frame_index) {
+  Factory* factory = Isolate::Current()->factory();
   int args_count = inlined_function->shared()->formal_parameter_count();
   ScopedVector<SlotRef> args_slots(args_count);
   ComputeSlotMappingForArguments(frame, inlined_frame_index, &args_slots);
   Handle<JSObject> arguments =
-      Factory::NewArgumentsObject(inlined_function, args_count);
-  Handle<FixedArray> array = Factory::NewFixedArray(args_count);
+      factory->NewArgumentsObject(inlined_function, args_count);
+  Handle<FixedArray> array = factory->NewFixedArray(args_count);
   for (int i = 0; i < args_count; ++i) {
     Handle<Object> value = args_slots[i].GetValue();
     array->set(i, *value);
@@ -750,11 +752,12 @@ static MaybeObject* ConstructArgumentsObjectForInlinedFunction(
 
 
 MaybeObject* Accessors::FunctionGetArguments(Object* object, void*) {
-  HandleScope scope;
+  Isolate* isolate = Isolate::Current();
+  HandleScope scope(isolate);
   bool found_it = false;
   JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
-  if (!found_it) return Heap::undefined_value();
-  Handle<JSFunction> function(holder);
+  if (!found_it) return isolate->heap()->undefined_value();
+  Handle<JSFunction> function(holder, isolate);
 
   // Find the top invocation of the function by traversing frames.
   List<JSFunction*> functions(2);
@@ -776,9 +779,9 @@ MaybeObject* Accessors::FunctionGetArguments(Object* object, void*) {
       if (!frame->is_optimized()) {
         // If there is an arguments variable in the stack, we return that.
         Handle<SerializedScopeInfo> info(function->shared()->scope_info());
-        int index = info->StackSlotIndex(Heap::arguments_symbol());
+        int index = info->StackSlotIndex(isolate->heap()->arguments_symbol());
         if (index >= 0) {
-          Handle<Object> arguments(frame->GetExpression(index));
+          Handle<Object> arguments(frame->GetExpression(index), isolate);
           if (!arguments->IsArgumentsMarker()) return *arguments;
         }
       }
@@ -792,9 +795,9 @@ MaybeObject* Accessors::FunctionGetArguments(Object* object, void*) {
       // Get the number of arguments and construct an arguments object
       // mirror for the right frame.
       const int length = frame->ComputeParametersCount();
-      Handle<JSObject> arguments = Factory::NewArgumentsObject(function,
-                                                               length);
-      Handle<FixedArray> array = Factory::NewFixedArray(length);
+      Handle<JSObject> arguments = isolate->factory()->NewArgumentsObject(
+          function, length);
+      Handle<FixedArray> array = isolate->factory()->NewFixedArray(length);
 
       // Copy the parameters to the arguments object.
       ASSERT(array->length() == length);
@@ -808,7 +811,7 @@ MaybeObject* Accessors::FunctionGetArguments(Object* object, void*) {
   }
 
   // No frame corresponding to the given function found. Return null.
-  return Heap::null_value();
+  return isolate->heap()->null_value();
 }
 
 
@@ -825,12 +828,13 @@ const AccessorDescriptor Accessors::FunctionArguments = {
 
 
 MaybeObject* Accessors::FunctionGetCaller(Object* object, void*) {
-  HandleScope scope;
+  Isolate* isolate = Isolate::Current();
+  HandleScope scope(isolate);
   AssertNoAllocation no_alloc;
   bool found_it = false;
   JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
-  if (!found_it) return Heap::undefined_value();
-  Handle<JSFunction> function(holder);
+  if (!found_it) return isolate->heap()->undefined_value();
+  Handle<JSFunction> function(holder, isolate);
 
   List<JSFunction*> functions(2);
   for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
@@ -854,7 +858,7 @@ MaybeObject* Accessors::FunctionGetCaller(Object* object, void*) {
             }
             ASSERT(functions.length() == 1);
           }
-          if (it.done()) return Heap::null_value();
+          if (it.done()) return isolate->heap()->null_value();
           break;
         }
       }
@@ -863,7 +867,7 @@ MaybeObject* Accessors::FunctionGetCaller(Object* object, void*) {
   }
 
   // No frame corresponding to the given function found. Return null.
-  return Heap::null_value();
+  return isolate->heap()->null_value();
 }
 
 
diff --git a/src/allocation-inl.h b/src/allocation-inl.h
new file mode 100644 (file)
index 0000000..04a3fe6
--- /dev/null
@@ -0,0 +1,49 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_ALLOCATION_INL_H_
+#define V8_ALLOCATION_INL_H_
+
+#include "allocation.h"
+
+namespace v8 {
+namespace internal {
+
+
+void* PreallocatedStorage::New(size_t size) {
+  return Isolate::Current()->PreallocatedStorageNew(size);
+}
+
+
+void PreallocatedStorage::Delete(void* p) {
+  return Isolate::Current()->PreallocatedStorageDelete(p);
+}
+
+
+} }  // namespace v8::internal
+
+#endif  // V8_ALLOCATION_INL_H_
index d74c37cd799499b32fe053fa5ceb2b1476547983..27415c65251f64b0515811bc3344b0c198931fce 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-#include <stdlib.h>
+#include "v8.h"
+#include "isolate.h"
+#include "allocation.h"
 
+/* TODO(isolates): this is what's included in bleeding_edge
+   including of v8.h was replaced with these in
+   http://codereview.chromium.org/5005001/
+   we need Isolate and Isolate needs a lot more so I'm including v8.h back.
 #include "../include/v8stdint.h"
 #include "globals.h"
 #include "checks.h"
 #include "allocation.h"
 #include "utils.h"
+*/
 
 namespace v8 {
 namespace internal {
 
+#ifdef DEBUG
+
+NativeAllocationChecker::NativeAllocationChecker(
+    NativeAllocationChecker::NativeAllocationAllowed allowed)
+    : allowed_(allowed) {
+  if (allowed == DISALLOW) {
+    Isolate* isolate = Isolate::Current();
+    isolate->set_allocation_disallowed(isolate->allocation_disallowed() + 1);
+  }
+}
+
+
+NativeAllocationChecker::~NativeAllocationChecker() {
+  Isolate* isolate = Isolate::Current();
+  if (allowed_ == DISALLOW) {
+    isolate->set_allocation_disallowed(isolate->allocation_disallowed() - 1);
+  }
+  ASSERT(isolate->allocation_disallowed() >= 0);
+}
+
+
+bool NativeAllocationChecker::allocation_allowed() {
+  // TODO(isolates): either find a way to make this work that doesn't
+  // require initializing an isolate before we can use malloc or drop
+  // it completely.
+  return true;
+  // return Isolate::Current()->allocation_disallowed() == 0;
+}
+
+#endif  // DEBUG
+
+
 void* Malloced::New(size_t size) {
   ASSERT(NativeAllocationChecker::allocation_allowed());
   void* result = malloc(size);
@@ -103,15 +142,7 @@ char* StrNDup(const char* str, int n) {
 }
 
 
-int NativeAllocationChecker::allocation_disallowed_ = 0;
-
-
-PreallocatedStorage PreallocatedStorage::in_use_list_(0);
-PreallocatedStorage PreallocatedStorage::free_list_(0);
-bool PreallocatedStorage::preallocated_ = false;
-
-
-void PreallocatedStorage::Init(size_t size) {
+void Isolate::PreallocatedStorageInit(size_t size) {
   ASSERT(free_list_.next_ == &free_list_);
   ASSERT(free_list_.previous_ == &free_list_);
   PreallocatedStorage* free_chunk =
@@ -119,12 +150,12 @@ void PreallocatedStorage::Init(size_t size) {
   free_list_.next_ = free_list_.previous_ = free_chunk;
   free_chunk->next_ = free_chunk->previous_ = &free_list_;
   free_chunk->size_ = size - sizeof(PreallocatedStorage);
-  preallocated_ = true;
+  preallocated_storage_preallocated_ = true;
 }
 
 
-void* PreallocatedStorage::New(size_t size) {
-  if (!preallocated_) {
+void* Isolate::PreallocatedStorageNew(size_t size) {
+  if (!preallocated_storage_preallocated_) {
     return FreeStoreAllocationPolicy::New(size);
   }
   ASSERT(free_list_.next_ != &free_list_);
@@ -166,11 +197,11 @@ void* PreallocatedStorage::New(size_t size) {
 
 
 // We don't attempt to coalesce.
-void PreallocatedStorage::Delete(void* p) {
+void Isolate::PreallocatedStorageDelete(void* p) {
   if (p == NULL) {
     return;
   }
-  if (!preallocated_) {
+  if (!preallocated_storage_preallocated_) {
     FreeStoreAllocationPolicy::Delete(p);
     return;
   }
index 394366ea4dc454f54935b48384707e3b767232da..d7bbbb8769cacd530e7567298a127e42f0fc2c51 100644 (file)
@@ -43,31 +43,18 @@ void FatalProcessOutOfMemory(const char* message);
 // the C++ heap only!
 class NativeAllocationChecker {
  public:
-  typedef enum { ALLOW, DISALLOW } NativeAllocationAllowed;
-  explicit inline NativeAllocationChecker(NativeAllocationAllowed allowed)
-      : allowed_(allowed) {
+  enum NativeAllocationAllowed { ALLOW, DISALLOW };
 #ifdef DEBUG
-    if (allowed == DISALLOW) {
-      allocation_disallowed_++;
-    }
-#endif
-  }
-  ~NativeAllocationChecker() {
-#ifdef DEBUG
-    if (allowed_ == DISALLOW) {
-      allocation_disallowed_--;
-    }
-#endif
-    ASSERT(allocation_disallowed_ >= 0);
-  }
-  static inline bool allocation_allowed() {
-    return allocation_disallowed_ == 0;
-  }
+  explicit NativeAllocationChecker(NativeAllocationAllowed allowed);
+  ~NativeAllocationChecker();
+  static bool allocation_allowed();
  private:
-  // This static counter ensures that NativeAllocationCheckers can be nested.
-  static int allocation_disallowed_;
   // This flag applies to this particular instance.
   NativeAllocationAllowed allowed_;
+#else
+  explicit inline NativeAllocationChecker(NativeAllocationAllowed allowed) {}
+  static inline bool allocation_allowed() { return true; }
+#endif
 };
 
 
@@ -146,27 +133,27 @@ class FreeStoreAllocationPolicy {
 // Allocation policy for allocating in preallocated space.
 // Used as an allocation policy for ScopeInfo when generating
 // stack traces.
-class PreallocatedStorage : public AllStatic {
+class PreallocatedStorage {
  public:
   explicit PreallocatedStorage(size_t size);
   size_t size() { return size_; }
-  static void* New(size_t size);
-  static void Delete(void* p);
 
-  // Preallocate a set number of bytes.
-  static void Init(size_t size);
+  // TODO(isolates): Get rid of these-- we'll have to change the allocator
+  //                 interface to include a pointer to an isolate to do this
+  //                 efficiently.
+  static inline void* New(size_t size);
+  static inline void Delete(void* p);
 
  private:
   size_t size_;
   PreallocatedStorage* previous_;
   PreallocatedStorage* next_;
-  static bool preallocated_;
-
-  static PreallocatedStorage in_use_list_;
-  static PreallocatedStorage free_list_;
 
   void LinkTo(PreallocatedStorage* other);
   void Unlink();
+
+  friend class Isolate;
+
   DISALLOW_IMPLICIT_CONSTRUCTORS(PreallocatedStorage);
 };
 
index 8a8640d33d28aa6565daaccc5a86b945f94036cd..667aa60b7ef0051d1ebb77913693e84e9233bd7e 100644 (file)
@@ -44,7 +44,6 @@
 #include "runtime-profiler.h"
 #include "serialize.h"
 #include "snapshot.h"
-#include "top.h"
 #include "v8threads.h"
 #include "version.h"
 #include "vm-state-inl.h"
 #include "../include/v8-profiler.h"
 #include "../include/v8-testing.h"
 
-#define LOG_API(expr) LOG(ApiEntryCall(expr))
+#define LOG_API(isolate, expr) LOG(isolate, ApiEntryCall(expr))
 
+// TODO(isolates): avoid repeated TLS reads in function prologues.
 #ifdef ENABLE_VMSTATE_TRACKING
-#define ENTER_V8 ASSERT(i::V8::IsRunning()); i::VMState __state__(i::OTHER)
-#define LEAVE_V8 i::VMState __state__(i::EXTERNAL)
+#define ENTER_V8                                        \
+  ASSERT(i::Isolate::Current()->IsInitialized());                           \
+  i::VMState __state__(i::Isolate::Current(), i::OTHER)
+#define LEAVE_V8 \
+  i::VMState __state__(i::Isolate::Current(), i::EXTERNAL)
 #else
 #define ENTER_V8 ((void) 0)
 #define LEAVE_V8 ((void) 0)
 
 namespace v8 {
 
-#define ON_BAILOUT(location, code)                                 \
-  if (IsDeadCheck(location) || v8::V8::IsExecutionTerminating()) { \
+#define ON_BAILOUT(isolate, location, code)                        \
+  if (IsDeadCheck(isolate, location) ||                            \
+            v8::V8::IsExecutionTerminating()) {                    \
     code;                                                          \
     UNREACHABLE();                                                 \
   }
 
 
-#define EXCEPTION_PREAMBLE()                                      \
-  thread_local.IncrementCallDepth();                              \
-  ASSERT(!i::Top::external_caught_exception());                   \
+#define EXCEPTION_PREAMBLE()                                                \
+  i::Isolate::Current()->handle_scope_implementer()->IncrementCallDepth();  \
+  ASSERT(!i::Isolate::Current()->external_caught_exception());              \
   bool has_pending_exception = false
 
 
 #define EXCEPTION_BAILOUT_CHECK(value)                                         \
   do {                                                                         \
-    thread_local.DecrementCallDepth();                                         \
+    i::HandleScopeImplementer* handle_scope_implementer =                      \
+        isolate->handle_scope_implementer();                                   \
+    handle_scope_implementer->DecrementCallDepth();                            \
     if (has_pending_exception) {                                               \
-      if (thread_local.CallDepthIsZero() && i::Top::is_out_of_memory()) {      \
-        if (!thread_local.ignore_out_of_memory())                              \
+      if (handle_scope_implementer->CallDepthIsZero() &&                       \
+          i::Isolate::Current()->is_out_of_memory()) {                         \
+        if (!handle_scope_implementer->ignore_out_of_memory())                 \
           i::V8::FatalProcessOutOfMemory(NULL);                                \
       }                                                                        \
-      bool call_depth_is_zero = thread_local.CallDepthIsZero();                \
-      i::Top::OptionalRescheduleException(call_depth_is_zero);                 \
+      bool call_depth_is_zero = handle_scope_implementer->CallDepthIsZero();   \
+      i::Isolate::Current()->OptionalRescheduleException(call_depth_is_zero);  \
       return value;                                                            \
     }                                                                          \
   } while (false)
 
+// TODO(isolates): Add a parameter to this macro for an isolate.
 
 #define API_ENTRY_CHECK(msg)                                                   \
   do {                                                                         \
     if (v8::Locker::IsActive()) {                                              \
-      ApiCheck(i::ThreadManager::IsLockedByCurrentThread(),                    \
+      ApiCheck(i::Isolate::Current()->thread_manager()->                       \
+                  IsLockedByCurrentThread(),                                   \
                msg,                                                            \
                "Entering the V8 API without proper locking in place");         \
     }                                                                          \
   } while (false)
 
 
-// --- D a t a   t h a t   i s   s p e c i f i c   t o   a   t h r e a d ---
-
-
-static i::HandleScopeImplementer thread_local;
-
-
 // --- E x c e p t i o n   B e h a v i o r ---
 
 
-static FatalErrorCallback exception_behavior = NULL;
-
 static void DefaultFatalErrorHandler(const char* location,
                                      const char* message) {
 #ifdef ENABLE_VMSTATE_TRACKING
-  i::VMState __state__(i::OTHER);
+  i::VMState __state__(i::Isolate::Current(), i::OTHER);
 #endif
   API_Fatal(location, message);
 }
 
 
-static FatalErrorCallback& GetFatalErrorHandler() {
-  if (exception_behavior == NULL) {
-    exception_behavior = DefaultFatalErrorHandler;
+static FatalErrorCallback GetFatalErrorHandler() {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (isolate->exception_behavior() == NULL) {
+    isolate->set_exception_behavior(DefaultFatalErrorHandler);
   }
-  return exception_behavior;
+  return isolate->exception_behavior();
 }
 
 
@@ -189,7 +191,7 @@ void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
   heap_stats.os_error = &os_error;
   int end_marker;
   heap_stats.end_marker = &end_marker;
-  i::Heap::RecordStats(&heap_stats, take_snapshot);
+  HEAP->RecordStats(&heap_stats, take_snapshot);
   i::V8::SetFatalError();
   FatalErrorCallback callback = GetFatalErrorHandler();
   {
@@ -201,11 +203,6 @@ void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
 }
 
 
-void V8::SetFatalErrorHandler(FatalErrorCallback that) {
-  exception_behavior = that;
-}
-
-
 bool Utils::ReportApiFailure(const char* location, const char* message) {
   FatalErrorCallback callback = GetFatalErrorHandler();
   callback(location, message);
@@ -252,8 +249,8 @@ static bool ReportEmptyHandle(const char* location) {
  * advantage over ON_BAILOUT that it actually initializes the VM if this has not
  * yet been done.
  */
-static inline bool IsDeadCheck(const char* location) {
-  return !i::V8::IsRunning()
+static inline bool IsDeadCheck(i::Isolate* isolate, const char* location) {
+  return !isolate->IsInitialized()
       && i::V8::IsDead() ? ReportV8Dead(location) : false;
 }
 
@@ -270,23 +267,47 @@ static inline bool EmptyCheck(const char* location, const v8::Data* obj) {
 // --- S t a t i c s ---
 
 
-static i::StringInputBuffer write_input_buffer;
+static bool InitializeHelper() {
+  if (i::Snapshot::Initialize()) return true;
+  return i::V8::Initialize(NULL);
+}
 
 
-static inline bool EnsureInitialized(const char* location) {
-  if (i::V8::IsRunning()) {
-    return true;
+static inline bool EnsureInitializedForIsolate(i::Isolate* isolate,
+                                               const char* location) {
+  if (IsDeadCheck(isolate, location)) return false;
+  if (isolate != NULL) {
+    if (isolate->IsInitialized()) return true;
   }
-  if (IsDeadCheck(location)) {
-    return false;
-  }
-  return ApiCheck(v8::V8::Initialize(), location, "Error initializing V8");
+  return ApiCheck(InitializeHelper(), location, "Error initializing V8");
 }
 
+static inline bool EnsureInitialized(const char* location) {
+  i::Isolate* isolate = i::Isolate::UncheckedCurrent();
+  return EnsureInitializedForIsolate(isolate, location);
+}
 
-ImplementationUtilities::HandleScopeData*
-    ImplementationUtilities::CurrentHandleScope() {
-  return &i::HandleScope::current_;
+// Some initializing API functions are called early and may be
+// called on a thread different from static initializer thread.
+// If Isolate API is used, Isolate::Enter() will initialize TLS so
+// Isolate::Current() works. If it's a legacy case, then the thread
+// may not have TLS initialized yet. However, in initializing APIs it
+// may be too early to call EnsureInitialized() - some pre-init
+// parameters still have to be configured.
+static inline i::Isolate* EnterIsolateIfNeeded() {
+  i::Isolate* isolate = i::Isolate::UncheckedCurrent();
+  if (isolate != NULL)
+    return isolate;
+
+  i::Isolate::EnterDefaultIsolate();
+  isolate = i::Isolate::Current();
+  return isolate;
+}
+
+
+void V8::SetFatalErrorHandler(FatalErrorCallback that) {
+  i::Isolate* isolate = EnterIsolateIfNeeded();
+  isolate->set_exception_behavior(that);
 }
 
 
@@ -300,28 +321,30 @@ void ImplementationUtilities::ZapHandleRange(i::Object** begin,
 
 v8::Handle<v8::Primitive> ImplementationUtilities::Undefined() {
   if (!EnsureInitialized("v8::Undefined()")) return v8::Handle<v8::Primitive>();
-  return v8::Handle<Primitive>(ToApi<Primitive>(i::Factory::undefined_value()));
+  return v8::Handle<Primitive>(ToApi<Primitive>(FACTORY->undefined_value()));
 }
 
 
 v8::Handle<v8::Primitive> ImplementationUtilities::Null() {
-  if (!EnsureInitialized("v8::Null()")) return v8::Handle<v8::Primitive>();
-  return v8::Handle<Primitive>(ToApi<Primitive>(i::Factory::null_value()));
+  i::Isolate* isolate = i::Isolate::UncheckedCurrent();
+  if (!EnsureInitializedForIsolate(isolate, "v8::Null()"))
+      return v8::Handle<v8::Primitive>();
+  return v8::Handle<Primitive>(
+      ToApi<Primitive>(isolate->factory()->null_value()));
 }
 
 
 v8::Handle<v8::Boolean> ImplementationUtilities::True() {
   if (!EnsureInitialized("v8::True()")) return v8::Handle<v8::Boolean>();
-  return v8::Handle<v8::Boolean>(ToApi<Boolean>(i::Factory::true_value()));
+  return v8::Handle<v8::Boolean>(ToApi<Boolean>(FACTORY->true_value()));
 }
 
 
 v8::Handle<v8::Boolean> ImplementationUtilities::False() {
   if (!EnsureInitialized("v8::False()")) return v8::Handle<v8::Boolean>();
-  return v8::Handle<v8::Boolean>(ToApi<Boolean>(i::Factory::false_value()));
+  return v8::Handle<v8::Boolean>(ToApi<Boolean>(FACTORY->false_value()));
 }
 
-
 void V8::SetFlagsFromString(const char* str, int length) {
   i::FlagList::SetFlagsFromString(str, length);
 }
@@ -333,14 +356,17 @@ void V8::SetFlagsFromCommandLine(int* argc, char** argv, bool remove_flags) {
 
 
 v8::Handle<Value> ThrowException(v8::Handle<v8::Value> value) {
-  if (IsDeadCheck("v8::ThrowException()")) return v8::Handle<Value>();
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::ThrowException()")) {
+    return v8::Handle<Value>();
+  }
   ENTER_V8;
   // If we're passed an empty handle, we throw an undefined exception
   // to deal more gracefully with out of memory situations.
   if (value.IsEmpty()) {
-    i::Top::ScheduleThrow(i::Heap::undefined_value());
+    isolate->ScheduleThrow(HEAP->undefined_value());
   } else {
-    i::Top::ScheduleThrow(*Utils::OpenHandle(*value));
+    isolate->ScheduleThrow(*Utils::OpenHandle(*value));
   }
   return v8::Undefined();
 }
@@ -354,8 +380,8 @@ RegisteredExtension::RegisteredExtension(Extension* extension)
 
 
 void RegisteredExtension::Register(RegisteredExtension* that) {
-  that->next_ = RegisteredExtension::first_extension_;
-  RegisteredExtension::first_extension_ = that;
+  that->next_ = first_extension_;
+  first_extension_ = that;
 }
 
 
@@ -377,25 +403,29 @@ Extension::Extension(const char* name,
 
 
 v8::Handle<Primitive> Undefined() {
-  LOG_API("Undefined");
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate,  "Undefined");
   return ImplementationUtilities::Undefined();
 }
 
 
 v8::Handle<Primitive> Null() {
-  LOG_API("Null");
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "Null");
   return ImplementationUtilities::Null();
 }
 
 
 v8::Handle<Boolean> True() {
-  LOG_API("True");
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "True");
   return ImplementationUtilities::True();
 }
 
 
 v8::Handle<Boolean> False() {
-  LOG_API("False");
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "False");
   return ImplementationUtilities::False();
 }
 
@@ -408,74 +438,89 @@ ResourceConstraints::ResourceConstraints()
 
 
 bool SetResourceConstraints(ResourceConstraints* constraints) {
+  i::Isolate* isolate = EnterIsolateIfNeeded();
+
   int young_space_size = constraints->max_young_space_size();
   int old_gen_size = constraints->max_old_space_size();
   int max_executable_size = constraints->max_executable_size();
   if (young_space_size != 0 || old_gen_size != 0 || max_executable_size != 0) {
-    bool result = i::Heap::ConfigureHeap(young_space_size / 2,
-                                         old_gen_size,
-                                         max_executable_size);
+    // After initialization it's too late to change Heap constraints.
+    ASSERT(!isolate->IsInitialized());
+    bool result = isolate->heap()->ConfigureHeap(young_space_size / 2,
+                                                 old_gen_size,
+                                                 max_executable_size);
     if (!result) return false;
   }
   if (constraints->stack_limit() != NULL) {
     uintptr_t limit = reinterpret_cast<uintptr_t>(constraints->stack_limit());
-    i::StackGuard::SetStackLimit(limit);
+    isolate->stack_guard()->SetStackLimit(limit);
   }
   return true;
 }
 
 
 i::Object** V8::GlobalizeReference(i::Object** obj) {
-  if (IsDeadCheck("V8::Persistent::New")) return NULL;
-  LOG_API("Persistent::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "V8::Persistent::New")) return NULL;
+  LOG_API(isolate, "Persistent::New");
   i::Handle<i::Object> result =
-      i::GlobalHandles::Create(*obj);
+      isolate->global_handles()->Create(*obj);
   return result.location();
 }
 
 
 void V8::MakeWeak(i::Object** object, void* parameters,
                   WeakReferenceCallback callback) {
-  LOG_API("MakeWeak");
-  i::GlobalHandles::MakeWeak(object, parameters, callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "MakeWeak");
+  isolate->global_handles()->MakeWeak(object, parameters,
+                                                    callback);
 }
 
 
 void V8::ClearWeak(i::Object** obj) {
-  LOG_API("ClearWeak");
-  i::GlobalHandles::ClearWeakness(obj);
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "ClearWeak");
+  isolate->global_handles()->ClearWeakness(obj);
 }
 
 
 bool V8::IsGlobalNearDeath(i::Object** obj) {
-  LOG_API("IsGlobalNearDeath");
-  if (!i::V8::IsRunning()) return false;
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "IsGlobalNearDeath");
+  if (!isolate->IsInitialized()) return false;
   return i::GlobalHandles::IsNearDeath(obj);
 }
 
 
 bool V8::IsGlobalWeak(i::Object** obj) {
-  LOG_API("IsGlobalWeak");
-  if (!i::V8::IsRunning()) return false;
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "IsGlobalWeak");
+  if (!isolate->IsInitialized()) return false;
   return i::GlobalHandles::IsWeak(obj);
 }
 
 
 void V8::DisposeGlobal(i::Object** obj) {
-  LOG_API("DisposeGlobal");
-  if (!i::V8::IsRunning()) return;
-  i::GlobalHandles::Destroy(obj);
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "DisposeGlobal");
+  if (!isolate->IsInitialized()) return;
+  isolate->global_handles()->Destroy(obj);
 }
 
 // --- H a n d l e s ---
 
 
-HandleScope::HandleScope()
-    : prev_next_(i::HandleScope::current_.next),
-      prev_limit_(i::HandleScope::current_.limit),
-      is_closed_(false) {
+HandleScope::HandleScope() {
   API_ENTRY_CHECK("HandleScope::HandleScope");
-  i::HandleScope::current_.level++;
+  i::Isolate* isolate = i::Isolate::Current();
+  v8::ImplementationUtilities::HandleScopeData* current =
+      isolate->handle_scope_data();
+  isolate_ = isolate;
+  prev_next_ = current->next;
+  prev_limit_ = current->limit;
+  is_closed_ = false;
+  current->level++;
 }
 
 
@@ -487,12 +532,15 @@ HandleScope::~HandleScope() {
 
 
 void HandleScope::Leave() {
-  i::HandleScope::current_.level--;
-  ASSERT(i::HandleScope::current_.level >= 0);
-  i::HandleScope::current_.next = prev_next_;
-  if (i::HandleScope::current_.limit != prev_limit_) {
-    i::HandleScope::current_.limit = prev_limit_;
-    i::HandleScope::DeleteExtensions();
+  ASSERT(isolate_ == i::Isolate::Current());
+  v8::ImplementationUtilities::HandleScopeData* current =
+      isolate_->handle_scope_data();
+  current->level--;
+  ASSERT(current->level >= 0);
+  current->next = prev_next_;
+  if (current->limit != prev_limit_) {
+    current->limit = prev_limit_;
+    i::HandleScope::DeleteExtensions(isolate_);
   }
 
 #ifdef DEBUG
@@ -502,45 +550,61 @@ void HandleScope::Leave() {
 
 
 int HandleScope::NumberOfHandles() {
+  EnsureInitialized("HandleScope::NumberOfHandles");
   return i::HandleScope::NumberOfHandles();
 }
 
 
-i::Object** v8::HandleScope::CreateHandle(i::Object* value) {
-  return i::HandleScope::CreateHandle(value);
+i::Object** HandleScope::CreateHandle(i::Object* value) {
+  return i::HandleScope::CreateHandle(value, i::Isolate::Current());
+}
+
+
+i::Object** HandleScope::CreateHandle(i::HeapObject* value) {
+  ASSERT(value->IsHeapObject());
+  return reinterpret_cast<i::Object**>(
+      i::HandleScope::CreateHandle(value, value->GetIsolate()));
 }
 
 
 void Context::Enter() {
-  if (IsDeadCheck("v8::Context::Enter()")) return;
+  // TODO(isolates): Context should have a pointer to isolate.
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Context::Enter()")) return;
   ENTER_V8;
   i::Handle<i::Context> env = Utils::OpenHandle(this);
-  thread_local.EnterContext(env);
+  isolate->handle_scope_implementer()->EnterContext(env);
 
-  thread_local.SaveContext(i::Top::context());
-  i::Top::set_context(*env);
+  isolate->handle_scope_implementer()->SaveContext(isolate->context());
+  isolate->set_context(*env);
 }
 
 
 void Context::Exit() {
-  if (!i::V8::IsRunning()) return;
-  if (!ApiCheck(thread_local.LeaveLastContext(),
+  // TODO(isolates): Context should have a pointer to isolate.
+  i::Isolate* isolate = i::Isolate::Current();
+  if (!isolate->IsInitialized()) return;
+
+  if (!ApiCheck(isolate->handle_scope_implementer()->LeaveLastContext(),
                 "v8::Context::Exit()",
                 "Cannot exit non-entered context")) {
     return;
   }
 
   // Content of 'last_context' could be NULL.
-  i::Context* last_context = thread_local.RestoreContext();
-  i::Top::set_context(last_context);
+  i::Context* last_context =
+      isolate->handle_scope_implementer()->RestoreContext();
+  isolate->set_context(last_context);
 }
 
 
 void Context::SetData(v8::Handle<String> data) {
-  if (IsDeadCheck("v8::Context::SetData()")) return;
+  // TODO(isolates): Context should have a pointer to isolate.
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Context::SetData()")) return;
   ENTER_V8;
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::Context> env = Utils::OpenHandle(this);
     i::Handle<i::Object> raw_data = Utils::OpenHandle(*data);
     ASSERT(env->IsGlobalContext());
@@ -552,11 +616,15 @@ void Context::SetData(v8::Handle<String> data) {
 
 
 v8::Local<v8::Value> Context::GetData() {
-  if (IsDeadCheck("v8::Context::GetData()")) return v8::Local<Value>();
+  // TODO(isolates): Context should have a pointer to isolate.
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Context::GetData()")) {
+    return v8::Local<Value>();
+  }
   ENTER_V8;
   i::Object* raw_result = NULL;
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::Context> env = Utils::OpenHandle(this);
     ASSERT(env->IsGlobalContext());
     if (env->IsGlobalContext()) {
@@ -576,7 +644,7 @@ i::Object** v8::HandleScope::RawClose(i::Object** value) {
                 "Local scope has already been closed")) {
     return 0;
   }
-  LOG_API("CloseHandleScope");
+  LOG_API(isolate_, "CloseHandleScope");
 
   // Read the result before popping the handle block.
   i::Object* result = NULL;
@@ -607,8 +675,8 @@ i::Object** v8::HandleScope::RawClose(i::Object** value) {
 NeanderObject::NeanderObject(int size) {
   EnsureInitialized("v8::Nowhere");
   ENTER_V8;
-  value_ = i::Factory::NewNeanderObject();
-  i::Handle<i::FixedArray> elements = i::Factory::NewFixedArray(size);
+  value_ = FACTORY->NewNeanderObject();
+  i::Handle<i::FixedArray> elements = FACTORY->NewFixedArray(size);
   value_->set_elements(*elements);
 }
 
@@ -644,7 +712,7 @@ void NeanderArray::add(i::Handle<i::Object> value) {
   int length = this->length();
   int size = obj_.size();
   if (length == size - 1) {
-    i::Handle<i::FixedArray> new_elms = i::Factory::NewFixedArray(2 * size);
+    i::Handle<i::FixedArray> new_elms = FACTORY->NewFixedArray(2 * size);
     for (int i = 0; i < length; i++)
       new_elms->set(i + 1, get(i));
     obj_.value()->set_elements(*new_elms);
@@ -670,9 +738,10 @@ static void InitializeTemplate(i::Handle<i::TemplateInfo> that, int type) {
 
 void Template::Set(v8::Handle<String> name, v8::Handle<Data> value,
                    v8::PropertyAttribute attribute) {
-  if (IsDeadCheck("v8::Template::Set()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Template::Set()")) return;
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::Object> list(Utils::OpenHandle(this)->property_list());
   if (list->IsUndefined()) {
     list = NeanderArray().value();
@@ -694,7 +763,8 @@ static void InitializeFunctionTemplate(
 
 
 Local<ObjectTemplate> FunctionTemplate::PrototypeTemplate() {
-  if (IsDeadCheck("v8::FunctionTemplate::PrototypeTemplate()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::FunctionTemplate::PrototypeTemplate()")) {
     return Local<ObjectTemplate>();
   }
   ENTER_V8;
@@ -708,28 +778,27 @@ Local<ObjectTemplate> FunctionTemplate::PrototypeTemplate() {
 
 
 void FunctionTemplate::Inherit(v8::Handle<FunctionTemplate> value) {
-  if (IsDeadCheck("v8::FunctionTemplate::Inherit()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::FunctionTemplate::Inherit()")) return;
   ENTER_V8;
   Utils::OpenHandle(this)->set_parent_template(*Utils::OpenHandle(*value));
 }
 
 
-// To distinguish the function templates, so that we can find them in the
-// function cache of the global context.
-static int next_serial_number = 0;
-
-
 Local<FunctionTemplate> FunctionTemplate::New(InvocationCallback callback,
     v8::Handle<Value> data, v8::Handle<Signature> signature) {
-  EnsureInitialized("v8::FunctionTemplate::New()");
-  LOG_API("FunctionTemplate::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::FunctionTemplate::New()");
+  LOG_API(isolate, "FunctionTemplate::New");
   ENTER_V8;
   i::Handle<i::Struct> struct_obj =
-      i::Factory::NewStruct(i::FUNCTION_TEMPLATE_INFO_TYPE);
+      FACTORY->NewStruct(i::FUNCTION_TEMPLATE_INFO_TYPE);
   i::Handle<i::FunctionTemplateInfo> obj =
       i::Handle<i::FunctionTemplateInfo>::cast(struct_obj);
   InitializeFunctionTemplate(obj);
-  obj->set_serial_number(i::Smi::FromInt(next_serial_number++));
+  int next_serial_number = isolate->next_serial_number();
+  isolate->set_next_serial_number(next_serial_number + 1);
+  obj->set_serial_number(i::Smi::FromInt(next_serial_number));
   if (callback != 0) {
     if (data.IsEmpty()) data = v8::Undefined();
     Utils::ToLocal(obj)->SetCallHandler(callback, data);
@@ -745,16 +814,17 @@ Local<FunctionTemplate> FunctionTemplate::New(InvocationCallback callback,
 
 Local<Signature> Signature::New(Handle<FunctionTemplate> receiver,
       int argc, Handle<FunctionTemplate> argv[]) {
-  EnsureInitialized("v8::Signature::New()");
-  LOG_API("Signature::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::Signature::New()");
+  LOG_API(isolate, "Signature::New");
   ENTER_V8;
   i::Handle<i::Struct> struct_obj =
-      i::Factory::NewStruct(i::SIGNATURE_INFO_TYPE);
+      FACTORY->NewStruct(i::SIGNATURE_INFO_TYPE);
   i::Handle<i::SignatureInfo> obj =
       i::Handle<i::SignatureInfo>::cast(struct_obj);
   if (!receiver.IsEmpty()) obj->set_receiver(*Utils::OpenHandle(*receiver));
   if (argc > 0) {
-    i::Handle<i::FixedArray> args = i::Factory::NewFixedArray(argc);
+    i::Handle<i::FixedArray> args = FACTORY->NewFixedArray(argc);
     for (int i = 0; i < argc; i++) {
       if (!argv[i].IsEmpty())
         args->set(i, *Utils::OpenHandle(*argv[i]));
@@ -772,14 +842,15 @@ Local<TypeSwitch> TypeSwitch::New(Handle<FunctionTemplate> type) {
 
 
 Local<TypeSwitch> TypeSwitch::New(int argc, Handle<FunctionTemplate> types[]) {
-  EnsureInitialized("v8::TypeSwitch::New()");
-  LOG_API("TypeSwitch::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::TypeSwitch::New()");
+  LOG_API(isolate, "TypeSwitch::New");
   ENTER_V8;
-  i::Handle<i::FixedArray> vector = i::Factory::NewFixedArray(argc);
+  i::Handle<i::FixedArray> vector = isolate->factory()->NewFixedArray(argc);
   for (int i = 0; i < argc; i++)
     vector->set(i, *Utils::OpenHandle(*types[i]));
   i::Handle<i::Struct> struct_obj =
-      i::Factory::NewStruct(i::TYPE_SWITCH_INFO_TYPE);
+      isolate->factory()->NewStruct(i::TYPE_SWITCH_INFO_TYPE);
   i::Handle<i::TypeSwitchInfo> obj =
       i::Handle<i::TypeSwitchInfo>::cast(struct_obj);
   obj->set_types(*vector);
@@ -788,7 +859,8 @@ Local<TypeSwitch> TypeSwitch::New(int argc, Handle<FunctionTemplate> types[]) {
 
 
 int TypeSwitch::match(v8::Handle<Value> value) {
-  LOG_API("TypeSwitch::match");
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "TypeSwitch::match");
   i::Handle<i::Object> obj = Utils::OpenHandle(*value);
   i::Handle<i::TypeSwitchInfo> info = Utils::OpenHandle(this);
   i::FixedArray* types = i::FixedArray::cast(info->types());
@@ -808,11 +880,12 @@ int TypeSwitch::match(v8::Handle<Value> value) {
 
 void FunctionTemplate::SetCallHandler(InvocationCallback callback,
                                       v8::Handle<Value> data) {
-  if (IsDeadCheck("v8::FunctionTemplate::SetCallHandler()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::FunctionTemplate::SetCallHandler()")) return;
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::Struct> struct_obj =
-      i::Factory::NewStruct(i::CALL_HANDLER_INFO_TYPE);
+      FACTORY->NewStruct(i::CALL_HANDLER_INFO_TYPE);
   i::Handle<i::CallHandlerInfo> obj =
       i::Handle<i::CallHandlerInfo>::cast(struct_obj);
   SET_FIELD_WRAPPED(obj, set_callback, callback);
@@ -829,7 +902,7 @@ static i::Handle<i::AccessorInfo> MakeAccessorInfo(
       v8::Handle<Value> data,
       v8::AccessControl settings,
       v8::PropertyAttribute attributes) {
-  i::Handle<i::AccessorInfo> obj = i::Factory::NewAccessorInfo();
+  i::Handle<i::AccessorInfo> obj = FACTORY->NewAccessorInfo();
   ASSERT(getter != NULL);
   SET_FIELD_WRAPPED(obj, set_getter, getter);
   SET_FIELD_WRAPPED(obj, set_setter, setter);
@@ -851,11 +924,13 @@ void FunctionTemplate::AddInstancePropertyAccessor(
       v8::Handle<Value> data,
       v8::AccessControl settings,
       v8::PropertyAttribute attributes) {
-  if (IsDeadCheck("v8::FunctionTemplate::AddInstancePropertyAccessor()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate,
+                  "v8::FunctionTemplate::AddInstancePropertyAccessor()")) {
     return;
   }
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
 
   i::Handle<i::AccessorInfo> obj = MakeAccessorInfo(name,
                                                     getter, setter, data,
@@ -871,7 +946,8 @@ void FunctionTemplate::AddInstancePropertyAccessor(
 
 
 Local<ObjectTemplate> FunctionTemplate::InstanceTemplate() {
-  if (IsDeadCheck("v8::FunctionTemplate::InstanceTemplate()")
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::FunctionTemplate::InstanceTemplate()")
       || EmptyCheck("v8::FunctionTemplate::InstanceTemplate()", this))
     return Local<ObjectTemplate>();
   ENTER_V8;
@@ -887,14 +963,18 @@ Local<ObjectTemplate> FunctionTemplate::InstanceTemplate() {
 
 
 void FunctionTemplate::SetClassName(Handle<String> name) {
-  if (IsDeadCheck("v8::FunctionTemplate::SetClassName()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::FunctionTemplate::SetClassName()")) return;
   ENTER_V8;
   Utils::OpenHandle(this)->set_class_name(*Utils::OpenHandle(*name));
 }
 
 
 void FunctionTemplate::SetHiddenPrototype(bool value) {
-  if (IsDeadCheck("v8::FunctionTemplate::SetHiddenPrototype()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::FunctionTemplate::SetHiddenPrototype()")) {
+    return;
+  }
   ENTER_V8;
   Utils::OpenHandle(this)->set_hidden_prototype(value);
 }
@@ -907,13 +987,15 @@ void FunctionTemplate::SetNamedInstancePropertyHandler(
       NamedPropertyDeleter remover,
       NamedPropertyEnumerator enumerator,
       Handle<Value> data) {
-  if (IsDeadCheck("v8::FunctionTemplate::SetNamedInstancePropertyHandler()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate,
+                  "v8::FunctionTemplate::SetNamedInstancePropertyHandler()")) {
     return;
   }
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::Struct> struct_obj =
-      i::Factory::NewStruct(i::INTERCEPTOR_INFO_TYPE);
+      isolate->factory()->NewStruct(i::INTERCEPTOR_INFO_TYPE);
   i::Handle<i::InterceptorInfo> obj =
       i::Handle<i::InterceptorInfo>::cast(struct_obj);
 
@@ -936,14 +1018,15 @@ void FunctionTemplate::SetIndexedInstancePropertyHandler(
       IndexedPropertyDeleter remover,
       IndexedPropertyEnumerator enumerator,
       Handle<Value> data) {
-  if (IsDeadCheck(
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate,
         "v8::FunctionTemplate::SetIndexedInstancePropertyHandler()")) {
     return;
   }
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::Struct> struct_obj =
-      i::Factory::NewStruct(i::INTERCEPTOR_INFO_TYPE);
+      isolate->factory()->NewStruct(i::INTERCEPTOR_INFO_TYPE);
   i::Handle<i::InterceptorInfo> obj =
       i::Handle<i::InterceptorInfo>::cast(struct_obj);
 
@@ -962,13 +1045,15 @@ void FunctionTemplate::SetIndexedInstancePropertyHandler(
 void FunctionTemplate::SetInstanceCallAsFunctionHandler(
       InvocationCallback callback,
       Handle<Value> data) {
-  if (IsDeadCheck("v8::FunctionTemplate::SetInstanceCallAsFunctionHandler()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate,
+                  "v8::FunctionTemplate::SetInstanceCallAsFunctionHandler()")) {
     return;
   }
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::Struct> struct_obj =
-      i::Factory::NewStruct(i::CALL_HANDLER_INFO_TYPE);
+      isolate->factory()->NewStruct(i::CALL_HANDLER_INFO_TYPE);
   i::Handle<i::CallHandlerInfo> obj =
       i::Handle<i::CallHandlerInfo>::cast(struct_obj);
   SET_FIELD_WRAPPED(obj, set_callback, callback);
@@ -988,12 +1073,15 @@ Local<ObjectTemplate> ObjectTemplate::New() {
 
 Local<ObjectTemplate> ObjectTemplate::New(
       v8::Handle<FunctionTemplate> constructor) {
-  if (IsDeadCheck("v8::ObjectTemplate::New()")) return Local<ObjectTemplate>();
-  EnsureInitialized("v8::ObjectTemplate::New()");
-  LOG_API("ObjectTemplate::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::ObjectTemplate::New()")) {
+    return Local<ObjectTemplate>();
+  }
+  EnsureInitializedForIsolate(isolate, "v8::ObjectTemplate::New()");
+  LOG_API(isolate, "ObjectTemplate::New");
   ENTER_V8;
   i::Handle<i::Struct> struct_obj =
-      i::Factory::NewStruct(i::OBJECT_TEMPLATE_INFO_TYPE);
+      FACTORY->NewStruct(i::OBJECT_TEMPLATE_INFO_TYPE);
   i::Handle<i::ObjectTemplateInfo> obj =
       i::Handle<i::ObjectTemplateInfo>::cast(struct_obj);
   InitializeTemplate(obj, Consts::OBJECT_TEMPLATE);
@@ -1022,9 +1110,10 @@ void ObjectTemplate::SetAccessor(v8::Handle<String> name,
                                  v8::Handle<Value> data,
                                  AccessControl settings,
                                  PropertyAttribute attribute) {
-  if (IsDeadCheck("v8::ObjectTemplate::SetAccessor()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetAccessor()")) return;
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   EnsureConstructor(this);
   i::FunctionTemplateInfo* constructor =
       i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
@@ -1044,7 +1133,10 @@ void ObjectTemplate::SetNamedPropertyHandler(NamedPropertyGetter getter,
                                              NamedPropertyDeleter remover,
                                              NamedPropertyEnumerator enumerator,
                                              Handle<Value> data) {
-  if (IsDeadCheck("v8::ObjectTemplate::SetNamedPropertyHandler()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetNamedPropertyHandler()")) {
+    return;
+  }
   ENTER_V8;
   HandleScope scope;
   EnsureConstructor(this);
@@ -1061,9 +1153,10 @@ void ObjectTemplate::SetNamedPropertyHandler(NamedPropertyGetter getter,
 
 
 void ObjectTemplate::MarkAsUndetectable() {
-  if (IsDeadCheck("v8::ObjectTemplate::MarkAsUndetectable()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::ObjectTemplate::MarkAsUndetectable()")) return;
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   EnsureConstructor(this);
   i::FunctionTemplateInfo* constructor =
       i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
@@ -1077,13 +1170,16 @@ void ObjectTemplate::SetAccessCheckCallbacks(
       IndexedSecurityCallback indexed_callback,
       Handle<Value> data,
       bool turned_on_by_default) {
-  if (IsDeadCheck("v8::ObjectTemplate::SetAccessCheckCallbacks()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetAccessCheckCallbacks()")) {
+    return;
+  }
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   EnsureConstructor(this);
 
   i::Handle<i::Struct> struct_info =
-      i::Factory::NewStruct(i::ACCESS_CHECK_INFO_TYPE);
+      FACTORY->NewStruct(i::ACCESS_CHECK_INFO_TYPE);
   i::Handle<i::AccessCheckInfo> info =
       i::Handle<i::AccessCheckInfo>::cast(struct_info);
 
@@ -1108,9 +1204,12 @@ void ObjectTemplate::SetIndexedPropertyHandler(
       IndexedPropertyDeleter remover,
       IndexedPropertyEnumerator enumerator,
       Handle<Value> data) {
-  if (IsDeadCheck("v8::ObjectTemplate::SetIndexedPropertyHandler()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetIndexedPropertyHandler()")) {
+    return;
+  }
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   EnsureConstructor(this);
   i::FunctionTemplateInfo* constructor =
       i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
@@ -1126,9 +1225,13 @@ void ObjectTemplate::SetIndexedPropertyHandler(
 
 void ObjectTemplate::SetCallAsFunctionHandler(InvocationCallback callback,
                                               Handle<Value> data) {
-  if (IsDeadCheck("v8::ObjectTemplate::SetCallAsFunctionHandler()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate,
+                  "v8::ObjectTemplate::SetCallAsFunctionHandler()")) {
+    return;
+  }
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   EnsureConstructor(this);
   i::FunctionTemplateInfo* constructor =
       i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
@@ -1138,7 +1241,8 @@ void ObjectTemplate::SetCallAsFunctionHandler(InvocationCallback callback,
 
 
 int ObjectTemplate::InternalFieldCount() {
-  if (IsDeadCheck("v8::ObjectTemplate::InternalFieldCount()")) {
+  if (IsDeadCheck(i::Isolate::Current(),
+                  "v8::ObjectTemplate::InternalFieldCount()")) {
     return 0;
   }
   return i::Smi::cast(Utils::OpenHandle(this)->internal_field_count())->value();
@@ -1146,7 +1250,10 @@ int ObjectTemplate::InternalFieldCount() {
 
 
 void ObjectTemplate::SetInternalFieldCount(int value) {
-  if (IsDeadCheck("v8::ObjectTemplate::SetInternalFieldCount()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetInternalFieldCount()")) {
+    return;
+  }
   if (!ApiCheck(i::Smi::IsValid(value),
                 "v8::ObjectTemplate::SetInternalFieldCount()",
                 "Invalid internal field count")) {
@@ -1214,8 +1321,9 @@ Local<Script> Script::New(v8::Handle<String> source,
                           v8::ScriptOrigin* origin,
                           v8::ScriptData* pre_data,
                           v8::Handle<String> script_data) {
-  ON_BAILOUT("v8::Script::New()", return Local<Script>());
-  LOG_API("Script::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Script::New()", return Local<Script>());
+  LOG_API(isolate, "Script::New");
   ENTER_V8;
   i::Handle<i::String> str = Utils::OpenHandle(*source);
   i::Handle<i::Object> name_obj;
@@ -1267,8 +1375,9 @@ Local<Script> Script::Compile(v8::Handle<String> source,
                               v8::ScriptOrigin* origin,
                               v8::ScriptData* pre_data,
                               v8::Handle<String> script_data) {
-  ON_BAILOUT("v8::Script::Compile()", return Local<Script>());
-  LOG_API("Script::Compile");
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Script::Compile()", return Local<Script>());
+  LOG_API(isolate, "Script::Compile");
   ENTER_V8;
   Local<Script> generic = New(source, origin, pre_data, script_data);
   if (generic.IsEmpty())
@@ -1277,8 +1386,9 @@ Local<Script> Script::Compile(v8::Handle<String> source,
   i::Handle<i::SharedFunctionInfo> function =
       i::Handle<i::SharedFunctionInfo>(i::SharedFunctionInfo::cast(*obj));
   i::Handle<i::JSFunction> result =
-      i::Factory::NewFunctionFromSharedFunctionInfo(function,
-                                                    i::Top::global_context());
+      FACTORY->NewFunctionFromSharedFunctionInfo(
+          function,
+          i::Isolate::Current()->global_context());
   return Local<Script>(ToApi<Script>(result));
 }
 
@@ -1292,8 +1402,9 @@ Local<Script> Script::Compile(v8::Handle<String> source,
 
 
 Local<Value> Script::Run() {
-  ON_BAILOUT("v8::Script::Run()", return Local<Value>());
-  LOG_API("Script::Run");
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Script::Run()", return Local<Value>());
+  LOG_API(isolate, "Script::Run");
   ENTER_V8;
   i::Object* raw_result = NULL;
   {
@@ -1303,13 +1414,14 @@ Local<Value> Script::Run() {
     if (obj->IsSharedFunctionInfo()) {
       i::Handle<i::SharedFunctionInfo>
           function_info(i::SharedFunctionInfo::cast(*obj));
-      fun = i::Factory::NewFunctionFromSharedFunctionInfo(
-          function_info, i::Top::global_context());
+      fun = FACTORY->NewFunctionFromSharedFunctionInfo(
+          function_info, i::Isolate::Current()->global_context());
     } else {
       fun = i::Handle<i::JSFunction>(i::JSFunction::cast(*obj));
     }
     EXCEPTION_PREAMBLE();
-    i::Handle<i::Object> receiver(i::Top::context()->global_proxy());
+    i::Handle<i::Object> receiver(
+        i::Isolate::Current()->context()->global_proxy());
     i::Handle<i::Object> result =
         i::Execution::Call(fun, receiver, 0, NULL, &has_pending_exception);
     EXCEPTION_BAILOUT_CHECK(Local<Value>());
@@ -1335,11 +1447,12 @@ static i::Handle<i::SharedFunctionInfo> OpenScript(Script* script) {
 
 
 Local<Value> Script::Id() {
-  ON_BAILOUT("v8::Script::Id()", return Local<Value>());
-  LOG_API("Script::Id");
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Script::Id()", return Local<Value>());
+  LOG_API(isolate, "Script::Id");
   i::Object* raw_id = NULL;
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::SharedFunctionInfo> function_info = OpenScript(this);
     i::Handle<i::Script> script(i::Script::cast(function_info->script()));
     i::Handle<i::Object> id(script->id());
@@ -1351,10 +1464,11 @@ Local<Value> Script::Id() {
 
 
 void Script::SetData(v8::Handle<String> data) {
-  ON_BAILOUT("v8::Script::SetData()", return);
-  LOG_API("Script::SetData");
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Script::SetData()", return);
+  LOG_API(isolate, "Script::SetData");
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::SharedFunctionInfo> function_info = OpenScript(this);
     i::Handle<i::Object> raw_data = Utils::OpenHandle(*data);
     i::Handle<i::Script> script(i::Script::cast(function_info->script()));
@@ -1367,14 +1481,14 @@ void Script::SetData(v8::Handle<String> data) {
 
 
 v8::TryCatch::TryCatch()
-    : next_(i::Top::try_catch_handler_address()),
-      exception_(i::Heap::the_hole_value()),
+    : next_(i::Isolate::Current()->try_catch_handler_address()),
+      exception_(HEAP->the_hole_value()),
       message_(i::Smi::FromInt(0)),
       is_verbose_(false),
       can_continue_(true),
       capture_message_(true),
       rethrow_(false) {
-  i::Top::RegisterTryCatchHandler(this);
+  i::Isolate::Current()->RegisterTryCatchHandler(this);
 }
 
 
@@ -1382,10 +1496,10 @@ v8::TryCatch::~TryCatch() {
   if (rethrow_) {
     v8::HandleScope scope;
     v8::Local<v8::Value> exc = v8::Local<v8::Value>::New(Exception());
-    i::Top::UnregisterTryCatchHandler(this);
+    i::Isolate::Current()->UnregisterTryCatchHandler(this);
     v8::ThrowException(exc);
   } else {
-    i::Top::UnregisterTryCatchHandler(this);
+    i::Isolate::Current()->UnregisterTryCatchHandler(this);
   }
 }
 
@@ -1424,7 +1538,7 @@ v8::Local<Value> v8::TryCatch::StackTrace() const {
     if (!raw_obj->IsJSObject()) return v8::Local<Value>();
     v8::HandleScope scope;
     i::Handle<i::JSObject> obj(i::JSObject::cast(raw_obj));
-    i::Handle<i::String> name = i::Factory::LookupAsciiSymbol("stack");
+    i::Handle<i::String> name = FACTORY->LookupAsciiSymbol("stack");
     if (!obj->HasProperty(*name))
       return v8::Local<Value>();
     return scope.Close(v8::Utils::ToLocal(i::GetProperty(obj, name)));
@@ -1445,7 +1559,7 @@ v8::Local<v8::Message> v8::TryCatch::Message() const {
 
 
 void v8::TryCatch::Reset() {
-  exception_ = i::Heap::the_hole_value();
+  exception_ = HEAP->the_hole_value();
   message_ = i::Smi::FromInt(0);
 }
 
@@ -1464,7 +1578,8 @@ void v8::TryCatch::SetCaptureMessage(bool value) {
 
 
 Local<String> Message::Get() const {
-  ON_BAILOUT("v8::Message::Get()", return Local<String>());
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Message::Get()", return Local<String>());
   ENTER_V8;
   HandleScope scope;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
@@ -1475,7 +1590,8 @@ Local<String> Message::Get() const {
 
 
 v8::Handle<Value> Message::GetScriptResourceName() const {
-  if (IsDeadCheck("v8::Message::GetScriptResourceName()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Message::GetScriptResourceName()")) {
     return Local<String>();
   }
   ENTER_V8;
@@ -1491,7 +1607,8 @@ v8::Handle<Value> Message::GetScriptResourceName() const {
 
 
 v8::Handle<Value> Message::GetScriptData() const {
-  if (IsDeadCheck("v8::Message::GetScriptResourceData()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Message::GetScriptResourceData()")) {
     return Local<Value>();
   }
   ENTER_V8;
@@ -1507,7 +1624,8 @@ v8::Handle<Value> Message::GetScriptData() const {
 
 
 v8::Handle<v8::StackTrace> Message::GetStackTrace() const {
-  if (IsDeadCheck("v8::Message::GetStackTrace()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Message::GetStackTrace()")) {
     return Local<v8::StackTrace>();
   }
   ENTER_V8;
@@ -1527,9 +1645,10 @@ static i::Handle<i::Object> CallV8HeapFunction(const char* name,
                                                int argc,
                                                i::Object** argv[],
                                                bool* has_pending_exception) {
-  i::Handle<i::String> fmt_str = i::Factory::LookupAsciiSymbol(name);
+  i::Isolate* isolate = i::Isolate::Current();
+  i::Handle<i::String> fmt_str = isolate->factory()->LookupAsciiSymbol(name);
   i::Object* object_fun =
-      i::Top::builtins()->GetPropertyNoExceptionThrown(*fmt_str);
+      isolate->js_builtins_object()->GetPropertyNoExceptionThrown(*fmt_str);
   i::Handle<i::JSFunction> fun =
       i::Handle<i::JSFunction>(i::JSFunction::cast(object_fun));
   i::Handle<i::Object> value =
@@ -1543,7 +1662,7 @@ static i::Handle<i::Object> CallV8HeapFunction(const char* name,
                                                bool* has_pending_exception) {
   i::Object** argv[1] = { data.location() };
   return CallV8HeapFunction(name,
-                            i::Top::builtins(),
+                            i::Isolate::Current()->js_builtins_object(),
                             1,
                             argv,
                             has_pending_exception);
@@ -1551,9 +1670,10 @@ static i::Handle<i::Object> CallV8HeapFunction(const char* name,
 
 
 int Message::GetLineNumber() const {
-  ON_BAILOUT("v8::Message::GetLineNumber()", return kNoLineNumberInfo);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Message::GetLineNumber()", return kNoLineNumberInfo);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
 
   EXCEPTION_PREAMBLE();
   i::Handle<i::Object> result = CallV8HeapFunction("GetLineNumber",
@@ -1565,9 +1685,10 @@ int Message::GetLineNumber() const {
 
 
 int Message::GetStartPosition() const {
-  if (IsDeadCheck("v8::Message::GetStartPosition()")) return 0;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Message::GetStartPosition()")) return 0;
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSMessageObject> message =
       i::Handle<i::JSMessageObject>::cast(Utils::OpenHandle(this));
   return message->start_position();
@@ -1575,9 +1696,10 @@ int Message::GetStartPosition() const {
 
 
 int Message::GetEndPosition() const {
-  if (IsDeadCheck("v8::Message::GetEndPosition()")) return 0;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Message::GetEndPosition()")) return 0;
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSMessageObject> message =
       i::Handle<i::JSMessageObject>::cast(Utils::OpenHandle(this));
   return message->end_position();
@@ -1585,9 +1707,12 @@ int Message::GetEndPosition() const {
 
 
 int Message::GetStartColumn() const {
-  if (IsDeadCheck("v8::Message::GetStartColumn()")) return kNoColumnInfo;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Message::GetStartColumn()")) {
+    return kNoColumnInfo;
+  }
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> data_obj = Utils::OpenHandle(this);
   EXCEPTION_PREAMBLE();
   i::Handle<i::Object> start_col_obj = CallV8HeapFunction(
@@ -1600,9 +1725,10 @@ int Message::GetStartColumn() const {
 
 
 int Message::GetEndColumn() const {
-  if (IsDeadCheck("v8::Message::GetEndColumn()")) return kNoColumnInfo;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Message::GetEndColumn()")) return kNoColumnInfo;
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> data_obj = Utils::OpenHandle(this);
   EXCEPTION_PREAMBLE();
   i::Handle<i::Object> start_col_obj = CallV8HeapFunction(
@@ -1619,7 +1745,8 @@ int Message::GetEndColumn() const {
 
 
 Local<String> Message::GetSourceLine() const {
-  ON_BAILOUT("v8::Message::GetSourceLine()", return Local<String>());
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Message::GetSourceLine()", return Local<String>());
   ENTER_V8;
   HandleScope scope;
   EXCEPTION_PREAMBLE();
@@ -1636,16 +1763,20 @@ Local<String> Message::GetSourceLine() const {
 
 
 void Message::PrintCurrentStackTrace(FILE* out) {
-  if (IsDeadCheck("v8::Message::PrintCurrentStackTrace()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Message::PrintCurrentStackTrace()")) return;
   ENTER_V8;
-  i::Top::PrintCurrentStackTrace(out);
+  isolate->PrintCurrentStackTrace(out);
 }
 
 
 // --- S t a c k T r a c e ---
 
 Local<StackFrame> StackTrace::GetFrame(uint32_t index) const {
-  if (IsDeadCheck("v8::StackTrace::GetFrame()")) return Local<StackFrame>();
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackTrace::GetFrame()")) {
+    return Local<StackFrame>();
+  }
   ENTER_V8;
   HandleScope scope;
   i::Handle<i::JSArray> self = Utils::OpenHandle(this);
@@ -1656,14 +1787,16 @@ Local<StackFrame> StackTrace::GetFrame(uint32_t index) const {
 
 
 int StackTrace::GetFrameCount() const {
-  if (IsDeadCheck("v8::StackTrace::GetFrameCount()")) return -1;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackTrace::GetFrameCount()")) return -1;
   ENTER_V8;
   return i::Smi::cast(Utils::OpenHandle(this)->length())->value();
 }
 
 
 Local<Array> StackTrace::AsArray() {
-  if (IsDeadCheck("v8::StackTrace::AsArray()")) Local<Array>();
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackTrace::AsArray()")) Local<Array>();
   ENTER_V8;
   return Utils::ToLocal(Utils::OpenHandle(this));
 }
@@ -1671,10 +1804,13 @@ Local<Array> StackTrace::AsArray() {
 
 Local<StackTrace> StackTrace::CurrentStackTrace(int frame_limit,
     StackTraceOptions options) {
-  if (IsDeadCheck("v8::StackTrace::CurrentStackTrace()")) Local<StackTrace>();
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackTrace::CurrentStackTrace()")) {
+    Local<StackTrace>();
+  }
   ENTER_V8;
   i::Handle<i::JSArray> stackTrace =
-      i::Top::CaptureCurrentStackTrace(frame_limit, options);
+      isolate->CaptureCurrentStackTrace(frame_limit, options);
   return Utils::StackTraceToLocal(stackTrace);
 }
 
@@ -1682,11 +1818,12 @@ Local<StackTrace> StackTrace::CurrentStackTrace(int frame_limit,
 // --- S t a c k F r a m e ---
 
 int StackFrame::GetLineNumber() const {
-  if (IsDeadCheck("v8::StackFrame::GetLineNumber()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackFrame::GetLineNumber()")) {
     return Message::kNoLineNumberInfo;
   }
   ENTER_V8;
-  i::HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> line = GetProperty(self, "lineNumber");
   if (!line->IsSmi()) {
@@ -1697,11 +1834,12 @@ int StackFrame::GetLineNumber() const {
 
 
 int StackFrame::GetColumn() const {
-  if (IsDeadCheck("v8::StackFrame::GetColumn()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackFrame::GetColumn()")) {
     return Message::kNoColumnInfo;
   }
   ENTER_V8;
-  i::HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> column = GetProperty(self, "column");
   if (!column->IsSmi()) {
@@ -1712,7 +1850,10 @@ int StackFrame::GetColumn() const {
 
 
 Local<String> StackFrame::GetScriptName() const {
-  if (IsDeadCheck("v8::StackFrame::GetScriptName()")) return Local<String>();
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackFrame::GetScriptName()")) {
+    return Local<String>();
+  }
   ENTER_V8;
   HandleScope scope;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
@@ -1725,7 +1866,8 @@ Local<String> StackFrame::GetScriptName() const {
 
 
 Local<String> StackFrame::GetScriptNameOrSourceURL() const {
-  if (IsDeadCheck("v8::StackFrame::GetScriptNameOrSourceURL()")) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackFrame::GetScriptNameOrSourceURL()")) {
     return Local<String>();
   }
   ENTER_V8;
@@ -1740,7 +1882,10 @@ Local<String> StackFrame::GetScriptNameOrSourceURL() const {
 
 
 Local<String> StackFrame::GetFunctionName() const {
-  if (IsDeadCheck("v8::StackFrame::GetFunctionName()")) return Local<String>();
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackFrame::GetFunctionName()")) {
+    return Local<String>();
+  }
   ENTER_V8;
   HandleScope scope;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
@@ -1753,9 +1898,10 @@ Local<String> StackFrame::GetFunctionName() const {
 
 
 bool StackFrame::IsEval() const {
-  if (IsDeadCheck("v8::StackFrame::IsEval()")) return false;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackFrame::IsEval()")) return false;
   ENTER_V8;
-  i::HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> is_eval = GetProperty(self, "isEval");
   return is_eval->IsTrue();
@@ -1763,9 +1909,10 @@ bool StackFrame::IsEval() const {
 
 
 bool StackFrame::IsConstructor() const {
-  if (IsDeadCheck("v8::StackFrame::IsConstructor()")) return false;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::StackFrame::IsConstructor()")) return false;
   ENTER_V8;
-  i::HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> is_constructor = GetProperty(self, "isConstructor");
   return is_constructor->IsTrue();
@@ -1775,37 +1922,41 @@ bool StackFrame::IsConstructor() const {
 // --- D a t a ---
 
 bool Value::IsUndefined() const {
-  if (IsDeadCheck("v8::Value::IsUndefined()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsUndefined()")) {
+    return false;
+  }
   return Utils::OpenHandle(this)->IsUndefined();
 }
 
 
 bool Value::IsNull() const {
-  if (IsDeadCheck("v8::Value::IsNull()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsNull()")) return false;
   return Utils::OpenHandle(this)->IsNull();
 }
 
 
 bool Value::IsTrue() const {
-  if (IsDeadCheck("v8::Value::IsTrue()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsTrue()")) return false;
   return Utils::OpenHandle(this)->IsTrue();
 }
 
 
 bool Value::IsFalse() const {
-  if (IsDeadCheck("v8::Value::IsFalse()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsFalse()")) return false;
   return Utils::OpenHandle(this)->IsFalse();
 }
 
 
 bool Value::IsFunction() const {
-  if (IsDeadCheck("v8::Value::IsFunction()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsFunction()")) {
+    return false;
+  }
   return Utils::OpenHandle(this)->IsJSFunction();
 }
 
 
 bool Value::FullIsString() const {
-  if (IsDeadCheck("v8::Value::IsString()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsString()")) return false;
   bool result = Utils::OpenHandle(this)->IsString();
   ASSERT_EQ(result, QuickIsString());
   return result;
@@ -1813,37 +1964,41 @@ bool Value::FullIsString() const {
 
 
 bool Value::IsArray() const {
-  if (IsDeadCheck("v8::Value::IsArray()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsArray()")) return false;
   return Utils::OpenHandle(this)->IsJSArray();
 }
 
 
 bool Value::IsObject() const {
-  if (IsDeadCheck("v8::Value::IsObject()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsObject()")) return false;
   return Utils::OpenHandle(this)->IsJSObject();
 }
 
 
 bool Value::IsNumber() const {
-  if (IsDeadCheck("v8::Value::IsNumber()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsNumber()")) return false;
   return Utils::OpenHandle(this)->IsNumber();
 }
 
 
 bool Value::IsBoolean() const {
-  if (IsDeadCheck("v8::Value::IsBoolean()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsBoolean()")) {
+    return false;
+  }
   return Utils::OpenHandle(this)->IsBoolean();
 }
 
 
 bool Value::IsExternal() const {
-  if (IsDeadCheck("v8::Value::IsExternal()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsExternal()")) {
+    return false;
+  }
   return Utils::OpenHandle(this)->IsProxy();
 }
 
 
 bool Value::IsInt32() const {
-  if (IsDeadCheck("v8::Value::IsInt32()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsInt32()")) return false;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsSmi()) return true;
   if (obj->IsNumber()) {
@@ -1855,7 +2010,7 @@ bool Value::IsInt32() const {
 
 
 bool Value::IsUint32() const {
-  if (IsDeadCheck("v8::Value::IsUint32()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsUint32()")) return false;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsSmi()) return i::Smi::cast(*obj)->value() >= 0;
   if (obj->IsNumber()) {
@@ -1867,22 +2022,25 @@ bool Value::IsUint32() const {
 
 
 bool Value::IsDate() const {
-  if (IsDeadCheck("v8::Value::IsDate()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsDate()")) return false;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
-  return obj->HasSpecificClassOf(i::Heap::Date_symbol());
+  return obj->HasSpecificClassOf(HEAP->Date_symbol());
 }
 
 
 bool Value::IsRegExp() const {
-  if (IsDeadCheck("v8::Value::IsRegExp()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsRegExp()")) return false;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   return obj->IsJSRegExp();
 }
 
 
 Local<String> Value::ToString() const {
-  if (IsDeadCheck("v8::Value::ToString()")) return Local<String>();
-  LOG_API("ToString");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::ToString()")) {
+    return Local<String>();
+  }
+  LOG_API(isolate, "ToString");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> str;
   if (obj->IsString()) {
@@ -1898,8 +2056,11 @@ Local<String> Value::ToString() const {
 
 
 Local<String> Value::ToDetailString() const {
-  if (IsDeadCheck("v8::Value::ToDetailString()")) return Local<String>();
-  LOG_API("ToDetailString");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::ToDetailString()")) {
+    return Local<String>();
+  }
+  LOG_API(isolate, "ToDetailString");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> str;
   if (obj->IsString()) {
@@ -1915,8 +2076,9 @@ Local<String> Value::ToDetailString() const {
 
 
 Local<v8::Object> Value::ToObject() const {
-  if (IsDeadCheck("v8::Value::ToObject()")) return Local<v8::Object>();
-  LOG_API("ToObject");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::ToObject()")) return Local<v8::Object>();
+  LOG_API(isolate, "ToObject");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> val;
   if (obj->IsJSObject()) {
@@ -1932,8 +2094,11 @@ Local<v8::Object> Value::ToObject() const {
 
 
 Local<Boolean> Value::ToBoolean() const {
-  if (IsDeadCheck("v8::Value::ToBoolean()")) return Local<Boolean>();
-  LOG_API("ToBoolean");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::ToBoolean()")) {
+    return Local<Boolean>();
+  }
+  LOG_API(isolate, "ToBoolean");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsBoolean()) {
     return Local<Boolean>(ToApi<Boolean>(obj));
@@ -1946,8 +2111,9 @@ Local<Boolean> Value::ToBoolean() const {
 
 
 Local<Number> Value::ToNumber() const {
-  if (IsDeadCheck("v8::Value::ToNumber()")) return Local<Number>();
-  LOG_API("ToNumber");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::ToNumber()")) return Local<Number>();
+  LOG_API(isolate, "ToNumber");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> num;
   if (obj->IsNumber()) {
@@ -1963,8 +2129,9 @@ Local<Number> Value::ToNumber() const {
 
 
 Local<Integer> Value::ToInteger() const {
-  if (IsDeadCheck("v8::Value::ToInteger()")) return Local<Integer>();
-  LOG_API("ToInteger");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::ToInteger()")) return Local<Integer>();
+  LOG_API(isolate, "ToInteger");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> num;
   if (obj->IsSmi()) {
@@ -1980,7 +2147,7 @@ Local<Integer> Value::ToInteger() const {
 
 
 void External::CheckCast(v8::Value* that) {
-  if (IsDeadCheck("v8::External::Cast()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::External::Cast()")) return;
   i::Handle<i::Object> obj = Utils::OpenHandle(that);
   ApiCheck(obj->IsProxy(),
            "v8::External::Cast()",
@@ -1989,7 +2156,7 @@ void External::CheckCast(v8::Value* that) {
 
 
 void v8::Object::CheckCast(Value* that) {
-  if (IsDeadCheck("v8::Object::Cast()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Object::Cast()")) return;
   i::Handle<i::Object> obj = Utils::OpenHandle(that);
   ApiCheck(obj->IsJSObject(),
            "v8::Object::Cast()",
@@ -1998,7 +2165,7 @@ void v8::Object::CheckCast(Value* that) {
 
 
 void v8::Function::CheckCast(Value* that) {
-  if (IsDeadCheck("v8::Function::Cast()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Function::Cast()")) return;
   i::Handle<i::Object> obj = Utils::OpenHandle(that);
   ApiCheck(obj->IsJSFunction(),
            "v8::Function::Cast()",
@@ -2007,7 +2174,7 @@ void v8::Function::CheckCast(Value* that) {
 
 
 void v8::String::CheckCast(v8::Value* that) {
-  if (IsDeadCheck("v8::String::Cast()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::String::Cast()")) return;
   i::Handle<i::Object> obj = Utils::OpenHandle(that);
   ApiCheck(obj->IsString(),
            "v8::String::Cast()",
@@ -2016,7 +2183,7 @@ void v8::String::CheckCast(v8::Value* that) {
 
 
 void v8::Number::CheckCast(v8::Value* that) {
-  if (IsDeadCheck("v8::Number::Cast()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Number::Cast()")) return;
   i::Handle<i::Object> obj = Utils::OpenHandle(that);
   ApiCheck(obj->IsNumber(),
            "v8::Number::Cast()",
@@ -2025,7 +2192,7 @@ void v8::Number::CheckCast(v8::Value* that) {
 
 
 void v8::Integer::CheckCast(v8::Value* that) {
-  if (IsDeadCheck("v8::Integer::Cast()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Integer::Cast()")) return;
   i::Handle<i::Object> obj = Utils::OpenHandle(that);
   ApiCheck(obj->IsNumber(),
            "v8::Integer::Cast()",
@@ -2034,7 +2201,7 @@ void v8::Integer::CheckCast(v8::Value* that) {
 
 
 void v8::Array::CheckCast(Value* that) {
-  if (IsDeadCheck("v8::Array::Cast()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Array::Cast()")) return;
   i::Handle<i::Object> obj = Utils::OpenHandle(that);
   ApiCheck(obj->IsJSArray(),
            "v8::Array::Cast()",
@@ -2043,16 +2210,16 @@ void v8::Array::CheckCast(Value* that) {
 
 
 void v8::Date::CheckCast(v8::Value* that) {
-  if (IsDeadCheck("v8::Date::Cast()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Date::Cast()")) return;
   i::Handle<i::Object> obj = Utils::OpenHandle(that);
-  ApiCheck(obj->HasSpecificClassOf(i::Heap::Date_symbol()),
+  ApiCheck(obj->HasSpecificClassOf(HEAP->Date_symbol()),
            "v8::Date::Cast()",
            "Could not convert to date");
 }
 
 
 void v8::RegExp::CheckCast(v8::Value* that) {
-  if (IsDeadCheck("v8::RegExp::Cast()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::RegExp::Cast()")) return;
   i::Handle<i::Object> obj = Utils::OpenHandle(that);
   ApiCheck(obj->IsJSRegExp(),
            "v8::RegExp::Cast()",
@@ -2061,8 +2228,9 @@ void v8::RegExp::CheckCast(v8::Value* that) {
 
 
 bool Value::BooleanValue() const {
-  if (IsDeadCheck("v8::Value::BooleanValue()")) return false;
-  LOG_API("BooleanValue");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::BooleanValue()")) return false;
+  LOG_API(isolate, "BooleanValue");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsBoolean()) {
     return obj->IsTrue();
@@ -2075,8 +2243,11 @@ bool Value::BooleanValue() const {
 
 
 double Value::NumberValue() const {
-  if (IsDeadCheck("v8::Value::NumberValue()")) return i::OS::nan_value();
-  LOG_API("NumberValue");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::NumberValue()")) {
+    return i::OS::nan_value();
+  }
+  LOG_API(isolate, "NumberValue");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> num;
   if (obj->IsNumber()) {
@@ -2092,8 +2263,9 @@ double Value::NumberValue() const {
 
 
 int64_t Value::IntegerValue() const {
-  if (IsDeadCheck("v8::Value::IntegerValue()")) return 0;
-  LOG_API("IntegerValue");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::IntegerValue()")) return 0;
+  LOG_API(isolate, "IntegerValue");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> num;
   if (obj->IsNumber()) {
@@ -2113,8 +2285,9 @@ int64_t Value::IntegerValue() const {
 
 
 Local<Int32> Value::ToInt32() const {
-  if (IsDeadCheck("v8::Value::ToInt32()")) return Local<Int32>();
-  LOG_API("ToInt32");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::ToInt32()")) return Local<Int32>();
+  LOG_API(isolate, "ToInt32");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> num;
   if (obj->IsSmi()) {
@@ -2130,8 +2303,9 @@ Local<Int32> Value::ToInt32() const {
 
 
 Local<Uint32> Value::ToUint32() const {
-  if (IsDeadCheck("v8::Value::ToUint32()")) return Local<Uint32>();
-  LOG_API("ToUInt32");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::ToUint32()")) return Local<Uint32>();
+  LOG_API(isolate, "ToUInt32");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> num;
   if (obj->IsSmi()) {
@@ -2147,8 +2321,9 @@ Local<Uint32> Value::ToUint32() const {
 
 
 Local<Uint32> Value::ToArrayIndex() const {
-  if (IsDeadCheck("v8::Value::ToArrayIndex()")) return Local<Uint32>();
-  LOG_API("ToArrayIndex");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::ToArrayIndex()")) return Local<Uint32>();
+  LOG_API(isolate, "ToArrayIndex");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsSmi()) {
     if (i::Smi::cast(*obj)->value() >= 0) return Utils::Uint32ToLocal(obj);
@@ -2166,7 +2341,7 @@ Local<Uint32> Value::ToArrayIndex() const {
     if (index <= static_cast<uint32_t>(i::Smi::kMaxValue)) {
       value = i::Handle<i::Object>(i::Smi::FromInt(index));
     } else {
-      value = i::Factory::NewNumber(index);
+      value = FACTORY->NewNumber(index);
     }
     return Utils::Uint32ToLocal(value);
   }
@@ -2175,13 +2350,14 @@ Local<Uint32> Value::ToArrayIndex() const {
 
 
 int32_t Value::Int32Value() const {
-  if (IsDeadCheck("v8::Value::Int32Value()")) return 0;
-  LOG_API("Int32Value");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::Int32Value()")) return 0;
+  LOG_API(isolate, "Int32Value");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsSmi()) {
     return i::Smi::cast(*obj)->value();
   } else {
-    LOG_API("Int32Value (slow)");
+    LOG_API(isolate, "Int32Value (slow)");
     ENTER_V8;
     EXCEPTION_PREAMBLE();
     i::Handle<i::Object> num =
@@ -2197,12 +2373,13 @@ int32_t Value::Int32Value() const {
 
 
 bool Value::Equals(Handle<Value> that) const {
-  if (IsDeadCheck("v8::Value::Equals()")
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::Equals()")
       || EmptyCheck("v8::Value::Equals()", this)
       || EmptyCheck("v8::Value::Equals()", that)) {
     return false;
   }
-  LOG_API("Equals");
+  LOG_API(isolate, "Equals");
   ENTER_V8;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> other = Utils::OpenHandle(*that);
@@ -2222,12 +2399,13 @@ bool Value::Equals(Handle<Value> that) const {
 
 
 bool Value::StrictEquals(Handle<Value> that) const {
-  if (IsDeadCheck("v8::Value::StrictEquals()")
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::StrictEquals()")
       || EmptyCheck("v8::Value::StrictEquals()", this)
       || EmptyCheck("v8::Value::StrictEquals()", that)) {
     return false;
   }
-  LOG_API("StrictEquals");
+  LOG_API(isolate, "StrictEquals");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::Object> other = Utils::OpenHandle(*that);
   // Must check HeapNumber first, since NaN !== NaN.
@@ -2253,8 +2431,9 @@ bool Value::StrictEquals(Handle<Value> that) const {
 
 
 uint32_t Value::Uint32Value() const {
-  if (IsDeadCheck("v8::Value::Uint32Value()")) return 0;
-  LOG_API("Uint32Value");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Value::Uint32Value()")) return 0;
+  LOG_API(isolate, "Uint32Value");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsSmi()) {
     return i::Smi::cast(*obj)->value();
@@ -2275,9 +2454,10 @@ uint32_t Value::Uint32Value() const {
 
 bool v8::Object::Set(v8::Handle<Value> key, v8::Handle<Value> value,
                      v8::PropertyAttribute attribs) {
-  ON_BAILOUT("v8::Object::Set()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::Set()", return false);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::Object> self = Utils::OpenHandle(this);
   i::Handle<i::Object> key_obj = Utils::OpenHandle(*key);
   i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
@@ -2295,9 +2475,10 @@ bool v8::Object::Set(v8::Handle<Value> key, v8::Handle<Value> value,
 
 
 bool v8::Object::Set(uint32_t index, v8::Handle<Value> value) {
-  ON_BAILOUT("v8::Object::Set()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::Set()", return false);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
   EXCEPTION_PREAMBLE();
@@ -2315,9 +2496,10 @@ bool v8::Object::Set(uint32_t index, v8::Handle<Value> value) {
 bool v8::Object::ForceSet(v8::Handle<Value> key,
                           v8::Handle<Value> value,
                           v8::PropertyAttribute attribs) {
-  ON_BAILOUT("v8::Object::ForceSet()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::ForceSet()", return false);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> key_obj = Utils::OpenHandle(*key);
   i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
@@ -2334,9 +2516,10 @@ bool v8::Object::ForceSet(v8::Handle<Value> key,
 
 
 bool v8::Object::ForceDelete(v8::Handle<Value> key) {
-  ON_BAILOUT("v8::Object::ForceDelete()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::ForceDelete()", return false);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> key_obj = Utils::OpenHandle(*key);
 
@@ -2353,7 +2536,8 @@ bool v8::Object::ForceDelete(v8::Handle<Value> key) {
 
 
 Local<Value> v8::Object::Get(v8::Handle<Value> key) {
-  ON_BAILOUT("v8::Object::Get()", return Local<v8::Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::Get()", return Local<v8::Value>());
   ENTER_V8;
   i::Handle<i::Object> self = Utils::OpenHandle(this);
   i::Handle<i::Object> key_obj = Utils::OpenHandle(*key);
@@ -2366,7 +2550,8 @@ Local<Value> v8::Object::Get(v8::Handle<Value> key) {
 
 
 Local<Value> v8::Object::Get(uint32_t index) {
-  ON_BAILOUT("v8::Object::Get()", return Local<v8::Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::Get()", return Local<v8::Value>());
   ENTER_V8;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   EXCEPTION_PREAMBLE();
@@ -2378,7 +2563,8 @@ Local<Value> v8::Object::Get(uint32_t index) {
 
 
 Local<Value> v8::Object::GetPrototype() {
-  ON_BAILOUT("v8::Object::GetPrototype()", return Local<v8::Value>());
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::GetPrototype()",
+             return Local<v8::Value>());
   ENTER_V8;
   i::Handle<i::Object> self = Utils::OpenHandle(this);
   i::Handle<i::Object> result = i::GetPrototype(self);
@@ -2387,7 +2573,8 @@ Local<Value> v8::Object::GetPrototype() {
 
 
 bool v8::Object::SetPrototype(Handle<Value> value) {
-  ON_BAILOUT("v8::Object::SetPrototype()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::SetPrototype()", return false);
   ENTER_V8;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
@@ -2401,7 +2588,8 @@ bool v8::Object::SetPrototype(Handle<Value> value) {
 
 Local<Object> v8::Object::FindInstanceInPrototypeChain(
     v8::Handle<FunctionTemplate> tmpl) {
-  ON_BAILOUT("v8::Object::FindInstanceInPrototypeChain()",
+  ON_BAILOUT(i::Isolate::Current(),
+             "v8::Object::FindInstanceInPrototypeChain()",
              return Local<v8::Object>());
   ENTER_V8;
   i::JSObject* object = *Utils::OpenHandle(this);
@@ -2416,7 +2604,8 @@ Local<Object> v8::Object::FindInstanceInPrototypeChain(
 
 
 Local<Array> v8::Object::GetPropertyNames() {
-  ON_BAILOUT("v8::Object::GetPropertyNames()", return Local<v8::Array>());
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::GetPropertyNames()",
+             return Local<v8::Array>());
   ENTER_V8;
   v8::HandleScope scope;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
@@ -2425,14 +2614,15 @@ Local<Array> v8::Object::GetPropertyNames() {
   // Because we use caching to speed up enumeration it is important
   // to never change the result of the basic enumeration function so
   // we clone the result.
-  i::Handle<i::FixedArray> elms = i::Factory::CopyFixedArray(value);
-  i::Handle<i::JSArray> result = i::Factory::NewJSArrayWithElements(elms);
+  i::Handle<i::FixedArray> elms = FACTORY->CopyFixedArray(value);
+  i::Handle<i::JSArray> result = FACTORY->NewJSArrayWithElements(elms);
   return scope.Close(Utils::ToLocal(result));
 }
 
 
 Local<String> v8::Object::ObjectProtoToString() {
-  ON_BAILOUT("v8::Object::ObjectProtoToString()", return Local<v8::String>());
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::ObjectProtoToString()",
+             return Local<v8::String>());
   ENTER_V8;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
 
@@ -2484,7 +2674,9 @@ Local<String> v8::Object::ObjectProtoToString() {
 
 
 Local<String> v8::Object::GetConstructorName() {
-  ON_BAILOUT("v8::Object::GetConstructorName()", return Local<v8::String>());
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::GetConstructorName()",
+             return Local<v8::String>());
   ENTER_V8;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::String> name(self->constructor_name());
@@ -2493,9 +2685,10 @@ Local<String> v8::Object::GetConstructorName() {
 
 
 bool v8::Object::Delete(v8::Handle<String> key) {
-  ON_BAILOUT("v8::Object::Delete()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::Delete()", return false);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
   return i::DeleteProperty(self, key_obj)->IsTrue();
@@ -2503,7 +2696,7 @@ bool v8::Object::Delete(v8::Handle<String> key) {
 
 
 bool v8::Object::Has(v8::Handle<String> key) {
-  ON_BAILOUT("v8::Object::Has()", return false);
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::Has()", return false);
   ENTER_V8;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
@@ -2512,7 +2705,8 @@ bool v8::Object::Has(v8::Handle<String> key) {
 
 
 bool v8::Object::Delete(uint32_t index) {
-  ON_BAILOUT("v8::Object::DeleteProperty()", return false);
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::DeleteProperty()",
+             return false);
   ENTER_V8;
   HandleScope scope;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
@@ -2521,7 +2715,7 @@ bool v8::Object::Delete(uint32_t index) {
 
 
 bool v8::Object::Has(uint32_t index) {
-  ON_BAILOUT("v8::Object::HasProperty()", return false);
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::HasProperty()", return false);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   return self->HasElement(index);
 }
@@ -2533,9 +2727,10 @@ bool Object::SetAccessor(Handle<String> name,
                          v8::Handle<Value> data,
                          AccessControl settings,
                          PropertyAttribute attributes) {
-  ON_BAILOUT("v8::Object::SetAccessor()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::SetAccessor()", return false);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::AccessorInfo> info = MakeAccessorInfo(name,
                                                      getter, setter, data,
                                                      settings, attributes);
@@ -2545,20 +2740,24 @@ bool Object::SetAccessor(Handle<String> name,
 
 
 bool v8::Object::HasRealNamedProperty(Handle<String> key) {
-  ON_BAILOUT("v8::Object::HasRealNamedProperty()", return false);
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::HasRealNamedProperty()",
+             return false);
   return Utils::OpenHandle(this)->HasRealNamedProperty(
       *Utils::OpenHandle(*key));
 }
 
 
 bool v8::Object::HasRealIndexedProperty(uint32_t index) {
-  ON_BAILOUT("v8::Object::HasRealIndexedProperty()", return false);
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::HasRealIndexedProperty()",
+             return false);
   return Utils::OpenHandle(this)->HasRealElementProperty(index);
 }
 
 
 bool v8::Object::HasRealNamedCallbackProperty(Handle<String> key) {
-  ON_BAILOUT("v8::Object::HasRealNamedCallbackProperty()", return false);
+  ON_BAILOUT(i::Isolate::Current(),
+             "v8::Object::HasRealNamedCallbackProperty()",
+             return false);
   ENTER_V8;
   return Utils::OpenHandle(this)->HasRealNamedCallbackProperty(
       *Utils::OpenHandle(*key));
@@ -2566,20 +2765,23 @@ bool v8::Object::HasRealNamedCallbackProperty(Handle<String> key) {
 
 
 bool v8::Object::HasNamedLookupInterceptor() {
-  ON_BAILOUT("v8::Object::HasNamedLookupInterceptor()", return false);
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::HasNamedLookupInterceptor()",
+             return false);
   return Utils::OpenHandle(this)->HasNamedInterceptor();
 }
 
 
 bool v8::Object::HasIndexedLookupInterceptor() {
-  ON_BAILOUT("v8::Object::HasIndexedLookupInterceptor()", return false);
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::HasIndexedLookupInterceptor()",
+             return false);
   return Utils::OpenHandle(this)->HasIndexedInterceptor();
 }
 
 
 Local<Value> v8::Object::GetRealNamedPropertyInPrototypeChain(
       Handle<String> key) {
-  ON_BAILOUT("v8::Object::GetRealNamedPropertyInPrototypeChain()",
+  ON_BAILOUT(i::Isolate::Current(),
+             "v8::Object::GetRealNamedPropertyInPrototypeChain()",
              return Local<Value>());
   ENTER_V8;
   i::Handle<i::JSObject> self_obj = Utils::OpenHandle(this);
@@ -2601,7 +2803,8 @@ Local<Value> v8::Object::GetRealNamedPropertyInPrototypeChain(
 
 
 Local<Value> v8::Object::GetRealNamedProperty(Handle<String> key) {
-  ON_BAILOUT("v8::Object::GetRealNamedProperty()", return Local<Value>());
+  ON_BAILOUT(i::Isolate::Current(), "v8::Object::GetRealNamedProperty()",
+             return Local<Value>());
   ENTER_V8;
   i::Handle<i::JSObject> self_obj = Utils::OpenHandle(this);
   i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
@@ -2625,9 +2828,10 @@ Local<Value> v8::Object::GetRealNamedProperty(Handle<String> key) {
 // Because the object gets a new map, existing inline cache caching
 // the old map of this object will fail.
 void v8::Object::TurnOnAccessCheck() {
-  ON_BAILOUT("v8::Object::TurnOnAccessCheck()", return);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::TurnOnAccessCheck()", return);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> obj = Utils::OpenHandle(this);
 
   // When turning on access checks for a global object deoptimize all functions
@@ -2635,7 +2839,7 @@ void v8::Object::TurnOnAccessCheck() {
   i::Deoptimizer::DeoptimizeGlobalObject(*obj);
 
   i::Handle<i::Map> new_map =
-    i::Factory::CopyMapDropTransitions(i::Handle<i::Map>(obj->map()));
+    FACTORY->CopyMapDropTransitions(i::Handle<i::Map>(obj->map()));
   new_map->set_is_access_check_needed(true);
   obj->set_map(*new_map);
 }
@@ -2647,7 +2851,8 @@ bool v8::Object::IsDirty() {
 
 
 Local<v8::Object> v8::Object::Clone() {
-  ON_BAILOUT("v8::Object::Clone()", return Local<Object>());
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::Clone()", return Local<Object>());
   ENTER_V8;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   EXCEPTION_PREAMBLE();
@@ -2659,9 +2864,10 @@ Local<v8::Object> v8::Object::Clone() {
 
 
 int v8::Object::GetIdentityHash() {
-  ON_BAILOUT("v8::Object::GetIdentityHash()", return 0);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::GetIdentityHash()", return 0);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> hidden_props_obj(i::GetHiddenProperties(self, true));
   if (!hidden_props_obj->IsJSObject()) {
@@ -2672,7 +2878,7 @@ int v8::Object::GetIdentityHash() {
   }
   i::Handle<i::JSObject> hidden_props =
       i::Handle<i::JSObject>::cast(hidden_props_obj);
-  i::Handle<i::String> hash_symbol = i::Factory::identity_hash_symbol();
+  i::Handle<i::String> hash_symbol = FACTORY->identity_hash_symbol();
   if (hidden_props->HasLocalProperty(*hash_symbol)) {
     i::Handle<i::Object> hash = i::GetProperty(hidden_props, hash_symbol);
     CHECK(!hash.is_null());
@@ -2685,7 +2891,7 @@ int v8::Object::GetIdentityHash() {
   do {
     // Generate a random 32-bit hash value but limit range to fit
     // within a smi.
-    hash_value = i::V8::Random() & i::Smi::kMaxValue;
+    hash_value = i::V8::Random(self->GetIsolate()) & i::Smi::kMaxValue;
     attempts++;
   } while (hash_value == 0 && attempts < 30);
   hash_value = hash_value != 0 ? hash_value : 1;  // never return 0
@@ -2701,9 +2907,10 @@ int v8::Object::GetIdentityHash() {
 
 bool v8::Object::SetHiddenValue(v8::Handle<v8::String> key,
                                 v8::Handle<v8::Value> value) {
-  ON_BAILOUT("v8::Object::SetHiddenValue()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::SetHiddenValue()", return false);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, true));
   i::Handle<i::Object> key_obj = Utils::OpenHandle(*key);
@@ -2722,7 +2929,9 @@ bool v8::Object::SetHiddenValue(v8::Handle<v8::String> key,
 
 
 v8::Local<v8::Value> v8::Object::GetHiddenValue(v8::Handle<v8::String> key) {
-  ON_BAILOUT("v8::Object::GetHiddenValue()", return Local<v8::Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Object::GetHiddenValue()",
+             return Local<v8::Value>());
   ENTER_V8;
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, false));
@@ -2742,9 +2951,10 @@ v8::Local<v8::Value> v8::Object::GetHiddenValue(v8::Handle<v8::String> key) {
 
 
 bool v8::Object::DeleteHiddenValue(v8::Handle<v8::String> key) {
-  ON_BAILOUT("v8::DeleteHiddenValue()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::DeleteHiddenValue()", return false);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, false));
   if (hidden_props->IsUndefined()) {
@@ -2763,7 +2973,7 @@ void PrepareExternalArrayElements(i::Handle<i::JSObject> object,
                                   ExternalArrayType array_type,
                                   int length) {
   i::Handle<i::ExternalArray> array =
-      i::Factory::NewExternalArray(length, array_type, data);
+      FACTORY->NewExternalArray(length, array_type, data);
 
   // If the object already has external elements, create a new, unique
   // map if the element type is now changing, because assumptions about
@@ -2772,10 +2982,10 @@ void PrepareExternalArrayElements(i::Handle<i::JSObject> object,
   bool force_unique_map =
       elements->map()->IsUndefined() ||
       !elements->map()->has_external_array_elements() ||
-      elements->map() != i::Heap::MapForExternalArrayType(array_type);
+      elements->map() != HEAP->MapForExternalArrayType(array_type);
   if (force_unique_map) {
     i::Handle<i::Map> external_array_map =
-        i::Factory::NewExternalArrayElementsMap(
+        FACTORY->NewExternalArrayElementsMap(
             i::Handle<i::Map>(object->map()));
     object->set_map(*external_array_map);
   }
@@ -2786,9 +2996,10 @@ void PrepareExternalArrayElements(i::Handle<i::JSObject> object,
 
 
 void v8::Object::SetIndexedPropertiesToPixelData(uint8_t* data, int length) {
-  ON_BAILOUT("v8::SetElementsToPixelData()", return);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::SetElementsToPixelData()", return);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   if (!ApiCheck(length <= i::ExternalPixelArray::kMaxLength,
                 "v8::Object::SetIndexedPropertiesToPixelData()",
                 "length exceeds max acceptable value")) {
@@ -2805,14 +3016,16 @@ void v8::Object::SetIndexedPropertiesToPixelData(uint8_t* data, int length) {
 
 
 bool v8::Object::HasIndexedPropertiesInPixelData() {
-  ON_BAILOUT("v8::HasIndexedPropertiesInPixelData()", return false);
+  ON_BAILOUT(i::Isolate::Current(), "v8::HasIndexedPropertiesInPixelData()",
+             return false);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   return self->HasExternalPixelElements();
 }
 
 
 uint8_t* v8::Object::GetIndexedPropertiesPixelData() {
-  ON_BAILOUT("v8::GetIndexedPropertiesPixelData()", return NULL);
+  ON_BAILOUT(i::Isolate::Current(), "v8::GetIndexedPropertiesPixelData()",
+             return NULL);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   if (self->HasExternalPixelElements()) {
     return i::ExternalPixelArray::cast(self->elements())->
@@ -2824,7 +3037,8 @@ uint8_t* v8::Object::GetIndexedPropertiesPixelData() {
 
 
 int v8::Object::GetIndexedPropertiesPixelDataLength() {
-  ON_BAILOUT("v8::GetIndexedPropertiesPixelDataLength()", return -1);
+  ON_BAILOUT(i::Isolate::Current(), "v8::GetIndexedPropertiesPixelDataLength()",
+             return -1);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   if (self->HasExternalPixelElements()) {
     return i::ExternalPixelArray::cast(self->elements())->length();
@@ -2837,9 +3051,10 @@ void v8::Object::SetIndexedPropertiesToExternalArrayData(
     void* data,
     ExternalArrayType array_type,
     int length) {
-  ON_BAILOUT("v8::SetIndexedPropertiesToExternalArrayData()", return);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::SetIndexedPropertiesToExternalArrayData()", return);
   ENTER_V8;
-  HandleScope scope;
+  i::HandleScope scope(isolate);
   if (!ApiCheck(length <= i::ExternalArray::kMaxLength,
                 "v8::Object::SetIndexedPropertiesToExternalArrayData()",
                 "length exceeds max acceptable value")) {
@@ -2856,14 +3071,18 @@ void v8::Object::SetIndexedPropertiesToExternalArrayData(
 
 
 bool v8::Object::HasIndexedPropertiesInExternalArrayData() {
-  ON_BAILOUT("v8::HasIndexedPropertiesInExternalArrayData()", return false);
+  ON_BAILOUT(i::Isolate::Current(),
+             "v8::HasIndexedPropertiesInExternalArrayData()",
+             return false);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   return self->HasExternalArrayElements();
 }
 
 
 void* v8::Object::GetIndexedPropertiesExternalArrayData() {
-  ON_BAILOUT("v8::GetIndexedPropertiesExternalArrayData()", return NULL);
+  ON_BAILOUT(i::Isolate::Current(),
+             "v8::GetIndexedPropertiesExternalArrayData()",
+             return NULL);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   if (self->HasExternalArrayElements()) {
     return i::ExternalArray::cast(self->elements())->external_pointer();
@@ -2874,7 +3093,8 @@ void* v8::Object::GetIndexedPropertiesExternalArrayData() {
 
 
 ExternalArrayType v8::Object::GetIndexedPropertiesExternalArrayDataType() {
-  ON_BAILOUT("v8::GetIndexedPropertiesExternalArrayDataType()",
+  ON_BAILOUT(i::Isolate::Current(),
+             "v8::GetIndexedPropertiesExternalArrayDataType()",
              return static_cast<ExternalArrayType>(-1));
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   switch (self->elements()->map()->instance_type()) {
@@ -2901,7 +3121,9 @@ ExternalArrayType v8::Object::GetIndexedPropertiesExternalArrayDataType() {
 
 
 int v8::Object::GetIndexedPropertiesExternalArrayDataLength() {
-  ON_BAILOUT("v8::GetIndexedPropertiesExternalArrayDataLength()", return 0);
+  ON_BAILOUT(i::Isolate::Current(),
+             "v8::GetIndexedPropertiesExternalArrayDataLength()",
+             return 0);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   if (self->HasExternalArrayElements()) {
     return i::ExternalArray::cast(self->elements())->length();
@@ -2918,8 +3140,10 @@ Local<v8::Object> Function::NewInstance() const {
 
 Local<v8::Object> Function::NewInstance(int argc,
                                         v8::Handle<v8::Value> argv[]) const {
-  ON_BAILOUT("v8::Function::NewInstance()", return Local<v8::Object>());
-  LOG_API("Function::NewInstance");
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Function::NewInstance()",
+             return Local<v8::Object>());
+  LOG_API(isolate, "Function::NewInstance");
   ENTER_V8;
   HandleScope scope;
   i::Handle<i::JSFunction> function = Utils::OpenHandle(this);
@@ -2935,12 +3159,13 @@ Local<v8::Object> Function::NewInstance(int argc,
 
 Local<v8::Value> Function::Call(v8::Handle<v8::Object> recv, int argc,
                                 v8::Handle<v8::Value> argv[]) {
-  ON_BAILOUT("v8::Function::Call()", return Local<v8::Value>());
-  LOG_API("Function::Call");
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Function::Call()", return Local<v8::Value>());
+  LOG_API(isolate, "Function::Call");
   ENTER_V8;
   i::Object* raw_result = NULL;
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::JSFunction> fun = Utils::OpenHandle(this);
     i::Handle<i::Object> recv_obj = Utils::OpenHandle(*recv);
     STATIC_ASSERT(sizeof(v8::Handle<v8::Value>) == sizeof(i::Object**));
@@ -2996,80 +3221,14 @@ int Function::GetScriptLineNumber() const {
 }
 
 
-namespace {
-
-// Tracks string usage to help make better decisions when
-// externalizing strings.
-//
-// Implementation note: internally this class only tracks fresh
-// strings and keeps a single use counter for them.
-class StringTracker {
- public:
-  // Records that the given string's characters were copied to some
-  // external buffer. If this happens often we should honor
-  // externalization requests for the string.
-  static void RecordWrite(i::Handle<i::String> string) {
-    i::Address address = reinterpret_cast<i::Address>(*string);
-    i::Address top = i::Heap::NewSpaceTop();
-    if (IsFreshString(address, top)) {
-      IncrementUseCount(top);
-    }
-  }
-
-  // Estimates freshness and use frequency of the given string based
-  // on how close it is to the new space top and the recorded usage
-  // history.
-  static inline bool IsFreshUnusedString(i::Handle<i::String> string) {
-    i::Address address = reinterpret_cast<i::Address>(*string);
-    i::Address top = i::Heap::NewSpaceTop();
-    return IsFreshString(address, top) && IsUseCountLow(top);
-  }
-
- private:
-  static inline bool IsFreshString(i::Address string, i::Address top) {
-    return top - kFreshnessLimit <= string && string <= top;
-  }
-
-  static inline bool IsUseCountLow(i::Address top) {
-    if (last_top_ != top) return true;
-    return use_count_ < kUseLimit;
-  }
-
-  static inline void IncrementUseCount(i::Address top) {
-    if (last_top_ != top) {
-      use_count_ = 0;
-      last_top_ = top;
-    }
-    ++use_count_;
-  }
-
-  // How close to the new space top a fresh string has to be.
-  static const int kFreshnessLimit = 1024;
-
-  // The number of uses required to consider a string useful.
-  static const int kUseLimit = 32;
-
-  // Single use counter shared by all fresh strings.
-  static int use_count_;
-
-  // Last new space top when the use count above was valid.
-  static i::Address last_top_;
-};
-
-int StringTracker::use_count_ = 0;
-i::Address StringTracker::last_top_ = NULL;
-
-}  // namespace
-
-
 int String::Length() const {
-  if (IsDeadCheck("v8::String::Length()")) return 0;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::String::Length()")) return 0;
   return Utils::OpenHandle(this)->length();
 }
 
 
 int String::Utf8Length() const {
-  if (IsDeadCheck("v8::String::Utf8Length()")) return 0;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::String::Utf8Length()")) return 0;
   return Utils::OpenHandle(this)->Utf8Length();
 }
 
@@ -3078,11 +3237,13 @@ int String::WriteUtf8(char* buffer,
                       int capacity,
                       int* nchars_ref,
                       WriteHints hints) const {
-  if (IsDeadCheck("v8::String::WriteUtf8()")) return 0;
-  LOG_API("String::WriteUtf8");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::String::WriteUtf8()")) return 0;
+  LOG_API(isolate, "String::WriteUtf8");
   ENTER_V8;
+  i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer();
   i::Handle<i::String> str = Utils::OpenHandle(this);
-  StringTracker::RecordWrite(str);
+  isolate->string_tracker()->RecordWrite(str);
   if (hints & HINT_MANY_WRITES_EXPECTED) {
     // Flatten the string for efficiency.  This applies whether we are
     // using StringInputBuffer or Get(i) to access the characters.
@@ -3133,12 +3294,14 @@ int String::WriteAscii(char* buffer,
                        int start,
                        int length,
                        WriteHints hints) const {
-  if (IsDeadCheck("v8::String::WriteAscii()")) return 0;
-  LOG_API("String::WriteAscii");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::String::WriteAscii()")) return 0;
+  LOG_API(isolate, "String::WriteAscii");
   ENTER_V8;
+  i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer();
   ASSERT(start >= 0 && length >= -1);
   i::Handle<i::String> str = Utils::OpenHandle(this);
-  StringTracker::RecordWrite(str);
+  isolate->string_tracker()->RecordWrite(str);
   if (hints & HINT_MANY_WRITES_EXPECTED) {
     // Flatten the string for efficiency.  This applies whether we are
     // using StringInputBuffer or Get(i) to access the characters.
@@ -3165,12 +3328,13 @@ int String::Write(uint16_t* buffer,
                   int start,
                   int length,
                   WriteHints hints) const {
-  if (IsDeadCheck("v8::String::Write()")) return 0;
-  LOG_API("String::Write");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::String::Write()")) return 0;
+  LOG_API(isolate, "String::Write");
   ENTER_V8;
   ASSERT(start >= 0 && length >= -1);
   i::Handle<i::String> str = Utils::OpenHandle(this);
-  StringTracker::RecordWrite(str);
+  isolate->string_tracker()->RecordWrite(str);
   if (hints & HINT_MANY_WRITES_EXPECTED) {
     // Flatten the string for efficiency.  This applies whether we are
     // using StringInputBuffer or Get(i) to access the characters.
@@ -3230,21 +3394,21 @@ v8::String::ExternalAsciiStringResource*
 
 
 double Number::Value() const {
-  if (IsDeadCheck("v8::Number::Value()")) return 0;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Number::Value()")) return 0;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   return obj->Number();
 }
 
 
 bool Boolean::Value() const {
-  if (IsDeadCheck("v8::Boolean::Value()")) return false;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Boolean::Value()")) return false;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   return obj->IsTrue();
 }
 
 
 int64_t Integer::Value() const {
-  if (IsDeadCheck("v8::Integer::Value()")) return 0;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Integer::Value()")) return 0;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsSmi()) {
     return i::Smi::cast(*obj)->value();
@@ -3255,7 +3419,7 @@ int64_t Integer::Value() const {
 
 
 int32_t Int32::Value() const {
-  if (IsDeadCheck("v8::Int32::Value()")) return 0;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Int32::Value()")) return 0;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsSmi()) {
     return i::Smi::cast(*obj)->value();
@@ -3266,7 +3430,7 @@ int32_t Int32::Value() const {
 
 
 uint32_t Uint32::Value() const {
-  if (IsDeadCheck("v8::Uint32::Value()")) return 0;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Uint32::Value()")) return 0;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (obj->IsSmi()) {
     return i::Smi::cast(*obj)->value();
@@ -3277,14 +3441,18 @@ uint32_t Uint32::Value() const {
 
 
 int v8::Object::InternalFieldCount() {
-  if (IsDeadCheck("v8::Object::InternalFieldCount()")) return 0;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Object::InternalFieldCount()")) {
+    return 0;
+  }
   i::Handle<i::JSObject> obj = Utils::OpenHandle(this);
   return obj->GetInternalFieldCount();
 }
 
 
 Local<Value> v8::Object::CheckedGetInternalField(int index) {
-  if (IsDeadCheck("v8::Object::GetInternalField()")) return Local<Value>();
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Object::GetInternalField()")) {
+    return Local<Value>();
+  }
   i::Handle<i::JSObject> obj = Utils::OpenHandle(this);
   if (!ApiCheck(index < obj->GetInternalFieldCount(),
                 "v8::Object::GetInternalField()",
@@ -3302,7 +3470,9 @@ Local<Value> v8::Object::CheckedGetInternalField(int index) {
 
 
 void v8::Object::SetInternalField(int index, v8::Handle<Value> value) {
-  if (IsDeadCheck("v8::Object::SetInternalField()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Object::SetInternalField()")) {
+    return;
+  }
   i::Handle<i::JSObject> obj = Utils::OpenHandle(this);
   if (!ApiCheck(index < obj->GetInternalFieldCount(),
                 "v8::Object::SetInternalField()",
@@ -3339,7 +3509,7 @@ void v8::Object::SetPointerInInternalField(int index, void* value) {
   } else {
     HandleScope scope;
     i::Handle<i::Proxy> proxy =
-        i::Factory::NewProxy(reinterpret_cast<i::Address>(value), i::TENURED);
+        FACTORY->NewProxy(reinterpret_cast<i::Address>(value), i::TENURED);
     if (!proxy.is_null())
         Utils::OpenHandle(this)->SetInternalField(index, *proxy);
   }
@@ -3349,15 +3519,23 @@ void v8::Object::SetPointerInInternalField(int index, void* value) {
 
 // --- E n v i r o n m e n t ---
 
+
 bool v8::V8::Initialize() {
-  if (i::V8::IsRunning()) return true;
-  HandleScope scope;
-  if (i::Snapshot::Initialize()) return true;
-  return i::V8::Initialize(NULL);
+  i::Isolate* isolate = i::Isolate::UncheckedCurrent();
+  if (isolate != NULL && isolate->IsInitialized()) {
+    return true;
+  }
+  return InitializeHelper();
 }
 
 
 bool v8::V8::Dispose() {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (!ApiCheck(isolate != NULL && isolate->IsDefaultIsolate(),
+                "v8::V8::Dispose()",
+                "Use v8::Isolate::Dispose() for a non-default isolate.")) {
+    return false;
+  }
   i::V8::TearDown();
   return true;
 }
@@ -3370,38 +3548,36 @@ HeapStatistics::HeapStatistics(): total_heap_size_(0),
 
 
 void v8::V8::GetHeapStatistics(HeapStatistics* heap_statistics) {
-  heap_statistics->set_total_heap_size(i::Heap::CommittedMemory());
+  heap_statistics->set_total_heap_size(HEAP->CommittedMemory());
   heap_statistics->set_total_heap_size_executable(
-      i::Heap::CommittedMemoryExecutable());
-  heap_statistics->set_used_heap_size(i::Heap::SizeOfObjects());
-  heap_statistics->set_heap_size_limit(i::Heap::MaxReserved());
+      HEAP->CommittedMemoryExecutable());
+  heap_statistics->set_used_heap_size(HEAP->SizeOfObjects());
+  heap_statistics->set_heap_size_limit(HEAP->MaxReserved());
 }
 
 
 bool v8::V8::IdleNotification() {
   // Returning true tells the caller that it need not
   // continue to call IdleNotification.
-  if (!i::V8::IsRunning()) return true;
+  if (!i::Isolate::Current()->IsInitialized()) return true;
   return i::V8::IdleNotification();
 }
 
 
 void v8::V8::LowMemoryNotification() {
-  if (!i::V8::IsRunning()) return;
-  i::Heap::CollectAllGarbage(true);
+  if (!i::Isolate::Current()->IsInitialized()) return;
+  HEAP->CollectAllGarbage(true);
 }
 
 
 int v8::V8::ContextDisposedNotification() {
-  if (!i::V8::IsRunning()) return 0;
-  return i::Heap::NotifyContextDisposed();
+  if (!i::Isolate::Current()->IsInitialized()) return 0;
+  return HEAP->NotifyContextDisposed();
 }
 
 
 const char* v8::V8::GetVersion() {
-  static v8::internal::EmbeddedVector<char, 128> buffer;
-  v8::internal::Version::GetString(buffer);
-  return buffer.start();
+  return i::Version::GetVersion();
 }
 
 
@@ -3421,9 +3597,10 @@ Persistent<Context> v8::Context::New(
     v8::ExtensionConfiguration* extensions,
     v8::Handle<ObjectTemplate> global_template,
     v8::Handle<Value> global_object) {
-  EnsureInitialized("v8::Context::New()");
-  LOG_API("Context::New");
-  ON_BAILOUT("v8::Context::New()", return Persistent<Context>());
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::Context::New()");
+  LOG_API(isolate, "Context::New");
+  ON_BAILOUT(isolate, "v8::Context::New()", return Persistent<Context>());
 
   // Enter V8 via an ENTER_V8 scope.
   i::Handle<i::Context> env;
@@ -3457,12 +3634,12 @@ Persistent<Context> v8::Context::New(
         proxy_constructor->set_needs_access_check(
             global_constructor->needs_access_check());
         global_constructor->set_needs_access_check(false);
-        global_constructor->set_access_check_info(i::Heap::undefined_value());
+        global_constructor->set_access_check_info(HEAP->undefined_value());
       }
     }
 
     // Create the environment.
-    env = i::Bootstrapper::CreateEnvironment(
+    env = isolate->bootstrapper()->CreateEnvironment(
         Utils::OpenHandle(*global_object),
         proxy_template,
         extensions);
@@ -3476,7 +3653,7 @@ Persistent<Context> v8::Context::New(
       global_constructor->set_needs_access_check(
           proxy_constructor->needs_access_check());
     }
-    i::RuntimeProfiler::Reset();
+    i::Isolate::Current()->runtime_profiler()->Reset();
   }
   // Leave V8.
 
@@ -3487,7 +3664,9 @@ Persistent<Context> v8::Context::New(
 
 
 void v8::Context::SetSecurityToken(Handle<Value> token) {
-  if (IsDeadCheck("v8::Context::SetSecurityToken()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Context::SetSecurityToken()")) {
+    return;
+  }
   ENTER_V8;
   i::Handle<i::Context> env = Utils::OpenHandle(this);
   i::Handle<i::Object> token_handle = Utils::OpenHandle(*token);
@@ -3496,7 +3675,10 @@ void v8::Context::SetSecurityToken(Handle<Value> token) {
 
 
 void v8::Context::UseDefaultSecurityToken() {
-  if (IsDeadCheck("v8::Context::UseDefaultSecurityToken()")) return;
+  if (IsDeadCheck(i::Isolate::Current(),
+                  "v8::Context::UseDefaultSecurityToken()")) {
+    return;
+  }
   ENTER_V8;
   i::Handle<i::Context> env = Utils::OpenHandle(this);
   env->set_security_token(env->global());
@@ -3504,7 +3686,9 @@ void v8::Context::UseDefaultSecurityToken() {
 
 
 Handle<Value> v8::Context::GetSecurityToken() {
-  if (IsDeadCheck("v8::Context::GetSecurityToken()")) return Handle<Value>();
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Context::GetSecurityToken()")) {
+    return Handle<Value>();
+  }
   i::Handle<i::Context> env = Utils::OpenHandle(this);
   i::Object* security_token = env->security_token();
   i::Handle<i::Object> token_handle(security_token);
@@ -3519,13 +3703,16 @@ bool Context::HasOutOfMemoryException() {
 
 
 bool Context::InContext() {
-  return i::Top::context() != NULL;
+  return i::Isolate::Current()->context() != NULL;
 }
 
 
 v8::Local<v8::Context> Context::GetEntered() {
-  if (IsDeadCheck("v8::Context::GetEntered()")) return Local<Context>();
-  i::Handle<i::Object> last = thread_local.LastEnteredContext();
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Context::GetEntered()")) {
+    return Local<Context>();
+  }
+  i::Handle<i::Object> last =
+      i::Isolate::Current()->handle_scope_implementer()->LastEnteredContext();
   if (last.is_null()) return Local<Context>();
   i::Handle<i::Context> context = i::Handle<i::Context>::cast(last);
   return Utils::ToLocal(context);
@@ -3533,8 +3720,10 @@ v8::Local<v8::Context> Context::GetEntered() {
 
 
 v8::Local<v8::Context> Context::GetCurrent() {
-  if (IsDeadCheck("v8::Context::GetCurrent()")) return Local<Context>();
-  i::Handle<i::Object> current = i::Top::global_context();
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Context::GetCurrent()")) {
+    return Local<Context>();
+  }
+  i::Handle<i::Object> current = i::Isolate::Current()->global_context();
   if (current.is_null()) return Local<Context>();
   i::Handle<i::Context> context = i::Handle<i::Context>::cast(current);
   return Utils::ToLocal(context);
@@ -3542,8 +3731,11 @@ v8::Local<v8::Context> Context::GetCurrent() {
 
 
 v8::Local<v8::Context> Context::GetCalling() {
-  if (IsDeadCheck("v8::Context::GetCalling()")) return Local<Context>();
-  i::Handle<i::Object> calling = i::Top::GetCallingGlobalContext();
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Context::GetCalling()")) {
+    return Local<Context>();
+  }
+  i::Handle<i::Object> calling =
+      i::Isolate::Current()->GetCallingGlobalContext();
   if (calling.is_null()) return Local<Context>();
   i::Handle<i::Context> context = i::Handle<i::Context>::cast(calling);
   return Utils::ToLocal(context);
@@ -3551,7 +3743,9 @@ v8::Local<v8::Context> Context::GetCalling() {
 
 
 v8::Local<v8::Object> Context::Global() {
-  if (IsDeadCheck("v8::Context::Global()")) return Local<v8::Object>();
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Context::Global()")) {
+    return Local<v8::Object>();
+  }
   i::Object** ctx = reinterpret_cast<i::Object**>(this);
   i::Handle<i::Context> context =
       i::Handle<i::Context>::cast(i::Handle<i::Object>(ctx));
@@ -3561,22 +3755,25 @@ v8::Local<v8::Object> Context::Global() {
 
 
 void Context::DetachGlobal() {
-  if (IsDeadCheck("v8::Context::DetachGlobal()")) return;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Context::DetachGlobal()")) return;
   ENTER_V8;
   i::Object** ctx = reinterpret_cast<i::Object**>(this);
   i::Handle<i::Context> context =
       i::Handle<i::Context>::cast(i::Handle<i::Object>(ctx));
-  i::Bootstrapper::DetachGlobal(context);
+  i::Isolate::Current()->bootstrapper()->DetachGlobal(context);
 }
 
 
 void Context::ReattachGlobal(Handle<Object> global_object) {
-  if (IsDeadCheck("v8::Context::ReattachGlobal()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Context::ReattachGlobal()")) return;
   ENTER_V8;
   i::Object** ctx = reinterpret_cast<i::Object**>(this);
   i::Handle<i::Context> context =
       i::Handle<i::Context>::cast(i::Handle<i::Object>(ctx));
-  i::Bootstrapper::ReattachGlobal(context, Utils::OpenHandle(*global_object));
+  isolate->bootstrapper()->ReattachGlobal(
+      context,
+      Utils::OpenHandle(*global_object));
 }
 
 
@@ -3586,8 +3783,10 @@ void V8::SetWrapperClassId(i::Object** global_handle, uint16_t class_id) {
 
 
 Local<v8::Object> ObjectTemplate::NewInstance() {
-  ON_BAILOUT("v8::ObjectTemplate::NewInstance()", return Local<v8::Object>());
-  LOG_API("ObjectTemplate::NewInstance");
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::ObjectTemplate::NewInstance()",
+             return Local<v8::Object>());
+  LOG_API(isolate, "ObjectTemplate::NewInstance");
   ENTER_V8;
   EXCEPTION_PREAMBLE();
   i::Handle<i::Object> obj =
@@ -3599,9 +3798,10 @@ Local<v8::Object> ObjectTemplate::NewInstance() {
 
 
 Local<v8::Function> FunctionTemplate::GetFunction() {
-  ON_BAILOUT("v8::FunctionTemplate::GetFunction()",
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::FunctionTemplate::GetFunction()",
              return Local<v8::Function>());
-  LOG_API("FunctionTemplate::GetFunction");
+  LOG_API(isolate, "FunctionTemplate::GetFunction");
   ENTER_V8;
   EXCEPTION_PREAMBLE();
   i::Handle<i::Object> obj =
@@ -3613,14 +3813,15 @@ Local<v8::Function> FunctionTemplate::GetFunction() {
 
 
 bool FunctionTemplate::HasInstance(v8::Handle<v8::Value> value) {
-  ON_BAILOUT("v8::FunctionTemplate::HasInstanceOf()", return false);
+  ON_BAILOUT(i::Isolate::Current(), "v8::FunctionTemplate::HasInstanceOf()",
+             return false);
   i::Object* obj = *Utils::OpenHandle(*value);
   return obj->IsInstanceOf(*Utils::OpenHandle(this));
 }
 
 
 static Local<External> ExternalNewImpl(void* data) {
-  return Utils::ToLocal(i::Factory::NewProxy(static_cast<i::Address>(data)));
+  return Utils::ToLocal(FACTORY->NewProxy(static_cast<i::Address>(data)));
 }
 
 static void* ExternalValueImpl(i::Handle<i::Object> obj) {
@@ -3629,9 +3830,10 @@ static void* ExternalValueImpl(i::Handle<i::Object> obj) {
 
 
 Local<Value> v8::External::Wrap(void* data) {
+  i::Isolate* isolate = i::Isolate::Current();
   STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
-  LOG_API("External::Wrap");
-  EnsureInitialized("v8::External::Wrap()");
+  LOG_API(isolate, "External::Wrap");
+  EnsureInitializedForIsolate(isolate, "v8::External::Wrap()");
   ENTER_V8;
 
   v8::Local<v8::Value> result = CanBeEncodedAsSmi(data)
@@ -3657,7 +3859,7 @@ void* v8::Object::SlowGetPointerFromInternalField(int index) {
 
 
 void* v8::External::FullUnwrap(v8::Handle<v8::Value> wrapper) {
-  if (IsDeadCheck("v8::External::Unwrap()")) return 0;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::External::Unwrap()")) return 0;
   i::Handle<i::Object> obj = Utils::OpenHandle(*wrapper);
   void* result;
   if (obj->IsSmi()) {
@@ -3674,58 +3876,64 @@ void* v8::External::FullUnwrap(v8::Handle<v8::Value> wrapper) {
 
 Local<External> v8::External::New(void* data) {
   STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
-  LOG_API("External::New");
-  EnsureInitialized("v8::External::New()");
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "External::New");
+  EnsureInitializedForIsolate(isolate, "v8::External::New()");
   ENTER_V8;
   return ExternalNewImpl(data);
 }
 
 
 void* External::Value() const {
-  if (IsDeadCheck("v8::External::Value()")) return 0;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::External::Value()")) return 0;
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   return ExternalValueImpl(obj);
 }
 
 
 Local<String> v8::String::Empty() {
-  EnsureInitialized("v8::String::Empty()");
-  LOG_API("String::Empty()");
-  return Utils::ToLocal(i::Factory::empty_symbol());
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::String::Empty()");
+  LOG_API(isolate, "String::Empty()");
+  return Utils::ToLocal(isolate->factory()->empty_symbol());
 }
 
 
 Local<String> v8::String::New(const char* data, int length) {
-  EnsureInitialized("v8::String::New()");
-  LOG_API("String::New(char)");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::String::New()");
+  LOG_API(isolate, "String::New(char)");
   if (length == 0) return Empty();
   ENTER_V8;
   if (length == -1) length = i::StrLength(data);
   i::Handle<i::String> result =
-      i::Factory::NewStringFromUtf8(i::Vector<const char>(data, length));
+      isolate->factory()->NewStringFromUtf8(
+          i::Vector<const char>(data, length));
   return Utils::ToLocal(result);
 }
 
 
 Local<String> v8::String::Concat(Handle<String> left, Handle<String> right) {
-  EnsureInitialized("v8::String::New()");
-  LOG_API("String::New(char)");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::String::New()");
+  LOG_API(isolate, "String::New(char)");
   ENTER_V8;
   i::Handle<i::String> left_string = Utils::OpenHandle(*left);
   i::Handle<i::String> right_string = Utils::OpenHandle(*right);
-  i::Handle<i::String> result = i::Factory::NewConsString(left_string,
+  i::Handle<i::String> result = FACTORY->NewConsString(left_string,
                                                           right_string);
   return Utils::ToLocal(result);
 }
 
 
 Local<String> v8::String::NewUndetectable(const char* data, int length) {
-  EnsureInitialized("v8::String::NewUndetectable()");
-  LOG_API("String::NewUndetectable(char)");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::String::NewUndetectable()");
+  LOG_API(isolate, "String::NewUndetectable(char)");
   ENTER_V8;
   if (length == -1) length = i::StrLength(data);
   i::Handle<i::String> result =
-      i::Factory::NewStringFromUtf8(i::Vector<const char>(data, length));
+      FACTORY->NewStringFromUtf8(i::Vector<const char>(data, length));
   result->MarkAsUndetectable();
   return Utils::ToLocal(result);
 }
@@ -3739,65 +3947,73 @@ static int TwoByteStringLength(const uint16_t* data) {
 
 
 Local<String> v8::String::New(const uint16_t* data, int length) {
-  EnsureInitialized("v8::String::New()");
-  LOG_API("String::New(uint16_)");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::String::New()");
+  LOG_API(isolate, "String::New(uint16_)");
   if (length == 0) return Empty();
   ENTER_V8;
   if (length == -1) length = TwoByteStringLength(data);
   i::Handle<i::String> result =
-      i::Factory::NewStringFromTwoByte(i::Vector<const uint16_t>(data, length));
+      isolate->factory()->NewStringFromTwoByte(
+          i::Vector<const uint16_t>(data, length));
   return Utils::ToLocal(result);
 }
 
 
 Local<String> v8::String::NewUndetectable(const uint16_t* data, int length) {
-  EnsureInitialized("v8::String::NewUndetectable()");
-  LOG_API("String::NewUndetectable(uint16_)");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::String::NewUndetectable()");
+  LOG_API(isolate, "String::NewUndetectable(uint16_)");
   ENTER_V8;
   if (length == -1) length = TwoByteStringLength(data);
   i::Handle<i::String> result =
-      i::Factory::NewStringFromTwoByte(i::Vector<const uint16_t>(data, length));
+      isolate->factory()->NewStringFromTwoByte(
+          i::Vector<const uint16_t>(data, length));
   result->MarkAsUndetectable();
   return Utils::ToLocal(result);
 }
 
 
-i::Handle<i::String> NewExternalStringHandle(
+i::Handle<i::String> NewExternalStringHandle(i::Isolate* isolate,
       v8::String::ExternalStringResource* resource) {
   i::Handle<i::String> result =
-      i::Factory::NewExternalStringFromTwoByte(resource);
+      isolate->factory()->NewExternalStringFromTwoByte(resource);
   return result;
 }
 
 
-i::Handle<i::String> NewExternalAsciiStringHandle(
+i::Handle<i::String> NewExternalAsciiStringHandle(i::Isolate* isolate,
       v8::String::ExternalAsciiStringResource* resource) {
   i::Handle<i::String> result =
-      i::Factory::NewExternalStringFromAscii(resource);
+      isolate->factory()->NewExternalStringFromAscii(resource);
   return result;
 }
 
 
 Local<String> v8::String::NewExternal(
       v8::String::ExternalStringResource* resource) {
-  EnsureInitialized("v8::String::NewExternal()");
-  LOG_API("String::NewExternal");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::String::NewExternal()");
+  LOG_API(isolate, "String::NewExternal");
   ENTER_V8;
-  i::Handle<i::String> result = NewExternalStringHandle(resource);
-  i::ExternalStringTable::AddString(*result);
+  i::Handle<i::String> result = NewExternalStringHandle(isolate, resource);
+  isolate->heap()->external_string_table()->AddString(*result);
   return Utils::ToLocal(result);
 }
 
 
 bool v8::String::MakeExternal(v8::String::ExternalStringResource* resource) {
-  if (IsDeadCheck("v8::String::MakeExternal()")) return false;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::String::MakeExternal()")) return false;
   if (this->IsExternal()) return false;  // Already an external string.
   ENTER_V8;
   i::Handle<i::String> obj = Utils::OpenHandle(this);
-  if (StringTracker::IsFreshUnusedString(obj)) return false;
+  if (isolate->string_tracker()->IsFreshUnusedString(obj)) {
+    return false;
+  }
   bool result = obj->MakeExternal(resource);
   if (result && !obj->IsSymbol()) {
-    i::ExternalStringTable::AddString(*obj);
+    isolate->heap()->external_string_table()->AddString(*obj);
   }
   return result;
 }
@@ -3805,34 +4021,41 @@ bool v8::String::MakeExternal(v8::String::ExternalStringResource* resource) {
 
 Local<String> v8::String::NewExternal(
       v8::String::ExternalAsciiStringResource* resource) {
-  EnsureInitialized("v8::String::NewExternal()");
-  LOG_API("String::NewExternal");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::String::NewExternal()");
+  LOG_API(isolate, "String::NewExternal");
   ENTER_V8;
-  i::Handle<i::String> result = NewExternalAsciiStringHandle(resource);
-  i::ExternalStringTable::AddString(*result);
+  i::Handle<i::String> result = NewExternalAsciiStringHandle(isolate, resource);
+  isolate->heap()->external_string_table()->AddString(*result);
   return Utils::ToLocal(result);
 }
 
 
 bool v8::String::MakeExternal(
     v8::String::ExternalAsciiStringResource* resource) {
-  if (IsDeadCheck("v8::String::MakeExternal()")) return false;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::String::MakeExternal()")) return false;
   if (this->IsExternal()) return false;  // Already an external string.
   ENTER_V8;
   i::Handle<i::String> obj = Utils::OpenHandle(this);
-  if (StringTracker::IsFreshUnusedString(obj)) return false;
+  if (isolate->string_tracker()->IsFreshUnusedString(obj)) {
+    return false;
+  }
   bool result = obj->MakeExternal(resource);
   if (result && !obj->IsSymbol()) {
-    i::ExternalStringTable::AddString(*obj);
+    isolate->heap()->external_string_table()->AddString(*obj);
   }
   return result;
 }
 
 
 bool v8::String::CanMakeExternal() {
-  if (IsDeadCheck("v8::String::CanMakeExternal()")) return false;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::String::CanMakeExternal()")) return false;
   i::Handle<i::String> obj = Utils::OpenHandle(this);
-  if (StringTracker::IsFreshUnusedString(obj)) return false;
+  if (isolate->string_tracker()->IsFreshUnusedString(obj)) {
+    return false;
+  }
   int size = obj->Size();  // Byte size of the original string.
   if (size < i::ExternalString::kSize)
     return false;
@@ -3842,18 +4065,20 @@ bool v8::String::CanMakeExternal() {
 
 
 Local<v8::Object> v8::Object::New() {
-  EnsureInitialized("v8::Object::New()");
-  LOG_API("Object::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::Object::New()");
+  LOG_API(isolate, "Object::New");
   ENTER_V8;
   i::Handle<i::JSObject> obj =
-      i::Factory::NewJSObject(i::Top::object_function());
+      isolate->factory()->NewJSObject(i::Isolate::Current()->object_function());
   return Utils::ToLocal(obj);
 }
 
 
 Local<v8::Value> v8::Date::New(double time) {
-  EnsureInitialized("v8::Date::New()");
-  LOG_API("Date::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::Date::New()");
+  LOG_API(isolate, "Date::New");
   if (isnan(time)) {
     // Introduce only canonical NaN value into the VM, to avoid signaling NaNs.
     time = i::OS::nan_value();
@@ -3868,8 +4093,9 @@ Local<v8::Value> v8::Date::New(double time) {
 
 
 double v8::Date::NumberValue() const {
-  if (IsDeadCheck("v8::Date::NumberValue()")) return 0;
-  LOG_API("Date::NumberValue");
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Date::NumberValue()")) return 0;
+  LOG_API(isolate, "Date::NumberValue");
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   i::Handle<i::JSValue> jsvalue = i::Handle<i::JSValue>::cast(obj);
   return jsvalue->value()->Number();
@@ -3877,16 +4103,18 @@ double v8::Date::NumberValue() const {
 
 
 void v8::Date::DateTimeConfigurationChangeNotification() {
-  ON_BAILOUT("v8::Date::DateTimeConfigurationChangeNotification()", return);
-  LOG_API("Date::DateTimeConfigurationChangeNotification");
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Date::DateTimeConfigurationChangeNotification()",
+             return);
+  LOG_API(isolate, "Date::DateTimeConfigurationChangeNotification");
   ENTER_V8;
 
-  HandleScope scope;
-
+  i::HandleScope scope(isolate);
   // Get the function ResetDateCache (defined in date-delay.js).
   i::Handle<i::String> func_name_str =
-      i::Factory::LookupAsciiSymbol("ResetDateCache");
-  i::MaybeObject* result = i::Top::builtins()->GetProperty(*func_name_str);
+      isolate->factory()->LookupAsciiSymbol("ResetDateCache");
+  i::MaybeObject* result =
+      isolate->js_builtins_object()->GetProperty(*func_name_str);
   i::Object* object_func;
   if (!result->ToObject(&object_func)) {
     return;
@@ -3899,7 +4127,7 @@ void v8::Date::DateTimeConfigurationChangeNotification() {
     // Call ResetDateCache(0 but expect no exceptions:
     bool caught_exception = false;
     i::Handle<i::Object> result =
-        i::Execution::TryCall(func, i::Top::builtins(), 0, NULL,
+        i::Execution::TryCall(func, isolate->js_builtins_object(), 0, NULL,
         &caught_exception);
   }
 }
@@ -3912,15 +4140,16 @@ static i::Handle<i::String> RegExpFlagsToString(RegExp::Flags flags) {
   if ((flags & RegExp::kMultiline) != 0) flags_buf[num_flags++] = 'm';
   if ((flags & RegExp::kIgnoreCase) != 0) flags_buf[num_flags++] = 'i';
   ASSERT(num_flags <= static_cast<int>(ARRAY_SIZE(flags_buf)));
-  return i::Factory::LookupSymbol(
+  return FACTORY->LookupSymbol(
       i::Vector<const char>(flags_buf, num_flags));
 }
 
 
 Local<v8::RegExp> v8::RegExp::New(Handle<String> pattern,
                                   Flags flags) {
-  EnsureInitialized("v8::RegExp::New()");
-  LOG_API("RegExp::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::RegExp::New()");
+  LOG_API(isolate, "RegExp::New");
   ENTER_V8;
   EXCEPTION_PREAMBLE();
   i::Handle<i::JSRegExp> obj = i::Execution::NewJSRegExp(
@@ -3933,7 +4162,10 @@ Local<v8::RegExp> v8::RegExp::New(Handle<String> pattern,
 
 
 Local<v8::String> v8::RegExp::GetSource() const {
-  if (IsDeadCheck("v8::RegExp::GetSource()")) return Local<v8::String>();
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::RegExp::GetSource()")) {
+    return Local<v8::String>();
+  }
   i::Handle<i::JSRegExp> obj = Utils::OpenHandle(this);
   return Utils::ToLocal(i::Handle<i::String>(obj->Pattern()));
 }
@@ -3950,25 +4182,29 @@ REGEXP_FLAG_ASSERT_EQ(kMultiline, MULTILINE);
 #undef REGEXP_FLAG_ASSERT_EQ
 
 v8::RegExp::Flags v8::RegExp::GetFlags() const {
-  if (IsDeadCheck("v8::RegExp::GetFlags()")) return v8::RegExp::kNone;
+  if (IsDeadCheck(i::Isolate::Current(), "v8::RegExp::GetFlags()")) {
+    return v8::RegExp::kNone;
+  }
   i::Handle<i::JSRegExp> obj = Utils::OpenHandle(this);
   return static_cast<RegExp::Flags>(obj->GetFlags().value());
 }
 
 
 Local<v8::Array> v8::Array::New(int length) {
-  EnsureInitialized("v8::Array::New()");
-  LOG_API("Array::New");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::Array::New()");
+  LOG_API(isolate, "Array::New");
   ENTER_V8;
   int real_length = length > 0 ? length : 0;
-  i::Handle<i::JSArray> obj = i::Factory::NewJSArray(real_length);
-  obj->set_length(*i::Factory::NewNumberFromInt(real_length));
+  i::Handle<i::JSArray> obj = isolate->factory()->NewJSArray(real_length);
+  obj->set_length(*isolate->factory()->NewNumberFromInt(real_length));
   return Utils::ToLocal(obj);
 }
 
 
 uint32_t v8::Array::Length() const {
-  if (IsDeadCheck("v8::Array::Length()")) return 0;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::Array::Length()")) return 0;
   i::Handle<i::JSArray> obj = Utils::OpenHandle(this);
   i::Object* length = obj->length();
   if (length->IsSmi()) {
@@ -3980,7 +4216,8 @@ uint32_t v8::Array::Length() const {
 
 
 Local<Object> Array::CloneElementAt(uint32_t index) {
-  ON_BAILOUT("v8::Array::CloneElementAt()", return Local<Object>());
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Array::CloneElementAt()", return Local<Object>());
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   if (!self->HasFastElements()) {
     return Local<Object>();
@@ -4001,35 +4238,39 @@ Local<Object> Array::CloneElementAt(uint32_t index) {
 
 
 Local<String> v8::String::NewSymbol(const char* data, int length) {
-  EnsureInitialized("v8::String::NewSymbol()");
-  LOG_API("String::NewSymbol(char)");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::String::NewSymbol()");
+  LOG_API(isolate, "String::NewSymbol(char)");
   ENTER_V8;
   if (length == -1) length = i::StrLength(data);
   i::Handle<i::String> result =
-      i::Factory::LookupSymbol(i::Vector<const char>(data, length));
+      FACTORY->LookupSymbol(i::Vector<const char>(data, length));
   return Utils::ToLocal(result);
 }
 
 
 Local<Number> v8::Number::New(double value) {
-  EnsureInitialized("v8::Number::New()");
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::Number::New()");
   if (isnan(value)) {
     // Introduce only canonical NaN value into the VM, to avoid signaling NaNs.
     value = i::OS::nan_value();
   }
   ENTER_V8;
-  i::Handle<i::Object> result = i::Factory::NewNumber(value);
+  i::Handle<i::Object> result = isolate->factory()->NewNumber(value);
   return Utils::NumberToLocal(result);
 }
 
 
 Local<Integer> v8::Integer::New(int32_t value) {
-  EnsureInitialized("v8::Integer::New()");
+  i::Isolate* isolate = i::Isolate::UncheckedCurrent();
+  EnsureInitializedForIsolate(isolate, "v8::Integer::New()");
   if (i::Smi::IsValid(value)) {
-    return Utils::IntegerToLocal(i::Handle<i::Object>(i::Smi::FromInt(value)));
+    return Utils::IntegerToLocal(i::Handle<i::Object>(i::Smi::FromInt(value),
+                                                      isolate));
   }
   ENTER_V8;
-  i::Handle<i::Object> result = i::Factory::NewNumber(value);
+  i::Handle<i::Object> result = isolate->factory()->NewNumber(value);
   return Utils::IntegerToLocal(result);
 }
 
@@ -4040,26 +4281,28 @@ Local<Integer> Integer::NewFromUnsigned(uint32_t value) {
     return Integer::New(static_cast<int32_t>(value));
   }
   ENTER_V8;
-  i::Handle<i::Object> result = i::Factory::NewNumber(value);
+  i::Handle<i::Object> result = FACTORY->NewNumber(value);
   return Utils::IntegerToLocal(result);
 }
 
 
 void V8::IgnoreOutOfMemoryException() {
-  thread_local.set_ignore_out_of_memory(true);
+  EnterIsolateIfNeeded()->handle_scope_implementer()->set_ignore_out_of_memory(
+      true);
 }
 
 
 bool V8::AddMessageListener(MessageCallback that, Handle<Value> data) {
-  EnsureInitialized("v8::V8::AddMessageListener()");
-  ON_BAILOUT("v8::V8::AddMessageListener()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::V8::AddMessageListener()");
+  ON_BAILOUT(isolate, "v8::V8::AddMessageListener()", return false);
   ENTER_V8;
-  HandleScope scope;
-  NeanderArray listeners(i::Factory::message_listeners());
+  i::HandleScope scope(isolate);
+  NeanderArray listeners(isolate->factory()->message_listeners());
   NeanderObject obj(2);
-  obj.set(0, *i::Factory::NewProxy(FUNCTION_ADDR(that)));
+  obj.set(0, *isolate->factory()->NewProxy(FUNCTION_ADDR(that)));
   obj.set(1, data.IsEmpty() ?
-             i::Heap::undefined_value() :
+             HEAP->undefined_value() :
              *Utils::OpenHandle(*data));
   listeners.add(obj.value());
   return true;
@@ -4067,18 +4310,19 @@ bool V8::AddMessageListener(MessageCallback that, Handle<Value> data) {
 
 
 void V8::RemoveMessageListeners(MessageCallback that) {
-  EnsureInitialized("v8::V8::RemoveMessageListener()");
-  ON_BAILOUT("v8::V8::RemoveMessageListeners()", return);
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::V8::RemoveMessageListener()");
+  ON_BAILOUT(isolate, "v8::V8::RemoveMessageListeners()", return);
   ENTER_V8;
-  HandleScope scope;
-  NeanderArray listeners(i::Factory::message_listeners());
+  i::HandleScope scope(isolate);
+  NeanderArray listeners(isolate->factory()->message_listeners());
   for (int i = 0; i < listeners.length(); i++) {
     if (listeners.get(i)->IsUndefined()) continue;  // skip deleted ones
 
     NeanderObject listener(i::JSObject::cast(listeners.get(i)));
     i::Handle<i::Proxy> callback_obj(i::Proxy::cast(listener.get(0)));
     if (callback_obj->proxy() == FUNCTION_ADDR(that)) {
-      listeners.set(i, i::Heap::undefined_value());
+      listeners.set(i, HEAP->undefined_value());
     }
   }
 }
@@ -4088,7 +4332,7 @@ void V8::SetCaptureStackTraceForUncaughtExceptions(
       bool capture,
       int frame_limit,
       StackTrace::StackTraceOptions options) {
-  i::Top::SetCaptureStackTraceForUncaughtExceptions(
+  i::Isolate::Current()->SetCaptureStackTraceForUncaughtExceptions(
       capture,
       frame_limit,
       options);
@@ -4096,39 +4340,47 @@ void V8::SetCaptureStackTraceForUncaughtExceptions(
 
 
 void V8::SetCounterFunction(CounterLookupCallback callback) {
-  if (IsDeadCheck("v8::V8::SetCounterFunction()")) return;
-  i::StatsTable::SetCounterFunction(callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::SetCounterFunction()")) return;
+  isolate->stats_table()->SetCounterFunction(callback);
 }
 
 void V8::SetCreateHistogramFunction(CreateHistogramCallback callback) {
-  if (IsDeadCheck("v8::V8::SetCreateHistogramFunction()")) return;
-  i::StatsTable::SetCreateHistogramFunction(callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::SetCreateHistogramFunction()")) return;
+  isolate->stats_table()->SetCreateHistogramFunction(callback);
 }
 
 void V8::SetAddHistogramSampleFunction(AddHistogramSampleCallback callback) {
-  if (IsDeadCheck("v8::V8::SetAddHistogramSampleFunction()")) return;
-  i::StatsTable::SetAddHistogramSampleFunction(callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::SetAddHistogramSampleFunction()")) return;
+  isolate->stats_table()->
+      SetAddHistogramSampleFunction(callback);
 }
 
 void V8::EnableSlidingStateWindow() {
-  if (IsDeadCheck("v8::V8::EnableSlidingStateWindow()")) return;
-  i::Logger::EnableSlidingStateWindow();
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::EnableSlidingStateWindow()")) return;
+  isolate->logger()->EnableSlidingStateWindow();
 }
 
 
 void V8::SetFailedAccessCheckCallbackFunction(
       FailedAccessCheckCallback callback) {
-  if (IsDeadCheck("v8::V8::SetFailedAccessCheckCallbackFunction()")) return;
-  i::Top::SetFailedAccessCheckCallback(callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::SetFailedAccessCheckCallbackFunction()")) {
+    return;
+  }
+  isolate->SetFailedAccessCheckCallback(callback);
 }
 
-
 void V8::AddObjectGroup(Persistent<Value>* objects,
                         size_t length,
                         RetainedObjectInfo* info) {
-  if (IsDeadCheck("v8::V8::AddObjectGroup()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::AddObjectGroup()")) return;
   STATIC_ASSERT(sizeof(Persistent<Value>) == sizeof(i::Object**));
-  i::GlobalHandles::AddObjectGroup(
+  isolate->global_handles()->AddObjectGroup(
       reinterpret_cast<i::Object***>(objects), length, info);
 }
 
@@ -4136,69 +4388,82 @@ void V8::AddObjectGroup(Persistent<Value>* objects,
 void V8::AddImplicitReferences(Persistent<Object> parent,
                                Persistent<Value>* children,
                                size_t length) {
-  if (IsDeadCheck("v8::V8::AddImplicitReferences()")) return;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::AddImplicitReferences()")) return;
   STATIC_ASSERT(sizeof(Persistent<Value>) == sizeof(i::Object**));
-  i::GlobalHandles::AddImplicitReferences(
+  isolate->global_handles()->AddImplicitReferences(
       *Utils::OpenHandle(*parent),
       reinterpret_cast<i::Object***>(children), length);
 }
 
 
 int V8::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
-  if (IsDeadCheck("v8::V8::AdjustAmountOfExternalAllocatedMemory()")) return 0;
-  return i::Heap::AdjustAmountOfExternalAllocatedMemory(change_in_bytes);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::AdjustAmountOfExternalAllocatedMemory()")) {
+    return 0;
+  }
+  return isolate->heap()->AdjustAmountOfExternalAllocatedMemory(
+      change_in_bytes);
 }
 
 
 void V8::SetGlobalGCPrologueCallback(GCCallback callback) {
-  if (IsDeadCheck("v8::V8::SetGlobalGCPrologueCallback()")) return;
-  i::Heap::SetGlobalGCPrologueCallback(callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::SetGlobalGCPrologueCallback()")) return;
+  isolate->heap()->SetGlobalGCPrologueCallback(callback);
 }
 
 
 void V8::SetGlobalGCEpilogueCallback(GCCallback callback) {
-  if (IsDeadCheck("v8::V8::SetGlobalGCEpilogueCallback()")) return;
-  i::Heap::SetGlobalGCEpilogueCallback(callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::SetGlobalGCEpilogueCallback()")) return;
+  isolate->heap()->SetGlobalGCEpilogueCallback(callback);
 }
 
 
 void V8::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) {
-  if (IsDeadCheck("v8::V8::AddGCPrologueCallback()")) return;
-  i::Heap::AddGCPrologueCallback(callback, gc_type);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::AddGCPrologueCallback()")) return;
+  isolate->heap()->AddGCPrologueCallback(callback, gc_type);
 }
 
 
 void V8::RemoveGCPrologueCallback(GCPrologueCallback callback) {
-  if (IsDeadCheck("v8::V8::RemoveGCPrologueCallback()")) return;
-  i::Heap::RemoveGCPrologueCallback(callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::RemoveGCPrologueCallback()")) return;
+  isolate->heap()->RemoveGCPrologueCallback(callback);
 }
 
 
 void V8::AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type) {
-  if (IsDeadCheck("v8::V8::AddGCEpilogueCallback()")) return;
-  i::Heap::AddGCEpilogueCallback(callback, gc_type);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::AddGCEpilogueCallback()")) return;
+  isolate->heap()->AddGCEpilogueCallback(callback, gc_type);
 }
 
 
 void V8::RemoveGCEpilogueCallback(GCEpilogueCallback callback) {
-  if (IsDeadCheck("v8::V8::RemoveGCEpilogueCallback()")) return;
-  i::Heap::RemoveGCEpilogueCallback(callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::RemoveGCEpilogueCallback()")) return;
+  isolate->heap()->RemoveGCEpilogueCallback(callback);
 }
 
 
 void V8::AddMemoryAllocationCallback(MemoryAllocationCallback callback,
                                      ObjectSpace space,
                                      AllocationAction action) {
-  if (IsDeadCheck("v8::V8::AddMemoryAllocationCallback()")) return;
-  i::MemoryAllocator::AddMemoryAllocationCallback(callback,
-                                                  space,
-                                                  action);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::AddMemoryAllocationCallback()")) return;
+  isolate->memory_allocator()->AddMemoryAllocationCallback(
+      callback, space, action);
 }
 
 
 void V8::RemoveMemoryAllocationCallback(MemoryAllocationCallback callback) {
-  if (IsDeadCheck("v8::V8::RemoveMemoryAllocationCallback()")) return;
-  i::MemoryAllocator::RemoveMemoryAllocationCallback(callback);
+  i::Isolate* isolate = i::Isolate::Current();
+  if (IsDeadCheck(isolate, "v8::V8::RemoveMemoryAllocationCallback()")) return;
+  isolate->memory_allocator()->RemoveMemoryAllocationCallback(
+      callback);
 }
 
 
@@ -4218,7 +4483,7 @@ void V8::ResumeProfiler() {
 
 bool V8::IsProfilerPaused() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  return i::Logger::GetActiveProfilerModules() & PROFILER_MODULE_CPU;
+  return LOGGER->GetActiveProfilerModules() & PROFILER_MODULE_CPU;
 #else
   return true;
 #endif
@@ -4233,15 +4498,15 @@ void V8::ResumeProfilerEx(int flags, int tag) {
     // snapshot.
 
     // Make a GC prior to taking a snapshot.
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     // Reset snapshot flag and CPU module flags.
     flags &= ~(PROFILER_MODULE_HEAP_SNAPSHOT | PROFILER_MODULE_CPU);
-    const int current_flags = i::Logger::GetActiveProfilerModules();
-    i::Logger::ResumeProfiler(flags, tag);
-    i::Heap::CollectAllGarbage(false);
-    i::Logger::PauseProfiler(~current_flags & flags, tag);
+    const int current_flags = LOGGER->GetActiveProfilerModules();
+    LOGGER->ResumeProfiler(flags, tag);
+    HEAP->CollectAllGarbage(false);
+    LOGGER->PauseProfiler(~current_flags & flags, tag);
   } else {
-    i::Logger::ResumeProfiler(flags, tag);
+    LOGGER->ResumeProfiler(flags, tag);
   }
 #endif
 }
@@ -4249,14 +4514,14 @@ void V8::ResumeProfilerEx(int flags, int tag) {
 
 void V8::PauseProfilerEx(int flags, int tag) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  i::Logger::PauseProfiler(flags, tag);
+  LOGGER->PauseProfiler(flags, tag);
 #endif
 }
 
 
 int V8::GetActiveProfilerModules() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  return i::Logger::GetActiveProfilerModules();
+  return LOGGER->GetActiveProfilerModules();
 #else
   return PROFILER_MODULE_NONE;
 #endif
@@ -4266,7 +4531,7 @@ int V8::GetActiveProfilerModules() {
 int V8::GetLogLines(int from_pos, char* dest_buf, int max_size) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
   ASSERT(max_size >= kMinimumSizeForLogLinesBuffer);
-  return i::Logger::GetLogLines(from_pos, dest_buf, max_size);
+  return LOGGER->GetLogLines(from_pos, dest_buf, max_size);
 #endif
   return 0;
 }
@@ -4275,39 +4540,80 @@ int V8::GetLogLines(int from_pos, char* dest_buf, int max_size) {
 int V8::GetCurrentThreadId() {
   API_ENTRY_CHECK("V8::GetCurrentThreadId()");
   EnsureInitialized("V8::GetCurrentThreadId()");
-  return i::Top::thread_id();
+  return i::Isolate::Current()->thread_id();
 }
 
 
 void V8::TerminateExecution(int thread_id) {
-  if (!i::V8::IsRunning()) return;
+  if (!i::Isolate::Current()->IsInitialized()) return;
   API_ENTRY_CHECK("V8::GetCurrentThreadId()");
+  i::Isolate* isolate = i::Isolate::Current();
   // If the thread_id identifies the current thread just terminate
   // execution right away.  Otherwise, ask the thread manager to
   // terminate the thread with the given id if any.
-  if (thread_id == i::Top::thread_id()) {
-    i::StackGuard::TerminateExecution();
+  if (thread_id == isolate->thread_id()) {
+    isolate->stack_guard()->TerminateExecution();
   } else {
-    i::ThreadManager::TerminateExecution(thread_id);
+    isolate->thread_manager()->TerminateExecution(thread_id);
   }
 }
 
 
-void V8::TerminateExecution() {
-  if (!i::V8::IsRunning()) return;
-  i::StackGuard::TerminateExecution();
+void V8::TerminateExecution(Isolate* isolate) {
+  // If no isolate is supplied, use the default isolate.
+  if (isolate != NULL) {
+    reinterpret_cast<i::Isolate*>(isolate)->stack_guard()->TerminateExecution();
+  } else {
+    i::Isolate::GetDefaultIsolateStackGuard()->TerminateExecution();
+  }
 }
 
 
 bool V8::IsExecutionTerminating() {
-  if (!i::V8::IsRunning()) return false;
-  if (i::Top::has_scheduled_exception()) {
-    return i::Top::scheduled_exception() == i::Heap::termination_exception();
+  if (!i::Isolate::Current()->IsInitialized()) return false;
+  if (i::Isolate::Current()->has_scheduled_exception()) {
+    return i::Isolate::Current()->scheduled_exception() ==
+        HEAP->termination_exception();
   }
   return false;
 }
 
 
+Isolate* Isolate::GetCurrent() {
+  i::Isolate* isolate = i::Isolate::UncheckedCurrent();
+  return reinterpret_cast<Isolate*>(isolate);
+}
+
+
+Isolate* Isolate::New() {
+  i::Isolate* isolate = new i::Isolate();
+  return reinterpret_cast<Isolate*>(isolate);
+}
+
+
+void Isolate::Dispose() {
+  i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
+  if (!ApiCheck(!isolate->IsInUse(),
+                "v8::Isolate::Dispose()",
+                "Disposing the isolate that is entered by a thread.")) {
+    return;
+  }
+  isolate->TearDown();
+}
+
+
+void Isolate::Enter() {
+  i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
+  isolate->Enter();
+}
+
+
+void Isolate::Exit() {
+  i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
+  isolate->Exit();
+}
+
+
 String::Utf8Value::Utf8Value(v8::Handle<v8::Value> obj) {
   EnsureInitialized("v8::String::Utf8Value::Utf8Value()");
   if (obj.IsEmpty()) {
@@ -4389,14 +4695,15 @@ String::Value::~Value() {
 }
 
 Local<Value> Exception::RangeError(v8::Handle<v8::String> raw_message) {
-  LOG_API("RangeError");
-  ON_BAILOUT("v8::Exception::RangeError()", return Local<Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "RangeError");
+  ON_BAILOUT(isolate, "v8::Exception::RangeError()", return Local<Value>());
   ENTER_V8;
   i::Object* error;
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::String> message = Utils::OpenHandle(*raw_message);
-    i::Handle<i::Object> result = i::Factory::NewRangeError(message);
+    i::Handle<i::Object> result = FACTORY->NewRangeError(message);
     error = *result;
   }
   i::Handle<i::Object> result(error);
@@ -4404,14 +4711,15 @@ Local<Value> Exception::RangeError(v8::Handle<v8::String> raw_message) {
 }
 
 Local<Value> Exception::ReferenceError(v8::Handle<v8::String> raw_message) {
-  LOG_API("ReferenceError");
-  ON_BAILOUT("v8::Exception::ReferenceError()", return Local<Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "ReferenceError");
+  ON_BAILOUT(isolate, "v8::Exception::ReferenceError()", return Local<Value>());
   ENTER_V8;
   i::Object* error;
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::String> message = Utils::OpenHandle(*raw_message);
-    i::Handle<i::Object> result = i::Factory::NewReferenceError(message);
+    i::Handle<i::Object> result = FACTORY->NewReferenceError(message);
     error = *result;
   }
   i::Handle<i::Object> result(error);
@@ -4419,14 +4727,15 @@ Local<Value> Exception::ReferenceError(v8::Handle<v8::String> raw_message) {
 }
 
 Local<Value> Exception::SyntaxError(v8::Handle<v8::String> raw_message) {
-  LOG_API("SyntaxError");
-  ON_BAILOUT("v8::Exception::SyntaxError()", return Local<Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "SyntaxError");
+  ON_BAILOUT(isolate, "v8::Exception::SyntaxError()", return Local<Value>());
   ENTER_V8;
   i::Object* error;
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::String> message = Utils::OpenHandle(*raw_message);
-    i::Handle<i::Object> result = i::Factory::NewSyntaxError(message);
+    i::Handle<i::Object> result = FACTORY->NewSyntaxError(message);
     error = *result;
   }
   i::Handle<i::Object> result(error);
@@ -4434,14 +4743,15 @@ Local<Value> Exception::SyntaxError(v8::Handle<v8::String> raw_message) {
 }
 
 Local<Value> Exception::TypeError(v8::Handle<v8::String> raw_message) {
-  LOG_API("TypeError");
-  ON_BAILOUT("v8::Exception::TypeError()", return Local<Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "TypeError");
+  ON_BAILOUT(isolate, "v8::Exception::TypeError()", return Local<Value>());
   ENTER_V8;
   i::Object* error;
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::String> message = Utils::OpenHandle(*raw_message);
-    i::Handle<i::Object> result = i::Factory::NewTypeError(message);
+    i::Handle<i::Object> result = FACTORY->NewTypeError(message);
     error = *result;
   }
   i::Handle<i::Object> result(error);
@@ -4449,14 +4759,15 @@ Local<Value> Exception::TypeError(v8::Handle<v8::String> raw_message) {
 }
 
 Local<Value> Exception::Error(v8::Handle<v8::String> raw_message) {
-  LOG_API("Error");
-  ON_BAILOUT("v8::Exception::Error()", return Local<Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "Error");
+  ON_BAILOUT(isolate, "v8::Exception::Error()", return Local<Value>());
   ENTER_V8;
   i::Object* error;
   {
-    HandleScope scope;
+    i::HandleScope scope(isolate);
     i::Handle<i::String> message = Utils::OpenHandle(*raw_message);
-    i::Handle<i::Object> result = i::Factory::NewError(message);
+    i::Handle<i::Object> result = FACTORY->NewError(message);
     error = *result;
   }
   i::Handle<i::Object> result(error);
@@ -4468,82 +4779,99 @@ Local<Value> Exception::Error(v8::Handle<v8::String> raw_message) {
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
 
-static v8::Debug::EventCallback event_callback = NULL;
-
 static void EventCallbackWrapper(const v8::Debug::EventDetails& event_details) {
-  if (event_callback) {
-    event_callback(event_details.GetEvent(),
-                   event_details.GetExecutionState(),
-                   event_details.GetEventData(),
-                   event_details.GetCallbackData());
+  i::Isolate* isolate = i::Isolate::Current();
+  if (isolate->debug_event_callback() != NULL) {
+    isolate->debug_event_callback()(event_details.GetEvent(),
+                                    event_details.GetExecutionState(),
+                                    event_details.GetEventData(),
+                                    event_details.GetCallbackData());
   }
 }
 
 
 bool Debug::SetDebugEventListener(EventCallback that, Handle<Value> data) {
-  EnsureInitialized("v8::Debug::SetDebugEventListener()");
-  ON_BAILOUT("v8::Debug::SetDebugEventListener()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::Debug::SetDebugEventListener()");
+  ON_BAILOUT(isolate, "v8::Debug::SetDebugEventListener()", return false);
   ENTER_V8;
 
-  event_callback = that;
+  isolate->set_debug_event_callback(that);
 
-  HandleScope scope;
-  i::Handle<i::Object> proxy = i::Factory::undefined_value();
+  i::HandleScope scope(isolate);
+  i::Handle<i::Object> proxy = isolate->factory()->undefined_value();
   if (that != NULL) {
-    proxy = i::Factory::NewProxy(FUNCTION_ADDR(EventCallbackWrapper));
+    proxy = isolate->factory()->NewProxy(FUNCTION_ADDR(EventCallbackWrapper));
   }
-  i::Debugger::SetEventListener(proxy, Utils::OpenHandle(*data));
+  isolate->debugger()->SetEventListener(proxy, Utils::OpenHandle(*data));
   return true;
 }
 
 
 bool Debug::SetDebugEventListener2(EventCallback2 that, Handle<Value> data) {
-  EnsureInitialized("v8::Debug::SetDebugEventListener2()");
-  ON_BAILOUT("v8::Debug::SetDebugEventListener2()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  EnsureInitializedForIsolate(isolate, "v8::Debug::SetDebugEventListener2()");
+  ON_BAILOUT(isolate, "v8::Debug::SetDebugEventListener2()", return false);
   ENTER_V8;
-  HandleScope scope;
-  i::Handle<i::Object> proxy = i::Factory::undefined_value();
+  i::HandleScope scope(isolate);
+  i::Handle<i::Object> proxy = isolate->factory()->undefined_value();
   if (that != NULL) {
-    proxy = i::Factory::NewProxy(FUNCTION_ADDR(that));
+    proxy = isolate->factory()->NewProxy(FUNCTION_ADDR(that));
   }
-  i::Debugger::SetEventListener(proxy, Utils::OpenHandle(*data));
+  isolate->debugger()->SetEventListener(proxy,
+                                                      Utils::OpenHandle(*data));
   return true;
 }
 
 
 bool Debug::SetDebugEventListener(v8::Handle<v8::Object> that,
                                   Handle<Value> data) {
-  ON_BAILOUT("v8::Debug::SetDebugEventListener()", return false);
+  i::Isolate* isolate = i::Isolate::Current();
+  ON_BAILOUT(isolate, "v8::Debug::SetDebugEventListener()", return false);
   ENTER_V8;
-  i::Debugger::SetEventListener(Utils::OpenHandle(*that),
-                                Utils::OpenHandle(*data));
+  isolate->debugger()->SetEventListener(Utils::OpenHandle(*that),
+                                                      Utils::OpenHandle(*data));
   return true;
 }
 
 
-void Debug::DebugBreak() {
-  if (!i::V8::IsRunning()) return;
-  i::StackGuard::DebugBreak();
+void Debug::DebugBreak(Isolate* isolate) {
+  // If no isolate is supplied, use the default isolate.
+  if (isolate != NULL) {
+    reinterpret_cast<i::Isolate*>(isolate)->stack_guard()->DebugBreak();
+  } else {
+    i::Isolate::GetDefaultIsolateStackGuard()->DebugBreak();
+  }
 }
 
 
-void Debug::CancelDebugBreak() {
-  i::StackGuard::Continue(i::DEBUGBREAK);
+void Debug::CancelDebugBreak(Isolate* isolate) {
+  // If no isolate is supplied, use the default isolate.
+  if (isolate != NULL) {
+    i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
+    internal_isolate->stack_guard()->Continue(i::DEBUGBREAK);
+  } else {
+    i::Isolate::GetDefaultIsolateStackGuard()->Continue(i::DEBUGBREAK);
+  }
 }
 
 
-void Debug::DebugBreakForCommand(ClientData* data) {
-  if (!i::V8::IsRunning()) return;
-  i::Debugger::EnqueueDebugCommand(data);
+void Debug::DebugBreakForCommand(ClientData* data, Isolate* isolate) {
+  // If no isolate is supplied, use the default isolate.
+  if (isolate != NULL) {
+    i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
+    internal_isolate->debugger()->EnqueueDebugCommand(data);
+  } else {
+    i::Isolate::GetDefaultIsolateDebugger()->EnqueueDebugCommand(data);
+  }
 }
 
 
-static v8::Debug::MessageHandler message_handler = NULL;
-
 static void MessageHandlerWrapper(const v8::Debug::Message& message) {
-  if (message_handler) {
+  i::Isolate* isolate = i::Isolate::Current();
+  if (isolate->message_handler()) {
     v8::String::Value json(message.GetJSON());
-    message_handler(*json, json.length(), message.GetClientData());
+    (isolate->message_handler())(*json, json.length(), message.GetClientData());
   }
 }
 
@@ -4552,16 +4880,17 @@ void Debug::SetMessageHandler(v8::Debug::MessageHandler handler,
                               bool message_handler_thread) {
   EnsureInitialized("v8::Debug::SetMessageHandler");
   ENTER_V8;
+  i::Isolate* isolate = i::Isolate::Current();
   // Message handler thread not supported any more. Parameter temporally left in
   // the API for client compatability reasons.
   CHECK(!message_handler_thread);
 
   // TODO(sgjesse) support the old message handler API through a simple wrapper.
-  message_handler = handler;
-  if (message_handler != NULL) {
-    i::Debugger::SetMessageHandler(MessageHandlerWrapper);
+  isolate->set_message_handler(handler);
+  if (handler != NULL) {
+    i::Isolate::Current()->debugger()->SetMessageHandler(MessageHandlerWrapper);
   } else {
-    i::Debugger::SetMessageHandler(NULL);
+    i::Isolate::Current()->debugger()->SetMessageHandler(NULL);
   }
 }
 
@@ -4569,15 +4898,15 @@ void Debug::SetMessageHandler(v8::Debug::MessageHandler handler,
 void Debug::SetMessageHandler2(v8::Debug::MessageHandler2 handler) {
   EnsureInitialized("v8::Debug::SetMessageHandler");
   ENTER_V8;
-  i::Debugger::SetMessageHandler(handler);
+  i::Isolate::Current()->debugger()->SetMessageHandler(handler);
 }
 
 
 void Debug::SendCommand(const uint16_t* command, int length,
                         ClientData* client_data) {
-  if (!i::V8::IsRunning()) return;
-  i::Debugger::ProcessCommand(i::Vector<const uint16_t>(command, length),
-                              client_data);
+  if (!i::Isolate::Current()->IsInitialized()) return;
+  i::Isolate::Current()->debugger()->ProcessCommand(
+      i::Vector<const uint16_t>(command, length), client_data);
 }
 
 
@@ -4585,7 +4914,7 @@ void Debug::SetHostDispatchHandler(HostDispatchHandler handler,
                                    int period) {
   EnsureInitialized("v8::Debug::SetHostDispatchHandler");
   ENTER_V8;
-  i::Debugger::SetHostDispatchHandler(handler, period);
+  i::Isolate::Current()->debugger()->SetHostDispatchHandler(handler, period);
 }
 
 
@@ -4593,25 +4922,28 @@ void Debug::SetDebugMessageDispatchHandler(
     DebugMessageDispatchHandler handler, bool provide_locker) {
   EnsureInitialized("v8::Debug::SetDebugMessageDispatchHandler");
   ENTER_V8;
-  i::Debugger::SetDebugMessageDispatchHandler(handler, provide_locker);
+  i::Isolate::Current()->debugger()->SetDebugMessageDispatchHandler(
+      handler, provide_locker);
 }
 
 
 Local<Value> Debug::Call(v8::Handle<v8::Function> fun,
                          v8::Handle<v8::Value> data) {
-  if (!i::V8::IsRunning()) return Local<Value>();
-  ON_BAILOUT("v8::Debug::Call()", return Local<Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  if (!isolate->IsInitialized()) return Local<Value>();
+  ON_BAILOUT(isolate, "v8::Debug::Call()", return Local<Value>());
   ENTER_V8;
   i::Handle<i::Object> result;
   EXCEPTION_PREAMBLE();
   if (data.IsEmpty()) {
-    result = i::Debugger::Call(Utils::OpenHandle(*fun),
-                               i::Factory::undefined_value(),
-                               &has_pending_exception);
+    result =
+        i::Isolate::Current()->debugger()->Call(Utils::OpenHandle(*fun),
+                                                FACTORY->undefined_value(),
+                                                &has_pending_exception);
   } else {
-    result = i::Debugger::Call(Utils::OpenHandle(*fun),
-                               Utils::OpenHandle(*data),
-                               &has_pending_exception);
+    result = i::Isolate::Current()->debugger()->Call(Utils::OpenHandle(*fun),
+                                                     Utils::OpenHandle(*data),
+                                                     &has_pending_exception);
   }
   EXCEPTION_BAILOUT_CHECK(Local<Value>());
   return Utils::ToLocal(result);
@@ -4619,13 +4951,15 @@ Local<Value> Debug::Call(v8::Handle<v8::Function> fun,
 
 
 Local<Value> Debug::GetMirror(v8::Handle<v8::Value> obj) {
-  if (!i::V8::IsRunning()) return Local<Value>();
-  ON_BAILOUT("v8::Debug::GetMirror()", return Local<Value>());
+  i::Isolate* isolate = i::Isolate::Current();
+  if (!isolate->IsInitialized()) return Local<Value>();
+  ON_BAILOUT(isolate, "v8::Debug::GetMirror()", return Local<Value>());
   ENTER_V8;
   v8::HandleScope scope;
-  i::Debug::Load();
-  i::Handle<i::JSObject> debug(i::Debug::debug_context()->global());
-  i::Handle<i::String> name = i::Factory::LookupAsciiSymbol("MakeMirror");
+  i::Debug* isolate_debug = i::Isolate::Current()->debug();
+  isolate_debug->Load();
+  i::Handle<i::JSObject> debug(isolate_debug->debug_context()->global());
+  i::Handle<i::String> name = FACTORY->LookupAsciiSymbol("MakeMirror");
   i::Handle<i::Object> fun_obj = i::GetProperty(debug, name);
   i::Handle<i::JSFunction> fun = i::Handle<i::JSFunction>::cast(fun_obj);
   v8::Handle<v8::Function> v8_fun = Utils::ToLocal(fun);
@@ -4641,7 +4975,8 @@ Local<Value> Debug::GetMirror(v8::Handle<v8::Value> obj) {
 
 
 bool Debug::EnableAgent(const char* name, int port, bool wait_for_connection) {
-  return i::Debugger::StartAgent(name, port, wait_for_connection);
+  return i::Isolate::Current()->debugger()->StartAgent(name, port,
+                                                       wait_for_connection);
 }
 
 void Debug::ProcessDebugMessages() {
@@ -4651,7 +4986,7 @@ void Debug::ProcessDebugMessages() {
 Local<Context> Debug::GetDebugContext() {
   EnsureInitialized("v8::Debug::GetDebugContext()");
   ENTER_V8;
-  return Utils::ToLocal(i::Debugger::GetDebugContext());
+  return Utils::ToLocal(i::Isolate::Current()->debugger()->GetDebugContext());
 }
 
 #endif  // ENABLE_DEBUGGER_SUPPORT
@@ -4660,72 +4995,82 @@ Local<Context> Debug::GetDebugContext() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
 
 Handle<String> CpuProfileNode::GetFunctionName() const {
-  IsDeadCheck("v8::CpuProfileNode::GetFunctionName");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetFunctionName");
   const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
   const i::CodeEntry* entry = node->entry();
   if (!entry->has_name_prefix()) {
     return Handle<String>(ToApi<String>(
-        i::Factory::LookupAsciiSymbol(entry->name())));
+        isolate->factory()->LookupAsciiSymbol(entry->name())));
   } else {
-    return Handle<String>(ToApi<String>(i::Factory::NewConsString(
-        i::Factory::LookupAsciiSymbol(entry->name_prefix()),
-        i::Factory::LookupAsciiSymbol(entry->name()))));
+    return Handle<String>(ToApi<String>(isolate->factory()->NewConsString(
+        isolate->factory()->LookupAsciiSymbol(entry->name_prefix()),
+        isolate->factory()->LookupAsciiSymbol(entry->name()))));
   }
 }
 
 
 Handle<String> CpuProfileNode::GetScriptResourceName() const {
-  IsDeadCheck("v8::CpuProfileNode::GetScriptResourceName");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetScriptResourceName");
   const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
-  return Handle<String>(ToApi<String>(i::Factory::LookupAsciiSymbol(
+  return Handle<String>(ToApi<String>(FACTORY->LookupAsciiSymbol(
       node->entry()->resource_name())));
 }
 
 
 int CpuProfileNode::GetLineNumber() const {
-  IsDeadCheck("v8::CpuProfileNode::GetLineNumber");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetLineNumber");
   return reinterpret_cast<const i::ProfileNode*>(this)->entry()->line_number();
 }
 
 
 double CpuProfileNode::GetTotalTime() const {
-  IsDeadCheck("v8::CpuProfileNode::GetTotalTime");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetTotalTime");
   return reinterpret_cast<const i::ProfileNode*>(this)->GetTotalMillis();
 }
 
 
 double CpuProfileNode::GetSelfTime() const {
-  IsDeadCheck("v8::CpuProfileNode::GetSelfTime");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetSelfTime");
   return reinterpret_cast<const i::ProfileNode*>(this)->GetSelfMillis();
 }
 
 
 double CpuProfileNode::GetTotalSamplesCount() const {
-  IsDeadCheck("v8::CpuProfileNode::GetTotalSamplesCount");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetTotalSamplesCount");
   return reinterpret_cast<const i::ProfileNode*>(this)->total_ticks();
 }
 
 
 double CpuProfileNode::GetSelfSamplesCount() const {
-  IsDeadCheck("v8::CpuProfileNode::GetSelfSamplesCount");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetSelfSamplesCount");
   return reinterpret_cast<const i::ProfileNode*>(this)->self_ticks();
 }
 
 
 unsigned CpuProfileNode::GetCallUid() const {
-  IsDeadCheck("v8::CpuProfileNode::GetCallUid");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetCallUid");
   return reinterpret_cast<const i::ProfileNode*>(this)->entry()->GetCallUid();
 }
 
 
 int CpuProfileNode::GetChildrenCount() const {
-  IsDeadCheck("v8::CpuProfileNode::GetChildrenCount");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetChildrenCount");
   return reinterpret_cast<const i::ProfileNode*>(this)->children()->length();
 }
 
 
 const CpuProfileNode* CpuProfileNode::GetChild(int index) const {
-  IsDeadCheck("v8::CpuProfileNode::GetChild");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfileNode::GetChild");
   const i::ProfileNode* child =
       reinterpret_cast<const i::ProfileNode*>(this)->children()->at(index);
   return reinterpret_cast<const CpuProfileNode*>(child);
@@ -4733,42 +5078,48 @@ const CpuProfileNode* CpuProfileNode::GetChild(int index) const {
 
 
 unsigned CpuProfile::GetUid() const {
-  IsDeadCheck("v8::CpuProfile::GetUid");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfile::GetUid");
   return reinterpret_cast<const i::CpuProfile*>(this)->uid();
 }
 
 
 Handle<String> CpuProfile::GetTitle() const {
-  IsDeadCheck("v8::CpuProfile::GetTitle");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfile::GetTitle");
   const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
-  return Handle<String>(ToApi<String>(i::Factory::LookupAsciiSymbol(
+  return Handle<String>(ToApi<String>(FACTORY->LookupAsciiSymbol(
       profile->title())));
 }
 
 
 const CpuProfileNode* CpuProfile::GetBottomUpRoot() const {
-  IsDeadCheck("v8::CpuProfile::GetBottomUpRoot");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfile::GetBottomUpRoot");
   const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
   return reinterpret_cast<const CpuProfileNode*>(profile->bottom_up()->root());
 }
 
 
 const CpuProfileNode* CpuProfile::GetTopDownRoot() const {
-  IsDeadCheck("v8::CpuProfile::GetTopDownRoot");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfile::GetTopDownRoot");
   const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
   return reinterpret_cast<const CpuProfileNode*>(profile->top_down()->root());
 }
 
 
 int CpuProfiler::GetProfilesCount() {
-  IsDeadCheck("v8::CpuProfiler::GetProfilesCount");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfiler::GetProfilesCount");
   return i::CpuProfiler::GetProfilesCount();
 }
 
 
 const CpuProfile* CpuProfiler::GetProfile(int index,
                                           Handle<Value> security_token) {
-  IsDeadCheck("v8::CpuProfiler::GetProfile");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfiler::GetProfile");
   return reinterpret_cast<const CpuProfile*>(
       i::CpuProfiler::GetProfile(
           security_token.IsEmpty() ? NULL : *Utils::OpenHandle(*security_token),
@@ -4778,7 +5129,8 @@ const CpuProfile* CpuProfiler::GetProfile(int index,
 
 const CpuProfile* CpuProfiler::FindProfile(unsigned uid,
                                            Handle<Value> security_token) {
-  IsDeadCheck("v8::CpuProfiler::FindProfile");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfiler::FindProfile");
   return reinterpret_cast<const CpuProfile*>(
       i::CpuProfiler::FindProfile(
           security_token.IsEmpty() ? NULL : *Utils::OpenHandle(*security_token),
@@ -4787,14 +5139,16 @@ const CpuProfile* CpuProfiler::FindProfile(unsigned uid,
 
 
 void CpuProfiler::StartProfiling(Handle<String> title) {
-  IsDeadCheck("v8::CpuProfiler::StartProfiling");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfiler::StartProfiling");
   i::CpuProfiler::StartProfiling(*Utils::OpenHandle(*title));
 }
 
 
 const CpuProfile* CpuProfiler::StopProfiling(Handle<String> title,
                                              Handle<Value> security_token) {
-  IsDeadCheck("v8::CpuProfiler::StopProfiling");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::CpuProfiler::StopProfiling");
   return reinterpret_cast<const CpuProfile*>(
       i::CpuProfiler::StopProfiling(
           security_token.IsEmpty() ? NULL : *Utils::OpenHandle(*security_token),
@@ -4808,24 +5162,26 @@ static i::HeapGraphEdge* ToInternal(const HeapGraphEdge* edge) {
 }
 
 HeapGraphEdge::Type HeapGraphEdge::GetType() const {
-  IsDeadCheck("v8::HeapGraphEdge::GetType");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapGraphEdge::GetType");
   return static_cast<HeapGraphEdge::Type>(ToInternal(this)->type());
 }
 
 
 Handle<Value> HeapGraphEdge::GetName() const {
-  IsDeadCheck("v8::HeapGraphEdge::GetName");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapGraphEdge::GetName");
   i::HeapGraphEdge* edge = ToInternal(this);
   switch (edge->type()) {
     case i::HeapGraphEdge::kContextVariable:
     case i::HeapGraphEdge::kInternal:
     case i::HeapGraphEdge::kProperty:
     case i::HeapGraphEdge::kShortcut:
-      return Handle<String>(ToApi<String>(i::Factory::LookupAsciiSymbol(
+      return Handle<String>(ToApi<String>(FACTORY->LookupAsciiSymbol(
           edge->name())));
     case i::HeapGraphEdge::kElement:
     case i::HeapGraphEdge::kHidden:
-      return Handle<Number>(ToApi<Number>(i::Factory::NewNumberFromInt(
+      return Handle<Number>(ToApi<Number>(FACTORY->NewNumberFromInt(
           edge->index())));
     default: UNREACHABLE();
   }
@@ -4834,14 +5190,16 @@ Handle<Value> HeapGraphEdge::GetName() const {
 
 
 const HeapGraphNode* HeapGraphEdge::GetFromNode() const {
-  IsDeadCheck("v8::HeapGraphEdge::GetFromNode");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapGraphEdge::GetFromNode");
   const i::HeapEntry* from = ToInternal(this)->From();
   return reinterpret_cast<const HeapGraphNode*>(from);
 }
 
 
 const HeapGraphNode* HeapGraphEdge::GetToNode() const {
-  IsDeadCheck("v8::HeapGraphEdge::GetToNode");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapGraphEdge::GetToNode");
   const i::HeapEntry* to = ToInternal(this)->to();
   return reinterpret_cast<const HeapGraphNode*>(to);
 }
@@ -4882,91 +5240,105 @@ static i::HeapEntry* ToInternal(const HeapGraphNode* entry) {
 
 
 HeapGraphNode::Type HeapGraphNode::GetType() const {
-  IsDeadCheck("v8::HeapGraphNode::GetType");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapGraphNode::GetType");
   return static_cast<HeapGraphNode::Type>(ToInternal(this)->type());
 }
 
 
 Handle<String> HeapGraphNode::GetName() const {
-  IsDeadCheck("v8::HeapGraphNode::GetName");
-  return Handle<String>(ToApi<String>(i::Factory::LookupAsciiSymbol(
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapGraphNode::GetName");
+  return Handle<String>(ToApi<String>(FACTORY->LookupAsciiSymbol(
       ToInternal(this)->name())));
 }
 
 
 uint64_t HeapGraphNode::GetId() const {
-  IsDeadCheck("v8::HeapGraphNode::GetId");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapGraphNode::GetId");
   ASSERT(ToInternal(this)->snapshot()->type() != i::HeapSnapshot::kAggregated);
   return ToInternal(this)->id();
 }
 
 
 int HeapGraphNode::GetInstancesCount() const {
-  IsDeadCheck("v8::HeapGraphNode::GetInstancesCount");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapGraphNode::GetInstancesCount");
   ASSERT(ToInternal(this)->snapshot()->type() == i::HeapSnapshot::kAggregated);
   return static_cast<int>(ToInternal(this)->id());
 }
 
 
 int HeapGraphNode::GetSelfSize() const {
-  IsDeadCheck("v8::HeapGraphNode::GetSelfSize");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapGraphNode::GetSelfSize");
   return ToInternal(this)->self_size();
 }
 
 
 int HeapGraphNode::GetRetainedSize(bool exact) const {
-  IsDeadCheck("v8::HeapSnapshot::GetRetainedSize");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainedSize");
   return ToInternal(this)->RetainedSize(exact);
 }
 
 
 int HeapGraphNode::GetChildrenCount() const {
-  IsDeadCheck("v8::HeapSnapshot::GetChildrenCount");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetChildrenCount");
   return ToInternal(this)->children().length();
 }
 
 
 const HeapGraphEdge* HeapGraphNode::GetChild(int index) const {
-  IsDeadCheck("v8::HeapSnapshot::GetChild");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetChild");
   return reinterpret_cast<const HeapGraphEdge*>(
       &ToInternal(this)->children()[index]);
 }
 
 
 int HeapGraphNode::GetRetainersCount() const {
-  IsDeadCheck("v8::HeapSnapshot::GetRetainersCount");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainersCount");
   return ToInternal(this)->retainers().length();
 }
 
 
 const HeapGraphEdge* HeapGraphNode::GetRetainer(int index) const {
-  IsDeadCheck("v8::HeapSnapshot::GetRetainer");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainer");
   return reinterpret_cast<const HeapGraphEdge*>(
       ToInternal(this)->retainers()[index]);
 }
 
 
 int HeapGraphNode::GetRetainingPathsCount() const {
-  IsDeadCheck("v8::HeapSnapshot::GetRetainingPathsCount");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainingPathsCount");
   return ToInternal(this)->GetRetainingPaths()->length();
 }
 
 
 const HeapGraphPath* HeapGraphNode::GetRetainingPath(int index) const {
-  IsDeadCheck("v8::HeapSnapshot::GetRetainingPath");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainingPath");
   return reinterpret_cast<const HeapGraphPath*>(
       ToInternal(this)->GetRetainingPaths()->at(index));
 }
 
 
 const HeapGraphNode* HeapGraphNode::GetDominatorNode() const {
-  IsDeadCheck("v8::HeapSnapshot::GetDominatorNode");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetDominatorNode");
   return reinterpret_cast<const HeapGraphNode*>(ToInternal(this)->dominator());
 }
 
 
 const HeapGraphNode* HeapSnapshotsDiff::GetAdditionsRoot() const {
-  IsDeadCheck("v8::HeapSnapshotsDiff::GetAdditionsRoot");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshotsDiff::GetAdditionsRoot");
   i::HeapSnapshotsDiff* diff =
       const_cast<i::HeapSnapshotsDiff*>(
           reinterpret_cast<const i::HeapSnapshotsDiff*>(this));
@@ -4975,7 +5347,8 @@ const HeapGraphNode* HeapSnapshotsDiff::GetAdditionsRoot() const {
 
 
 const HeapGraphNode* HeapSnapshotsDiff::GetDeletionsRoot() const {
-  IsDeadCheck("v8::HeapSnapshotsDiff::GetDeletionsRoot");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshotsDiff::GetDeletionsRoot");
   i::HeapSnapshotsDiff* diff =
       const_cast<i::HeapSnapshotsDiff*>(
           reinterpret_cast<const i::HeapSnapshotsDiff*>(this));
@@ -4990,32 +5363,37 @@ static i::HeapSnapshot* ToInternal(const HeapSnapshot* snapshot) {
 
 
 HeapSnapshot::Type HeapSnapshot::GetType() const {
-  IsDeadCheck("v8::HeapSnapshot::GetType");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetType");
   return static_cast<HeapSnapshot::Type>(ToInternal(this)->type());
 }
 
 
 unsigned HeapSnapshot::GetUid() const {
-  IsDeadCheck("v8::HeapSnapshot::GetUid");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetUid");
   return ToInternal(this)->uid();
 }
 
 
 Handle<String> HeapSnapshot::GetTitle() const {
-  IsDeadCheck("v8::HeapSnapshot::GetTitle");
-  return Handle<String>(ToApi<String>(i::Factory::LookupAsciiSymbol(
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetTitle");
+  return Handle<String>(ToApi<String>(FACTORY->LookupAsciiSymbol(
       ToInternal(this)->title())));
 }
 
 
 const HeapGraphNode* HeapSnapshot::GetRoot() const {
-  IsDeadCheck("v8::HeapSnapshot::GetHead");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetHead");
   return reinterpret_cast<const HeapGraphNode*>(ToInternal(this)->root());
 }
 
 
 const HeapGraphNode* HeapSnapshot::GetNodeById(uint64_t id) const {
-  IsDeadCheck("v8::HeapSnapshot::GetNodeById");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodeById");
   return reinterpret_cast<const HeapGraphNode*>(
       ToInternal(this)->GetEntryById(id));
 }
@@ -5023,7 +5401,8 @@ const HeapGraphNode* HeapSnapshot::GetNodeById(uint64_t id) const {
 
 const HeapSnapshotsDiff* HeapSnapshot::CompareWith(
     const HeapSnapshot* snapshot) const {
-  IsDeadCheck("v8::HeapSnapshot::CompareWith");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::CompareWith");
   return reinterpret_cast<const HeapSnapshotsDiff*>(
       ToInternal(this)->CompareWith(ToInternal(snapshot)));
 }
@@ -5031,7 +5410,8 @@ const HeapSnapshotsDiff* HeapSnapshot::CompareWith(
 
 void HeapSnapshot::Serialize(OutputStream* stream,
                              HeapSnapshot::SerializationFormat format) const {
-  IsDeadCheck("v8::HeapSnapshot::Serialize");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapSnapshot::Serialize");
   ApiCheck(format == kJSON,
            "v8::HeapSnapshot::Serialize",
            "Unknown serialization format");
@@ -5047,20 +5427,23 @@ void HeapSnapshot::Serialize(OutputStream* stream,
 
 
 int HeapProfiler::GetSnapshotsCount() {
-  IsDeadCheck("v8::HeapProfiler::GetSnapshotsCount");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapProfiler::GetSnapshotsCount");
   return i::HeapProfiler::GetSnapshotsCount();
 }
 
 
 const HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
-  IsDeadCheck("v8::HeapProfiler::GetSnapshot");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapProfiler::GetSnapshot");
   return reinterpret_cast<const HeapSnapshot*>(
       i::HeapProfiler::GetSnapshot(index));
 }
 
 
 const HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
-  IsDeadCheck("v8::HeapProfiler::FindSnapshot");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapProfiler::FindSnapshot");
   return reinterpret_cast<const HeapSnapshot*>(
       i::HeapProfiler::FindSnapshot(uid));
 }
@@ -5069,7 +5452,8 @@ const HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
 const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
                                                HeapSnapshot::Type type,
                                                ActivityControl* control) {
-  IsDeadCheck("v8::HeapProfiler::TakeSnapshot");
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapProfiler::TakeSnapshot");
   i::HeapSnapshot::Type internal_type = i::HeapSnapshot::kFull;
   switch (type) {
     case HeapSnapshot::kFull:
@@ -5089,7 +5473,8 @@ const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
 
 void HeapProfiler::DefineWrapperClass(uint16_t class_id,
                                       WrapperInfoCallback callback) {
-  i::HeapProfiler::DefineWrapperClass(class_id, callback);
+  i::Isolate::Current()->heap_profiler()->DefineWrapperClass(class_id,
+                                                             callback);
 }
 
 #endif  // ENABLE_LOGGING_AND_PROFILING
@@ -5164,24 +5549,15 @@ void Testing::DeoptimizeAll() {
 namespace internal {
 
 
-HandleScopeImplementer* HandleScopeImplementer::instance() {
-  return &thread_local;
-}
-
-
 void HandleScopeImplementer::FreeThreadResources() {
-  thread_local.Free();
+  Free();
 }
 
 
 char* HandleScopeImplementer::ArchiveThread(char* storage) {
-  return thread_local.ArchiveThreadHelper(storage);
-}
-
-
-char* HandleScopeImplementer::ArchiveThreadHelper(char* storage) {
+  Isolate* isolate = Isolate::Current();
   v8::ImplementationUtilities::HandleScopeData* current =
-      v8::ImplementationUtilities::CurrentHandleScope();
+      isolate->handle_scope_data();
   handle_scope_data_ = *current;
   memcpy(storage, this, sizeof(*this));
 
@@ -5193,18 +5569,13 @@ char* HandleScopeImplementer::ArchiveThreadHelper(char* storage) {
 
 
 int HandleScopeImplementer::ArchiveSpacePerThread() {
-  return sizeof(thread_local);
+  return sizeof(HandleScopeImplementer);
 }
 
 
 char* HandleScopeImplementer::RestoreThread(char* storage) {
-  return thread_local.RestoreThreadHelper(storage);
-}
-
-
-char* HandleScopeImplementer::RestoreThreadHelper(char* storage) {
   memcpy(this, storage, sizeof(*this));
-  *v8::ImplementationUtilities::CurrentHandleScope() = handle_scope_data_;
+  *Isolate::Current()->handle_scope_data() = handle_scope_data_;
   return storage + ArchiveSpacePerThread();
 }
 
@@ -5230,9 +5601,9 @@ void HandleScopeImplementer::IterateThis(ObjectVisitor* v) {
 
 void HandleScopeImplementer::Iterate(ObjectVisitor* v) {
   v8::ImplementationUtilities::HandleScopeData* current =
-      v8::ImplementationUtilities::CurrentHandleScope();
-  thread_local.handle_scope_data_ = *current;
-  thread_local.IterateThis(v);
+      Isolate::Current()->handle_scope_data();
+  handle_scope_data_ = *current;
+  IterateThis(v);
 }
 
 
index d07d75b91292bb7392321ff49a9f6856e911fdd1..6d46713eff10a511e000735c625995192953c54e 100644 (file)
--- a/src/api.h
+++ b/src/api.h
@@ -122,7 +122,7 @@ template <typename T> static inline T ToCData(v8::internal::Object* obj) {
 template <typename T>
 static inline v8::internal::Handle<v8::internal::Object> FromCData(T obj) {
   STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address));
-  return v8::internal::Factory::NewProxy(
+  return FACTORY->NewProxy(
       reinterpret_cast<v8::internal::Address>(reinterpret_cast<intptr_t>(obj)));
 }
 
@@ -157,7 +157,6 @@ class RegisteredExtension {
   RegisteredExtension* next_auto_;
   ExtensionTraversalState state_;
   static RegisteredExtension* first_extension_;
-  static RegisteredExtension* first_auto_extension_;
 };
 
 
@@ -321,16 +320,83 @@ MAKE_OPEN_HANDLE(StackFrame, JSObject)
 
 namespace internal {
 
+// Tracks string usage to help make better decisions when
+// externalizing strings.
+//
+// Implementation note: internally this class only tracks fresh
+// strings and keeps a single use counter for them.
+class StringTracker {
+ public:
+  // Records that the given string's characters were copied to some
+  // external buffer. If this happens often we should honor
+  // externalization requests for the string.
+  void RecordWrite(Handle<String> string) {
+    Address address = reinterpret_cast<Address>(*string);
+    Address top = isolate_->heap()->NewSpaceTop();
+    if (IsFreshString(address, top)) {
+      IncrementUseCount(top);
+    }
+  }
+
+  // Estimates freshness and use frequency of the given string based
+  // on how close it is to the new space top and the recorded usage
+  // history.
+  inline bool IsFreshUnusedString(Handle<String> string) {
+    Address address = reinterpret_cast<Address>(*string);
+    Address top = isolate_->heap()->NewSpaceTop();
+    return IsFreshString(address, top) && IsUseCountLow(top);
+  }
+
+ private:
+  StringTracker() : use_count_(0), last_top_(NULL), isolate_(NULL) { }
+
+  static inline bool IsFreshString(Address string, Address top) {
+    return top - kFreshnessLimit <= string && string <= top;
+  }
+
+  inline bool IsUseCountLow(Address top) {
+    if (last_top_ != top) return true;
+    return use_count_ < kUseLimit;
+  }
+
+  inline void IncrementUseCount(Address top) {
+    if (last_top_ != top) {
+      use_count_ = 0;
+      last_top_ = top;
+    }
+    ++use_count_;
+  }
+
+  // Single use counter shared by all fresh strings.
+  int use_count_;
+
+  // Last new space top when the use count above was valid.
+  Address last_top_;
+
+  Isolate* isolate_;
+
+  // How close to the new space top a fresh string has to be.
+  static const int kFreshnessLimit = 1024;
+
+  // The number of uses required to consider a string useful.
+  static const int kUseLimit = 32;
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(StringTracker);
+};
+
+
 // This class is here in order to be able to declare it a friend of
 // HandleScope.  Moving these methods to be members of HandleScope would be
-// neat in some ways, but it would expose external implementation details in
+// neat in some ways, but it would expose internal implementation details in
 // our public header file, which is undesirable.
 //
-// There is a singleton instance of this class to hold the per-thread data.
-// For multithreaded V8 programs this data is copied in and out of storage
+// An isolate has a single instance of this class to hold the current thread's
+// data. In multithreaded V8 programs this data is copied in and out of storage
 // so that the currently executing thread always has its own copy of this
 // data.
-class HandleScopeImplementer {
+ISOLATED_CLASS HandleScopeImplementer {
  public:
 
   HandleScopeImplementer()
@@ -341,16 +407,14 @@ class HandleScopeImplementer {
         ignore_out_of_memory_(false),
         call_depth_(0) { }
 
-  static HandleScopeImplementer* instance();
-
   // Threading support for handle data.
   static int ArchiveSpacePerThread();
-  static char* RestoreThread(char* from);
-  static char* ArchiveThread(char* to);
-  static void FreeThreadResources();
+  char* RestoreThread(char* from);
+  char* ArchiveThread(char* to);
+  void FreeThreadResources();
 
   // Garbage collection support.
-  static void Iterate(v8::internal::ObjectVisitor* v);
+  void Iterate(v8::internal::ObjectVisitor* v);
   static char* Iterate(v8::internal::ObjectVisitor* v, char* data);
 
 
index 9683aa43bb5c5d01ef55a8ea1794823725cacc5f..149ede59c32bf1254dd6b36af3480070a350867a 100644 (file)
@@ -68,8 +68,6 @@ class ImplementationUtilities {
   // to access the HandleScope data.
   typedef v8::HandleScope::Data HandleScopeData;
 
-  static HandleScopeData* CurrentHandleScope();
-
 #ifdef DEBUG
   static void ZapHandleRange(internal::Object** begin, internal::Object** end);
 #endif
index 5cf8deaa5942f5ba967203046cdf0d6e8a4e7e94..c80548f4672c720352d2bba790c9b5e8661fca73 100644 (file)
@@ -65,7 +65,6 @@ class Arguments BASE_EMBEDDED {
   int length() const { return length_; }
 
   Object** arguments() { return arguments_; }
-
  private:
   int length_;
   Object** arguments_;
@@ -77,15 +76,16 @@ class Arguments BASE_EMBEDDED {
 // can.
 class CustomArguments : public Relocatable {
  public:
-  inline CustomArguments(Object* data,
+  inline CustomArguments(Isolate* isolate,
+                         Object* data,
                          Object* self,
-                         JSObject* holder) {
+                         JSObject* holder) : Relocatable(isolate) {
     values_[2] = self;
     values_[1] = holder;
     values_[0] = data;
   }
 
-  inline CustomArguments() {
+  inline explicit CustomArguments(Isolate* isolate) : Relocatable(isolate) {
 #ifdef DEBUG
     for (size_t i = 0; i < ARRAY_SIZE(values_); i++) {
       values_[i] = reinterpret_cast<Object*>(kZapValue);
@@ -99,6 +99,8 @@ class CustomArguments : public Relocatable {
   Object* values_[3];
 };
 
+#define RUNTIME_CALLING_CONVENTION Arguments args, Isolate* isolate
+#define RUNTIME_GET_ISOLATE ASSERT(isolate == Isolate::Current())
 
 } }  // namespace v8::internal
 
index 3b811021b3ac4b75bb837718b0014ba6b8d60c5b..bd76d9ab5607633135fa04cea35345f3e55bcdb8 100644 (file)
@@ -203,11 +203,12 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     visitor->VisitExternalReference(target_reference_address());
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  } else if (Debug::has_break_points() &&
-             ((RelocInfo::IsJSReturn(mode) &&
+  // TODO(isolates): Get a cached isolate below.
+  } else if (((RelocInfo::IsJSReturn(mode) &&
               IsPatchedReturnSequence()) ||
              (RelocInfo::IsDebugBreakSlot(mode) &&
-              IsPatchedDebugBreakSlotSequence()))) {
+              IsPatchedDebugBreakSlotSequence())) &&
+             Isolate::Current()->debug()->has_break_points()) {
     visitor->VisitDebugTarget(this);
 #endif
   } else if (mode == RelocInfo::RUNTIME_ENTRY) {
@@ -217,10 +218,10 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
 
 
 template<typename StaticVisitor>
-void RelocInfo::Visit() {
+void RelocInfo::Visit(Heap* heap) {
   RelocInfo::Mode mode = rmode();
   if (mode == RelocInfo::EMBEDDED_OBJECT) {
-    StaticVisitor::VisitPointer(target_object_address());
+    StaticVisitor::VisitPointer(heap, target_object_address());
   } else if (RelocInfo::IsCodeTarget(mode)) {
     StaticVisitor::VisitCodeTarget(this);
   } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
@@ -228,7 +229,7 @@ void RelocInfo::Visit() {
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     StaticVisitor::VisitExternalReference(target_reference_address());
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  } else if (Debug::has_break_points() &&
+  } else if (heap->isolate()->debug()->has_break_points() &&
              ((RelocInfo::IsJSReturn(mode) &&
               IsPatchedReturnSequence()) ||
              (RelocInfo::IsDebugBreakSlot(mode) &&
index e58d968c5fb4bf00644abb652cf92bcf16c09279..09a68f3ad178c707bc22a7a91e08b571722e6886 100644 (file)
 namespace v8 {
 namespace internal {
 
-// Safe default is no features.
-unsigned CpuFeatures::supported_ = 0;
-unsigned CpuFeatures::enabled_ = 0;
-unsigned CpuFeatures::found_by_runtime_probing_ = 0;
-
+CpuFeatures::CpuFeatures()
+    : supported_(0),
+      enabled_(0),
+      found_by_runtime_probing_(0) {
+}
 
 #ifdef __arm__
 static uint64_t CpuFeaturesImpliedByCompiler() {
@@ -148,7 +148,7 @@ Operand::Operand(Handle<Object> handle) {
   rm_ = no_reg;
   // Verify all Objects referred by code are NOT in new space.
   Object* obj = *handle;
-  ASSERT(!Heap::InNewSpace(obj));
+  ASSERT(!HEAP->InNewSpace(obj));
   if (obj->IsHeapObject()) {
     imm32_ = reinterpret_cast<intptr_t>(handle.location());
     rmode_ = RelocInfo::EMBEDDED_OBJECT;
@@ -266,22 +266,22 @@ const Instr kLdrStrOffsetMask = 0x00000fff;
 
 // Spare buffer.
 static const int kMinimalBufferSize = 4*KB;
-static byte* spare_buffer_ = NULL;
 
 
 Assembler::Assembler(void* buffer, int buffer_size)
     : positions_recorder_(this),
       allow_peephole_optimization_(false),
       emit_debug_code_(FLAG_debug_code) {
+  Isolate* isolate = Isolate::Current();
   allow_peephole_optimization_ = FLAG_peephole_optimization;
   if (buffer == NULL) {
     // Do our own buffer management.
     if (buffer_size <= kMinimalBufferSize) {
       buffer_size = kMinimalBufferSize;
 
-      if (spare_buffer_ != NULL) {
-        buffer = spare_buffer_;
-        spare_buffer_ = NULL;
+      if (isolate->assembler_spare_buffer() != NULL) {
+        buffer = isolate->assembler_spare_buffer();
+        isolate->set_assembler_spare_buffer(NULL);
       }
     }
     if (buffer == NULL) {
@@ -314,10 +314,12 @@ Assembler::Assembler(void* buffer, int buffer_size)
 
 
 Assembler::~Assembler() {
+  Isolate* isolate = Isolate::Current();
   ASSERT(const_pool_blocked_nesting_ == 0);
   if (own_buffer_) {
-    if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) {
-      spare_buffer_ = buffer_;
+    if (isolate->assembler_spare_buffer() == NULL &&
+        buffer_size_ == kMinimalBufferSize) {
+      isolate->set_assembler_spare_buffer(buffer_);
     } else {
       DeleteArray(buffer_);
     }
@@ -714,7 +716,7 @@ static bool fits_shifter(uint32_t imm32,
         *instr ^= kMovMvnFlip;
         return true;
       } else if ((*instr & kMovLeaveCCMask) == kMovLeaveCCPattern) {
-        if (CpuFeatures::IsSupported(ARMv7)) {
+        if (Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
           if (imm32 < 0x10000) {
             *instr ^= kMovwLeaveCCFlip;
             *instr |= EncodeMovwImmediate(imm32);
@@ -777,7 +779,8 @@ bool Operand::is_single_instruction(Instr instr) const {
     // constant pool is required. For a mov instruction not setting the
     // condition code additional instruction conventions can be used.
     if ((instr & ~kCondMask) == 13*B21) {  // mov, S not set
-      if (must_use_constant_pool() || !CpuFeatures::IsSupported(ARMv7)) {
+      if (must_use_constant_pool() ||
+          !Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
         // mov instruction will be an ldr from constant pool (one instruction).
         return true;
       } else {
@@ -819,7 +822,8 @@ void Assembler::addrmod1(Instr instr,
       CHECK(!rn.is(ip));  // rn should never be ip, or will be trashed
       Condition cond = Instruction::ConditionField(instr);
       if ((instr & ~kCondMask) == 13*B21) {  // mov, S not set
-        if (x.must_use_constant_pool() || !CpuFeatures::IsSupported(ARMv7)) {
+        if (x.must_use_constant_pool() ||
+            !Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
           RecordRelocInfo(x.rmode_, x.imm32_);
           ldr(rd, MemOperand(pc, 0), cond);
         } else {
@@ -1262,7 +1266,7 @@ void Assembler::usat(Register dst,
                      const Operand& src,
                      Condition cond) {
   // v6 and above.
-  ASSERT(CpuFeatures::IsSupported(ARMv7));
+  ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
   ASSERT(!dst.is(pc) && !src.rm_.is(pc));
   ASSERT((satpos >= 0) && (satpos <= 31));
   ASSERT((src.shift_op_ == ASR) || (src.shift_op_ == LSL));
@@ -1290,7 +1294,7 @@ void Assembler::ubfx(Register dst,
                      int width,
                      Condition cond) {
   // v7 and above.
-  ASSERT(CpuFeatures::IsSupported(ARMv7));
+  ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
   ASSERT(!dst.is(pc) && !src.is(pc));
   ASSERT((lsb >= 0) && (lsb <= 31));
   ASSERT((width >= 1) && (width <= (32 - lsb)));
@@ -1310,7 +1314,7 @@ void Assembler::sbfx(Register dst,
                      int width,
                      Condition cond) {
   // v7 and above.
-  ASSERT(CpuFeatures::IsSupported(ARMv7));
+  ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
   ASSERT(!dst.is(pc) && !src.is(pc));
   ASSERT((lsb >= 0) && (lsb <= 31));
   ASSERT((width >= 1) && (width <= (32 - lsb)));
@@ -1325,7 +1329,7 @@ void Assembler::sbfx(Register dst,
 //   bfc dst, #lsb, #width
 void Assembler::bfc(Register dst, int lsb, int width, Condition cond) {
   // v7 and above.
-  ASSERT(CpuFeatures::IsSupported(ARMv7));
+  ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
   ASSERT(!dst.is(pc));
   ASSERT((lsb >= 0) && (lsb <= 31));
   ASSERT((width >= 1) && (width <= (32 - lsb)));
@@ -1344,7 +1348,7 @@ void Assembler::bfi(Register dst,
                     int width,
                     Condition cond) {
   // v7 and above.
-  ASSERT(CpuFeatures::IsSupported(ARMv7));
+  ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
   ASSERT(!dst.is(pc) && !src.is(pc));
   ASSERT((lsb >= 0) && (lsb <= 31));
   ASSERT((width >= 1) && (width <= (32 - lsb)));
@@ -1616,7 +1620,7 @@ void Assembler::ldrsh(Register dst, const MemOperand& src, Condition cond) {
 
 void Assembler::ldrd(Register dst1, Register dst2,
                      const MemOperand& src, Condition cond) {
-  ASSERT(CpuFeatures::IsEnabled(ARMv7));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(ARMv7));
   ASSERT(src.rm().is(no_reg));
   ASSERT(!dst1.is(lr));  // r14.
   ASSERT_EQ(0, dst1.code() % 2);
@@ -1631,7 +1635,7 @@ void Assembler::strd(Register src1, Register src2,
   ASSERT(!src1.is(lr));  // r14.
   ASSERT_EQ(0, src1.code() % 2);
   ASSERT_EQ(src1.code() + 1, src2.code());
-  ASSERT(CpuFeatures::IsEnabled(ARMv7));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(ARMv7));
   addrmod3(cond | B7 | B6 | B5 | B4, src1, dst);
 }
 
@@ -1867,7 +1871,7 @@ void Assembler::vldr(const DwVfpRegister dst,
   // Instruction details available in ARM DDI 0406A, A8-628.
   // cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) |
   // Vdst(15-12) | 1011(11-8) | offset
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   int u = 1;
   if (offset < 0) {
     offset = -offset;
@@ -1909,7 +1913,7 @@ void Assembler::vldr(const SwVfpRegister dst,
   // Instruction details available in ARM DDI 0406A, A8-628.
   // cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) |
   // Vdst(15-12) | 1010(11-8) | offset
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   int u = 1;
   if (offset < 0) {
     offset = -offset;
@@ -1953,7 +1957,7 @@ void Assembler::vstr(const DwVfpRegister src,
   // Instruction details available in ARM DDI 0406A, A8-786.
   // cond(31-28) | 1101(27-24)| U000(23-20) | | Rbase(19-16) |
   // Vsrc(15-12) | 1011(11-8) | (offset/4)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   int u = 1;
   if (offset < 0) {
     offset = -offset;
@@ -1994,7 +1998,7 @@ void Assembler::vstr(const SwVfpRegister src,
   // Instruction details available in ARM DDI 0406A, A8-786.
   // cond(31-28) | 1101(27-24)| U000(23-20) | Rbase(19-16) |
   // Vdst(15-12) | 1010(11-8) | (offset/4)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   int u = 1;
   if (offset < 0) {
     offset = -offset;
@@ -2040,7 +2044,7 @@ static void DoubleAsTwoUInt32(double d, uint32_t* lo, uint32_t* hi) {
 // Only works for little endian floating point formats.
 // We don't support VFP on the mixed endian floating point platform.
 static bool FitsVMOVDoubleImmediate(double d, uint32_t *encoding) {
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
 
   // VMOV can accept an immediate of the form:
   //
@@ -2093,7 +2097,7 @@ void Assembler::vmov(const DwVfpRegister dst,
                      const Condition cond) {
   // Dd = immediate
   // Instruction details available in ARM DDI 0406B, A8-640.
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
 
   uint32_t enc;
   if (FitsVMOVDoubleImmediate(imm, &enc)) {
@@ -2130,7 +2134,7 @@ void Assembler::vmov(const SwVfpRegister dst,
                      const Condition cond) {
   // Sd = Sm
   // Instruction details available in ARM DDI 0406B, A8-642.
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   int sd, d, sm, m;
   dst.split_code(&sd, &d);
   src.split_code(&sm, &m);
@@ -2143,7 +2147,7 @@ void Assembler::vmov(const DwVfpRegister dst,
                      const Condition cond) {
   // Dd = Dm
   // Instruction details available in ARM DDI 0406B, A8-642.
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(cond | 0xE*B24 | 0xB*B20 |
        dst.code()*B12 | 0x5*B9 | B8 | B6 | src.code());
 }
@@ -2157,7 +2161,7 @@ void Assembler::vmov(const DwVfpRegister dst,
   // Instruction details available in ARM DDI 0406A, A8-646.
   // cond(31-28) | 1100(27-24)| 010(23-21) | op=0(20) | Rt2(19-16) |
   // Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   ASSERT(!src1.is(pc) && !src2.is(pc));
   emit(cond | 0xC*B24 | B22 | src2.code()*B16 |
        src1.code()*B12 | 0xB*B8 | B4 | dst.code());
@@ -2172,7 +2176,7 @@ void Assembler::vmov(const Register dst1,
   // Instruction details available in ARM DDI 0406A, A8-646.
   // cond(31-28) | 1100(27-24)| 010(23-21) | op=1(20) | Rt2(19-16) |
   // Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   ASSERT(!dst1.is(pc) && !dst2.is(pc));
   emit(cond | 0xC*B24 | B22 | B20 | dst2.code()*B16 |
        dst1.code()*B12 | 0xB*B8 | B4 | src.code());
@@ -2186,7 +2190,7 @@ void Assembler::vmov(const SwVfpRegister dst,
   // Instruction details available in ARM DDI 0406A, A8-642.
   // cond(31-28) | 1110(27-24)| 000(23-21) | op=0(20) | Vn(19-16) |
   // Rt(15-12) | 1010(11-8) | N(7)=0 | 00(6-5) | 1(4) | 0000(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   ASSERT(!src.is(pc));
   int sn, n;
   dst.split_code(&sn, &n);
@@ -2201,7 +2205,7 @@ void Assembler::vmov(const Register dst,
   // Instruction details available in ARM DDI 0406A, A8-642.
   // cond(31-28) | 1110(27-24)| 000(23-21) | op=1(20) | Vn(19-16) |
   // Rt(15-12) | 1010(11-8) | N(7)=0 | 00(6-5) | 1(4) | 0000(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   ASSERT(!dst.is(pc));
   int sn, n;
   src.split_code(&sn, &n);
@@ -2326,7 +2330,7 @@ void Assembler::vcvt_f64_s32(const DwVfpRegister dst,
                              const SwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(EncodeVCVT(F64, dst.code(), S32, src.code(), mode, cond));
 }
 
@@ -2335,7 +2339,7 @@ void Assembler::vcvt_f32_s32(const SwVfpRegister dst,
                              const SwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(EncodeVCVT(F32, dst.code(), S32, src.code(), mode, cond));
 }
 
@@ -2344,7 +2348,7 @@ void Assembler::vcvt_f64_u32(const DwVfpRegister dst,
                              const SwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(EncodeVCVT(F64, dst.code(), U32, src.code(), mode, cond));
 }
 
@@ -2353,7 +2357,7 @@ void Assembler::vcvt_s32_f64(const SwVfpRegister dst,
                              const DwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(EncodeVCVT(S32, dst.code(), F64, src.code(), mode, cond));
 }
 
@@ -2362,7 +2366,7 @@ void Assembler::vcvt_u32_f64(const SwVfpRegister dst,
                              const DwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(EncodeVCVT(U32, dst.code(), F64, src.code(), mode, cond));
 }
 
@@ -2371,7 +2375,7 @@ void Assembler::vcvt_f64_f32(const DwVfpRegister dst,
                              const SwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(EncodeVCVT(F64, dst.code(), F32, src.code(), mode, cond));
 }
 
@@ -2380,7 +2384,7 @@ void Assembler::vcvt_f32_f64(const SwVfpRegister dst,
                              const DwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(EncodeVCVT(F32, dst.code(), F64, src.code(), mode, cond));
 }
 
@@ -2410,7 +2414,7 @@ void Assembler::vadd(const DwVfpRegister dst,
   // Instruction details available in ARM DDI 0406A, A8-536.
   // cond(31-28) | 11100(27-23)| D=?(22) | 11(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 0(6) | M=?(5) | 0(4) | Vm(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(cond | 0xE*B24 | 0x3*B20 | src1.code()*B16 |
        dst.code()*B12 | 0x5*B9 | B8 | src2.code());
 }
@@ -2425,7 +2429,7 @@ void Assembler::vsub(const DwVfpRegister dst,
   // Instruction details available in ARM DDI 0406A, A8-784.
   // cond(31-28) | 11100(27-23)| D=?(22) | 11(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 1(6) | M=?(5) | 0(4) | Vm(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(cond | 0xE*B24 | 0x3*B20 | src1.code()*B16 |
        dst.code()*B12 | 0x5*B9 | B8 | B6 | src2.code());
 }
@@ -2440,7 +2444,7 @@ void Assembler::vmul(const DwVfpRegister dst,
   // Instruction details available in ARM DDI 0406A, A8-784.
   // cond(31-28) | 11100(27-23)| D=?(22) | 10(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 0(6) | M=?(5) | 0(4) | Vm(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(cond | 0xE*B24 | 0x2*B20 | src1.code()*B16 |
        dst.code()*B12 | 0x5*B9 | B8 | src2.code());
 }
@@ -2455,7 +2459,7 @@ void Assembler::vdiv(const DwVfpRegister dst,
   // Instruction details available in ARM DDI 0406A, A8-584.
   // cond(31-28) | 11101(27-23)| D=?(22) | 00(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=? | 0(6) | M=?(5) | 0(4) | Vm(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(cond | 0xE*B24 | B23 | src1.code()*B16 |
        dst.code()*B12 | 0x5*B9 | B8 | src2.code());
 }
@@ -2468,7 +2472,7 @@ void Assembler::vcmp(const DwVfpRegister src1,
   // Instruction details available in ARM DDI 0406A, A8-570.
   // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0100 (19-16) |
   // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=0 | 1(6) | M(5)=? | 0(4) | Vm(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 |
        src1.code()*B12 | 0x5*B9 | B8 | B6 | src2.code());
 }
@@ -2481,7 +2485,7 @@ void Assembler::vcmp(const DwVfpRegister src1,
   // Instruction details available in ARM DDI 0406A, A8-570.
   // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0101 (19-16) |
   // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=0 | 1(6) | M(5)=? | 0(4) | 0000(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   ASSERT(src2 == 0.0);
   emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 | B16 |
        src1.code()*B12 | 0x5*B9 | B8 | B6);
@@ -2492,7 +2496,7 @@ void Assembler::vmsr(Register dst, Condition cond) {
   // Instruction details available in ARM DDI 0406A, A8-652.
   // cond(31-28) | 1110 (27-24) | 1110(23-20)| 0001 (19-16) |
   // Rt(15-12) | 1010 (11-8) | 0(7) | 00 (6-5) | 1(4) | 0000(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(cond | 0xE*B24 | 0xE*B20 |  B16 |
        dst.code()*B12 | 0xA*B8 | B4);
 }
@@ -2502,7 +2506,7 @@ void Assembler::vmrs(Register dst, Condition cond) {
   // Instruction details available in ARM DDI 0406A, A8-652.
   // cond(31-28) | 1110 (27-24) | 1111(23-20)| 0001 (19-16) |
   // Rt(15-12) | 1010 (11-8) | 0(7) | 00 (6-5) | 1(4) | 0000(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(cond | 0xE*B24 | 0xF*B20 |  B16 |
        dst.code()*B12 | 0xA*B8 | B4);
 }
@@ -2513,7 +2517,7 @@ void Assembler::vsqrt(const DwVfpRegister dst,
                       const Condition cond) {
   // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0001 (19-16) |
   // Vd(15-12) | 101(11-9) | sz(8)=1 | 11 (7-6) | M(5)=? | 0(4) | Vm(3-0)
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
   emit(cond | 0xE*B24 | B23 | 0x3*B20 | B16 |
        dst.code()*B12 | 0x5*B9 | B8 | 3*B6 | src.code());
 }
index 22eee0f95ea89c256e52acc9aff1d2ba70ea5192..cf42dc8096cc3126c88afc9fc1e17a2bae830852 100644 (file)
@@ -468,20 +468,20 @@ class MemOperand BASE_EMBEDDED {
 
 // CpuFeatures keeps track of which features are supported by the target CPU.
 // Supported features must be enabled by a Scope before use.
-class CpuFeatures : public AllStatic {
+class CpuFeatures {
  public:
   // Detect features of the target CPU. Set safe defaults if the serializer
   // is enabled (snapshots must be portable).
-  static void Probe(bool portable);
+  void Probe(bool portable);
 
   // Check whether a feature is supported by the target CPU.
-  static bool IsSupported(CpuFeature f) {
+  bool IsSupported(CpuFeature f) const {
     if (f == VFP3 && !FLAG_enable_vfp3) return false;
     return (supported_ & (1u << f)) != 0;
   }
 
   // Check whether a feature is currently enabled.
-  static bool IsEnabled(CpuFeature f) {
+  bool IsEnabled(CpuFeature f) const {
     return (enabled_ & (1u << f)) != 0;
   }
 
@@ -489,16 +489,23 @@ class CpuFeatures : public AllStatic {
   class Scope BASE_EMBEDDED {
 #ifdef DEBUG
    public:
-    explicit Scope(CpuFeature f) {
-      ASSERT(CpuFeatures::IsSupported(f));
+    explicit Scope(CpuFeature f)
+        : cpu_features_(Isolate::Current()->cpu_features()),
+          isolate_(Isolate::Current()) {
+      ASSERT(cpu_features_->IsSupported(f));
       ASSERT(!Serializer::enabled() ||
-             (found_by_runtime_probing_ & (1u << f)) == 0);
-      old_enabled_ = CpuFeatures::enabled_;
-      CpuFeatures::enabled_ |= 1u << f;
+             (cpu_features_->found_by_runtime_probing_ & (1u << f)) == 0);
+      old_enabled_ = cpu_features_->enabled_;
+      cpu_features_->enabled_ |= 1u << f;
+    }
+    ~Scope() {
+      ASSERT_EQ(Isolate::Current(), isolate_);
+      cpu_features_->enabled_ = old_enabled_;
     }
-    ~Scope() { CpuFeatures::enabled_ = old_enabled_; }
    private:
     unsigned old_enabled_;
+    CpuFeatures* cpu_features_;
+    Isolate* isolate_;
 #else
    public:
     explicit Scope(CpuFeature f) {}
@@ -506,9 +513,15 @@ class CpuFeatures : public AllStatic {
   };
 
  private:
-  static unsigned supported_;
-  static unsigned enabled_;
-  static unsigned found_by_runtime_probing_;
+  CpuFeatures();
+
+  unsigned supported_;
+  unsigned enabled_;
+  unsigned found_by_runtime_probing_;
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(CpuFeatures);
 };
 
 
index 961d3ce5b981e830d95bbc9c35e53e66d4edfde9..bbf2e06626b524935e4aa4a0e5661bf800824881 100644 (file)
@@ -325,7 +325,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
                        r5,
                        JSArray::kPreallocatedArrayElements,
                        call_generic_code);
-  __ IncrementCounter(&Counters::array_function_native, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->array_function_native(), 1, r3, r4);
   // Setup return value, remove receiver from stack and return.
   __ mov(r0, r2);
   __ add(sp, sp, Operand(kPointerSize));
@@ -361,7 +361,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
                   r7,
                   true,
                   call_generic_code);
-  __ IncrementCounter(&Counters::array_function_native, 1, r2, r4);
+  __ IncrementCounter(COUNTERS->array_function_native(), 1, r2, r4);
   // Setup return value, remove receiver and argument from stack and return.
   __ mov(r0, r3);
   __ add(sp, sp, Operand(2 * kPointerSize));
@@ -385,7 +385,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
                   r7,
                   false,
                   call_generic_code);
-  __ IncrementCounter(&Counters::array_function_native, 1, r2, r6);
+  __ IncrementCounter(COUNTERS->array_function_native(), 1, r2, r6);
 
   // Fill arguments as array elements. Copy from the top of the stack (last
   // element) to the array backing store filling it backwards. Note:
@@ -442,7 +442,8 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
   // Jump to the generic array code if the specialized code cannot handle
   // the construction.
   __ bind(&generic_array_code);
-  Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
+  Code* code = Isolate::Current()->builtins()->builtin(
+      Builtins::ArrayCodeGeneric);
   Handle<Code> array_code(code);
   __ Jump(array_code, RelocInfo::CODE_TARGET);
 }
@@ -474,7 +475,8 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
   // Jump to the generic construct code in case the specialized code cannot
   // handle the construction.
   __ bind(&generic_constructor);
-  Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
+  Code* code = Isolate::Current()->builtins()->builtin(
+      Builtins::JSConstructStubGeneric);
   Handle<Code> generic_construct_stub(code);
   __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
 }
@@ -488,7 +490,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
   //  -- sp[argc * 4]           : receiver
   // -----------------------------------
-  __ IncrementCounter(&Counters::string_ctor_calls, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->string_ctor_calls(), 1, r2, r3);
 
   Register function = r1;
   if (FLAG_debug_code) {
@@ -518,7 +520,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
       r5,        // Scratch.
       false,     // Is it a Smi?
       &not_cached);
-  __ IncrementCounter(&Counters::string_ctor_cached_number, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->string_ctor_cached_number(), 1, r3, r4);
   __ bind(&argument_is_string);
 
   // ----------- S t a t e -------------
@@ -572,13 +574,13 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   __ tst(r3, Operand(kIsNotStringMask));
   __ b(ne, &convert_argument);
   __ mov(argument, r0);
-  __ IncrementCounter(&Counters::string_ctor_conversions, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->string_ctor_conversions(), 1, r3, r4);
   __ b(&argument_is_string);
 
   // Invoke the conversion builtin and put the result into r2.
   __ bind(&convert_argument);
   __ push(function);  // Preserve the function.
-  __ IncrementCounter(&Counters::string_ctor_conversions, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->string_ctor_conversions(), 1, r3, r4);
   __ EnterInternalFrame();
   __ push(r0);
   __ InvokeBuiltin(Builtins::TO_STRING, CALL_JS);
@@ -597,7 +599,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   // At this point the argument is already a string. Call runtime to
   // create a string wrapper.
   __ bind(&gc_required);
-  __ IncrementCounter(&Counters::string_ctor_gc_required, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->string_ctor_gc_required(), 1, r3, r4);
   __ EnterInternalFrame();
   __ push(argument);
   __ CallRuntime(Runtime::kNewStringWrapper, 1);
@@ -633,8 +635,8 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
   // Set expected number of arguments to zero (not changing r0).
   __ mov(r2, Operand(0, RelocInfo::NONE));
   __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
-  __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
-          RelocInfo::CODE_TARGET);
+  __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+      ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
 }
 
 
@@ -906,7 +908,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   if (is_api_function) {
     __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
     Handle<Code> code = Handle<Code>(
-        Builtins::builtin(Builtins::HandleApiCallConstruct));
+        Isolate::Current()->builtins()->builtin(
+            Builtins::HandleApiCallConstruct));
     ParameterCount expected(0);
     __ InvokeCode(code, expected, expected,
                   RelocInfo::CODE_TARGET, CALL_FUNCTION);
@@ -963,7 +966,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   __ LeaveConstructFrame();
   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
   __ add(sp, sp, Operand(kPointerSize));
-  __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
+  __ IncrementCounter(COUNTERS->constructed_objects(), 1, r1, r2);
   __ Jump(lr);
 }
 
@@ -1039,8 +1042,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
   // Invoke the code and pass argc as r0.
   __ mov(r0, Operand(r3));
   if (is_construct) {
-    __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
-            RelocInfo::CODE_TARGET);
+    __ Call(Handle<Code>(Isolate::Current()->builtins()->builtin(
+        Builtins::JSConstructCall)), RelocInfo::CODE_TARGET);
   } else {
     ParameterCount actual(r0);
     __ InvokeFunction(r1, actual, CALL_FUNCTION);
@@ -1169,7 +1172,7 @@ void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   // Probe the CPU to set the supported features, because this builtin
   // may be called before the initialization performs CPU setup.
-  CpuFeatures::Probe(false);
+  Isolate::Current()->cpu_features()->Probe(false);
 
   // Lookup the function in the JavaScript frame and push it as an
   // argument to the on-stack replacement function.
@@ -1332,8 +1335,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
     // Expected number of arguments is 0 for CALL_NON_FUNCTION.
     __ mov(r2, Operand(0, RelocInfo::NONE));
     __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
-    __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
-                         RelocInfo::CODE_TARGET);
+    __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+        ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
     __ bind(&function);
   }
 
@@ -1348,8 +1351,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
   __ mov(r2, Operand(r2, ASR, kSmiTagSize));
   __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
   __ cmp(r2, r0);  // Check formal and actual parameter counts.
-  __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
-          RelocInfo::CODE_TARGET, ne);
+  __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+      ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET, ne);
 
   ParameterCount expected(0);
   __ InvokeCode(r3, expected, expected, JUMP_FUNCTION);
index 7fbbc8e6ff00b277680da775c14234c940d24017..61d9d8324a5562589bde5f62078cc6696c2a3c3e 100644 (file)
@@ -502,7 +502,7 @@ void FloatingPointHelper::LoadSmis(MacroAssembler* masm,
                                    FloatingPointHelper::Destination destination,
                                    Register scratch1,
                                    Register scratch2) {
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     __ mov(scratch1, Operand(r0, ASR, kSmiTagSize));
     __ vmov(d7.high(), scratch1);
@@ -570,7 +570,8 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
   __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
 
   // Handle loading a double from a heap number.
-  if (CpuFeatures::IsSupported(VFP3) && destination == kVFPRegisters) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3) &&
+      destination == kVFPRegisters) {
     CpuFeatures::Scope scope(VFP3);
     // Load the double from tagged HeapNumber to double register.
     __ sub(scratch1, object, Operand(kHeapObjectTag));
@@ -584,7 +585,7 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
 
   // Handle loading a double from a smi.
   __ bind(&is_smi);
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     // Convert smi to double using VFP instructions.
     __ SmiUntag(scratch1, object);
@@ -675,7 +676,7 @@ void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
 
   __ JumpIfNotSmi(object, &obj_is_not_smi);
   __ SmiUntag(scratch1, object);
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     __ vmov(single_scratch, scratch1);
     __ vcvt_f64_s32(double_dst, single_scratch);
@@ -743,7 +744,7 @@ void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
   __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
 
   // Load the number.
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     // Load the double value.
     __ sub(scratch1, object, Operand(kHeapObjectTag));
@@ -817,7 +818,7 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
 
   // Object is a heap number.
   // Convert the floating point value to a 32-bit integer.
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     SwVfpRegister single_scratch = double_scratch.low();
     // Load the double value.
@@ -1028,7 +1029,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
   // The two objects are identical.  If we know that one of them isn't NaN then
   // we now know they test equal.
   if (cond != eq || !never_nan_nan) {
-    // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
+    // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(),
     // so we do the second best thing - test it ourselves.
     // They are both equal and they are not both Smis so both of them are not
     // Smis.  If it's not a heap number, then return equal.
@@ -1151,7 +1152,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
   }
 
   // Lhs is a smi, rhs is a number.
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     // Convert lhs to a double in d7.
     CpuFeatures::Scope scope(VFP3);
     __ SmiToDoubleVFPRegister(lhs, d7, r7, s15);
@@ -1191,7 +1192,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
   }
 
   // Rhs is a smi, lhs is a heap number.
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     // Load the double from lhs, tagged HeapNumber r1, to d7.
     __ sub(r7, lhs, Operand(kHeapObjectTag));
@@ -1371,7 +1372,7 @@ static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
 
   // Both are heap numbers.  Load them up then jump to the code we have
   // for that.
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     __ sub(r7, rhs, Operand(kHeapObjectTag));
     __ vldr(d6, r7, HeapNumber::kValueOffset);
@@ -1460,7 +1461,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
   Label load_result_from_cache;
   if (!object_is_smi) {
     __ JumpIfSmi(object, &is_smi);
-    if (CpuFeatures::IsSupported(VFP3)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
       CpuFeatures::Scope scope(VFP3);
       __ CheckMap(object,
                   scratch1,
@@ -1517,7 +1518,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
   __ bind(&load_result_from_cache);
   __ ldr(result,
          FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize));
-  __ IncrementCounter(&Counters::number_to_string_native,
+  __ IncrementCounter(COUNTERS->number_to_string_native(),
                       1,
                       scratch1,
                       scratch2);
@@ -1593,7 +1594,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
   __ bind(&both_loaded_as_doubles);
   // The arguments have been converted to doubles and stored in d6 and d7, if
   // VFP3 is supported, or in r0, r1, r2, and r3.
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     __ bind(&lhs_not_nan);
     CpuFeatures::Scope scope(VFP3);
     Label no_nan;
@@ -1663,7 +1664,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
 
   __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, r2, r3, &slow);
 
-  __ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->string_compare_native(), 1, r2, r3);
   StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
                                                      lhs_,
                                                      rhs_,
@@ -1703,7 +1704,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
 // The stub returns zero for false, and a non-zero value for true.
 void ToBooleanStub::Generate(MacroAssembler* masm) {
   // This stub uses VFP3 instructions.
-  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
 
   Label false_result;
   Label not_heap_number;
@@ -1789,7 +1790,9 @@ void GenericBinaryOpStub::HandleBinaryOpSlowCases(
     Register rhs,
     const Builtins::JavaScript& builtin) {
   Label slow, slow_reverse, do_the_call;
-  bool use_fp_registers = CpuFeatures::IsSupported(VFP3) && Token::MOD != op_;
+  bool use_fp_registers =
+      Isolate::Current()->cpu_features()->IsSupported(VFP3) &&
+      Token::MOD != op_;
 
   ASSERT((lhs.is(r0) && rhs.is(r1)) || (lhs.is(r1) && rhs.is(r0)));
   Register heap_number_map = r6;
@@ -1805,7 +1808,7 @@ void GenericBinaryOpStub::HandleBinaryOpSlowCases(
 
     // If we have floating point hardware, inline ADD, SUB, MUL, and DIV,
     // using registers d7 and d6 for the double values.
-    if (CpuFeatures::IsSupported(VFP3)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
       CpuFeatures::Scope scope(VFP3);
       __ mov(r7, Operand(rhs, ASR, kSmiTagSize));
       __ vmov(s15, r7);
@@ -1901,7 +1904,7 @@ void GenericBinaryOpStub::HandleBinaryOpSlowCases(
       __ AllocateHeapNumber(r5, r4, r7, heap_number_map, &slow);
       }
 
-      if (CpuFeatures::IsSupported(VFP3)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
         CpuFeatures::Scope scope(VFP3);
         // Convert smi in r0 to double in d7.
         __ mov(r7, Operand(r0, ASR, kSmiTagSize));
@@ -1958,7 +1961,7 @@ void GenericBinaryOpStub::HandleBinaryOpSlowCases(
       __ AllocateHeapNumber(r5, r4, r7, heap_number_map, &slow);
       }
 
-      if (CpuFeatures::IsSupported(VFP3)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
         CpuFeatures::Scope scope(VFP3);
         // Convert smi in r1 to double in d6.
         __ mov(r7, Operand(r1, ASR, kSmiTagSize));
@@ -2170,7 +2173,7 @@ void GenericBinaryOpStub::HandleNonSmiBitwiseOp(MacroAssembler* masm,
       // The code below for writing into heap numbers isn't capable of writing
       // the register as an unsigned int so we go to slow case if we hit this
       // case.
-      if (CpuFeatures::IsSupported(VFP3)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
         __ b(mi, &result_not_a_smi);
       } else {
         __ b(mi, &slow);
@@ -2218,7 +2221,7 @@ void GenericBinaryOpStub::HandleNonSmiBitwiseOp(MacroAssembler* masm,
   // result.
   __ mov(r0, Operand(r5));
 
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     // Convert the int32 in r2 to the heap number in r0. r3 is corrupted.
     CpuFeatures::Scope scope(VFP3);
     __ vmov(s0, r2);
@@ -2891,7 +2894,8 @@ void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
 const char* TypeRecordingBinaryOpStub::GetName() {
   if (name_ != NULL) return name_;
   const int kMaxNameLength = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
+      kMaxNameLength);
   if (name_ == NULL) return "OOM";
   const char* op_name = Token::Name(op_);
   const char* overwrite_name;
@@ -3065,7 +3069,8 @@ void TypeRecordingBinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
       // Load left and right operands into d6 and d7 or r0/r1 and r2/r3
       // depending on whether VFP3 is available or not.
       FloatingPointHelper::Destination destination =
-          CpuFeatures::IsSupported(VFP3) && op_ != Token::MOD ?
+          Isolate::Current()->cpu_features()->IsSupported(VFP3) &&
+          op_ != Token::MOD ?
           FloatingPointHelper::kVFPRegisters :
           FloatingPointHelper::kCoreRegisters;
 
@@ -3177,7 +3182,7 @@ void TypeRecordingBinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
           // The code below for writing into heap numbers isn't capable of
           // writing the register as an unsigned int so we go to slow case if we
           // hit this case.
-          if (CpuFeatures::IsSupported(VFP3)) {
+          if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
             __ b(mi, &result_not_a_smi);
           } else {
             __ b(mi, not_numbers);
@@ -3216,7 +3221,7 @@ void TypeRecordingBinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
       // result.
       __ mov(r0, Operand(r5));
 
-      if (CpuFeatures::IsSupported(VFP3)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
         // Convert the int32 in r2 to the heap number in r0. r3 is corrupted. As
         // mentioned above SHR needs to always produce a positive result.
         CpuFeatures::Scope scope(VFP3);
@@ -3345,7 +3350,8 @@ void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
     // Jump to type transition if they are not. The registers r0 and r1 (right
     // and left) are preserved for the runtime call.
     FloatingPointHelper::Destination destination =
-        CpuFeatures::IsSupported(VFP3) && op_ != Token::MOD ?
+        Isolate::Current()->cpu_features()->IsSupported(VFP3) &&
+        op_ != Token::MOD ?
         FloatingPointHelper::kVFPRegisters :
         FloatingPointHelper::kCoreRegisters;
 
@@ -3520,7 +3526,7 @@ void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
           // to return a heap number if we can.
           // The non vfp3 code does not support this special case, so jump to
           // runtime if we don't support it.
-          if (CpuFeatures::IsSupported(VFP3)) {
+          if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
             __ b(mi,
                  (result_type_ <= TRBinaryOpIC::INT32) ? &transition
                                                        : &return_heap_number);
@@ -3546,7 +3552,7 @@ void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
       __ Ret();
 
       __ bind(&return_heap_number);
-      if (CpuFeatures::IsSupported(VFP3)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
         CpuFeatures::Scope scope(VFP3);
         heap_number_result = r5;
         GenerateHeapResultAllocation(masm,
@@ -3750,7 +3756,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
   const Register cache_entry = r0;
   const bool tagged = (argument_type_ == TAGGED);
 
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     if (tagged) {
       // Argument is a number and is on stack and in r0.
@@ -3785,8 +3791,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
     __ eor(r1, r2, Operand(r3));
     __ eor(r1, r1, Operand(r1, ASR, 16));
     __ eor(r1, r1, Operand(r1, ASR, 8));
-    ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
-    __ And(r1, r1, Operand(TranscendentalCache::kCacheSize - 1));
+    ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
+    __ And(r1, r1, Operand(TranscendentalCache::SubCache::kCacheSize - 1));
 
     // r2 = low 32 bits of double value.
     // r3 = high 32 bits of double value.
@@ -3794,8 +3800,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
     __ mov(cache_entry,
            Operand(ExternalReference::transcendental_cache_array_address()));
     // r0 points to cache array.
-    __ ldr(cache_entry, MemOperand(cache_entry,
-        type_ * sizeof(TranscendentalCache::caches_[0])));
+    __ ldr(cache_entry, MemOperand(cache_entry, type_ * sizeof(
+        Isolate::Current()->transcendental_cache()->caches_[0])));
     // r0 points to the cache for the type type_.
     // If NULL, the cache hasn't been initialized yet, so go through runtime.
     __ cmp(cache_entry, Operand(0, RelocInfo::NONE));
@@ -3803,7 +3809,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
 
 #ifdef DEBUG
     // Check that the layout of cache elements match expectations.
-    { TranscendentalCache::Element test_elem[2];
+    { TranscendentalCache::SubCache::Element test_elem[2];
       char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
       char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
       char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
@@ -3835,14 +3841,14 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
        __ vldr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset));
     }
     __ Ret();
-  }  // if (CpuFeatures::IsSupported(VFP3))
+  }  // if (Isolate::Current()->cpu_features()->IsSupported(VFP3))
 
   __ bind(&calculate);
   if (tagged) {
     __ bind(&invalid_cache);
     __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
   } else {
-    if (!CpuFeatures::IsSupported(VFP3)) UNREACHABLE();
+    if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) UNREACHABLE();
     CpuFeatures::Scope scope(VFP3);
 
     Label no_update;
@@ -4039,7 +4045,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
       __ mov(r0, Operand(r2));
     }
 
-    if (CpuFeatures::IsSupported(VFP3)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
       // Convert the int32 in r1 to the heap number in r0. r2 is corrupted.
       CpuFeatures::Scope scope(VFP3);
       __ vmov(s0, r1);
@@ -4080,7 +4086,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
 void MathPowStub::Generate(MacroAssembler* masm) {
   Label call_runtime;
 
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
 
     Label base_not_smi;
@@ -4240,6 +4246,9 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
   }
 #endif
 
+  __ mov(r2, Operand(ExternalReference::isolate_address()));
+
+
   // TODO(1242173): To let the GC traverse the return address of the exit
   // frames, we need to know where the return address is. Right now,
   // we store it on the stack to be able to find it again, but we never
@@ -4295,13 +4304,13 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
   // Retrieve the pending exception and clear the variable.
   __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
   __ ldr(r3, MemOperand(ip));
-  __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
+  __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address)));
   __ ldr(r0, MemOperand(ip));
   __ str(r3, MemOperand(ip));
 
   // Special handling of termination exceptions which are uncatchable
   // by javascript code.
-  __ cmp(r0, Operand(Factory::termination_exception()));
+  __ cmp(r0, Operand(FACTORY->termination_exception()));
   __ b(eq, throw_termination_exception);
 
   // Handle normal exception.
@@ -4413,7 +4422,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
   __ mov(r7, Operand(Smi::FromInt(marker)));
   __ mov(r6, Operand(Smi::FromInt(marker)));
-  __ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address)));
+  __ mov(r5, Operand(ExternalReference(Isolate::k_c_entry_fp_address)));
   __ ldr(r5, MemOperand(r5));
   __ Push(r8, r7, r6, r5);
 
@@ -4422,7 +4431,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
   // If this is the outermost JS call, set js_entry_sp value.
-  ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
+  ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address);
   __ mov(r5, Operand(ExternalReference(js_entry_sp)));
   __ ldr(r6, MemOperand(r5));
   __ cmp(r6, Operand(0, RelocInfo::NONE));
@@ -4436,7 +4445,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // exception field in the JSEnv and return a failure sentinel.
   // Coming in here the fp will be invalid because the PushTryHandler below
   // sets it to 0 to signal the existence of the JSEntry frame.
-  __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
+  __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address)));
   __ str(r0, MemOperand(ip));
   __ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
   __ b(&exit);
@@ -4453,7 +4462,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // Clear any pending exceptions.
   __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
   __ ldr(r5, MemOperand(ip));
-  __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
+  __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address)));
   __ str(r5, MemOperand(ip));
 
   // Invoke the function by calling through JS entry trampoline builtin.
@@ -4486,7 +4495,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // displacement since the current stack pointer (sp) points directly
   // to the stack handler.
   __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
-  __ mov(ip, Operand(ExternalReference(Top::k_handler_address)));
+  __ mov(ip, Operand(ExternalReference(Isolate::k_handler_address)));
   __ str(r3, MemOperand(ip));
   // No need to restore registers
   __ add(sp, sp, Operand(StackHandlerConstants::kSize));
@@ -4504,7 +4513,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ bind(&exit);  // r0 holds result
   // Restore the top frame descriptors from the stack.
   __ pop(r3);
-  __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
+  __ mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address)));
   __ str(r3, MemOperand(ip));
 
   // Reset the stack to the callee saved registers.
@@ -4661,7 +4670,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
   __ b(ne, &slow);
 
   // Null is not instance of anything.
-  __ cmp(scratch, Operand(Factory::null_value()));
+  __ cmp(scratch, Operand(FACTORY->null_value()));
   __ b(ne, &object_not_null);
   __ mov(r0, Operand(Smi::FromInt(1)));
   __ Ret(HasArgsInRegisters() ? 0 : 2);
@@ -5059,15 +5068,20 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // subject: Subject string
   // regexp_data: RegExp data (FixedArray)
   // All checks done. Now push arguments for native regexp code.
-  __ IncrementCounter(&Counters::regexp_entry_native, 1, r0, r2);
+  __ IncrementCounter(COUNTERS->regexp_entry_native(), 1, r0, r2);
 
-  static const int kRegExpExecuteArguments = 7;
+  // Isolates: note we add an additional parameter here (isolate pointer).
+  static const int kRegExpExecuteArguments = 8;
   static const int kParameterRegisters = 4;
   __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
 
   // Stack pointer now points to cell where return address is to be written.
   // Arguments are before that on the stack or in registers.
 
+  // Argument 8 (sp[16]): Pass current isolate address.
+  __ mov(r0, Operand(ExternalReference::isolate_address()));
+  __ str(r0, MemOperand(sp, 4 * kPointerSize));
+
   // Argument 7 (sp[12]): Indicate that this is a direct call from JavaScript.
   __ mov(r0, Operand(1));
   __ str(r0, MemOperand(sp, 3 * kPointerSize));
@@ -5131,7 +5145,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
   __ mov(r1, Operand(ExternalReference::the_hole_value_location()));
   __ ldr(r1, MemOperand(r1, 0));
-  __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
+  __ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address)));
   __ ldr(r0, MemOperand(r2, 0));
   __ cmp(r0, r1);
   __ b(eq, &runtime);
@@ -5151,7 +5165,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   __ bind(&failure);
   // For failure and exception return null.
-  __ mov(r0, Operand(Factory::null_value()));
+  __ mov(r0, Operand(FACTORY->null_value()));
   __ add(sp, sp, Operand(4 * kPointerSize));
   __ Ret();
 
@@ -5256,7 +5270,7 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
   // Interleave operations for better latency.
   __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
   __ add(r3, r0, Operand(JSRegExpResult::kSize));
-  __ mov(r4, Operand(Factory::empty_fixed_array()));
+  __ mov(r4, Operand(FACTORY->empty_fixed_array()));
   __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
   __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
   __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
@@ -5277,13 +5291,13 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
   // r5: Number of elements in array, untagged.
 
   // Set map.
-  __ mov(r2, Operand(Factory::fixed_array_map()));
+  __ mov(r2, Operand(FACTORY->fixed_array_map()));
   __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
   // Set FixedArray length.
   __ mov(r6, Operand(r5, LSL, kSmiTagSize));
   __ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
   // Fill contents of fixed-array with the-hole.
-  __ mov(r2, Operand(Factory::the_hole_value()));
+  __ mov(r2, Operand(FACTORY->the_hole_value()));
   __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   // Fill fixed array elements with hole.
   // r0: JSArray, tagged.
@@ -5360,7 +5374,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   __ mov(r0, Operand(argc_));  // Setup the number of arguments.
   __ mov(r2, Operand(0, RelocInfo::NONE));
   __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
-  __ Jump(Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)),
+  __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+              Builtins::ArgumentsAdaptorTrampoline)),
           RelocInfo::CODE_TARGET);
 }
 
@@ -5373,7 +5388,8 @@ const char* CompareStub::GetName() {
 
   if (name_ != NULL) return name_;
   const int kMaxNameLength = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
+      kMaxNameLength);
   if (name_ == NULL) return "OOM";
 
   const char* cc_name;
@@ -6167,7 +6183,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   Label make_two_character_string;
   StringHelper::GenerateTwoCharacterSymbolTableProbe(
       masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string);
-  __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->sub_string_native(), 1, r3, r4);
   __ add(sp, sp, Operand(3 * kPointerSize));
   __ Ret();
 
@@ -6176,7 +6192,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   __ bind(&make_two_character_string);
   __ AllocateAsciiString(r0, r2, r4, r5, r9, &runtime);
   __ strh(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
-  __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->sub_string_native(), 1, r3, r4);
   __ add(sp, sp, Operand(3 * kPointerSize));
   __ Ret();
 
@@ -6202,7 +6218,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   STATIC_ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
   StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
                                            COPY_ASCII | DEST_ALWAYS_ALIGNED);
-  __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->sub_string_native(), 1, r3, r4);
   __ add(sp, sp, Operand(3 * kPointerSize));
   __ Ret();
 
@@ -6234,7 +6250,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   StringHelper::GenerateCopyCharactersLong(
       masm, r1, r5, r2, r3, r4, r6, r7, r9, DEST_ALWAYS_ALIGNED);
-  __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->sub_string_native(), 1, r3, r4);
   __ add(sp, sp, Operand(3 * kPointerSize));
   __ Ret();
 
@@ -6317,7 +6333,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
   STATIC_ASSERT(EQUAL == 0);
   STATIC_ASSERT(kSmiTag == 0);
   __ mov(r0, Operand(Smi::FromInt(EQUAL)));
-  __ IncrementCounter(&Counters::string_compare_native, 1, r1, r2);
+  __ IncrementCounter(COUNTERS->string_compare_native(), 1, r1, r2);
   __ add(sp, sp, Operand(2 * kPointerSize));
   __ Ret();
 
@@ -6327,7 +6343,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
   __ JumpIfNotBothSequentialAsciiStrings(r1, r0, r2, r3, &runtime);
 
   // Compare flat ASCII strings natively. Remove arguments from stack first.
-  __ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->string_compare_native(), 1, r2, r3);
   __ add(sp, sp, Operand(2 * kPointerSize));
   GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5);
 
@@ -6397,7 +6413,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
     __ cmp(r3, Operand(Smi::FromInt(0)), ne);
     __ b(ne, &strings_not_empty);  // If either string was empty, return r0.
 
-    __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
+    __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
     __ add(sp, sp, Operand(2 * kPointerSize));
     __ Ret();
 
@@ -6442,7 +6458,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   Label make_two_character_string;
   StringHelper::GenerateTwoCharacterSymbolTableProbe(
       masm, r2, r3, r6, r7, r4, r5, r9, &make_two_character_string);
-  __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
   __ add(sp, sp, Operand(2 * kPointerSize));
   __ Ret();
 
@@ -6455,7 +6471,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ mov(r6, Operand(2));
   __ AllocateAsciiString(r0, r6, r4, r5, r9, &string_add_runtime);
   __ strh(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
-  __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
   __ add(sp, sp, Operand(2 * kPointerSize));
   __ Ret();
 
@@ -6492,7 +6508,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
   __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
   __ mov(r0, Operand(r7));
-  __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
   __ add(sp, sp, Operand(2 * kPointerSize));
   __ Ret();
 
@@ -6574,7 +6590,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   // r7: result string.
   StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, true);
   __ mov(r0, Operand(r7));
-  __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
   __ add(sp, sp, Operand(2 * kPointerSize));
   __ Ret();
 
@@ -6615,7 +6631,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, false);
 
   __ mov(r0, Operand(r7));
-  __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
   __ add(sp, sp, Operand(2 * kPointerSize));
   __ Ret();
 
@@ -6718,7 +6734,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
 
   // Inlining the double comparison and falling back to the general compare
   // stub if NaN is involved or VFP3 is unsupported.
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
 
     // Load left and right operand
index f94ec0cf56b6d48022e92b090fa02839d166b582..5615b7e6878619a9ee6688f671b96e75c7906c06 100644 (file)
@@ -235,7 +235,7 @@ class TypeRecordingBinaryOpStub: public CodeStub {
         operands_type_(TRBinaryOpIC::UNINITIALIZED),
         result_type_(TRBinaryOpIC::UNINITIALIZED),
         name_(NULL) {
-    use_vfp3_ = CpuFeatures::IsSupported(VFP3);
+    use_vfp3_ = Isolate::Current()->cpu_features()->IsSupported(VFP3);
     ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
   }
 
index 01cdaf1e88b1182c0b6d5c01fb2b7178a258553e..12d5c08b31cb0df74eacebf18cbb6af905e7ee1f 100644 (file)
@@ -132,8 +132,6 @@ TypeInfoCodeGenState::~TypeInfoCodeGenState() {
 // -------------------------------------------------------------------------
 // CodeGenerator implementation
 
-int CodeGenerator::inlined_write_barrier_size_ = -1;
-
 CodeGenerator::CodeGenerator(MacroAssembler* masm)
     : deferred_(8),
       masm_(masm),
@@ -307,7 +305,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
     if (!scope()->HasIllegalRedeclaration()) {
       Comment cmnt(masm_, "[ function body");
 #ifdef DEBUG
-      bool is_builtin = Bootstrapper::IsActive();
+      bool is_builtin = Isolate::Current()->bootstrapper()->IsActive();
       bool should_trace =
           is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
       if (should_trace) {
@@ -772,7 +770,7 @@ void CodeGenerator::ToBoolean(JumpTarget* true_target,
     true_target->Branch(eq);
 
     // Slow case.
-    if (CpuFeatures::IsSupported(VFP3)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
       CpuFeatures::Scope scope(VFP3);
       // Implements the slow case by using ToBooleanStub.
       // The ToBooleanStub takes a single argument, and
@@ -969,7 +967,8 @@ void DeferredInlineSmiOperation::JumpToNonSmiInput(Condition cond) {
 void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) {
   ASSERT(Token::IsBitOp(op_));
 
-  if ((op_ == Token::SHR) && !CpuFeatures::IsSupported(VFP3)) {
+  if ((op_ == Token::SHR) &&
+      !Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     // >>> requires an unsigned to double conversion and the non VFP code
     // does not support this conversion.
     __ b(cond, entry_label());
@@ -1073,7 +1072,7 @@ void DeferredInlineSmiOperation::Generate() {
 void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer,
                                                    Register heap_number,
                                                    Register scratch) {
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     __ vmov(s0, answer);
     if (op_ == Token::SHR) {
@@ -1143,7 +1142,7 @@ void DeferredInlineSmiOperation::GenerateNonSmiInput() {
         // SHR is special because it is required to produce a positive answer.
         __ cmp(int32, Operand(0, RelocInfo::NONE));
       }
-      if (CpuFeatures::IsSupported(VFP3)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
         __ b(mi, &result_not_a_smi);
       } else {
         // Non VFP code cannot convert from unsigned to double, so fall back
@@ -1724,7 +1723,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
   // Load applicand.apply onto the stack. This will usually
   // give us a megamorphic load site. Not super, but it works.
   Load(applicand);
-  Handle<String> name = Factory::LookupAsciiSymbol("apply");
+  Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
   frame_->Dup();
   frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
   frame_->EmitPush(r0);
@@ -1787,7 +1786,8 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
   __ JumpIfSmi(r0, &build_args);
   __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
   __ b(ne, &build_args);
-  Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
+  Handle<Code> apply_code(
+      Isolate::Current()->builtins()->builtin(Builtins::FunctionApply));
   __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
   __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
   __ cmp(r1, Operand(apply_code));
@@ -2002,7 +2002,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
   // If we have a function or a constant, we need to initialize the variable.
   Expression* val = NULL;
   if (node->mode() == Variable::CONST) {
-    val = new Literal(Factory::the_hole_value());
+    val = new Literal(FACTORY->the_hole_value());
   } else {
     val = node->fun();  // NULL if we don't have a function
   }
@@ -2859,7 +2859,7 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
   function_return_is_shadowed_ = function_return_was_shadowed;
 
   // Get an external reference to the handler address.
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
 
   // If we can fall off the end of the try block, unlink from try chain.
   if (has_valid_frame()) {
@@ -2975,7 +2975,7 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
   function_return_is_shadowed_ = function_return_was_shadowed;
 
   // Get an external reference to the handler address.
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
 
   // If we can fall off the end of the try block, unlink from the try
   // chain and set the state on the frame to FALLING.
@@ -3130,8 +3130,8 @@ void CodeGenerator::InstantiateFunction(
     frame_->EmitPush(cp);
     frame_->EmitPush(Operand(function_info));
     frame_->EmitPush(Operand(pretenure
-                             ? Factory::true_value()
-                             : Factory::false_value()));
+                             ? FACTORY->true_value()
+                             : FACTORY->false_value()));
     frame_->CallRuntime(Runtime::kNewClosure, 3);
     frame_->EmitPush(r0);
   }
@@ -3631,7 +3631,8 @@ void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
         // else fall through
       case ObjectLiteral::Property::COMPUTED:
         if (key->handle()->IsSymbol()) {
-          Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+          Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+              Builtins::StoreIC_Initialize));
           Load(value);
           if (property->emit_store()) {
             frame_->PopToR0();
@@ -3694,11 +3695,11 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
   frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
   frame_->EmitPush(Operand(node->constant_elements()));
   int length = node->values()->length();
-  if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
+  if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
     frame_->CallStub(&stub, 3);
-    __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2);
+    __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1, r1, r2);
   } else if (node->depth() > 1) {
     frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -4254,7 +4255,8 @@ void CodeGenerator::VisitCall(Call* node) {
     // Setup the name register and call the IC initialization code.
     __ mov(r2, Operand(var->name()));
     InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
-    Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop);
+    Handle<Code> stub =
+        ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
     CodeForSourcePosition(node->position());
     frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
                            arg_count + 1);
@@ -4349,7 +4351,7 @@ void CodeGenerator::VisitCall(Call* node) {
         __ mov(r2, Operand(name));
         InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
         Handle<Code> stub =
-            StubCache::ComputeCallInitialize(arg_count, in_loop);
+            ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
         CodeForSourcePosition(node->position());
         frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
         __ ldr(cp, frame_->Context());
@@ -4391,7 +4393,8 @@ void CodeGenerator::VisitCall(Call* node) {
         // Load the key into r2 and call the IC initialization code.
         InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
         Handle<Code> stub =
-            StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
+            ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count,
+                                                              in_loop);
         CodeForSourcePosition(node->position());
         frame_->SpillAll();
         __ ldr(r2, frame_->ElementAt(arg_count + 1));
@@ -4456,7 +4459,8 @@ void CodeGenerator::VisitCallNew(CallNew* node) {
   // Call the construct call builtin that handles allocation and
   // constructor invocation.
   CodeForSourcePosition(node->position());
-  Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::JSConstructCall));
   frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
   frame_->EmitPush(r0);
 
@@ -4505,13 +4509,13 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
 
   // Functions have class 'Function'.
   function.Bind();
-  __ mov(tos, Operand(Factory::function_class_symbol()));
+  __ mov(tos, Operand(FACTORY->function_class_symbol()));
   frame_->EmitPush(tos);
   leave.Jump();
 
   // Objects with a non-function constructor have class 'Object'.
   non_function_constructor.Bind();
-  __ mov(tos, Operand(Factory::Object_symbol()));
+  __ mov(tos, Operand(FACTORY->Object_symbol()));
   frame_->EmitPush(tos);
   leave.Jump();
 
@@ -4612,7 +4616,7 @@ void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
   Load(args->at(0));
   Load(args->at(1));
 
-  if (!CpuFeatures::IsSupported(VFP3)) {
+  if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     frame_->CallRuntime(Runtime::kMath_pow, 2);
     frame_->EmitPush(r0);
   } else {
@@ -4766,7 +4770,7 @@ void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
   ASSERT(args->length() == 1);
   Load(args->at(0));
 
-  if (!CpuFeatures::IsSupported(VFP3)) {
+  if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     frame_->CallRuntime(Runtime::kMath_sqrt, 1);
     frame_->EmitPush(r0);
   } else {
@@ -5152,7 +5156,7 @@ class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
     Label entry, loop;
     // The use of ip to store the valueOf symbol asumes that it is not otherwise
     // used in the loop below.
-    __ mov(ip, Operand(Factory::value_of_symbol()));
+    __ mov(ip, Operand(FACTORY->value_of_symbol()));
     __ jmp(&entry);
     __ bind(&loop);
     __ ldr(scratch2_, MemOperand(map_result_, 0));
@@ -5355,7 +5359,7 @@ void CodeGenerator::GenerateRandomHeapNumber(
   // Convert 32 random bits in r0 to 0.(32 random bits) in a double
   // by computing:
   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     __ PrepareCallCFunction(0, r1);
     __ CallCFunction(ExternalReference::random_uint32_function(), 0);
 
@@ -5479,7 +5483,7 @@ void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
 
   Handle<FixedArray> jsfunction_result_caches(
-      Top::global_context()->jsfunction_result_caches());
+      Isolate::Current()->global_context()->jsfunction_result_caches());
   if (jsfunction_result_caches->length() <= cache_id) {
     __ Abort("Attempt to use undefined cache.");
     frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
@@ -5669,7 +5673,7 @@ void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
 void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
   ASSERT_EQ(args->length(), 1);
   Load(args->at(0));
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     TranscendentalCacheStub stub(TranscendentalCache::SIN,
                                  TranscendentalCacheStub::TAGGED);
     frame_->SpillAllButCopyTOSToR0();
@@ -5684,7 +5688,7 @@ void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
 void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
   ASSERT_EQ(args->length(), 1);
   Load(args->at(0));
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     TranscendentalCacheStub stub(TranscendentalCache::COS,
                                  TranscendentalCacheStub::TAGGED);
     frame_->SpillAllButCopyTOSToR0();
@@ -5699,7 +5703,7 @@ void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
 void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) {
   ASSERT_EQ(args->length(), 1);
   Load(args->at(0));
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     TranscendentalCacheStub stub(TranscendentalCache::LOG,
                                  TranscendentalCacheStub::TAGGED);
     frame_->SpillAllButCopyTOSToR0();
@@ -5804,7 +5808,7 @@ void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
 
   ZoneList<Expression*>* args = node->arguments();
   Comment cmnt(masm_, "[ CallRuntime");
-  Runtime::Function* function = node->function();
+  const Runtime::Function* function = node->function();
 
   if (function == NULL) {
     // Prepare stack for calling JS runtime function.
@@ -5828,7 +5832,8 @@ void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
     // Call the JS runtime function.
     __ mov(r2, Operand(node->name()));
     InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
-    Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop);
+    Handle<Code> stub =
+        ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
     frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
     __ ldr(cp, frame_->Context());
     frame_->EmitPush(r0);
@@ -6363,7 +6368,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
 
     Register scratch = VirtualFrame::scratch0();
 
-    if (check->Equals(Heap::number_symbol())) {
+    if (check->Equals(HEAP->number_symbol())) {
       __ tst(tos, Operand(kSmiTagMask));
       true_target()->Branch(eq);
       __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
@@ -6371,7 +6376,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       __ cmp(tos, ip);
       cc_reg_ = eq;
 
-    } else if (check->Equals(Heap::string_symbol())) {
+    } else if (check->Equals(HEAP->string_symbol())) {
       __ tst(tos, Operand(kSmiTagMask));
       false_target()->Branch(eq);
 
@@ -6387,7 +6392,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
       cc_reg_ = lt;
 
-    } else if (check->Equals(Heap::boolean_symbol())) {
+    } else if (check->Equals(HEAP->boolean_symbol())) {
       __ LoadRoot(ip, Heap::kTrueValueRootIndex);
       __ cmp(tos, ip);
       true_target()->Branch(eq);
@@ -6395,7 +6400,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       __ cmp(tos, ip);
       cc_reg_ = eq;
 
-    } else if (check->Equals(Heap::undefined_symbol())) {
+    } else if (check->Equals(HEAP->undefined_symbol())) {
       __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
       __ cmp(tos, ip);
       true_target()->Branch(eq);
@@ -6411,7 +6416,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
 
       cc_reg_ = eq;
 
-    } else if (check->Equals(Heap::function_symbol())) {
+    } else if (check->Equals(HEAP->function_symbol())) {
       __ tst(tos, Operand(kSmiTagMask));
       false_target()->Branch(eq);
       Register map_reg = scratch;
@@ -6421,7 +6426,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
       cc_reg_ = eq;
 
-    } else if (check->Equals(Heap::object_symbol())) {
+    } else if (check->Equals(HEAP->object_symbol())) {
       __ tst(tos, Operand(kSmiTagMask));
       false_target()->Branch(eq);
 
@@ -6583,8 +6588,9 @@ void DeferredReferenceGetNamedValue::Generate() {
   Register scratch1 = VirtualFrame::scratch0();
   Register scratch2 = VirtualFrame::scratch1();
   ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
-  __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2);
-  __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2);
+  __ DecrementCounter(COUNTERS->named_load_inline(), 1, scratch1, scratch2);
+  __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1,
+      scratch1, scratch2);
 
   // Ensure receiver in r0 and name in r2 to match load ic calling convention.
   __ Move(r0, receiver_);
@@ -6592,7 +6598,8 @@ void DeferredReferenceGetNamedValue::Generate() {
 
   // The rest of the instructions in the deferred code must be together.
   { Assembler::BlockConstPoolScope block_const_pool(masm_);
-    Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+    Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+        Builtins::LoadIC_Initialize));
     RelocInfo::Mode mode = is_contextual_
         ? RelocInfo::CODE_TARGET_CONTEXT
         : RelocInfo::CODE_TARGET;
@@ -6654,8 +6661,9 @@ void DeferredReferenceGetKeyedValue::Generate() {
 
   Register scratch1 = VirtualFrame::scratch0();
   Register scratch2 = VirtualFrame::scratch1();
-  __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2);
-  __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2);
+  __ DecrementCounter(COUNTERS->keyed_load_inline(), 1, scratch1, scratch2);
+  __ IncrementCounter(COUNTERS->keyed_load_inline_miss(),
+      1, scratch1, scratch2);
 
   // Ensure key in r0 and receiver in r1 to match keyed load ic calling
   // convention.
@@ -6666,7 +6674,8 @@ void DeferredReferenceGetKeyedValue::Generate() {
   // The rest of the instructions in the deferred code must be together.
   { Assembler::BlockConstPoolScope block_const_pool(masm_);
     // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
-    Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+    Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+        Builtins::KeyedLoadIC_Initialize));
     __ Call(ic, RelocInfo::CODE_TARGET);
     // The call must be followed by a nop instruction to indicate that the
     // keyed load has been inlined.
@@ -6713,9 +6722,9 @@ class DeferredReferenceSetKeyedValue: public DeferredCode {
 void DeferredReferenceSetKeyedValue::Generate() {
   Register scratch1 = VirtualFrame::scratch0();
   Register scratch2 = VirtualFrame::scratch1();
-  __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2);
-  __ IncrementCounter(
-      &Counters::keyed_store_inline_miss, 1, scratch1, scratch2);
+  __ DecrementCounter(COUNTERS->keyed_store_inline(), 1, scratch1, scratch2);
+  __ IncrementCounter(COUNTERS->keyed_store_inline_miss(),
+                      1, scratch1, scratch2);
 
   // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
   // calling convention.
@@ -6728,7 +6737,7 @@ void DeferredReferenceSetKeyedValue::Generate() {
   { Assembler::BlockConstPoolScope block_const_pool(masm_);
     // Call keyed store IC. It has the arguments value, key and receiver in r0,
     // r1 and r2.
-    Handle<Code> ic(Builtins::builtin(
+    Handle<Code> ic(Isolate::Current()->builtins()->builtin(
         (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
                                       : Builtins::KeyedStoreIC_Initialize));
     __ Call(ic, RelocInfo::CODE_TARGET);
@@ -6783,7 +6792,7 @@ void DeferredReferenceSetNamedValue::Generate() {
   { Assembler::BlockConstPoolScope block_const_pool(masm_);
     // Call keyed store IC. It has the arguments value, key and receiver in r0,
     // r1 and r2.
-    Handle<Code> ic(Builtins::builtin(
+    Handle<Code> ic(Isolate::Current()->builtins()->builtin(
         (strict_mode_ == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
                                       : Builtins::StoreIC_Initialize));
     __ Call(ic, RelocInfo::CODE_TARGET);
@@ -6809,7 +6818,7 @@ void DeferredReferenceSetNamedValue::Generate() {
 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
   bool contextual_load_in_builtin =
       is_contextual &&
-      (Bootstrapper::IsActive() ||
+      (ISOLATE->bootstrapper()->IsActive() ||
       (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
 
   if (scope()->is_global_scope() ||
@@ -6831,10 +6840,10 @@ void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
     // Counter will be decremented in the deferred code. Placed here to avoid
     // having it in the instruction stream below where patching will occur.
     if (is_contextual) {
-      __ IncrementCounter(&Counters::named_load_global_inline, 1,
+      __ IncrementCounter(COUNTERS->named_load_global_inline(), 1,
                           frame_->scratch0(), frame_->scratch1());
     } else {
-      __ IncrementCounter(&Counters::named_load_inline, 1,
+      __ IncrementCounter(COUNTERS->named_load_inline(), 1,
                           frame_->scratch0(), frame_->scratch1());
     }
 
@@ -6867,7 +6876,7 @@ void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
         }
       }
       if (is_dont_delete) {
-        __ IncrementCounter(&Counters::dont_delete_hint_hit, 1,
+        __ IncrementCounter(COUNTERS->dont_delete_hint_hit(), 1,
                             frame_->scratch0(), frame_->scratch1());
       }
     }
@@ -6904,7 +6913,7 @@ void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
       // Check the map. The null map used below is patched by the inline cache
       // code.  Therefore we can't use a LoadRoot call.
       __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
-      __ mov(scratch2, Operand(Factory::null_value()));
+      __ mov(scratch2, Operand(FACTORY->null_value()));
       __ cmp(scratch, scratch2);
       deferred->Branch(ne);
 
@@ -6913,7 +6922,7 @@ void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
         InlinedNamedLoadInstructions += 1;
 #endif
         // Load the (initially invalid) cell and get its value.
-        masm()->mov(receiver, Operand(Factory::null_value()));
+        masm()->mov(receiver, Operand(FACTORY->null_value()));
         __ ldr(receiver,
                FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset));
 
@@ -6923,13 +6932,13 @@ void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
 #ifdef DEBUG
           InlinedNamedLoadInstructions += 3;
 #endif
-          __ cmp(receiver, Operand(Factory::the_hole_value()));
+          __ cmp(receiver, Operand(FACTORY->the_hole_value()));
           deferred->Branch(eq);
         } else if (FLAG_debug_code) {
 #ifdef DEBUG
           InlinedNamedLoadInstructions += 3;
 #endif
-          __ cmp(receiver, Operand(Factory::the_hole_value()));
+          __ cmp(receiver, Operand(FACTORY->the_hole_value()));
           __ b(&check_the_hole, eq);
           __ bind(&cont);
         }
@@ -6997,7 +7006,7 @@ void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
       Label check_inlined_codesize;
       masm_->bind(&check_inlined_codesize);
 #endif
-      __ mov(scratch0, Operand(Factory::null_value()));
+      __ mov(scratch0, Operand(FACTORY->null_value()));
       __ cmp(scratch0, scratch1);
       deferred->Branch(ne);
 
@@ -7027,11 +7036,11 @@ void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
       // Check that this is the first inlined write barrier or that
       // this inlined write barrier has the same size as all the other
       // inlined write barriers.
-      ASSERT((inlined_write_barrier_size_ == -1) ||
-             (inlined_write_barrier_size_ ==
+      ASSERT((Isolate::Current()->inlined_write_barrier_size() == -1) ||
+             (Isolate::Current()->inlined_write_barrier_size() ==
               masm()->InstructionsGeneratedSince(&record_write_start)));
-      inlined_write_barrier_size_ =
-          masm()->InstructionsGeneratedSince(&record_write_start);
+      Isolate::Current()->set_inlined_write_barrier_size(
+          masm()->InstructionsGeneratedSince(&record_write_start));
 
       // Make sure that the expected number of instructions are generated.
       ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(),
@@ -7053,7 +7062,7 @@ void CodeGenerator::EmitKeyedLoad() {
 
     // Counter will be decremented in the deferred code. Placed here to avoid
     // having it in the instruction stream below where patching will occur.
-    __ IncrementCounter(&Counters::keyed_load_inline, 1,
+    __ IncrementCounter(COUNTERS->keyed_load_inline(), 1,
                         frame_->scratch0(), frame_->scratch1());
 
     // Load the key and receiver from the stack.
@@ -7090,7 +7099,7 @@ void CodeGenerator::EmitKeyedLoad() {
       Label check_inlined_codesize;
       masm_->bind(&check_inlined_codesize);
 #endif
-      __ mov(scratch2, Operand(Factory::null_value()));
+      __ mov(scratch2, Operand(FACTORY->null_value()));
       __ cmp(scratch1, scratch2);
       deferred->Branch(ne);
 
@@ -7140,7 +7149,7 @@ void CodeGenerator::EmitKeyedStore(StaticType* key_type,
 
     // Counter will be decremented in the deferred code. Placed here to avoid
     // having it in the instruction stream below where patching will occur.
-    __ IncrementCounter(&Counters::keyed_store_inline, 1,
+    __ IncrementCounter(COUNTERS->keyed_store_inline(), 1,
                         scratch1, scratch2);
 
 
@@ -7221,7 +7230,7 @@ void CodeGenerator::EmitKeyedStore(StaticType* key_type,
       // comparison to always fail so that we will hit the IC call in the
       // deferred code which will allow the debugger to break for fast case
       // stores.
-      __ mov(scratch3, Operand(Factory::fixed_array_map()));
+      __ mov(scratch3, Operand(FACTORY->fixed_array_map()));
       __ cmp(scratch2, scratch3);
       deferred->Branch(ne);
 
@@ -7391,7 +7400,7 @@ void Reference::SetValue(InitState init_state, WriteBarrierCharacter wb_info) {
 const char* GenericBinaryOpStub::GetName() {
   if (name_ != NULL) return name_;
   const int len = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(len);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(len);
   if (name_ == NULL) return "OOM";
   const char* op_name = Token::Name(op_);
   const char* overwrite_name;
@@ -7411,7 +7420,6 @@ const char* GenericBinaryOpStub::GetName() {
   return name_;
 }
 
-
 #undef __
 
 } }  // namespace v8::internal
index f3c888ca24fb8ad49a94d7f97db405d0edf97583..9b1f103d5a7bca6681b585675931ac89f8e9a2ac 100644 (file)
@@ -270,8 +270,8 @@ class CodeGenerator: public AstVisitor {
   }
   static const int kInlinedKeyedStoreInstructionsAfterPatch = 8;
   static int GetInlinedNamedStoreInstructionsAfterPatch() {
-    ASSERT(inlined_write_barrier_size_ != -1);
-    return inlined_write_barrier_size_ + 4;
+    ASSERT(Isolate::Current()->inlined_write_barrier_size() != -1);
+    return Isolate::Current()->inlined_write_barrier_size() + 4;
   }
 
  private:
@@ -576,15 +576,14 @@ class CodeGenerator: public AstVisitor {
   // to some unlinking code).
   bool function_return_is_shadowed_;
 
-  // Size of inlined write barriers generated by EmitNamedStore.
-  static int inlined_write_barrier_size_;
-
   friend class VirtualFrame;
+  friend class Isolate;
   friend class JumpTarget;
   friend class Reference;
   friend class FastCodeGenerator;
   friend class FullCodeGenerator;
   friend class FullCodeGenSyntaxChecker;
+  friend class InlineRuntimeFunctionsTable;
   friend class LCodeGen;
 
   DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
index 51c84b3354693fd34d0b21ccf4ff19680936e603..0f5bf56b54dd3b05b0338d2f31ddbfd7c40a1c3c 100644 (file)
@@ -42,8 +42,9 @@ namespace v8 {
 namespace internal {
 
 void CPU::Setup() {
-  CpuFeatures::Probe(true);
-  if (!CpuFeatures::IsSupported(VFP3) || Serializer::enabled()) {
+  CpuFeatures* cpu_features = Isolate::Current()->cpu_features();
+  cpu_features->Probe(true);
+  if (!cpu_features->IsSupported(VFP3) || Serializer::enabled()) {
     V8::DisableCrankshaft();
   }
 }
@@ -61,7 +62,7 @@ void CPU::FlushICache(void* start, size_t size) {
   // that the Icache was flushed.
   // None of this code ends up in the snapshot so there are no issues
   // around whether or not to generate the code when building snapshots.
-  Simulator::FlushICache(start, size);
+  Simulator::FlushICache(Isolate::Current()->simulator_i_cache(), start, size);
 #else
   // Ideally, we would call
   //   syscall(__ARM_NR_cacheflush, start,
index 22640ca1c572c833c23fb1b3d8a4a1e2b8261db5..93fa5c37066db835077299456d87aebad7b729e7 100644 (file)
@@ -65,7 +65,7 @@ void BreakLocationIterator::SetDebugBreakAtReturn() {
   patcher.masm()->mov(v8::internal::lr, v8::internal::pc);
   patcher.masm()->ldr(v8::internal::pc, MemOperand(v8::internal::pc, -4));
 #endif
-  patcher.Emit(Debug::debug_break_return()->entry());
+  patcher.Emit(Isolate::Current()->debug()->debug_break_return()->entry());
   patcher.masm()->bkpt(0);
 }
 
@@ -115,7 +115,7 @@ void BreakLocationIterator::SetDebugBreakAtSlot() {
   patcher.masm()->mov(v8::internal::lr, v8::internal::pc);
   patcher.masm()->ldr(v8::internal::pc, MemOperand(v8::internal::pc, -4));
 #endif
-  patcher.Emit(Debug::debug_break_slot()->entry());
+  patcher.Emit(Isolate::Current()->debug()->debug_break_slot()->entry());
 }
 
 
index 40e0fc152bc91f0302cf9489c11e39b2a6a95964..8e970ec746cb6396af862c7854a7ad3af874baa4 100644 (file)
@@ -106,8 +106,9 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
 
   // Add the deoptimizing code to the list.
   DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
-  node->set_next(deoptimizing_code_list_);
-  deoptimizing_code_list_ = node;
+  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
+  node->set_next(data->deoptimizing_code_list_);
+  data->deoptimizing_code_list_ = node;
 
   // Set the code for the function to non-optimized version.
   function->ReplaceCode(function->shared()->code());
@@ -315,7 +316,8 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
         optimized_code_->entry() + pc_offset);
     output_[0]->SetPc(pc);
   }
-  Code* continuation = Builtins::builtin(Builtins::NotifyOSR);
+  Code* continuation = Isolate::Current()->builtins()->builtin(
+    Builtins::NotifyOSR);
   output_[0]->SetContinuation(
       reinterpret_cast<uint32_t>(continuation->entry()));
 
@@ -489,11 +491,13 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
       FullCodeGenerator::StateField::decode(pc_and_state);
   output_frame->SetState(Smi::FromInt(state));
 
+
   // Set the continuation for the topmost frame.
   if (is_topmost) {
+    Builtins* builtins = isolate_->builtins();
     Code* continuation = (bailout_type_ == EAGER)
-        ? Builtins::builtin(Builtins::NotifyDeoptimized)
-        : Builtins::builtin(Builtins::NotifyLazyDeoptimized);
+        ? builtins->builtin(Builtins::NotifyDeoptimized)
+        : builtins->builtin(Builtins::NotifyLazyDeoptimized);
     output_frame->SetContinuation(
         reinterpret_cast<uint32_t>(continuation->entry()));
   }
index f154839c762bac3155970a343a92e82078f23cc4..09a26260ee8001db728352173d7ba7b0623e5ffc 100644 (file)
@@ -1362,9 +1362,8 @@ namespace disasm {
 
 
 const char* NameConverter::NameOfAddress(byte* addr) const {
-  static v8::internal::EmbeddedVector<char, 32> tmp_buffer;
-  v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr);
-  return tmp_buffer.start();
+  v8::internal::OS::SNPrintF(tmp_buffer_, "%p", addr);
+  return tmp_buffer_.start();
 }
 
 
index 4aa8d6aa9ac1340237b7317a0d94c837c7c947cb..152172300bd83efec5a5390b8b2dbe9300d58771 100644 (file)
@@ -28,6 +28,8 @@
 #ifndef V8_ARM_FRAMES_ARM_H_
 #define V8_ARM_FRAMES_ARM_H_
 
+#include "memory.h"
+
 namespace v8 {
 namespace internal {
 
index c0bd302af179c3e04620be51bf59e1ae7fed582f..a53a4ebbf1c04c4ca15c12b0ce1da0888d536c36 100644 (file)
@@ -562,7 +562,7 @@ void FullCodeGenerator::TestContext::Plug(bool flag) const {
 void FullCodeGenerator::DoTest(Label* if_true,
                                Label* if_false,
                                Label* fall_through) {
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     // Emit the inlined tests assumed by the stub.
     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
@@ -781,9 +781,9 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
              prop->key()->AsLiteral()->handle()->IsSmi());
       __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
 
-      Handle<Code> ic(Builtins::builtin(
-          is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
-                           : Builtins::KeyedStoreIC_Initialize));
+      Handle<Code> ic(isolate()->builtins()->builtin(is_strict_mode()
+          ? Builtins::KeyedStoreIC_Initialize_Strict
+          : Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
       // Value in r0 is ignored (declarations are statements).
     }
@@ -1186,7 +1186,8 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
                  ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
                                                    slow));
           __ mov(r0, Operand(key_literal->handle()));
-          Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+          Handle<Code> ic(isolate()->builtins()->builtin(
+              Builtins::KeyedLoadIC_Initialize));
           EmitCallIC(ic, RelocInfo::CODE_TARGET);
           __ jmp(done);
         }
@@ -1252,7 +1253,8 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
       ? RelocInfo::CODE_TARGET
       : RelocInfo::CODE_TARGET_CONTEXT;
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   EmitCallIC(ic, mode);
 }
 
@@ -1270,7 +1272,8 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
     // object (receiver) in r0.
     __ ldr(r0, GlobalObjectOperand());
     __ mov(r2, Operand(var->name()));
-    Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(
+        Builtins::LoadIC_Initialize));
     EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
     context()->Plug(r0);
 
@@ -1329,7 +1332,8 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
     __ mov(r0, Operand(key_literal->handle()));
 
     // Call keyed load IC. It has arguments key and receiver in r0 and r1.
-    Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(
+        Builtins::KeyedLoadIC_Initialize));
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
     context()->Plug(r0);
   }
@@ -1432,7 +1436,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
             VisitForAccumulatorValue(value);
             __ mov(r2, Operand(key->handle()));
             __ ldr(r1, MemOperand(sp));
-            Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+            Handle<Code> ic(isolate()->builtins()->builtin(
+                Builtins::StoreIC_Initialize));
             EmitCallIC(ic, RelocInfo::CODE_TARGET);
             PrepareForBailoutForId(key->id(), NO_REGISTERS);
           } else {
@@ -1490,11 +1495,13 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
   __ mov(r1, Operand(expr->constant_elements()));
   __ Push(r3, r2, r1);
-  if (expr->constant_elements()->map() == Heap::fixed_cow_array_map()) {
+  if (expr->constant_elements()->map() ==
+      isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
     __ CallStub(&stub);
-    __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2);
+    __ IncrementCounter(
+        isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
   } else if (expr->depth() > 1) {
     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -1678,7 +1685,8 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   Literal* key = prop->key()->AsLiteral();
   __ mov(r2, Operand(key->handle()));
   // Call load IC. It has arguments receiver and property name r0 and r2.
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
 }
 
@@ -1686,7 +1694,8 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
   // Call keyed load IC. It has arguments key and receiver in r0 and r1.
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
 }
 
@@ -1831,7 +1840,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
       __ mov(r1, r0);
       __ pop(r0);  // Restore value.
       __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                            : Builtins::StoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1854,7 +1863,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
         __ pop(r2);
       }
       __ pop(r0);  // Restore value.
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
                            : Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1880,7 +1889,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
     // r2, and the global object in r1.
     __ mov(r2, Operand(var->name()));
     __ ldr(r1, GlobalObjectOperand());
-    Handle<Code> ic(Builtins::builtin(
+    Handle<Code> ic(isolate()->builtins()->builtin(
         is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                          : Builtins::StoreIC_Initialize));
     EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
@@ -1989,7 +1998,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
     __ pop(r1);
   }
 
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                        : Builtins::StoreIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -2035,7 +2044,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
     __ pop(r2);
   }
 
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
                        : Builtins::KeyedStoreIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -2088,7 +2097,8 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
   SetSourcePosition(expr->position());
   // Call the IC initialization code.
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
+  Handle<Code> ic =
+      isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
   EmitCallIC(ic, mode);
   RecordJSReturnSite(expr);
   // Restore context register.
@@ -2121,7 +2131,8 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   SetSourcePosition(expr->position());
   // Call the IC initialization code.
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
-  Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
+  Handle<Code> ic =
+      isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
   __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize));  // Key.
   EmitCallIC(ic, mode);
   RecordJSReturnSite(expr);
@@ -2320,7 +2331,8 @@ void FullCodeGenerator::VisitCall(Call* expr) {
         // Record source code position for IC call.
         SetSourcePosition(prop->position());
 
-        Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+        Handle<Code> ic(isolate()->builtins()->builtin(
+            Builtins::KeyedLoadIC_Initialize));
         EmitCallIC(ic, RelocInfo::CODE_TARGET);
         __ ldr(r1, GlobalObjectOperand());
         __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
@@ -2339,7 +2351,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     // also use the fast code generator.
     FunctionLiteral* lit = fun->AsFunctionLiteral();
     if (lit != NULL &&
-        lit->name()->Equals(Heap::empty_string()) &&
+        lit->name()->Equals(isolate()->heap()->empty_string()) &&
         loop_depth() == 0) {
       lit->set_try_full_codegen(true);
     }
@@ -2388,7 +2400,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ mov(r0, Operand(arg_count));
   __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
 
-  Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
+  Handle<Code> construct_builtin(isolate()->builtins()->builtin(
+      Builtins::JSConstructCall));
   __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
   context()->Plug(r0);
 }
@@ -2783,7 +2796,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
   // Convert 32 random bits in r0 to 0.(32 random bits) in a double
   // by computing:
   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (isolate()->cpu_features()->IsSupported(VFP3)) {
     __ PrepareCallCFunction(0, r1);
     __ CallCFunction(ExternalReference::random_uint32_function(), 0);
 
@@ -3204,7 +3217,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
 
   Handle<FixedArray> jsfunction_result_caches(
-      Top::global_context()->jsfunction_result_caches());
+      isolate()->global_context()->jsfunction_result_caches());
   if (jsfunction_result_caches->length() <= cache_id) {
     __ Abort("Attempt to use undefined cache.");
     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
@@ -3583,7 +3596,8 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
   if (expr->is_jsruntime()) {
     // Call the JS runtime function.
     __ mov(r2, Operand(expr->name()));
-    Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, NOT_IN_LOOP);
+    Handle<Code> ic =
+        isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP);
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
     // Restore context register.
     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -3887,7 +3901,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     case NAMED_PROPERTY: {
       __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
       __ pop(r1);
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                            : Builtins::StoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3904,7 +3918,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     case KEYED_PROPERTY: {
       __ pop(r1);  // Key.
       __ pop(r2);  // Receiver.
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
                            : Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3930,7 +3944,8 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
     Comment cmnt(masm_, "Global variable");
     __ ldr(r0, GlobalObjectOperand());
     __ mov(r2, Operand(proxy->name()));
-    Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(
+        Builtins::LoadIC_Initialize));
     // Use a regular load, not a contextual load, to avoid a reference
     // error.
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3983,13 +3998,13 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
   }
   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
 
-  if (check->Equals(Heap::number_symbol())) {
+  if (check->Equals(isolate()->heap()->number_symbol())) {
     __ JumpIfSmi(r0, if_true);
     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
     __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
     __ cmp(r0, ip);
     Split(eq, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::string_symbol())) {
+  } else if (check->Equals(isolate()->heap()->string_symbol())) {
     __ JumpIfSmi(r0, if_false);
     // Check for undetectable objects => false.
     __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
@@ -3997,12 +4012,12 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
     __ tst(r1, Operand(1 << Map::kIsUndetectable));
     Split(eq, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::boolean_symbol())) {
+  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
     __ CompareRoot(r0, Heap::kTrueValueRootIndex);
     __ b(eq, if_true);
     __ CompareRoot(r0, Heap::kFalseValueRootIndex);
     Split(eq, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::undefined_symbol())) {
+  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
     __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
     __ b(eq, if_true);
     __ JumpIfSmi(r0, if_false);
@@ -4012,12 +4027,12 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
     __ tst(r1, Operand(1 << Map::kIsUndetectable));
     Split(ne, if_true, if_false, fall_through);
 
-  } else if (check->Equals(Heap::function_symbol())) {
+  } else if (check->Equals(isolate()->heap()->function_symbol())) {
     __ JumpIfSmi(r0, if_false);
     __ CompareObjectType(r0, r1, r0, FIRST_FUNCTION_CLASS_TYPE);
     Split(ge, if_true, if_false, fall_through);
 
-  } else if (check->Equals(Heap::object_symbol())) {
+  } else if (check->Equals(isolate()->heap()->object_symbol())) {
     __ JumpIfSmi(r0, if_false);
     __ CompareRoot(r0, Heap::kNullValueRootIndex);
     __ b(eq, if_true);
@@ -4203,16 +4218,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
          mode == RelocInfo::CODE_TARGET_CONTEXT);
   switch (ic->kind()) {
     case Code::LOAD_IC:
-      __ IncrementCounter(&Counters::named_load_full, 1, r1, r2);
+      __ IncrementCounter(COUNTERS->named_load_full(), 1, r1, r2);
       break;
     case Code::KEYED_LOAD_IC:
-      __ IncrementCounter(&Counters::keyed_load_full, 1, r1, r2);
+      __ IncrementCounter(COUNTERS->keyed_load_full(), 1, r1, r2);
       break;
     case Code::STORE_IC:
-      __ IncrementCounter(&Counters::named_store_full, 1, r1, r2);
+      __ IncrementCounter(COUNTERS->named_store_full(), 1, r1, r2);
       break;
     case Code::KEYED_STORE_IC:
-      __ IncrementCounter(&Counters::keyed_store_full, 1, r1, r2);
+      __ IncrementCounter(COUNTERS->keyed_store_full(), 1, r1, r2);
     default:
       break;
   }
@@ -4224,16 +4239,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
   switch (ic->kind()) {
     case Code::LOAD_IC:
-      __ IncrementCounter(&Counters::named_load_full, 1, r1, r2);
+      __ IncrementCounter(COUNTERS->named_load_full(), 1, r1, r2);
       break;
     case Code::KEYED_LOAD_IC:
-      __ IncrementCounter(&Counters::keyed_load_full, 1, r1, r2);
+      __ IncrementCounter(COUNTERS->keyed_load_full(), 1, r1, r2);
       break;
     case Code::STORE_IC:
-      __ IncrementCounter(&Counters::named_store_full, 1, r1, r2);
+      __ IncrementCounter(COUNTERS->named_store_full(), 1, r1, r2);
       break;
     case Code::KEYED_STORE_IC:
-      __ IncrementCounter(&Counters::keyed_store_full, 1, r1, r2);
+      __ IncrementCounter(COUNTERS->keyed_store_full(), 1, r1, r2);
     default:
       break;
   }
index 85a0c09b411be171ba4ad555120acf691d9a204d..d92406bdc0adc1a6b86bf9307aba2a53b629bf93 100644 (file)
@@ -552,7 +552,8 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
                                          Code::kNoExtraICState,
                                          NORMAL,
                                          argc);
-  StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5);
+  Isolate::Current()->stub_cache()->GenerateProbe(
+      masm, flags, r1, r2, r3, r4, r5);
 
   // If the stub cache probing failed, the receiver might be a value.
   // For value objects, we use the map of the prototype objects for
@@ -591,7 +592,8 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
 
   // Probe the stub cache for the value object.
   __ bind(&probe);
-  StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5);
+  Isolate::Current()->stub_cache()->GenerateProbe(
+      masm, flags, r1, r2, r3, r4, r5);
 
   __ bind(&miss);
 }
@@ -646,9 +648,9 @@ static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
   // -----------------------------------
 
   if (id == IC::kCallIC_Miss) {
-    __ IncrementCounter(&Counters::call_miss, 1, r3, r4);
+    __ IncrementCounter(COUNTERS->call_miss(), 1, r3, r4);
   } else {
-    __ IncrementCounter(&Counters::keyed_call_miss, 1, r3, r4);
+    __ IncrementCounter(COUNTERS->keyed_call_miss(), 1, r3, r4);
   }
 
   // Get the receiver of the function from the stack.
@@ -763,7 +765,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
 
   GenerateFastArrayLoad(
       masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
-  __ IncrementCounter(&Counters::keyed_call_generic_smi_fast, 1, r0, r3);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_smi_fast(), 1, r0, r3);
 
   __ bind(&do_call);
   // receiver in r1 is not used after this point.
@@ -782,13 +784,13 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   __ mov(r0, Operand(r2, ASR, kSmiTagSize));
   // r0: untagged index
   GenerateNumberDictionaryLoad(masm, &slow_load, r4, r2, r1, r0, r3, r5);
-  __ IncrementCounter(&Counters::keyed_call_generic_smi_dict, 1, r0, r3);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_smi_dict(), 1, r0, r3);
   __ jmp(&do_call);
 
   __ bind(&slow_load);
   // This branch is taken when calling KeyedCallIC_Miss is neither required
   // nor beneficial.
-  __ IncrementCounter(&Counters::keyed_call_generic_slow_load, 1, r0, r3);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_slow_load(), 1, r0, r3);
   __ EnterInternalFrame();
   __ push(r2);  // save the key
   __ Push(r1, r2);  // pass the receiver and the key
@@ -815,11 +817,11 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   __ b(ne, &lookup_monomorphic_cache);
 
   GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
-  __ IncrementCounter(&Counters::keyed_call_generic_lookup_dict, 1, r0, r3);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_dict(), 1, r0, r3);
   __ jmp(&do_call);
 
   __ bind(&lookup_monomorphic_cache);
-  __ IncrementCounter(&Counters::keyed_call_generic_lookup_cache, 1, r0, r3);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_cache(), 1, r0, r3);
   GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
   // Fall through on miss.
 
@@ -830,7 +832,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   // - the value loaded is not a function,
   // - there is hope that the runtime will create a monomorphic call stub
   //   that will get fetched next time.
-  __ IncrementCounter(&Counters::keyed_call_generic_slow, 1, r0, r3);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_slow(), 1, r0, r3);
   GenerateMiss(masm, argc);
 
   __ bind(&index_string);
@@ -873,7 +875,8 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
   Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
                                          NOT_IN_LOOP,
                                          MONOMORPHIC);
-  StubCache::GenerateProbe(masm, flags, r0, r2, r3, r4, r5);
+  Isolate::Current()->stub_cache()->GenerateProbe(
+      masm, flags, r0, r2, r3, r4, r5);
 
   // Cache miss: Jump to runtime.
   GenerateMiss(masm);
@@ -909,7 +912,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
   //  -- sp[0] : receiver
   // -----------------------------------
 
-  __ IncrementCounter(&Counters::load_miss, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->load_miss(), 1, r3, r4);
 
   __ mov(r3, r0);
   __ Push(r3, r2);
@@ -1057,7 +1060,7 @@ bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
   // Update the offsets if initializing the inlined store. No reason
   // to update the offsets when clearing the inlined version because
   // it will bail out in the map check.
-  if (map != Heap::null_value()) {
+  if (map != HEAP->null_value()) {
     // Patch the offset in the actual store instruction.
     Address str_property_instr_address =
         ldr_map_instr_address + 3 * Assembler::kInstrSize;
@@ -1141,7 +1144,7 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
   //  -- r1     : receiver
   // -----------------------------------
 
-  __ IncrementCounter(&Counters::keyed_load_miss, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->keyed_load_miss(), 1, r3, r4);
 
   __ Push(r1, r0);
 
@@ -1192,7 +1195,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
 
   GenerateFastArrayLoad(
       masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
-  __ IncrementCounter(&Counters::keyed_load_generic_smi, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_smi(), 1, r2, r3);
   __ Ret();
 
   __ bind(&check_number_dictionary);
@@ -1212,7 +1215,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
 
   // Slow case, key and receiver still in r0 and r1.
   __ bind(&slow);
-  __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_slow(), 1, r2, r3);
   GenerateRuntimeGetProperty(masm);
 
   __ bind(&check_string);
@@ -1267,7 +1270,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   __ add(r6, r6, r5);  // Index from start of object.
   __ sub(r1, r1, Operand(kHeapObjectTag));  // Remove the heap tag.
   __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
-  __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1, r2, r3);
   __ Ret();
 
   // Load property array property.
@@ -1275,7 +1278,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset));
   __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
-  __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1, r2, r3);
   __ Ret();
 
   // Do a quick inline probe of the receiver's dictionary, if it
@@ -1289,7 +1292,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
   // Load the property to r0.
   GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
-  __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_symbol(), 1, r2, r3);
   __ Ret();
 
   __ bind(&index_string);
@@ -1527,7 +1530,9 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
                                          NOT_IN_LOOP,
                                          MONOMORPHIC,
                                          strict_mode);
-  StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5);
+
+  Isolate::Current()->stub_cache()->GenerateProbe(
+      masm, flags, r1, r2, r3, r4, r5);
 
   // Cache miss: Jump to runtime.
   GenerateMiss(masm);
@@ -1611,11 +1616,11 @@ void StoreIC::GenerateNormal(MacroAssembler* masm) {
   GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
 
   GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
-  __ IncrementCounter(&Counters::store_normal_hit, 1, r4, r5);
+  __ IncrementCounter(COUNTERS->store_normal_hit(), 1, r4, r5);
   __ Ret();
 
   __ bind(&miss);
-  __ IncrementCounter(&Counters::store_normal_miss, 1, r4, r5);
+  __ IncrementCounter(COUNTERS->store_normal_miss(), 1, r4, r5);
   GenerateMiss(masm);
 }
 
index 861ea381049b087dfbcb0e69182379f816f796fa..4244cf50f61d7fbe079d880671b2fd9dff34c983 100644 (file)
@@ -25,6 +25,8 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
+
 #include "lithium-allocator-inl.h"
 #include "arm/lithium-arm.h"
 #include "arm/lithium-codegen-arm.h"
index a148fdbea45211f32265b5f26e0d3819a1911c7b..e8652861755a259997f5f935e64a4bf17d423a38 100644 (file)
@@ -1421,7 +1421,7 @@ class LCallRuntime: public LTemplateInstruction<1, 0, 0> {
   DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime")
   DECLARE_HYDROGEN_ACCESSOR(CallRuntime)
 
-  Runtime::Function* function() const { return hydrogen()->function(); }
+  const Runtime::Function* function() const { return hydrogen()->function(); }
   int arity() const { return hydrogen()->argument_count(); }
 };
 
index 4bfa048cf04ceb27a21583323292e98f7a297213..cb18a8d9d0378afd31a4f5a30c536b1bd76b8883 100644 (file)
@@ -25,6 +25,8 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
+
 #include "arm/lithium-codegen-arm.h"
 #include "arm/lithium-gap-resolver-arm.h"
 #include "code-stubs.h"
@@ -489,7 +491,7 @@ void LCodeGen::CallCode(Handle<Code> code,
 }
 
 
-void LCodeGen::CallRuntime(Runtime::Function* function,
+void LCodeGen::CallRuntime(const Runtime::Function* function,
                            int num_arguments,
                            LInstruction* instr) {
   ASSERT(instr != NULL);
@@ -588,14 +590,14 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
   if (length == 0) return;
   ASSERT(FLAG_deopt);
   Handle<DeoptimizationInputData> data =
-      Factory::NewDeoptimizationInputData(length, TENURED);
+      factory()->NewDeoptimizationInputData(length, TENURED);
 
   Handle<ByteArray> translations = translations_.CreateByteArray();
   data->SetTranslationByteArray(*translations);
   data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
 
   Handle<FixedArray> literals =
-      Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
+      factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
   for (int i = 0; i < deoptimization_literals_.length(); i++) {
     literals->set(i, *deoptimization_literals_[i]);
   }
@@ -1900,8 +1902,8 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
 
   Label true_value, done;
   __ tst(r0, r0);
-  __ mov(r0, Operand(Factory::false_value()), LeaveCC, ne);
-  __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq);
+  __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
+  __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
 }
 
 
@@ -1960,13 +1962,13 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
   // We use Factory::the_hole_value() on purpose instead of loading from the
   // root array to force relocation to be able to later patch with
   // the cached map.
-  __ mov(ip, Operand(Factory::the_hole_value()));
+  __ mov(ip, Operand(factory()->the_hole_value()));
   __ cmp(map, Operand(ip));
   __ b(ne, &cache_miss);
   // We use Factory::the_hole_value() on purpose instead of loading from the
   // root array to force relocation to be able to later patch
   // with true or false.
-  __ mov(result, Operand(Factory::the_hole_value()));
+  __ mov(result, Operand(factory()->the_hole_value()));
   __ b(&done);
 
   // The inlined call site cache did not match. Check null and string before
@@ -2179,7 +2181,8 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
 
   // Name is always in r2.
   __ mov(r2, Operand(instr->name()));
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(
+      isolate()->builtins()->builtin(Builtins::LoadIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2310,7 +2313,8 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
   ASSERT(ToRegister(instr->object()).is(r1));
   ASSERT(ToRegister(instr->key()).is(r0));
 
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2815,7 +2819,8 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
   ASSERT(ToRegister(instr->result()).is(r0));
 
   int arity = instr->arity();
-  Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
+  Handle<Code> ic =
+      isolate()->stub_cache()->ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
 }
@@ -2825,7 +2830,8 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
   ASSERT(ToRegister(instr->result()).is(r0));
 
   int arity = instr->arity();
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
+  Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
+      arity, NOT_IN_LOOP);
   __ mov(r2, Operand(instr->name()));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   // Restore context register.
@@ -2848,7 +2854,8 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
   ASSERT(ToRegister(instr->result()).is(r0));
 
   int arity = instr->arity();
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
+  Handle<Code> ic =
+      isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP);
   __ mov(r2, Operand(instr->name()));
   CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2866,7 +2873,8 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   ASSERT(ToRegister(instr->InputAt(0)).is(r1));
   ASSERT(ToRegister(instr->result()).is(r0));
 
-  Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
+  Handle<Code> builtin(isolate()->builtins()->builtin(
+      Builtins::JSConstructCall));
   __ mov(r0, Operand(instr->arity()));
   CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
 }
@@ -2915,7 +2923,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
 
   // Name is always in r2.
   __ mov(r2, Operand(instr->name()));
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
                          : Builtins::StoreIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -2970,7 +2978,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
   ASSERT(ToRegister(instr->key()).is(r1));
   ASSERT(ToRegister(instr->value()).is(r0));
 
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
                          : Builtins::KeyedStoreIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3582,9 +3590,9 @@ void LCodeGen::DoCheckMap(LCheckMap* instr) {
 
 void LCodeGen::LoadHeapObject(Register result,
                               Handle<HeapObject> object) {
-  if (Heap::InNewSpace(*object)) {
+  if (heap()->InNewSpace(*object)) {
     Handle<JSGlobalPropertyCell> cell =
-        Factory::NewJSGlobalPropertyCell(object);
+        factory()->NewJSGlobalPropertyCell(object);
     __ mov(result, Operand(cell));
     __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
   } else {
@@ -3735,8 +3743,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
   } else {
     __ mov(r2, Operand(shared_info));
     __ mov(r1, Operand(pretenure
-                       ? Factory::true_value()
-                       : Factory::false_value()));
+                       ? factory()->true_value()
+                       : factory()->false_value()));
     __ Push(cp, r2, r1);
     CallRuntime(Runtime::kNewClosure, 3, instr);
   }
@@ -3795,14 +3803,14 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
                                  Handle<String> type_name) {
   Condition final_branch_condition = kNoCondition;
   Register scratch = scratch0();
-  if (type_name->Equals(Heap::number_symbol())) {
+  if (type_name->Equals(heap()->number_symbol())) {
     __ JumpIfSmi(input, true_label);
     __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
     __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
     __ cmp(input, Operand(ip));
     final_branch_condition = eq;
 
-  } else if (type_name->Equals(Heap::string_symbol())) {
+  } else if (type_name->Equals(heap()->string_symbol())) {
     __ JumpIfSmi(input, false_label);
     __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
     __ b(ge, false_label);
@@ -3810,13 +3818,13 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
     __ tst(ip, Operand(1 << Map::kIsUndetectable));
     final_branch_condition = eq;
 
-  } else if (type_name->Equals(Heap::boolean_symbol())) {
+  } else if (type_name->Equals(heap()->boolean_symbol())) {
     __ CompareRoot(input, Heap::kTrueValueRootIndex);
     __ b(eq, true_label);
     __ CompareRoot(input, Heap::kFalseValueRootIndex);
     final_branch_condition = eq;
 
-  } else if (type_name->Equals(Heap::undefined_symbol())) {
+  } else if (type_name->Equals(heap()->undefined_symbol())) {
     __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
     __ b(eq, true_label);
     __ JumpIfSmi(input, false_label);
@@ -3826,12 +3834,12 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
     __ tst(ip, Operand(1 << Map::kIsUndetectable));
     final_branch_condition = ne;
 
-  } else if (type_name->Equals(Heap::function_symbol())) {
+  } else if (type_name->Equals(heap()->function_symbol())) {
     __ JumpIfSmi(input, false_label);
     __ CompareObjectType(input, input, scratch, FIRST_FUNCTION_CLASS_TYPE);
     final_branch_condition = ge;
 
-  } else if (type_name->Equals(Heap::object_symbol())) {
+  } else if (type_name->Equals(heap()->object_symbol())) {
     __ JumpIfSmi(input, false_label);
     __ CompareRoot(input, Heap::kNullValueRootIndex);
     __ b(eq, true_label);
index e2b572ecb9817cf740ec02f125910482543ef19b..da341762a569afcbdebe04814bf2442fb07b7702 100644 (file)
@@ -65,6 +65,9 @@ class LCodeGen BASE_EMBEDDED {
   // Simple accessors.
   MacroAssembler* masm() const { return masm_; }
   CompilationInfo* info() const { return info_; }
+  Isolate* isolate() const { return info_->isolate(); }
+  Factory* factory() const { return isolate()->factory(); }
+  Heap* heap() const { return isolate()->heap(); }
 
   // Support for converting LOperands to assembler types.
   // LOperand must be a register.
@@ -172,13 +175,13 @@ class LCodeGen BASE_EMBEDDED {
   void CallCode(Handle<Code> code,
                 RelocInfo::Mode mode,
                 LInstruction* instr);
-  void CallRuntime(Runtime::Function* function,
+  void CallRuntime(const Runtime::Function* function,
                    int num_arguments,
                    LInstruction* instr);
   void CallRuntime(Runtime::FunctionId id,
                    int num_arguments,
                    LInstruction* instr) {
-    Runtime::Function* function = Runtime::FunctionForId(id);
+    const Runtime::Function* function = Runtime::FunctionForId(id);
     CallRuntime(function, num_arguments, instr);
   }
 
index 1a2326b748d5a38642d864a6972504bb6f5d9868..02608a695042ea06cc830b972149a8b3b2661ea4 100644 (file)
@@ -25,6 +25,8 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
+
 #include "arm/lithium-gap-resolver-arm.h"
 #include "arm/lithium-codegen-arm.h"
 
index 9e5417f92747d28c83f04e473af66a78de057339..ce653eaadb854b2dcb085190f1718f37b5b7e82f 100644 (file)
@@ -43,7 +43,7 @@ MacroAssembler::MacroAssembler(void* buffer, int size)
     : Assembler(buffer, size),
       generating_stub_(false),
       allow_stub_calls_(true),
-      code_object_(Heap::undefined_value()) {
+      code_object_(HEAP->undefined_value()) {
 }
 
 
@@ -292,7 +292,7 @@ void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
 
   } else if (!src2.is_single_instruction() &&
              !src2.must_use_constant_pool() &&
-             CpuFeatures::IsSupported(ARMv7) &&
+             Isolate::Current()->cpu_features()->IsSupported(ARMv7) &&
              IsPowerOf2(src2.immediate() + 1)) {
     ubfx(dst, src1, 0, WhichPowerOf2(src2.immediate() + 1), cond);
 
@@ -305,7 +305,7 @@ void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
 void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
                           Condition cond) {
   ASSERT(lsb < 32);
-  if (!CpuFeatures::IsSupported(ARMv7)) {
+  if (!Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
     and_(dst, src1, Operand(mask), LeaveCC, cond);
     if (lsb != 0) {
@@ -320,7 +320,7 @@ void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
 void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
                           Condition cond) {
   ASSERT(lsb < 32);
-  if (!CpuFeatures::IsSupported(ARMv7)) {
+  if (!Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
     and_(dst, src1, Operand(mask), LeaveCC, cond);
     int shift_up = 32 - lsb - width;
@@ -348,7 +348,7 @@ void MacroAssembler::Bfi(Register dst,
   ASSERT(lsb + width < 32);
   ASSERT(!scratch.is(dst));
   if (width == 0) return;
-  if (!CpuFeatures::IsSupported(ARMv7)) {
+  if (!Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
     bic(dst, dst, Operand(mask));
     and_(scratch, src, Operand((1 << width) - 1));
@@ -362,7 +362,7 @@ void MacroAssembler::Bfi(Register dst,
 
 void MacroAssembler::Bfc(Register dst, int lsb, int width, Condition cond) {
   ASSERT(lsb < 32);
-  if (!CpuFeatures::IsSupported(ARMv7)) {
+  if (!Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
     bic(dst, dst, Operand(mask));
   } else {
@@ -373,7 +373,7 @@ void MacroAssembler::Bfc(Register dst, int lsb, int width, Condition cond) {
 
 void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
                           Condition cond) {
-  if (!CpuFeatures::IsSupported(ARMv7)) {
+  if (!Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     ASSERT(!dst.is(pc) && !src.rm().is(pc));
     ASSERT((satpos >= 0) && (satpos <= 31));
 
@@ -619,7 +619,7 @@ void MacroAssembler::Ldrd(Register dst1, Register dst2,
   ASSERT_EQ(dst1.code() + 1, dst2.code());
 
   // Generate two ldr instructions if ldrd is not available.
-  if (CpuFeatures::IsSupported(ARMv7)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     CpuFeatures::Scope scope(ARMv7);
     ldrd(dst1, dst2, src, cond);
   } else {
@@ -644,7 +644,7 @@ void MacroAssembler::Strd(Register src1, Register src2,
   ASSERT_EQ(src1.code() + 1, src2.code());
 
   // Generate two str instructions if strd is not available.
-  if (CpuFeatures::IsSupported(ARMv7)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     CpuFeatures::Scope scope(ARMv7);
     strd(src1, src2, dst, cond);
   } else {
@@ -739,9 +739,9 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
   str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
 
   // Save the frame pointer and the context in top.
-  mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
+  mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address)));
   str(fp, MemOperand(ip));
-  mov(ip, Operand(ExternalReference(Top::k_context_address)));
+  mov(ip, Operand(ExternalReference(Isolate::k_context_address)));
   str(cp, MemOperand(ip));
 
   // Optionally save all double registers.
@@ -817,11 +817,11 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles,
 
   // Clear top frame.
   mov(r3, Operand(0, RelocInfo::NONE));
-  mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
+  mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address)));
   str(r3, MemOperand(ip));
 
   // Restore current context from top and clear it in debug mode.
-  mov(ip, Operand(ExternalReference(Top::k_context_address)));
+  mov(ip, Operand(ExternalReference(Isolate::k_context_address)));
   ldr(cp, MemOperand(ip));
 #ifdef DEBUG
   str(r3, MemOperand(ip));
@@ -903,7 +903,8 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
     }
 
     Handle<Code> adaptor =
-        Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
+        Handle<Code>(Isolate::Current()->builtins()->builtin(
+            Builtins::ArgumentsAdaptorTrampoline));
     if (flag == CALL_FUNCTION) {
       if (call_wrapper != NULL) {
         call_wrapper->BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
@@ -1070,7 +1071,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
            && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
     stm(db_w, sp, r3.bit() | fp.bit() | lr.bit());
     // Save the current handler as the next handler.
-    mov(r3, Operand(ExternalReference(Top::k_handler_address)));
+    mov(r3, Operand(ExternalReference(Isolate::k_handler_address)));
     ldr(r1, MemOperand(r3));
     ASSERT(StackHandlerConstants::kNextOffset == 0);
     push(r1);
@@ -1089,7 +1090,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
            && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
     stm(db_w, sp, r6.bit() | ip.bit() | lr.bit());
     // Save the current handler as the next handler.
-    mov(r7, Operand(ExternalReference(Top::k_handler_address)));
+    mov(r7, Operand(ExternalReference(Isolate::k_handler_address)));
     ldr(r6, MemOperand(r7));
     ASSERT(StackHandlerConstants::kNextOffset == 0);
     push(r6);
@@ -1102,7 +1103,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
 void MacroAssembler::PopTryHandler() {
   ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
   pop(r1);
-  mov(ip, Operand(ExternalReference(Top::k_handler_address)));
+  mov(ip, Operand(ExternalReference(Isolate::k_handler_address)));
   add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
   str(r1, MemOperand(ip));
 }
@@ -1118,7 +1119,7 @@ void MacroAssembler::Throw(Register value) {
   STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
 
   // Drop the sp to the top of the handler.
-  mov(r3, Operand(ExternalReference(Top::k_handler_address)));
+  mov(r3, Operand(ExternalReference(Isolate::k_handler_address)));
   ldr(sp, MemOperand(r3));
 
   // Restore the next handler and frame pointer, discard handler state.
@@ -1157,7 +1158,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
   }
 
   // Drop sp to the top stack handler.
-  mov(r3, Operand(ExternalReference(Top::k_handler_address)));
+  mov(r3, Operand(ExternalReference(Isolate::k_handler_address)));
   ldr(sp, MemOperand(r3));
 
   // Unwind the handlers until the ENTRY handler is found.
@@ -1181,7 +1182,8 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
 
   if (type == OUT_OF_MEMORY) {
     // Set external caught exception to false.
-    ExternalReference external_caught(Top::k_external_caught_exception_address);
+    ExternalReference external_caught(
+        Isolate::k_external_caught_exception_address);
     mov(r0, Operand(false, RelocInfo::NONE));
     mov(r2, Operand(external_caught));
     str(r0, MemOperand(r2));
@@ -1189,7 +1191,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
     // Set pending exception and r0 to out of memory exception.
     Failure* out_of_memory = Failure::OutOfMemoryException();
     mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
-    mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
+    mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address)));
     str(r0, MemOperand(r2));
   }
 
@@ -1896,7 +1898,7 @@ void MacroAssembler::ConvertToInt32(Register source,
                                     Register scratch2,
                                     DwVfpRegister double_scratch,
                                     Label *not_int32) {
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     sub(scratch, source, Operand(kHeapObjectTag));
     vldr(double_scratch, scratch, HeapNumber::kValueOffset);
@@ -1992,7 +1994,7 @@ void MacroAssembler::EmitVFPTruncate(VFPRoundingMode rounding_mode,
                                      Register scratch1,
                                      Register scratch2,
                                      CheckForInexactConversion check_inexact) {
-  ASSERT(CpuFeatures::IsSupported(VFP3));
+  ASSERT(Isolate::Current()->cpu_features()->IsSupported(VFP3));
   CpuFeatures::Scope scope(VFP3);
   Register prev_fpscr = scratch1;
   Register scratch = scratch2;
@@ -2150,7 +2152,7 @@ void MacroAssembler::EmitECMATruncate(Register result,
 void MacroAssembler::GetLeastBitsFromSmi(Register dst,
                                          Register src,
                                          int num_least_bits) {
-  if (CpuFeatures::IsSupported(ARMv7)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     ubfx(dst, src, kSmiTagSize, num_least_bits);
   } else {
     mov(dst, Operand(src, ASR, kSmiTagSize));
@@ -2166,7 +2168,8 @@ void MacroAssembler::GetLeastBitsFromInt32(Register dst,
 }
 
 
-void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
+void MacroAssembler::CallRuntime(const Runtime::Function* f,
+                                 int num_arguments) {
   // All parameters are on the stack.  r0 has the return value after call.
 
   // If the expected number of arguments of the runtime function is
@@ -2194,7 +2197,7 @@ void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
 
 
 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
-  Runtime::Function* function = Runtime::FunctionForId(id);
+  const Runtime::Function* function = Runtime::FunctionForId(id);
   mov(r0, Operand(function->nargs));
   mov(r1, Operand(ExternalReference(function)));
   CEntryStub stub(1);
@@ -2771,11 +2774,17 @@ void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(Register type,
   b(ne, failure);
 }
 
+static const int kRegisterPassedArguments = 4;
 
 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
   int frame_alignment = ActivationFrameAlignment();
+
+  // Reserve space for Isolate address which is always passed as last parameter
+  num_arguments += 1;
+
   // Up to four simple arguments are passed in registers r0..r3.
-  int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
+  int stack_passed_arguments = (num_arguments <= kRegisterPassedArguments) ?
+                               0 : num_arguments - kRegisterPassedArguments;
   if (frame_alignment > kPointerSize) {
     // Make stack end at alignment and make room for num_arguments - 4 words
     // and the original value of sp.
@@ -2792,12 +2801,36 @@ void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
 
 void MacroAssembler::CallCFunction(ExternalReference function,
                                    int num_arguments) {
-  mov(ip, Operand(function));
-  CallCFunction(ip, num_arguments);
+  CallCFunctionHelper(no_reg, function, ip, num_arguments);
+}
+
+void MacroAssembler::CallCFunction(Register function,
+                                   Register scratch,
+                                   int num_arguments) {
+  CallCFunctionHelper(function,
+                      ExternalReference::the_hole_value_location(),
+                      scratch,
+                      num_arguments);
 }
 
 
-void MacroAssembler::CallCFunction(Register function, int num_arguments) {
+void MacroAssembler::CallCFunctionHelper(Register function,
+                                         ExternalReference function_reference,
+                                         Register scratch,
+                                         int num_arguments) {
+  // Push Isolate address as the last argument.
+  if (num_arguments < kRegisterPassedArguments) {
+    Register arg_to_reg[] = {r0, r1, r2, r3};
+    Register r = arg_to_reg[num_arguments];
+    mov(r, Operand(ExternalReference::isolate_address()));
+  } else {
+    int stack_passed_arguments = num_arguments - kRegisterPassedArguments;
+    // Push Isolate address on the stack after the arguments.
+    mov(scratch, Operand(ExternalReference::isolate_address()));
+    str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
+  }
+  num_arguments += 1;
+
   // Make sure that the stack is aligned before calling a C function unless
   // running in the simulator. The simulator has its own alignment check which
   // provides more information.
@@ -2821,8 +2854,13 @@ void MacroAssembler::CallCFunction(Register function, int num_arguments) {
   // Just call directly. The function called cannot cause a GC, or
   // allow preemption, so the return address in the link register
   // stays correct.
+  if (function.is(no_reg)) {
+    mov(scratch, Operand(function_reference));
+    function = scratch;
+  }
   Call(function);
-  int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
+  int stack_passed_arguments = (num_arguments <= kRegisterPassedArguments) ?
+                               0 : num_arguments - kRegisterPassedArguments;
   if (OS::ActivationFrameAlignment() > kPointerSize) {
     ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
   } else {
index f72bbedc6cdb10ef3ff8c478da7efedd4b3cc80e..a2a7e2f9c2a097c2e9efa7cd832de65029551dd2 100644 (file)
@@ -708,7 +708,7 @@ class MacroAssembler: public Assembler {
                                                Condition cond = al);
 
   // Call a runtime routine.
-  void CallRuntime(Runtime::Function* f, int num_arguments);
+  void CallRuntime(const Runtime::Function* f, int num_arguments);
   void CallRuntimeSaveDoubles(Runtime::FunctionId id);
 
   // Convenience function: Same as above, but takes the fid instead.
@@ -752,7 +752,7 @@ class MacroAssembler: public Assembler {
   // return address (unless this is somehow accounted for by the called
   // function).
   void CallCFunction(ExternalReference function, int num_arguments);
-  void CallCFunction(Register function, int num_arguments);
+  void CallCFunction(Register function, Register scratch, int num_arguments);
 
   void GetCFunctionDoubleResult(const DoubleRegister dst);
 
@@ -934,6 +934,11 @@ class MacroAssembler: public Assembler {
 
 
  private:
+  void CallCFunctionHelper(Register function,
+                           ExternalReference function_reference,
+                           Register scratch,
+                           int num_arguments);
+
   void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
   int CallSize(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
   void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
index 1f6ed6712d5bbe03f4ea55309512612a8104e0e1..47a33220bea3a3f47255fc3dc385aae8757945ec 100644 (file)
@@ -60,6 +60,7 @@ namespace internal {
  * Each call to a public method should retain this convention.
  *
  * The stack will have the following structure:
+ *  - fp[52]  Isolate* isolate   (Address of the current isolate)
  *  - fp[48]  direct_call  (if 1, direct call from JavaScript code,
  *                          if 0, call through the runtime system).
  *  - fp[44]  stack_area_base (High end of the memory area to use as
@@ -804,10 +805,10 @@ Handle<Object> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
 
   CodeDesc code_desc;
   masm_->GetCode(&code_desc);
-  Handle<Code> code = Factory::NewCode(code_desc,
+  Handle<Code> code = FACTORY->NewCode(code_desc,
                                        Code::ComputeFlags(Code::REGEXP),
                                        masm_->CodeObject());
-  PROFILE(RegExpCodeCreateEvent(*code, *source));
+  PROFILE(Isolate::Current(), RegExpCodeCreateEvent(*code, *source));
   return Handle<Object>::cast(code);
 }
 
@@ -1013,8 +1014,10 @@ static T& frame_entry(Address re_frame, int frame_offset) {
 int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address,
                                                   Code* re_code,
                                                   Address re_frame) {
-  if (StackGuard::IsStackOverflow()) {
-    Top::StackOverflow();
+  Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
+  ASSERT(isolate == Isolate::Current());
+  if (isolate->stack_guard()->IsStackOverflow()) {
+    isolate->StackOverflow();
     return EXCEPTION;
   }
 
index d9d0b3562ed405d6e55fe38c8ffe75a4da46c17b..b57d0ebb6a1c37a190bf95a795fe57ef0c9437c0 100644 (file)
@@ -127,6 +127,7 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
   static const int kRegisterOutput = kSecondaryReturnAddress + kPointerSize;
   static const int kStackHighEnd = kRegisterOutput + kPointerSize;
   static const int kDirectCall = kStackHighEnd + kPointerSize;
+  static const int kIsolate = kDirectCall + kPointerSize;
 
   // Below the frame pointer.
   // Register parameters stored by setup code.
index cedce6d4ba26d8d4f3560e47d0e01962d9c561d9..28954f63007e6aa35e4f67b5d6a903ab2ee9619e 100644 (file)
@@ -49,12 +49,12 @@ namespace internal {
 // Windows C Run-Time Library does not provide vsscanf.
 #define SScanF sscanf  // NOLINT
 
-// The Debugger class is used by the simulator while debugging simulated ARM
+// The ArmDebugger class is used by the simulator while debugging simulated ARM
 // code.
-class Debugger {
+class ArmDebugger {
  public:
-  explicit Debugger(Simulator* sim);
-  ~Debugger();
+  explicit ArmDebugger(Simulator* sim);
+  ~ArmDebugger();
 
   void Stop(Instruction* instr);
   void Debug();
@@ -83,12 +83,12 @@ class Debugger {
 };
 
 
-Debugger::Debugger(Simulator* sim) {
+ArmDebugger::ArmDebugger(Simulator* sim) {
   sim_ = sim;
 }
 
 
-Debugger::~Debugger() {
+ArmDebugger::~ArmDebugger() {
 }
 
 
@@ -105,7 +105,7 @@ static void InitializeCoverage() {
 }
 
 
-void Debugger::Stop(Instruction* instr) {
+void ArmDebugger::Stop(Instruction* instr) {
   // Get the stop code.
   uint32_t code = instr->SvcValue() & kStopCodeMask;
   // Retrieve the encoded address, which comes just after this stop.
@@ -137,7 +137,7 @@ static void InitializeCoverage() {
 }
 
 
-void Debugger::Stop(Instruction* instr) {
+void ArmDebugger::Stop(Instruction* instr) {
   // Get the stop code.
   uint32_t code = instr->SvcValue() & kStopCodeMask;
   // Retrieve the encoded address, which comes just after this stop.
@@ -159,7 +159,7 @@ void Debugger::Stop(Instruction* instr) {
 #endif
 
 
-int32_t Debugger::GetRegisterValue(int regnum) {
+int32_t ArmDebugger::GetRegisterValue(int regnum) {
   if (regnum == kPCRegister) {
     return sim_->get_pc();
   } else {
@@ -168,12 +168,12 @@ int32_t Debugger::GetRegisterValue(int regnum) {
 }
 
 
-double Debugger::GetVFPDoubleRegisterValue(int regnum) {
+double ArmDebugger::GetVFPDoubleRegisterValue(int regnum) {
   return sim_->get_double_from_d_register(regnum);
 }
 
 
-bool Debugger::GetValue(const char* desc, int32_t* value) {
+bool ArmDebugger::GetValue(const char* desc, int32_t* value) {
   int regnum = Registers::Number(desc);
   if (regnum != kNoRegister) {
     *value = GetRegisterValue(regnum);
@@ -189,7 +189,7 @@ bool Debugger::GetValue(const char* desc, int32_t* value) {
 }
 
 
-bool Debugger::GetVFPSingleValue(const char* desc, float* value) {
+bool ArmDebugger::GetVFPSingleValue(const char* desc, float* value) {
   bool is_double;
   int regnum = VFPRegisters::Number(desc, &is_double);
   if (regnum != kNoRegister && !is_double) {
@@ -200,7 +200,7 @@ bool Debugger::GetVFPSingleValue(const char* desc, float* value) {
 }
 
 
-bool Debugger::GetVFPDoubleValue(const char* desc, double* value) {
+bool ArmDebugger::GetVFPDoubleValue(const char* desc, double* value) {
   bool is_double;
   int regnum = VFPRegisters::Number(desc, &is_double);
   if (regnum != kNoRegister && is_double) {
@@ -211,7 +211,7 @@ bool Debugger::GetVFPDoubleValue(const char* desc, double* value) {
 }
 
 
-bool Debugger::SetBreakpoint(Instruction* breakpc) {
+bool ArmDebugger::SetBreakpoint(Instruction* breakpc) {
   // Check if a breakpoint can be set. If not return without any side-effects.
   if (sim_->break_pc_ != NULL) {
     return false;
@@ -226,7 +226,7 @@ bool Debugger::SetBreakpoint(Instruction* breakpc) {
 }
 
 
-bool Debugger::DeleteBreakpoint(Instruction* breakpc) {
+bool ArmDebugger::DeleteBreakpoint(Instruction* breakpc) {
   if (sim_->break_pc_ != NULL) {
     sim_->break_pc_->SetInstructionBits(sim_->break_instr_);
   }
@@ -237,21 +237,21 @@ bool Debugger::DeleteBreakpoint(Instruction* breakpc) {
 }
 
 
-void Debugger::UndoBreakpoints() {
+void ArmDebugger::UndoBreakpoints() {
   if (sim_->break_pc_ != NULL) {
     sim_->break_pc_->SetInstructionBits(sim_->break_instr_);
   }
 }
 
 
-void Debugger::RedoBreakpoints() {
+void ArmDebugger::RedoBreakpoints() {
   if (sim_->break_pc_ != NULL) {
     sim_->break_pc_->SetInstructionBits(kBreakpointInstr);
   }
 }
 
 
-void Debugger::Debug() {
+void ArmDebugger::Debug() {
   intptr_t last_pc = -1;
   bool done = false;
 
@@ -539,7 +539,7 @@ void Debugger::Debug() {
         PrintF("    Stops are debug instructions inserted by\n");
         PrintF("    the Assembler::stop() function.\n");
         PrintF("    When hitting a stop, the Simulator will\n");
-        PrintF("    stop and and give control to the Debugger.\n");
+        PrintF("    stop and and give control to the ArmDebugger.\n");
         PrintF("    The first %d stop codes are watched:\n",
                Simulator::kNumOfWatchedStops);
         PrintF("    - They can be enabled / disabled: the Simulator\n");
@@ -593,7 +593,9 @@ static bool AllOnOnePage(uintptr_t start, int size) {
 }
 
 
-void Simulator::FlushICache(void* start_addr, size_t size) {
+void Simulator::FlushICache(v8::internal::HashMap* i_cache,
+                            void* start_addr,
+                            size_t size) {
   intptr_t start = reinterpret_cast<intptr_t>(start_addr);
   int intra_line = (start & CachePage::kLineMask);
   start -= intra_line;
@@ -602,22 +604,22 @@ void Simulator::FlushICache(void* start_addr, size_t size) {
   int offset = (start & CachePage::kPageMask);
   while (!AllOnOnePage(start, size - 1)) {
     int bytes_to_flush = CachePage::kPageSize - offset;
-    FlushOnePage(start, bytes_to_flush);
+    FlushOnePage(i_cache, start, bytes_to_flush);
     start += bytes_to_flush;
     size -= bytes_to_flush;
     ASSERT_EQ(0, start & CachePage::kPageMask);
     offset = 0;
   }
   if (size != 0) {
-    FlushOnePage(start, size);
+    FlushOnePage(i_cache, start, size);
   }
 }
 
 
-CachePage* Simulator::GetCachePage(void* page) {
-  v8::internal::HashMap::Entry* entry = i_cache_->Lookup(page,
-                                                         ICacheHash(page),
-                                                         true);
+CachePage* Simulator::GetCachePage(v8::internal::HashMap* i_cache, void* page) {
+  v8::internal::HashMap::Entry* entry = i_cache->Lookup(page,
+                                                        ICacheHash(page),
+                                                        true);
   if (entry->value == NULL) {
     CachePage* new_page = new CachePage();
     entry->value = new_page;
@@ -627,25 +629,28 @@ CachePage* Simulator::GetCachePage(void* page) {
 
 
 // Flush from start up to and not including start + size.
-void Simulator::FlushOnePage(intptr_t start, int size) {
+void Simulator::FlushOnePage(v8::internal::HashMap* i_cache,
+                             intptr_t start,
+                             int size) {
   ASSERT(size <= CachePage::kPageSize);
   ASSERT(AllOnOnePage(start, size - 1));
   ASSERT((start & CachePage::kLineMask) == 0);
   ASSERT((size & CachePage::kLineMask) == 0);
   void* page = reinterpret_cast<void*>(start & (~CachePage::kPageMask));
   int offset = (start & CachePage::kPageMask);
-  CachePage* cache_page = GetCachePage(page);
+  CachePage* cache_page = GetCachePage(i_cache, page);
   char* valid_bytemap = cache_page->ValidityByte(offset);
   memset(valid_bytemap, CachePage::LINE_INVALID, size >> CachePage::kLineShift);
 }
 
 
-void Simulator::CheckICache(Instruction* instr) {
+void Simulator::CheckICache(v8::internal::HashMap* i_cache,
+                            Instruction* instr) {
   intptr_t address = reinterpret_cast<intptr_t>(instr);
   void* page = reinterpret_cast<void*>(address & (~CachePage::kPageMask));
   void* line = reinterpret_cast<void*>(address & (~CachePage::kLineMask));
   int offset = (address & CachePage::kPageMask);
-  CachePage* cache_page = GetCachePage(page);
+  CachePage* cache_page = GetCachePage(i_cache, page);
   char* cache_valid_byte = cache_page->ValidityByte(offset);
   bool cache_hit = (*cache_valid_byte == CachePage::LINE_VALID);
   char* cached_line = cache_page->CachedData(offset & ~CachePage::kLineMask);
@@ -662,27 +667,18 @@ void Simulator::CheckICache(Instruction* instr) {
 }
 
 
-// Create one simulator per thread and keep it in thread local storage.
-static v8::internal::Thread::LocalStorageKey simulator_key;
-
-
-bool Simulator::initialized_ = false;
-
-
 void Simulator::Initialize() {
-  if (initialized_) return;
-  simulator_key = v8::internal::Thread::CreateThreadLocalKey();
-  initialized_ = true;
+  if (Isolate::Current()->simulator_initialized()) return;
+  Isolate::Current()->set_simulator_initialized(true);
   ::v8::internal::ExternalReference::set_redirector(&RedirectExternalReference);
 }
 
 
-v8::internal::HashMap* Simulator::i_cache_ = NULL;
-
-
-Simulator::Simulator() {
+Simulator::Simulator() : isolate_(Isolate::Current()) {
+  i_cache_ = isolate_->simulator_i_cache();
   if (i_cache_ == NULL) {
     i_cache_ = new v8::internal::HashMap(&ICacheMatch);
+    isolate_->set_simulator_i_cache(i_cache_);
   }
   Initialize();
   // Setup simulator support first. Some of this information is needed to
@@ -748,11 +744,14 @@ class Redirection {
       : external_function_(external_function),
         swi_instruction_(al | (0xf*B24) | kCallRtRedirected),
         type_(type),
-        next_(list_) {
-    Simulator::current()->
-        FlushICache(reinterpret_cast<void*>(&swi_instruction_),
-                      Instruction::kInstrSize);
-    list_ = this;
+        next_(NULL) {
+    Isolate* isolate = Isolate::Current();
+    next_ = isolate->simulator_redirection();
+    Simulator::current(isolate)->
+        FlushICache(isolate->simulator_i_cache(),
+                    reinterpret_cast<void*>(&swi_instruction_),
+                    Instruction::kInstrSize);
+    isolate->set_simulator_redirection(this);
   }
 
   void* address_of_swi_instruction() {
@@ -764,8 +763,9 @@ class Redirection {
 
   static Redirection* Get(void* external_function,
                           ExternalReference::Type type) {
-    Redirection* current;
-    for (current = list_; current != NULL; current = current->next_) {
+    Isolate* isolate = Isolate::Current();
+    Redirection* current = isolate->simulator_redirection();
+    for (; current != NULL; current = current->next_) {
       if (current->external_function_ == external_function) return current;
     }
     return new Redirection(external_function, type);
@@ -783,13 +783,9 @@ class Redirection {
   uint32_t swi_instruction_;
   ExternalReference::Type type_;
   Redirection* next_;
-  static Redirection* list_;
 };
 
 
-Redirection* Redirection::list_ = NULL;
-
-
 void* Simulator::RedirectExternalReference(void* external_function,
                                            ExternalReference::Type type) {
   Redirection* redirection = Redirection::Get(external_function, type);
@@ -798,14 +794,20 @@ void* Simulator::RedirectExternalReference(void* external_function,
 
 
 // Get the active Simulator for the current thread.
-Simulator* Simulator::current() {
-  Initialize();
-  Simulator* sim = reinterpret_cast<Simulator*>(
-      v8::internal::Thread::GetThreadLocal(simulator_key));
+Simulator* Simulator::current(Isolate* isolate) {
+  v8::internal::Isolate::PerIsolateThreadData* isolate_data =
+      Isolate::CurrentPerIsolateThreadData();
+  if (isolate_data == NULL) {
+    Isolate::EnterDefaultIsolate();
+    isolate_data = Isolate::CurrentPerIsolateThreadData();
+  }
+  ASSERT(isolate_data != NULL);
+
+  Simulator* sim = isolate_data->simulator();
   if (sim == NULL) {
-    // TODO(146): delete the simulator object when a thread goes away.
+    // TODO(146): delete the simulator object when a thread/isolate goes away.
     sim = new Simulator();
-    v8::internal::Thread::SetThreadLocal(simulator_key, sim);
+    isolate_data->set_simulator(sim);
   }
   return sim;
 }
@@ -1533,7 +1535,8 @@ typedef int64_t (*SimulatorRuntimeCall)(int32_t arg0,
                                         int32_t arg1,
                                         int32_t arg2,
                                         int32_t arg3,
-                                        int32_t arg4);
+                                        int32_t arg4,
+                                        int32_t arg5);
 typedef double (*SimulatorRuntimeFPCall)(int32_t arg0,
                                          int32_t arg1,
                                          int32_t arg2,
@@ -1564,7 +1567,8 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
       int32_t arg2 = get_register(r2);
       int32_t arg3 = get_register(r3);
       int32_t* stack_pointer = reinterpret_cast<int32_t*>(get_register(sp));
-      int32_t arg4 = *stack_pointer;
+      int32_t arg4 = stack_pointer[0];
+      int32_t arg5 = stack_pointer[1];
       // This is dodgy but it works because the C entry stubs are never moved.
       // See comment in codegen-arm.cc and bug 1242173.
       int32_t saved_lr = get_register(lr);
@@ -1627,20 +1631,22 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
             reinterpret_cast<SimulatorRuntimeCall>(external);
         if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
           PrintF(
-              "Call to host function at %p args %08x, %08x, %08x, %08x, %0xc",
+              "Call to host function at %p"
+              "args %08x, %08x, %08x, %08x, %08x, %08x",
               FUNCTION_ADDR(target),
               arg0,
               arg1,
               arg2,
               arg3,
-              arg4);
+              arg4,
+              arg5);
           if (!stack_aligned) {
             PrintF(" with unaligned stack %08x\n", get_register(sp));
           }
           PrintF("\n");
         }
         CHECK(stack_aligned);
-        int64_t result = target(arg0, arg1, arg2, arg3, arg4);
+        int64_t result = target(arg0, arg1, arg2, arg3, arg4, arg5);
         int32_t lo_res = static_cast<int32_t>(result);
         int32_t hi_res = static_cast<int32_t>(result >> 32);
         if (::v8::internal::FLAG_trace_sim) {
@@ -1654,7 +1660,7 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
       break;
     }
     case kBreakpoint: {
-      Debugger dbg(this);
+      ArmDebugger dbg(this);
       dbg.Debug();
       break;
     }
@@ -1668,7 +1674,7 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
         // Stop if it is enabled, otherwise go on jumping over the stop
         // and the message address.
         if (isEnabledStop(code)) {
-          Debugger dbg(this);
+          ArmDebugger dbg(this);
           dbg.Stop(instr);
         } else {
           set_pc(get_pc() + 2 * Instruction::kInstrSize);
@@ -1976,7 +1982,7 @@ void Simulator::DecodeType01(Instruction* instr) {
           break;
         }
         case BKPT: {
-          Debugger dbg(this);
+          ArmDebugger dbg(this);
           PrintF("Simulator hit BKPT.\n");
           dbg.Debug();
           break;
@@ -2964,7 +2970,7 @@ void Simulator::DecodeType6CoprocessorIns(Instruction* instr) {
 // Executes the current instruction.
 void Simulator::InstructionDecode(Instruction* instr) {
   if (v8::internal::FLAG_check_icache) {
-    CheckICache(instr);
+    CheckICache(isolate_->simulator_i_cache(), instr);
   }
   pc_modified_ = false;
   if (::v8::internal::FLAG_trace_sim) {
@@ -3047,7 +3053,7 @@ void Simulator::Execute() {
       Instruction* instr = reinterpret_cast<Instruction*>(program_counter);
       icount_++;
       if (icount_ == ::v8::internal::FLAG_stop_sim_at) {
-        Debugger dbg(this);
+        ArmDebugger dbg(this);
         dbg.Debug();
       } else {
         InstructionDecode(instr);
index bdf1f8a10663cc29444c15e0dac453a048f7ab17..debdffcbf52fdd6a43cfe8713d7954803624b24e 100644 (file)
@@ -56,11 +56,12 @@ typedef int (*arm_regexp_matcher)(String*, int, const byte*, const byte*,
 // should act as a function matching the type arm_regexp_matcher.
 // The fifth argument is a dummy that reserves the space used for
 // the return address added by the ExitFrame in native calls.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
-  (FUNCTION_CAST<arm_regexp_matcher>(entry)(p0, p1, p2, p3, NULL, p4, p5, p6))
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+  (FUNCTION_CAST<arm_regexp_matcher>(entry)(                              \
+      p0, p1, p2, p3, NULL, p4, p5, p6, p7))
 
 #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
-  (reinterpret_cast<TryCatch*>(try_catch_address))
+  reinterpret_cast<TryCatch*>(try_catch_address)
 
 // The stack limit beyond which we will throw stack overflow errors in
 // generated code. Because generated code on arm uses the C stack, we
@@ -78,8 +79,6 @@ class SimulatorStack : public v8::internal::AllStatic {
   static inline void UnregisterCTryCatch() { }
 };
 
-} }  // namespace v8::internal
-
 #else  // !defined(USE_SIMULATOR)
 // Running with a simulator.
 
@@ -123,7 +122,7 @@ class CachePage {
 
 class Simulator {
  public:
-  friend class Debugger;
+  friend class ArmDebugger;
   enum Register {
     no_reg = -1,
     r0 = 0, r1, r2, r3, r4, r5, r6, r7,
@@ -147,7 +146,7 @@ class Simulator {
 
   // The currently executing Simulator instance. Potentially there can be one
   // for each native thread.
-  static Simulator* current();
+  static Simulator* current(v8::internal::Isolate* isolate);
 
   // Accessors for register state. Reading the pc value adheres to the ARM
   // architecture specification and is off by a 8 from the currently executing
@@ -191,7 +190,8 @@ class Simulator {
   uintptr_t PopAddress();
 
   // ICache checking.
-  static void FlushICache(void* start, size_t size);
+  static void FlushICache(v8::internal::HashMap* i_cache, void* start,
+                          size_t size);
 
   // Returns true if pc register contains one of the 'special_values' defined
   // below (bad_lr, end_sim_pc).
@@ -287,9 +287,10 @@ class Simulator {
   void InstructionDecode(Instruction* instr);
 
   // ICache.
-  static void CheckICache(Instruction* instr);
-  static void FlushOnePage(intptr_t start, int size);
-  static CachePage* GetCachePage(void* page);
+  static void CheckICache(v8::internal::HashMap* i_cache, Instruction* instr);
+  static void FlushOnePage(v8::internal::HashMap* i_cache, intptr_t start,
+                           int size);
+  static CachePage* GetCachePage(v8::internal::HashMap* i_cache, void* page);
 
   // Runtime call support.
   static void* RedirectExternalReference(
@@ -333,15 +334,16 @@ class Simulator {
   char* stack_;
   bool pc_modified_;
   int icount_;
-  static bool initialized_;
 
   // Icache simulation
-  static v8::internal::HashMap* i_cache_;
+  v8::internal::HashMap* i_cache_;
 
   // Registered breakpoints.
   Instruction* break_pc_;
   Instr break_instr_;
 
+  v8::internal::Isolate* isolate_;
+
   // A stop is watched if its code is less than kNumOfWatchedStops.
   // Only watched stops support enabling/disabling and the counter feature.
   static const uint32_t kNumOfWatchedStops = 256;
@@ -364,15 +366,16 @@ class Simulator {
 // When running with the simulator transition into simulated execution at this
 // point.
 #define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
-  reinterpret_cast<Object*>(Simulator::current()->Call( \
+  reinterpret_cast<Object*>(Simulator::current(Isolate::Current())->Call( \
       FUNCTION_ADDR(entry), 5, p0, p1, p2, p3, p4))
 
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
-  Simulator::current()->Call(entry, 8, p0, p1, p2, p3, NULL, p4, p5, p6)
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+  Simulator::current(Isolate::Current())->Call( \
+      entry, 9, p0, p1, p2, p3, NULL, p4, p5, p6, p7)
 
-#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
-  try_catch_address == \
-      NULL ? NULL : *(reinterpret_cast<TryCatch**>(try_catch_address))
+#define TRY_CATCH_FROM_ADDRESS(try_catch_address)                              \
+  try_catch_address == NULL ?                                                  \
+      NULL : *(reinterpret_cast<TryCatch**>(try_catch_address))
 
 
 // The simulator has its own stack. Thus it has a different stack limit from
@@ -383,16 +386,16 @@ class Simulator {
 class SimulatorStack : public v8::internal::AllStatic {
  public:
   static inline uintptr_t JsLimitFromCLimit(uintptr_t c_limit) {
-    return Simulator::current()->StackLimit();
+    return Simulator::current(Isolate::Current())->StackLimit();
   }
 
   static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) {
-    Simulator* sim = Simulator::current();
+    Simulator* sim = Simulator::current(Isolate::Current());
     return sim->PushAddress(try_catch_address);
   }
 
   static inline void UnregisterCTryCatch() {
-    Simulator::current()->PopAddress();
+    Simulator::current(Isolate::Current())->PopAddress();
   }
 };
 
index 61f7801aff30d9377798d993836025efa5bd4072..5678676fb0ceb94ffc12eba9ab3c297032753ee3 100644 (file)
@@ -39,15 +39,16 @@ namespace internal {
 #define __ ACCESS_MASM(masm)
 
 
-static void ProbeTable(MacroAssembler* masm,
+static void ProbeTable(Isolate* isolate,
+                       MacroAssembler* masm,
                        Code::Flags flags,
                        StubCache::Table table,
                        Register name,
                        Register offset,
                        Register scratch,
                        Register scratch2) {
-  ExternalReference key_offset(SCTableReference::keyReference(table));
-  ExternalReference value_offset(SCTableReference::valueReference(table));
+  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
+  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
 
   uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
   uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
@@ -101,8 +102,8 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
                                              Register scratch0,
                                              Register scratch1) {
   ASSERT(name->IsSymbol());
-  __ IncrementCounter(&Counters::negative_lookups, 1, scratch0, scratch1);
-  __ IncrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1);
+  __ IncrementCounter(COUNTERS->negative_lookups(), 1, scratch0, scratch1);
+  __ IncrementCounter(COUNTERS->negative_lookups_miss(), 1, scratch0, scratch1);
 
   Label done;
 
@@ -198,7 +199,7 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
     }
   }
   __ bind(&done);
-  __ DecrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1);
+  __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1, scratch0, scratch1);
 }
 
 
@@ -209,6 +210,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
                               Register scratch,
                               Register extra,
                               Register extra2) {
+  Isolate* isolate = Isolate::Current();
   Label miss;
 
   // Make sure that code is valid. The shifting code relies on the
@@ -248,7 +250,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
           Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
 
   // Probe the primary table.
-  ProbeTable(masm, flags, kPrimary, name, scratch, extra, extra2);
+  ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
 
   // Primary miss: Compute hash for secondary probe.
   __ sub(scratch, scratch, Operand(name));
@@ -258,7 +260,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
           Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
 
   // Probe the secondary table.
-  ProbeTable(masm, flags, kSecondary, name, scratch, extra, extra2);
+  ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
 
   // Cache miss: Fall-through and let caller handle the miss by
   // entering the runtime system.
@@ -288,11 +290,12 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
     MacroAssembler* masm, int index, Register prototype, Label* miss) {
   // Check we're still in the same context.
   __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
-  __ Move(ip, Top::global());
+  __ Move(ip, Isolate::Current()->global());
   __ cmp(prototype, ip);
   __ b(ne, miss);
   // Get the global function with the given index.
-  JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
+  JSFunction* function = JSFunction::cast(
+      Isolate::Current()->global_context()->get(index));
   // Load its initial map. The global functions all have initial maps.
   __ Move(prototype, Handle<Map>(function->initial_map()));
   // Load the prototype from the initial map.
@@ -505,9 +508,9 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
   ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
   Code* code = NULL;
   if (kind == Code::LOAD_IC) {
-    code = Builtins::builtin(Builtins::LoadIC_Miss);
+    code = Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Miss);
   } else {
-    code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
+    code = Isolate::Current()->builtins()->builtin(Builtins::KeyedLoadIC_Miss);
   }
 
   Handle<Code> ic(code);
@@ -548,7 +551,7 @@ static void PushInterceptorArguments(MacroAssembler* masm,
                                      JSObject* holder_obj) {
   __ push(name);
   InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
-  ASSERT(!Heap::InNewSpace(interceptor));
+  ASSERT(!HEAP->InNewSpace(interceptor));
   Register scratch = name;
   __ mov(scratch, Operand(Handle<Object>(interceptor)));
   __ push(scratch);
@@ -616,7 +619,7 @@ static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
   // Pass the additional arguments FastHandleApiCall expects.
   Object* call_data = optimization.api_call_info()->data();
   Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
-  if (Heap::InNewSpace(call_data)) {
+  if (HEAP->InNewSpace(call_data)) {
     __ Move(r0, api_call_info_handle);
     __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
   } else {
@@ -710,7 +713,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
                      name,
                      holder,
                      miss);
-      return Heap::undefined_value();
+      return HEAP->undefined_value();
     }
   }
 
@@ -746,11 +749,11 @@ class CallInterceptorCompiler BASE_EMBEDDED {
                             (depth2 != kInvalidProtoDepth);
     }
 
-    __ IncrementCounter(&Counters::call_const_interceptor, 1,
+    __ IncrementCounter(COUNTERS->call_const_interceptor(), 1,
                       scratch1, scratch2);
 
     if (can_do_fast_api_call) {
-      __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1,
+      __ IncrementCounter(COUNTERS->call_const_interceptor_fast_api(), 1,
                           scratch1, scratch2);
       ReserveSpaceForFastApiCall(masm, scratch1);
     }
@@ -811,7 +814,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
       FreeSpaceForFastApiCall(masm);
     }
 
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
 
   void CompileRegular(MacroAssembler* masm,
@@ -942,7 +945,7 @@ static void StoreIntAsFloat(MacroAssembler* masm,
                             Register fval,
                             Register scratch1,
                             Register scratch2) {
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     __ vmov(s0, ival);
     __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
@@ -1080,7 +1083,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
         !current->IsJSGlobalObject() &&
         !current->IsJSGlobalProxy()) {
       if (!name->IsSymbol()) {
-        MaybeObject* maybe_lookup_result = Heap::LookupSymbol(name);
+        MaybeObject* maybe_lookup_result = HEAP->LookupSymbol(name);
         Object* lookup_result = NULL;  // Initialization to please compiler.
         if (!maybe_lookup_result->ToObject(&lookup_result)) {
           set_failure(Failure::cast(maybe_lookup_result));
@@ -1100,7 +1103,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
       __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
       reg = holder_reg;  // from now the object is in holder_reg
       __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
-    } else if (Heap::InNewSpace(prototype)) {
+    } else if (HEAP->InNewSpace(prototype)) {
       // Get the map of the current object.
       __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
       __ cmp(scratch1, Operand(Handle<Map>(current->map())));
@@ -1154,7 +1157,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
   __ b(ne, miss);
 
   // Log the check depth.
-  LOG(IntEvent("check-maps-depth", depth + 1));
+  LOG(Isolate::Current(), IntEvent("check-maps-depth", depth + 1));
 
   // Perform security check for access to the global object.
   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
@@ -1248,7 +1251,7 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
   __ push(receiver);
   __ mov(scratch2, sp);  // scratch2 = AccessorInfo::args_
   Handle<AccessorInfo> callback_handle(callback);
-  if (Heap::InNewSpace(callback_handle->data())) {
+  if (HEAP->InNewSpace(callback_handle->data())) {
     __ Move(scratch3, callback_handle);
     __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
   } else {
@@ -1462,7 +1465,7 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
   __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
 
   // Check that the cell contains the same function.
-  if (Heap::InNewSpace(function)) {
+  if (HEAP->InNewSpace(function)) {
     // We can't embed a pointer to a function in new space so we have
     // to verify that the shared function info is unchanged. This has
     // the nice side effect that multiple closures based on the same
@@ -1486,8 +1489,8 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
 
 
 MaybeObject* CallStubCompiler::GenerateMissBranch() {
-  MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(),
-                                                      kind_);
+  MaybeObject* maybe_obj = Isolate::Current()->stub_cache()->ComputeCallMiss(
+      arguments().immediate(), kind_);
   Object* obj;
   if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
@@ -1547,7 +1550,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
   // -----------------------------------
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value();
 
   Label miss;
 
@@ -1707,7 +1710,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
   // -----------------------------------
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value();
 
   Label miss, return_undefined, call_builtin;
 
@@ -1794,7 +1797,7 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1880,7 +1883,7 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1971,7 +1974,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
 
   // If the object is not a JSObject or we got an unexpected number of
   // arguments, bail out to the regular call.
-  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+  if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
 
   Label miss;
   GenerateNameCheck(name, &miss);
@@ -2042,14 +2045,16 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
   //  -- sp[argc * 4]           : receiver
   // -----------------------------------
 
-  if (!CpuFeatures::IsSupported(VFP3)) return Heap::undefined_value();
+  if (!Isolate::Current()->cpu_features()->IsSupported(VFP3))
+      return HEAP->undefined_value();
+
   CpuFeatures::Scope scope_vfp3(VFP3);
 
   const int argc = arguments().immediate();
 
   // If the object is not a JSObject or we got an unexpected number of
   // arguments, bail out to the regular call.
-  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+  if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
 
   Label miss, slow;
   GenerateNameCheck(name, &miss);
@@ -2191,7 +2196,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
 
   // If the object is not a JSObject or we got an unexpected number of
   // arguments, bail out to the regular call.
-  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+  if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
 
   Label miss;
   GenerateNameCheck(name, &miss);
@@ -2288,11 +2293,11 @@ MaybeObject* CallStubCompiler::CompileFastApiCall(
   ASSERT(optimization.is_simple_api_call());
   // Bail out if object is a global object as we don't want to
   // repatch it to global receiver.
-  if (object->IsGlobalObject()) return Heap::undefined_value();
-  if (cell != NULL) return Heap::undefined_value();
+  if (object->IsGlobalObject()) return HEAP->undefined_value();
+  if (cell != NULL) return HEAP->undefined_value();
   int depth = optimization.GetPrototypeDepthOfExpectedType(
             JSObject::cast(object), holder);
-  if (depth == kInvalidProtoDepth) return Heap::undefined_value();
+  if (depth == kInvalidProtoDepth) return HEAP->undefined_value();
 
   Label miss, miss_before_stack_reserved;
 
@@ -2306,8 +2311,8 @@ MaybeObject* CallStubCompiler::CompileFastApiCall(
   __ tst(r1, Operand(kSmiTagMask));
   __ b(eq, &miss_before_stack_reserved);
 
-  __ IncrementCounter(&Counters::call_const, 1, r0, r3);
-  __ IncrementCounter(&Counters::call_const_fast_api, 1, r0, r3);
+  __ IncrementCounter(COUNTERS->call_const(), 1, r0, r3);
+  __ IncrementCounter(COUNTERS->call_const_fast_api(), 1, r0, r3);
 
   ReserveSpaceForFastApiCall(masm(), r0);
 
@@ -2371,7 +2376,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
   SharedFunctionInfo* function_info = function->shared();
   switch (check) {
     case RECEIVER_MAP_CHECK:
-      __ IncrementCounter(&Counters::call_const, 1, r0, r3);
+      __ IncrementCounter(COUNTERS->call_const(), 1, r0, r3);
 
       // Check that the maps haven't changed.
       CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
@@ -2562,7 +2567,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
 
   // Jump to the cached code (tail call).
-  __ IncrementCounter(&Counters::call_global_inline, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->call_global_inline(), 1, r3, r4);
   ASSERT(function->is_compiled());
   Handle<Code> code(function->code());
   ParameterCount expected(function->shared()->formal_parameter_count());
@@ -2579,7 +2584,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
 
   // Handle call cache miss.
   __ bind(&miss);
-  __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
+  __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1, r1, r3);
   Object* obj;
   { MaybeObject* maybe_obj = GenerateMissBranch();
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
@@ -2609,7 +2614,8 @@ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
                      r1, r2, r3,
                      &miss);
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2657,7 +2663,8 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
 
   // Handle store cache miss.
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2705,7 +2712,8 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
 
   // Handle store cache miss.
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2742,13 +2750,14 @@ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
   // Store the value in the cell.
   __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
 
-  __ IncrementCounter(&Counters::named_store_global_inline, 1, r4, r3);
+  __ IncrementCounter(COUNTERS->named_store_global_inline(), 1, r4, r3);
   __ Ret();
 
   // Handle store cache miss.
   __ bind(&miss);
-  __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r4, r3);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1, r4, r3);
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2795,7 +2804,7 @@ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NONEXISTENT, Heap::empty_string());
+  return GetCode(NONEXISTENT, HEAP->empty_string());
 }
 
 
@@ -2930,11 +2939,11 @@ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
   }
 
   __ mov(r0, r4);
-  __ IncrementCounter(&Counters::named_load_global_stub, 1, r1, r3);
+  __ IncrementCounter(COUNTERS->named_load_global_stub(), 1, r1, r3);
   __ Ret();
 
   __ bind(&miss);
-  __ IncrementCounter(&Counters::named_load_global_stub_miss, 1, r1, r3);
+  __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1, r1, r3);
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
@@ -3079,7 +3088,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
   //  -- r1    : receiver
   // -----------------------------------
   Label miss;
-  __ IncrementCounter(&Counters::keyed_load_string_length, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1, r2, r3);
 
   // Check the key is the cached one.
   __ cmp(r0, Operand(Handle<String>(name)));
@@ -3087,7 +3096,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
 
   GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_string_length, 1, r2, r3);
+  __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1, r2, r3);
 
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
@@ -3103,7 +3112,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3);
 
   // Check the name hasn't changed.
   __ cmp(r0, Operand(Handle<String>(name)));
@@ -3111,7 +3120,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
 
   GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3);
+  __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   return GetCode(CALLBACKS, name);
@@ -3179,7 +3188,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_store_field, 1, r3, r4);
+  __ IncrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4);
 
   // Check that the name has not changed.
   __ cmp(r1, Operand(Handle<String>(name)));
@@ -3195,8 +3204,9 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
                      &miss);
   __ bind(&miss);
 
-  __ DecrementCounter(&Counters::keyed_store_field, 1, r3, r4);
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+  __ DecrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4);
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedStoreIC_Miss));
 
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
@@ -3240,7 +3250,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
   __ ldr(elements_reg,
          FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
   __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset));
-  __ cmp(scratch, Operand(Handle<Map>(Factory::fixed_array_map())));
+  __ cmp(scratch, Operand(Handle<Map>(FACTORY->fixed_array_map())));
   __ b(ne, &miss);
 
   // Check that the key is within bounds.
@@ -3267,7 +3277,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
   __ Ret();
 
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+  Handle<Code> ic(
+      Isolate::Current()->builtins()->builtin(Builtins::KeyedStoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -3403,14 +3414,15 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
   // Remove caller arguments and receiver from the stack and return.
   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
   __ add(sp, sp, Operand(kPointerSize));
-  __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
-  __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2);
+  __ IncrementCounter(COUNTERS->constructed_objects(), 1, r1, r2);
+  __ IncrementCounter(COUNTERS->constructed_objects_stub(), 1, r1, r2);
   __ Jump(lr);
 
   // Jump to the generic stub in case the specialized code cannot handle the
   // construction.
   __ bind(&generic_stub_call);
-  Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
+  Code* code = Isolate::Current()->builtins()->builtin(
+      Builtins::JSConstructStubGeneric);
   Handle<Code> generic_construct_stub(code);
   __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
 
@@ -3499,7 +3511,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
       __ ldr(value, MemOperand(r3, key, LSL, 1));
       break;
     case kExternalFloatArray:
-      if (CpuFeatures::IsSupported(VFP3)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
         CpuFeatures::Scope scope(VFP3);
         __ add(r2, r3, Operand(key, LSL, 1));
         __ vldr(s0, r2, 0);
@@ -3538,7 +3550,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
     // Now we can use r0 for the result as key is not needed any more.
     __ mov(r0, r5);
 
-    if (CpuFeatures::IsSupported(VFP3)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
       CpuFeatures::Scope scope(VFP3);
       __ vmov(s0, value);
       __ vcvt_f64_s32(d0, s0);
@@ -3553,7 +3565,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
     // The test is different for unsigned int values. Since we need
     // the value to be in the range of a positive smi, we can't
     // handle either of the top two bits being set in the value.
-    if (CpuFeatures::IsSupported(VFP3)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
       CpuFeatures::Scope scope(VFP3);
       Label box_int, done;
       __ tst(value, Operand(0xC0000000));
@@ -3617,7 +3629,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
   } else if (array_type == kExternalFloatArray) {
     // For the floating-point array type, we need to always allocate a
     // HeapNumber.
-    if (CpuFeatures::IsSupported(VFP3)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
       CpuFeatures::Scope scope(VFP3);
       // Allocate a HeapNumber for the result. Don't use r0 and r1 as
       // AllocateHeapNumber clobbers all registers - also when jumping due to
@@ -3693,7 +3705,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
 
   // Slow case, key and receiver still in r0 and r1.
   __ bind(&slow);
-  __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1, r2, r3);
+  __ IncrementCounter(COUNTERS->keyed_load_external_array_slow(), 1, r2, r3);
 
   // ---------- S t a t e --------------
   //  -- lr     : return address
@@ -3808,7 +3820,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
     // The WebGL specification leaves the behavior of storing NaN and
     // +/-Infinity into integer arrays basically undefined. For more
     // reproducible behavior, convert these to zero.
-    if (CpuFeatures::IsSupported(VFP3)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
       CpuFeatures::Scope scope(VFP3);
 
 
index 544e405dbbcce99ec3692fe326b0c5d4d45a66a4..c4919b84be573919d78718549b766e34dc0b5c2e 100644 (file)
@@ -288,7 +288,7 @@ void VirtualFrame::CallJSFunction(int arg_count) {
 }
 
 
-void VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
+void VirtualFrame::CallRuntime(const Runtime::Function* f, int arg_count) {
   SpillAll();
   Forget(arg_count);
   ASSERT(cgen()->HasValidEntryRegisters());
@@ -321,7 +321,8 @@ void VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
 
 
 void VirtualFrame::CallLoadIC(Handle<String> name, RelocInfo::Mode mode) {
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   PopToR0();
   SpillAll();
   __ mov(r2, Operand(name));
@@ -332,7 +333,7 @@ void VirtualFrame::CallLoadIC(Handle<String> name, RelocInfo::Mode mode) {
 void VirtualFrame::CallStoreIC(Handle<String> name,
                                bool is_contextual,
                                StrictModeFlag strict_mode) {
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
       (strict_mode == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
                                    : Builtins::StoreIC_Initialize));
   PopToR0();
@@ -352,7 +353,8 @@ void VirtualFrame::CallStoreIC(Handle<String> name,
 
 
 void VirtualFrame::CallKeyedLoadIC() {
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   PopToR1R0();
   SpillAll();
   CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
@@ -360,7 +362,7 @@ void VirtualFrame::CallKeyedLoadIC() {
 
 
 void VirtualFrame::CallKeyedStoreIC(StrictModeFlag strict_mode) {
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
       (strict_mode == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
                                    : Builtins::KeyedStoreIC_Initialize));
   PopToR1R0();
@@ -385,7 +387,8 @@ void VirtualFrame::CallCodeObject(Handle<Code> code,
       ASSERT(dropped_args == 0);
       break;
     case Code::BUILTIN:
-      ASSERT(*code == Builtins::builtin(Builtins::JSConstructCall));
+      ASSERT(*code == Isolate::Current()->builtins()->builtin(
+          Builtins::JSConstructCall));
       break;
     default:
       UNREACHABLE();
@@ -422,9 +425,6 @@ const VirtualFrame::TopOfStack VirtualFrame::kStateAfterPush[TOS_STATES] =
     { R0_TOS, R1_R0_TOS, R0_R1_TOS, R0_R1_TOS, R1_R0_TOS };
 
 
-bool VirtualFrame::SpilledScope::is_spilled_ = false;
-
-
 void VirtualFrame::Drop(int count) {
   ASSERT(count >= 0);
   ASSERT(height() >= count);
index 76470bdc53e937c20bdce1ab404805736606bc5d..6d67e70c0364ae6a0c8fe537bfac55e1b94f2c37 100644 (file)
@@ -55,23 +55,26 @@ class VirtualFrame : public ZoneObject {
   class SpilledScope BASE_EMBEDDED {
    public:
     explicit SpilledScope(VirtualFrame* frame)
-      : old_is_spilled_(is_spilled_) {
+      : old_is_spilled_(
+          Isolate::Current()->is_virtual_frame_in_spilled_scope()) {
       if (frame != NULL) {
-        if (!is_spilled_) {
+        if (!old_is_spilled_) {
           frame->SpillAll();
         } else {
           frame->AssertIsSpilled();
         }
       }
-      is_spilled_ = true;
+      Isolate::Current()->set_is_virtual_frame_in_spilled_scope(true);
     }
     ~SpilledScope() {
-      is_spilled_ = old_is_spilled_;
+      Isolate::Current()->set_is_virtual_frame_in_spilled_scope(
+          old_is_spilled_);
+    }
+    static bool is_spilled() {
+      return Isolate::Current()->is_virtual_frame_in_spilled_scope();
     }
-    static bool is_spilled() { return is_spilled_; }
 
    private:
-    static bool is_spilled_;
     int old_is_spilled_;
 
     SpilledScope() { }
@@ -274,7 +277,7 @@ class VirtualFrame : public ZoneObject {
 
   // Call runtime given the number of arguments expected on (and
   // removed from) the stack.
-  void CallRuntime(Runtime::Function* f, int arg_count);
+  void CallRuntime(const Runtime::Function* f, int arg_count);
   void CallRuntime(Runtime::FunctionId id, int arg_count);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
index de3711b341b33ca098a347188126237ee7b9e129..a0e3dc6678ad2f15ead688a866c19026bc52982d 100644 (file)
@@ -217,7 +217,7 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) {
 #ifdef DEBUG
   byte* begin_pos = pos_;
 #endif
-  Counters::reloc_info_count.Increment();
+  COUNTERS->reloc_info_count()->Increment();
   ASSERT(rinfo->pc() - last_pc_ >= 0);
   ASSERT(RelocInfo::NUMBER_OF_MODES <= kMaxRelocModes);
   // Use unsigned delta-encoding for pc.
@@ -526,7 +526,7 @@ void RelocInfo::Verify() {
       ASSERT(addr != NULL);
       // Check that we can find the right code object.
       Code* code = Code::GetCodeFromTargetAddress(addr);
-      Object* found = Heap::FindCodeObject(addr);
+      Object* found = HEAP->FindCodeObject(addr);
       ASSERT(found->IsCode());
       ASSERT(code->address() == HeapObject::cast(found)->address());
       break;
@@ -562,31 +562,36 @@ ExternalReference::ExternalReference(
 
 
 ExternalReference::ExternalReference(Builtins::Name name)
-  : address_(Builtins::builtin_address(name)) {}
+  : address_(Isolate::Current()->builtins()->builtin_address(name)) {}
 
 
 ExternalReference::ExternalReference(Runtime::FunctionId id)
   : address_(Redirect(Runtime::FunctionForId(id)->entry)) {}
 
 
-ExternalReference::ExternalReference(Runtime::Function* f)
+ExternalReference::ExternalReference(const Runtime::Function* f)
   : address_(Redirect(f->entry)) {}
 
 
+ExternalReference ExternalReference::isolate_address() {
+  return ExternalReference(Isolate::Current());
+}
+
+
 ExternalReference::ExternalReference(const IC_Utility& ic_utility)
   : address_(Redirect(ic_utility.address())) {}
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
 ExternalReference::ExternalReference(const Debug_Address& debug_address)
-  : address_(debug_address.address()) {}
+  : address_(debug_address.address(Isolate::Current())) {}
 #endif
 
 ExternalReference::ExternalReference(StatsCounter* counter)
   : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
 
 
-ExternalReference::ExternalReference(Top::AddressId id)
-  : address_(Top::get_address_from_id(id)) {}
+ExternalReference::ExternalReference(Isolate::AddressId id)
+  : address_(Isolate::Current()->get_address_from_id(id)) {}
 
 
 ExternalReference::ExternalReference(const SCTableReference& table_ref)
@@ -616,7 +621,8 @@ ExternalReference ExternalReference::random_uint32_function() {
 
 
 ExternalReference ExternalReference::transcendental_cache_array_address() {
-  return ExternalReference(TranscendentalCache::cache_array_address());
+  return ExternalReference(Isolate::Current()->transcendental_cache()->
+      cache_array_address());
 }
 
 
@@ -633,72 +639,78 @@ ExternalReference ExternalReference::compute_output_frames_function() {
 
 
 ExternalReference ExternalReference::global_contexts_list() {
-  return ExternalReference(Heap::global_contexts_list_address());
+  return ExternalReference(Isolate::Current()->
+      heap()->global_contexts_list_address());
 }
 
 
 ExternalReference ExternalReference::keyed_lookup_cache_keys() {
-  return ExternalReference(KeyedLookupCache::keys_address());
+  return ExternalReference(Isolate::Current()->
+      keyed_lookup_cache()->keys_address());
 }
 
 
 ExternalReference ExternalReference::keyed_lookup_cache_field_offsets() {
-  return ExternalReference(KeyedLookupCache::field_offsets_address());
+  return ExternalReference(Isolate::Current()->
+      keyed_lookup_cache()->field_offsets_address());
 }
 
 
 ExternalReference ExternalReference::the_hole_value_location() {
-  return ExternalReference(Factory::the_hole_value().location());
+  return ExternalReference(FACTORY->the_hole_value().location());
 }
 
 
 ExternalReference ExternalReference::arguments_marker_location() {
-  return ExternalReference(Factory::arguments_marker().location());
+  return ExternalReference(FACTORY->arguments_marker().location());
 }
 
 
 ExternalReference ExternalReference::roots_address() {
-  return ExternalReference(Heap::roots_address());
+  return ExternalReference(HEAP->roots_address());
 }
 
 
 ExternalReference ExternalReference::address_of_stack_limit() {
-  return ExternalReference(StackGuard::address_of_jslimit());
+  return ExternalReference(
+      Isolate::Current()->stack_guard()->address_of_jslimit());
 }
 
 
 ExternalReference ExternalReference::address_of_real_stack_limit() {
-  return ExternalReference(StackGuard::address_of_real_jslimit());
+  return ExternalReference(
+      Isolate::Current()->stack_guard()->address_of_real_jslimit());
 }
 
 
 ExternalReference ExternalReference::address_of_regexp_stack_limit() {
-  return ExternalReference(RegExpStack::limit_address());
+  return ExternalReference(
+      Isolate::Current()->regexp_stack()->limit_address());
 }
 
 
 ExternalReference ExternalReference::new_space_start() {
-  return ExternalReference(Heap::NewSpaceStart());
+  return ExternalReference(HEAP->NewSpaceStart());
 }
 
 
 ExternalReference ExternalReference::new_space_mask() {
-  return ExternalReference(reinterpret_cast<Address>(Heap::NewSpaceMask()));
+  return ExternalReference(reinterpret_cast<Address>(HEAP->NewSpaceMask()));
 }
 
 
 ExternalReference ExternalReference::new_space_allocation_top_address() {
-  return ExternalReference(Heap::NewSpaceAllocationTopAddress());
+  return ExternalReference(HEAP->NewSpaceAllocationTopAddress());
 }
 
 
 ExternalReference ExternalReference::heap_always_allocate_scope_depth() {
-  return ExternalReference(Heap::always_allocate_scope_depth_address());
+  return ExternalReference(HEAP->always_allocate_scope_depth_address());
 }
 
 
 ExternalReference ExternalReference::new_space_allocation_limit_address() {
-  return ExternalReference(Heap::NewSpaceAllocationLimitAddress());
+  return ExternalReference(HEAP->NewSpaceAllocationLimitAddress());
 }
 
 
@@ -718,7 +730,7 @@ ExternalReference ExternalReference::handle_scope_limit_address() {
 
 
 ExternalReference ExternalReference::scheduled_exception_address() {
-  return ExternalReference(Top::scheduled_exception_address());
+  return ExternalReference(Isolate::Current()->scheduled_exception_address());
 }
 
 
@@ -784,15 +796,18 @@ ExternalReference ExternalReference::re_word_character_map() {
 }
 
 ExternalReference ExternalReference::address_of_static_offsets_vector() {
-  return ExternalReference(OffsetsVector::static_offsets_vector_address());
+  return ExternalReference(OffsetsVector::static_offsets_vector_address(
+      Isolate::Current()));
 }
 
 ExternalReference ExternalReference::address_of_regexp_stack_memory_address() {
-  return ExternalReference(RegExpStack::memory_address());
+  return ExternalReference(
+      Isolate::Current()->regexp_stack()->memory_address());
 }
 
 ExternalReference ExternalReference::address_of_regexp_stack_memory_size() {
-  return ExternalReference(RegExpStack::memory_size_address());
+  return ExternalReference(
+      Isolate::Current()->regexp_stack()->memory_size_address());
 }
 
 #endif  // V8_INTERPRETED_REGEXP
@@ -943,10 +958,6 @@ ExternalReference ExternalReference::compare_doubles() {
 }
 
 
-ExternalReference::ExternalReferenceRedirector*
-    ExternalReference::redirector_ = NULL;
-
-
 #ifdef ENABLE_DEBUGGER_SUPPORT
 ExternalReference ExternalReference::debug_break() {
   return ExternalReference(Redirect(FUNCTION_ADDR(Debug::Break)));
@@ -954,7 +965,7 @@ ExternalReference ExternalReference::debug_break() {
 
 
 ExternalReference ExternalReference::debug_step_in_fp_address() {
-  return ExternalReference(Debug::step_in_fp_addr());
+  return ExternalReference(Isolate::Current()->debug()->step_in_fp_addr());
 }
 #endif
 
index b66421a2e72a1cb97137d45d66bbba441ac41fb0..d3207040b3e55c986c1ccca4f3dbd240c4bfbfff 100644 (file)
@@ -37,7 +37,6 @@
 
 #include "gdb-jit.h"
 #include "runtime.h"
-#include "top.h"
 #include "token.h"
 
 namespace v8 {
@@ -318,7 +317,7 @@ class RelocInfo BASE_EMBEDDED {
   INLINE(void set_call_object(Object* target));
   INLINE(Object** call_object_address());
 
-  template<typename StaticVisitor> inline void Visit();
+  template<typename StaticVisitor> inline void Visit(Heap* heap);
   inline void Visit(ObjectVisitor* v);
 
   // Patch the code with some other code.
@@ -510,7 +509,7 @@ class ExternalReference BASE_EMBEDDED {
 
   explicit ExternalReference(Runtime::FunctionId id);
 
-  explicit ExternalReference(Runtime::Function* f);
+  explicit ExternalReference(const Runtime::Function* f);
 
   explicit ExternalReference(const IC_Utility& ic_utility);
 
@@ -520,10 +519,13 @@ class ExternalReference BASE_EMBEDDED {
 
   explicit ExternalReference(StatsCounter* counter);
 
-  explicit ExternalReference(Top::AddressId id);
+  explicit ExternalReference(Isolate::AddressId id);
 
   explicit ExternalReference(const SCTableReference& table_ref);
 
+  // Isolate::Current() as an external reference.
+  static ExternalReference isolate_address();
+
   // One-of-a-kind references. These references are not part of a general
   // pattern. This means that they have to be added to the
   // ExternalReferenceTable in serialize.cc manually.
@@ -627,29 +629,35 @@ class ExternalReference BASE_EMBEDDED {
   // This lets you register a function that rewrites all external references.
   // Used by the ARM simulator to catch calls to external references.
   static void set_redirector(ExternalReferenceRedirector* redirector) {
-    ASSERT(redirector_ == NULL);  // We can't stack them.
-    redirector_ = redirector;
+    // We can't stack them.
+    ASSERT(Isolate::Current()->external_reference_redirector() == NULL);
+    Isolate::Current()->set_external_reference_redirector(
+        reinterpret_cast<ExternalReferenceRedirectorPointer*>(redirector));
   }
 
  private:
   explicit ExternalReference(void* address)
       : address_(address) {}
 
-  static ExternalReferenceRedirector* redirector_;
-
   static void* Redirect(void* address,
                         Type type = ExternalReference::BUILTIN_CALL) {
-    if (redirector_ == NULL) return address;
-    void* answer = (*redirector_)(address, type);
+    ExternalReferenceRedirector* redirector =
+        reinterpret_cast<ExternalReferenceRedirector*>(
+            Isolate::Current()->external_reference_redirector());
+    if (redirector == NULL) return address;
+    void* answer = (*redirector)(address, type);
     return answer;
   }
 
   static void* Redirect(Address address_arg,
                         Type type = ExternalReference::BUILTIN_CALL) {
+    ExternalReferenceRedirector* redirector =
+        reinterpret_cast<ExternalReferenceRedirector*>(
+            Isolate::Current()->external_reference_redirector());
     void* address = reinterpret_cast<void*>(address_arg);
-    void* answer = (redirector_ == NULL) ?
+    void* answer = (redirector == NULL) ?
                    address :
-                   (*redirector_)(address, type);
+                   (*redirector)(address, type);
     return answer;
   }
 
index 184aaa5c3ded39daf86828fe100b97954395fe0e..a61632243b7193232efd83b883e1d9b60e28fcd1 100644 (file)
 namespace v8 {
 namespace internal {
 
-unsigned AstNode::current_id_ = 0;
-unsigned AstNode::count_ = 0;
-VariableProxySentinel VariableProxySentinel::this_proxy_(true);
-VariableProxySentinel VariableProxySentinel::identifier_proxy_(false);
-ValidLeftHandSideSentinel ValidLeftHandSideSentinel::instance_;
-Property Property::this_property_(VariableProxySentinel::this_proxy(), NULL, 0);
-Call Call::sentinel_(NULL, NULL, 0);
+AstSentinels::AstSentinels()
+    : this_proxy_(true),
+      identifier_proxy_(false),
+      valid_left_hand_side_sentinel_(),
+      this_property_(&this_proxy_, NULL, 0),
+      call_sentinel_(NULL, NULL, 0) {
+}
 
 
 // ----------------------------------------------------------------------------
@@ -170,7 +170,7 @@ ObjectLiteral::Property::Property(Literal* key, Expression* value) {
   key_ = key;
   value_ = value;
   Object* k = *key->handle();
-  if (k->IsSymbol() && Heap::Proto_symbol()->Equals(String::cast(k))) {
+  if (k->IsSymbol() && HEAP->Proto_symbol()->Equals(String::cast(k))) {
     kind_ = PROTOTYPE;
   } else if (value_->AsMaterializedLiteral() != NULL) {
     kind_ = MATERIALIZED_LITERAL;
@@ -249,10 +249,11 @@ void ObjectLiteral::CalculateEmitStore() {
     uint32_t hash;
     HashMap* table;
     void* key;
+    Factory* factory = Isolate::Current()->factory();
     if (handle->IsSymbol()) {
       Handle<String> name(String::cast(*handle));
       if (name->AsArrayIndex(&hash)) {
-        Handle<Object> key_handle = Factory::NewNumberFromUint(hash);
+        Handle<Object> key_handle = factory->NewNumberFromUint(hash);
         key = key_handle.location();
         table = &elements;
       } else {
@@ -269,7 +270,7 @@ void ObjectLiteral::CalculateEmitStore() {
       char arr[100];
       Vector<char> buffer(arr, ARRAY_SIZE(arr));
       const char* str = DoubleToCString(num, buffer);
-      Handle<String> name = Factory::NewStringFromAscii(CStrVector(str));
+      Handle<String> name = factory->NewStringFromAscii(CStrVector(str));
       key = name.location();
       hash = name->Hash();
       table = &properties;
@@ -634,7 +635,7 @@ bool Call::ComputeGlobalTarget(Handle<GlobalObject> global,
       Handle<JSFunction> candidate(JSFunction::cast(cell_->value()));
       // If the function is in new space we assume it's more likely to
       // change and thus prefer the general IC code.
-      if (!Heap::InNewSpace(*candidate) &&
+      if (!HEAP->InNewSpace(*candidate) &&
           CanCallWithoutIC(candidate, arguments()->length())) {
         target_ = candidate;
         return true;
@@ -699,7 +700,7 @@ void CompareOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
 
 bool AstVisitor::CheckStackOverflow() {
   if (stack_overflow_) return true;
-  StackLimitCheck check;
+  StackLimitCheck check(Isolate::Current());
   if (!check.HasOverflowed()) return false;
   return (stack_overflow_ = true);
 }
index b00612dad6e425e81070c6af51b95703192c8cf0..19d3580c4bf48dca3779864922ae2702ab843a87 100644 (file)
--- a/src/ast.h
+++ b/src/ast.h
@@ -135,7 +135,10 @@ class AstNode: public ZoneObject {
 
   static const int kNoNumber = -1;
 
-  AstNode() : id_(GetNextId()) { count_++; }
+  AstNode() : id_(GetNextId()) {
+    Isolate* isolate = Isolate::Current();
+    isolate->set_ast_node_count(isolate->ast_node_count() + 1);
+  }
 
   virtual ~AstNode() { }
 
@@ -159,21 +162,25 @@ class AstNode: public ZoneObject {
   // True if the node is simple enough for us to inline calls containing it.
   virtual bool IsInlineable() const { return false; }
 
-  static int Count() { return count_; }
-  static void ResetIds() { current_id_ = 0; }
+  static int Count() { return Isolate::Current()->ast_node_count(); }
+  static void ResetIds() { Isolate::Current()->set_ast_node_id(0); }
   unsigned id() const { return id_; }
 
  protected:
-  static unsigned GetNextId() { return current_id_++; }
+  static unsigned GetNextId() {
+    Isolate* isolate = Isolate::Current();
+    unsigned tmp = isolate->ast_node_id();
+    isolate->set_ast_node_id(tmp + 1);
+    return tmp;
+  }
   static unsigned ReserveIdRange(int n) {
-    unsigned tmp = current_id_;
-    current_id_ += n;
+    Isolate* isolate = Isolate::Current();
+    unsigned tmp = isolate->ast_node_id();
+    isolate->set_ast_node_id(tmp + n);
     return tmp;
   }
 
  private:
-  static unsigned current_id_;
-  static unsigned count_;
   unsigned id_;
 
   friend class CaseClause;  // Generates AST IDs.
@@ -335,10 +342,6 @@ class ValidLeftHandSideSentinel: public Expression {
  public:
   virtual bool IsValidLeftHandSide() { return true; }
   virtual void Accept(AstVisitor* v) { UNREACHABLE(); }
-  static ValidLeftHandSideSentinel* instance() { return &instance_; }
-
- private:
-  static ValidLeftHandSideSentinel instance_;
 };
 
 
@@ -898,10 +901,17 @@ class Literal: public Expression {
   virtual bool ToBooleanIsFalse() { return handle_->ToBoolean()->IsFalse(); }
 
   // Identity testers.
-  bool IsNull() const { return handle_.is_identical_to(Factory::null_value()); }
-  bool IsTrue() const { return handle_.is_identical_to(Factory::true_value()); }
+  bool IsNull() const {
+    ASSERT(!handle_.is_null());
+    return handle_->IsNull();
+  }
+  bool IsTrue() const {
+    ASSERT(!handle_.is_null());
+    return handle_->IsTrue();
+  }
   bool IsFalse() const {
-    return handle_.is_identical_to(Factory::false_value());
+    ASSERT(!handle_.is_null());
+    return handle_->IsFalse();
   }
 
   Handle<Object> handle() const { return handle_; }
@@ -1138,15 +1148,11 @@ class VariableProxy: public Expression {
 class VariableProxySentinel: public VariableProxy {
  public:
   virtual bool IsValidLeftHandSide() { return !is_this(); }
-  static VariableProxySentinel* this_proxy() { return &this_proxy_; }
-  static VariableProxySentinel* identifier_proxy() {
-    return &identifier_proxy_;
-  }
 
  private:
   explicit VariableProxySentinel(bool is_this) : VariableProxy(is_this) { }
-  static VariableProxySentinel this_proxy_;
-  static VariableProxySentinel identifier_proxy_;
+
+  friend class AstSentinels;
 };
 
 
@@ -1253,10 +1259,6 @@ class Property: public Expression {
     return monomorphic_receiver_type_;
   }
 
-  // Returns a property singleton property access on 'this'.  Used
-  // during preparsing.
-  static Property* this_property() { return &this_property_; }
-
  private:
   Expression* obj_;
   Expression* key_;
@@ -1272,9 +1274,6 @@ class Property: public Expression {
   bool is_arguments_access_ : 1;
   Handle<Map> monomorphic_receiver_type_;
   ExternalArrayType array_type_;
-
-  // Dummy property used during preparsing.
-  static Property this_property_;
 };
 
 
@@ -1312,8 +1311,6 @@ class Call: public Expression {
   // Bailout support.
   int ReturnId() const { return return_id_; }
 
-  static Call* sentinel() { return &sentinel_; }
-
 #ifdef DEBUG
   // Used to assert that the FullCodeGenerator records the return site.
   bool return_is_recorded_;
@@ -1332,8 +1329,36 @@ class Call: public Expression {
   Handle<JSGlobalPropertyCell> cell_;
 
   int return_id_;
+};
 
-  static Call sentinel_;
+
+class AstSentinels {
+ public:
+  ~AstSentinels() { }
+
+  // Returns a property singleton property access on 'this'.  Used
+  // during preparsing.
+  Property* this_property() { return &this_property_; }
+  VariableProxySentinel* this_proxy() { return &this_proxy_; }
+  VariableProxySentinel* identifier_proxy() { return &identifier_proxy_; }
+  ValidLeftHandSideSentinel* valid_left_hand_side_sentinel() {
+    return &valid_left_hand_side_sentinel_;
+  }
+  Call* call_sentinel() { return &call_sentinel_; }
+  EmptyStatement* empty_statement() { return &empty_statement_; }
+
+ private:
+  AstSentinels();
+  VariableProxySentinel this_proxy_;
+  VariableProxySentinel identifier_proxy_;
+  ValidLeftHandSideSentinel valid_left_hand_side_sentinel_;
+  Property this_property_;
+  Call call_sentinel_;
+  EmptyStatement empty_statement_;
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(AstSentinels);
 };
 
 
@@ -1364,7 +1389,7 @@ class CallNew: public Expression {
 class CallRuntime: public Expression {
  public:
   CallRuntime(Handle<String> name,
-              Runtime::Function* function,
+              const Runtime::Function* function,
               ZoneList<Expression*>* arguments)
       : name_(name), function_(function), arguments_(arguments) { }
 
@@ -1373,13 +1398,13 @@ class CallRuntime: public Expression {
   virtual bool IsInlineable() const;
 
   Handle<String> name() const { return name_; }
-  Runtime::Function* function() const { return function_; }
+  const Runtime::Function* function() const { return function_; }
   ZoneList<Expression*>* arguments() const { return arguments_; }
   bool is_jsruntime() const { return function_ == NULL; }
 
  private:
   Handle<String> name_;
-  Runtime::Function* function_;
+  const Runtime::Function* function_;
   ZoneList<Expression*>* arguments_;
 };
 
@@ -1707,7 +1732,7 @@ class FunctionLiteral: public Expression {
         is_expression_(is_expression),
         contains_loops_(contains_loops),
         function_token_position_(RelocInfo::kNoPosition),
-        inferred_name_(Heap::empty_string()),
+        inferred_name_(HEAP->empty_string()),
         try_full_codegen_(false),
         pretenure_(false) { }
 
@@ -2189,6 +2214,7 @@ class AstVisitor BASE_EMBEDDED {
   bool stack_overflow_;
 };
 
+
 } }  // namespace v8::internal
 
 #endif  // V8_AST_H_
index ffa5775fe0d1b53f84d7d79f7895b21136d1189d..015e702c959de76919868f3cf9f0542267d4fac6 100644 (file)
 namespace v8 {
 namespace internal {
 
-// A SourceCodeCache uses a FixedArray to store pairs of
-// (AsciiString*, JSFunction*), mapping names of native code files
-// (runtime.js, etc.) to precompiled functions. Instead of mapping
-// names to functions it might make sense to let the JS2C tool
-// generate an index for each native JS file.
-class SourceCodeCache BASE_EMBEDDED {
- public:
-  explicit SourceCodeCache(Script::Type type): type_(type), cache_(NULL) { }
-
-  void Initialize(bool create_heap_objects) {
-    cache_ = create_heap_objects ? Heap::empty_fixed_array() : NULL;
-  }
-
-  void Iterate(ObjectVisitor* v) {
-    v->VisitPointer(BitCast<Object**>(&cache_));
-  }
-
-
-  bool Lookup(Vector<const char> name, Handle<SharedFunctionInfo>* handle) {
-    for (int i = 0; i < cache_->length(); i+=2) {
-      SeqAsciiString* str = SeqAsciiString::cast(cache_->get(i));
-      if (str->IsEqualTo(name)) {
-        *handle = Handle<SharedFunctionInfo>(
-            SharedFunctionInfo::cast(cache_->get(i + 1)));
-        return true;
-      }
-    }
-    return false;
-  }
-
 
-  void Add(Vector<const char> name, Handle<SharedFunctionInfo> shared) {
-    HandleScope scope;
-    int length = cache_->length();
-    Handle<FixedArray> new_array =
-        Factory::NewFixedArray(length + 2, TENURED);
-    cache_->CopyTo(0, *new_array, 0, cache_->length());
-    cache_ = *new_array;
-    Handle<String> str = Factory::NewStringFromAscii(name, TENURED);
-    cache_->set(length, *str);
-    cache_->set(length + 1, *shared);
-    Script::cast(shared->script())->set_type(Smi::FromInt(type_));
-  }
-
- private:
-  Script::Type type_;
-  FixedArray* cache_;
-  DISALLOW_COPY_AND_ASSIGN(SourceCodeCache);
-};
-
-static SourceCodeCache extensions_cache(Script::TYPE_EXTENSION);
-// This is for delete, not delete[].
-static List<char*>* delete_these_non_arrays_on_tear_down = NULL;
-// This is for delete[]
-static List<char*>* delete_these_arrays_on_tear_down = NULL;
-
-
-NativesExternalStringResource::NativesExternalStringResource(const char* source)
+NativesExternalStringResource::NativesExternalStringResource(
+    Bootstrapper* bootstrapper,
+    const char* source)
     : data_(source), length_(StrLength(source)) {
-  if (delete_these_non_arrays_on_tear_down == NULL) {
-    delete_these_non_arrays_on_tear_down = new List<char*>(2);
+  if (bootstrapper->delete_these_non_arrays_on_tear_down_ == NULL) {
+    bootstrapper->delete_these_non_arrays_on_tear_down_ = new List<char*>(2);
   }
   // The resources are small objects and we only make a fixed number of
   // them, but let's clean them up on exit for neatness.
-  delete_these_non_arrays_on_tear_down->
+  bootstrapper->delete_these_non_arrays_on_tear_down_->
       Add(reinterpret_cast<char*>(this));
 }
 
 
+Bootstrapper::Bootstrapper()
+    : nesting_(0),
+      extensions_cache_(Script::TYPE_EXTENSION),
+      delete_these_non_arrays_on_tear_down_(NULL),
+      delete_these_arrays_on_tear_down_(NULL) {
+}
+
+
 Handle<String> Bootstrapper::NativesSourceLookup(int index) {
   ASSERT(0 <= index && index < Natives::GetBuiltinsCount());
-  if (Heap::natives_source_cache()->get(index)->IsUndefined()) {
+  if (HEAP->natives_source_cache()->get(index)->IsUndefined()) {
     if (!Snapshot::IsEnabled() || FLAG_new_snapshot) {
       // We can use external strings for the natives.
       NativesExternalStringResource* resource =
-          new NativesExternalStringResource(
+          new NativesExternalStringResource(this,
               Natives::GetScriptSource(index).start());
       Handle<String> source_code =
-          Factory::NewExternalStringFromAscii(resource);
-      Heap::natives_source_cache()->set(index, *source_code);
+          FACTORY->NewExternalStringFromAscii(resource);
+      HEAP->natives_source_cache()->set(index, *source_code);
     } else {
       // Old snapshot code can't cope with external strings at all.
       Handle<String> source_code =
-        Factory::NewStringFromAscii(Natives::GetScriptSource(index));
-      Heap::natives_source_cache()->set(index, *source_code);
+        FACTORY->NewStringFromAscii(Natives::GetScriptSource(index));
+      HEAP->natives_source_cache()->set(index, *source_code);
     }
   }
-  Handle<Object> cached_source(Heap::natives_source_cache()->get(index));
+  Handle<Object> cached_source(HEAP->natives_source_cache()->get(index));
   return Handle<String>::cast(cached_source);
 }
 
 
 void Bootstrapper::Initialize(bool create_heap_objects) {
-  extensions_cache.Initialize(create_heap_objects);
+  extensions_cache_.Initialize(create_heap_objects);
   GCExtension::Register();
   ExternalizeStringExtension::Register();
 }
@@ -146,39 +100,39 @@ void Bootstrapper::Initialize(bool create_heap_objects) {
 char* Bootstrapper::AllocateAutoDeletedArray(int bytes) {
   char* memory = new char[bytes];
   if (memory != NULL) {
-    if (delete_these_arrays_on_tear_down == NULL) {
-      delete_these_arrays_on_tear_down = new List<char*>(2);
+    if (delete_these_arrays_on_tear_down_ == NULL) {
+      delete_these_arrays_on_tear_down_ = new List<char*>(2);
     }
-    delete_these_arrays_on_tear_down->Add(memory);
+    delete_these_arrays_on_tear_down_->Add(memory);
   }
   return memory;
 }
 
 
 void Bootstrapper::TearDown() {
-  if (delete_these_non_arrays_on_tear_down != NULL) {
-    int len = delete_these_non_arrays_on_tear_down->length();
+  if (delete_these_non_arrays_on_tear_down_ != NULL) {
+    int len = delete_these_non_arrays_on_tear_down_->length();
     ASSERT(len < 20);  // Don't use this mechanism for unbounded allocations.
     for (int i = 0; i < len; i++) {
-      delete delete_these_non_arrays_on_tear_down->at(i);
-      delete_these_non_arrays_on_tear_down->at(i) = NULL;
+      delete delete_these_non_arrays_on_tear_down_->at(i);
+      delete_these_non_arrays_on_tear_down_->at(i) = NULL;
     }
-    delete delete_these_non_arrays_on_tear_down;
-    delete_these_non_arrays_on_tear_down = NULL;
+    delete delete_these_non_arrays_on_tear_down_;
+    delete_these_non_arrays_on_tear_down_ = NULL;
   }
 
-  if (delete_these_arrays_on_tear_down != NULL) {
-    int len = delete_these_arrays_on_tear_down->length();
+  if (delete_these_arrays_on_tear_down_ != NULL) {
+    int len = delete_these_arrays_on_tear_down_->length();
     ASSERT(len < 1000);  // Don't use this mechanism for unbounded allocations.
     for (int i = 0; i < len; i++) {
-      delete[] delete_these_arrays_on_tear_down->at(i);
-      delete_these_arrays_on_tear_down->at(i) = NULL;
+      delete[] delete_these_arrays_on_tear_down_->at(i);
+      delete_these_arrays_on_tear_down_->at(i) = NULL;
     }
-    delete delete_these_arrays_on_tear_down;
-    delete_these_arrays_on_tear_down = NULL;
+    delete delete_these_arrays_on_tear_down_;
+    delete_these_arrays_on_tear_down_ = NULL;
   }
 
-  extensions_cache.Initialize(false);  // Yes, symmetrical
+  extensions_cache_.Initialize(false);  // Yes, symmetrical
 }
 
 
@@ -306,7 +260,7 @@ class Genesis BASE_EMBEDDED {
 
 
 void Bootstrapper::Iterate(ObjectVisitor* v) {
-  extensions_cache.Iterate(v);
+  extensions_cache_.Iterate(v);
   v->Synchronize("Extensions");
 }
 
@@ -331,16 +285,16 @@ Handle<Context> Bootstrapper::CreateEnvironment(
 static void SetObjectPrototype(Handle<JSObject> object, Handle<Object> proto) {
   // object.__proto__ = proto;
   Handle<Map> old_to_map = Handle<Map>(object->map());
-  Handle<Map> new_to_map = Factory::CopyMapDropTransitions(old_to_map);
+  Handle<Map> new_to_map = FACTORY->CopyMapDropTransitions(old_to_map);
   new_to_map->set_prototype(*proto);
   object->set_map(*new_to_map);
 }
 
 
 void Bootstrapper::DetachGlobal(Handle<Context> env) {
-  JSGlobalProxy::cast(env->global_proxy())->set_context(*Factory::null_value());
+  JSGlobalProxy::cast(env->global_proxy())->set_context(*FACTORY->null_value());
   SetObjectPrototype(Handle<JSObject>(env->global_proxy()),
-                     Factory::null_value());
+                     FACTORY->null_value());
   env->set_global_proxy(env->global());
   env->global()->set_global_receiver(env->global());
 }
@@ -364,11 +318,12 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target,
                                           Handle<JSObject> prototype,
                                           Builtins::Name call,
                                           bool is_ecma_native) {
-  Handle<String> symbol = Factory::LookupAsciiSymbol(name);
-  Handle<Code> call_code = Handle<Code>(Builtins::builtin(call));
+  Handle<String> symbol = FACTORY->LookupAsciiSymbol(name);
+  Handle<Code> call_code = Handle<Code>(
+      Isolate::Current()->builtins()->builtin(call));
   Handle<JSFunction> function = prototype.is_null() ?
-    Factory::NewFunctionWithoutPrototype(symbol, call_code) :
-    Factory::NewFunctionWithPrototype(symbol,
+    FACTORY->NewFunctionWithoutPrototype(symbol, call_code) :
+    FACTORY->NewFunctionWithPrototype(symbol,
                                       type,
                                       instance_size,
                                       prototype,
@@ -385,28 +340,28 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target,
 Handle<DescriptorArray> Genesis::ComputeFunctionInstanceDescriptor(
     PrototypePropertyMode prototypeMode) {
   Handle<DescriptorArray> descriptors =
-      Factory::NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE ? 4 : 5);
+      FACTORY->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE ? 4 : 5);
   PropertyAttributes attributes =
       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
 
   {  // Add length.
-    Handle<Proxy> proxy = Factory::NewProxy(&Accessors::FunctionLength);
-    CallbacksDescriptor d(*Factory::length_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = FACTORY->NewProxy(&Accessors::FunctionLength);
+    CallbacksDescriptor d(*FACTORY->length_symbol(), *proxy, attributes);
     descriptors->Set(0, &d);
   }
   {  // Add name.
-    Handle<Proxy> proxy = Factory::NewProxy(&Accessors::FunctionName);
-    CallbacksDescriptor d(*Factory::name_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = FACTORY->NewProxy(&Accessors::FunctionName);
+    CallbacksDescriptor d(*FACTORY->name_symbol(), *proxy, attributes);
     descriptors->Set(1, &d);
   }
   {  // Add arguments.
-    Handle<Proxy> proxy = Factory::NewProxy(&Accessors::FunctionArguments);
-    CallbacksDescriptor d(*Factory::arguments_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = FACTORY->NewProxy(&Accessors::FunctionArguments);
+    CallbacksDescriptor d(*FACTORY->arguments_symbol(), *proxy, attributes);
     descriptors->Set(2, &d);
   }
   {  // Add caller.
-    Handle<Proxy> proxy = Factory::NewProxy(&Accessors::FunctionCaller);
-    CallbacksDescriptor d(*Factory::caller_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = FACTORY->NewProxy(&Accessors::FunctionCaller);
+    CallbacksDescriptor d(*FACTORY->caller_symbol(), *proxy, attributes);
     descriptors->Set(3, &d);
   }
   if (prototypeMode != DONT_ADD_PROTOTYPE) {
@@ -414,8 +369,8 @@ Handle<DescriptorArray> Genesis::ComputeFunctionInstanceDescriptor(
     if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) {
       attributes = static_cast<PropertyAttributes>(attributes & ~READ_ONLY);
     }
-    Handle<Proxy> proxy = Factory::NewProxy(&Accessors::FunctionPrototype);
-    CallbacksDescriptor d(*Factory::prototype_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = FACTORY->NewProxy(&Accessors::FunctionPrototype);
+    CallbacksDescriptor d(*FACTORY->prototype_symbol(), *proxy, attributes);
     descriptors->Set(4, &d);
   }
   descriptors->Sort();
@@ -424,7 +379,7 @@ Handle<DescriptorArray> Genesis::ComputeFunctionInstanceDescriptor(
 
 
 Handle<Map> Genesis::CreateFunctionMap(PrototypePropertyMode prototype_mode) {
-  Handle<Map> map = Factory::NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
+  Handle<Map> map = FACTORY->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
   Handle<DescriptorArray> descriptors =
       ComputeFunctionInstanceDescriptor(prototype_mode);
   map->set_instance_descriptors(*descriptors);
@@ -458,41 +413,43 @@ Handle<JSFunction> Genesis::CreateEmptyFunction() {
   function_instance_map_writable_prototype_ =
       CreateFunctionMap(ADD_WRITEABLE_PROTOTYPE);
 
-  Handle<String> object_name = Handle<String>(Heap::Object_symbol());
+  Handle<String> object_name = Handle<String>(HEAP->Object_symbol());
 
   {  // --- O b j e c t ---
     Handle<JSFunction> object_fun =
-        Factory::NewFunction(object_name, Factory::null_value());
+        FACTORY->NewFunction(object_name, FACTORY->null_value());
     Handle<Map> object_function_map =
-        Factory::NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+        FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
     object_fun->set_initial_map(*object_function_map);
     object_function_map->set_constructor(*object_fun);
 
     global_context()->set_object_function(*object_fun);
 
     // Allocate a new prototype for the object function.
-    Handle<JSObject> prototype = Factory::NewJSObject(Top::object_function(),
-                                                      TENURED);
+    Handle<JSObject> prototype = FACTORY->NewJSObject(
+        Isolate::Current()->object_function(),
+        TENURED);
 
     global_context()->set_initial_object_prototype(*prototype);
     SetPrototype(object_fun, prototype);
     object_function_map->
-      set_instance_descriptors(Heap::empty_descriptor_array());
+      set_instance_descriptors(HEAP->empty_descriptor_array());
   }
 
   // Allocate the empty function as the prototype for function ECMAScript
   // 262 15.3.4.
-  Handle<String> symbol = Factory::LookupAsciiSymbol("Empty");
+  Handle<String> symbol = FACTORY->LookupAsciiSymbol("Empty");
   Handle<JSFunction> empty_function =
-      Factory::NewFunctionWithoutPrototype(symbol, kNonStrictMode);
+      FACTORY->NewFunctionWithoutPrototype(symbol, kNonStrictMode);
 
   // --- E m p t y ---
   Handle<Code> code =
-      Handle<Code>(Builtins::builtin(Builtins::EmptyFunction));
+      Handle<Code>(Isolate::Current()->builtins()->builtin(
+          Builtins::EmptyFunction));
   empty_function->set_code(*code);
   empty_function->shared()->set_code(*code);
-  Handle<String> source = Factory::NewStringFromAscii(CStrVector("() {}"));
-  Handle<Script> script = Factory::NewScript(source);
+  Handle<String> source = FACTORY->NewStringFromAscii(CStrVector("() {}"));
+  Handle<Script> script = FACTORY->NewScript(source);
   script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
   empty_function->shared()->set_script(*script);
   empty_function->shared()->set_start_position(0);
@@ -509,7 +466,7 @@ Handle<JSFunction> Genesis::CreateEmptyFunction() {
   // Allocate the function map first and then patch the prototype later
   Handle<Map> function_without_prototype_map(
       global_context()->function_without_prototype_map());
-  Handle<Map> empty_fm = Factory::CopyMapDropDescriptors(
+  Handle<Map> empty_fm = FACTORY->CopyMapDropDescriptors(
       function_without_prototype_map);
   empty_fm->set_instance_descriptors(
       function_without_prototype_map->instance_descriptors());
@@ -524,26 +481,26 @@ Handle<DescriptorArray> Genesis::ComputeStrictFunctionInstanceDescriptor(
     Handle<FixedArray> arguments,
     Handle<FixedArray> caller) {
   Handle<DescriptorArray> descriptors =
-      Factory::NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE ? 4 : 5);
+      FACTORY->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE ? 4 : 5);
   PropertyAttributes attributes = static_cast<PropertyAttributes>(
       DONT_ENUM | DONT_DELETE | READ_ONLY);
 
   {  // length
-    Handle<Proxy> proxy = Factory::NewProxy(&Accessors::FunctionLength);
-    CallbacksDescriptor d(*Factory::length_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = FACTORY->NewProxy(&Accessors::FunctionLength);
+    CallbacksDescriptor d(*FACTORY->length_symbol(), *proxy, attributes);
     descriptors->Set(0, &d);
   }
   {  // name
-    Handle<Proxy> proxy = Factory::NewProxy(&Accessors::FunctionName);
-    CallbacksDescriptor d(*Factory::name_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = FACTORY->NewProxy(&Accessors::FunctionName);
+    CallbacksDescriptor d(*FACTORY->name_symbol(), *proxy, attributes);
     descriptors->Set(1, &d);
   }
   {  // arguments
-    CallbacksDescriptor d(*Factory::arguments_symbol(), *arguments, attributes);
+    CallbacksDescriptor d(*FACTORY->arguments_symbol(), *arguments, attributes);
     descriptors->Set(2, &d);
   }
   {  // caller
-    CallbacksDescriptor d(*Factory::caller_symbol(), *caller, attributes);
+    CallbacksDescriptor d(*FACTORY->caller_symbol(), *caller, attributes);
     descriptors->Set(3, &d);
   }
 
@@ -552,8 +509,8 @@ Handle<DescriptorArray> Genesis::ComputeStrictFunctionInstanceDescriptor(
     if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) {
       attributes = static_cast<PropertyAttributes>(attributes & ~READ_ONLY);
     }
-    Handle<Proxy> proxy = Factory::NewProxy(&Accessors::FunctionPrototype);
-    CallbacksDescriptor d(*Factory::prototype_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = FACTORY->NewProxy(&Accessors::FunctionPrototype);
+    CallbacksDescriptor d(*FACTORY->prototype_symbol(), *proxy, attributes);
     descriptors->Set(4, &d);
   }
 
@@ -565,10 +522,11 @@ Handle<DescriptorArray> Genesis::ComputeStrictFunctionInstanceDescriptor(
 // ECMAScript 5th Edition, 13.2.3
 Handle<JSFunction> Genesis::CreateThrowTypeErrorFunction(
     Builtins::Name builtin) {
-  Handle<String> name = Factory::LookupAsciiSymbol("ThrowTypeError");
+  Handle<String> name = FACTORY->LookupAsciiSymbol("ThrowTypeError");
   Handle<JSFunction> throw_type_error =
-      Factory::NewFunctionWithoutPrototype(name, kStrictMode);
-  Handle<Code> code = Handle<Code>(Builtins::builtin(builtin));
+      FACTORY->NewFunctionWithoutPrototype(name, kStrictMode);
+  Handle<Code> code = Handle<Code>(
+      Isolate::Current()->builtins()->builtin(builtin));
 
   throw_type_error->set_map(global_context()->strict_mode_function_map());
   throw_type_error->set_code(*code);
@@ -586,7 +544,7 @@ Handle<Map> Genesis::CreateStrictModeFunctionMap(
     Handle<JSFunction> empty_function,
     Handle<FixedArray> arguments_callbacks,
     Handle<FixedArray> caller_callbacks) {
-  Handle<Map> map = Factory::NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
+  Handle<Map> map = FACTORY->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
   Handle<DescriptorArray> descriptors =
       ComputeStrictFunctionInstanceDescriptor(prototype_mode,
                                               arguments_callbacks,
@@ -601,8 +559,8 @@ Handle<Map> Genesis::CreateStrictModeFunctionMap(
 void Genesis::CreateStrictModeFunctionMaps(Handle<JSFunction> empty) {
   // Create the callbacks arrays for ThrowTypeError functions.
   // The get/set callacks are filled in after the maps are created below.
-  Handle<FixedArray> arguments = Factory::NewFixedArray(2, TENURED);
-  Handle<FixedArray> caller = Factory::NewFixedArray(2, TENURED);
+  Handle<FixedArray> arguments = FACTORY->NewFixedArray(2, TENURED);
+  Handle<FixedArray> caller = FACTORY->NewFixedArray(2, TENURED);
 
   // Allocate map for the strict mode function instances.
   global_context()->set_strict_mode_function_instance_map(
@@ -643,32 +601,33 @@ void Genesis::CreateStrictModeFunctionMaps(Handle<JSFunction> empty) {
 
 static void AddToWeakGlobalContextList(Context* context) {
   ASSERT(context->IsGlobalContext());
+  Heap* heap = Isolate::Current()->heap();
 #ifdef DEBUG
   { // NOLINT
     ASSERT(context->get(Context::NEXT_CONTEXT_LINK)->IsUndefined());
     // Check that context is not in the list yet.
-    for (Object* current = Heap::global_contexts_list();
+    for (Object* current = heap->global_contexts_list();
          !current->IsUndefined();
          current = Context::cast(current)->get(Context::NEXT_CONTEXT_LINK)) {
       ASSERT(current != context);
     }
   }
 #endif
-  context->set(Context::NEXT_CONTEXT_LINK, Heap::global_contexts_list());
-  Heap::set_global_contexts_list(context);
+  context->set(Context::NEXT_CONTEXT_LINK, heap->global_contexts_list());
+  heap->set_global_contexts_list(context);
 }
 
 
 void Genesis::CreateRoots() {
+  Isolate* isolate = Isolate::Current();
   // Allocate the global context FixedArray first and then patch the
   // closure and extension object later (we need the empty function
   // and the global object, but in order to create those, we need the
   // global context).
-  global_context_ =
-      Handle<Context>::cast(
-          GlobalHandles::Create(*Factory::NewGlobalContext()));
+  global_context_ = Handle<Context>::cast(isolate->global_handles()->Create(
+              *isolate->factory()->NewGlobalContext()));
   AddToWeakGlobalContextList(*global_context_);
-  Top::set_context(*global_context());
+  isolate->set_context(*global_context());
 
   // Allocate the message listeners object.
   {
@@ -712,10 +671,11 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals(
   }
 
   if (js_global_template.is_null()) {
-    Handle<String> name = Handle<String>(Heap::empty_symbol());
-    Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
+    Handle<String> name = Handle<String>(HEAP->empty_symbol());
+    Handle<Code> code = Handle<Code>(Isolate::Current()->builtins()->builtin(
+        Builtins::Illegal));
     js_global_function =
-        Factory::NewFunction(name, JS_GLOBAL_OBJECT_TYPE,
+        FACTORY->NewFunction(name, JS_GLOBAL_OBJECT_TYPE,
                              JSGlobalObject::kSize, code, true);
     // Change the constructor property of the prototype of the
     // hidden global function to refer to the Object function.
@@ -723,18 +683,21 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals(
         Handle<JSObject>(
             JSObject::cast(js_global_function->instance_prototype()));
     SetLocalPropertyNoThrow(
-        prototype, Factory::constructor_symbol(), Top::object_function(), NONE);
+        prototype,
+        FACTORY->constructor_symbol(),
+        Isolate::Current()->object_function(),
+        NONE);
   } else {
     Handle<FunctionTemplateInfo> js_global_constructor(
         FunctionTemplateInfo::cast(js_global_template->constructor()));
     js_global_function =
-        Factory::CreateApiFunction(js_global_constructor,
-                                   Factory::InnerGlobalObject);
+        FACTORY->CreateApiFunction(js_global_constructor,
+                                   FACTORY->InnerGlobalObject);
   }
 
   js_global_function->initial_map()->set_is_hidden_prototype();
   Handle<GlobalObject> inner_global =
-      Factory::NewGlobalObject(js_global_function);
+      FACTORY->NewGlobalObject(js_global_function);
   if (inner_global_out != NULL) {
     *inner_global_out = inner_global;
   }
@@ -742,10 +705,11 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals(
   // Step 2: create or re-initialize the global proxy object.
   Handle<JSFunction> global_proxy_function;
   if (global_template.IsEmpty()) {
-    Handle<String> name = Handle<String>(Heap::empty_symbol());
-    Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
+    Handle<String> name = Handle<String>(HEAP->empty_symbol());
+    Handle<Code> code = Handle<Code>(Isolate::Current()->builtins()->builtin(
+        Builtins::Illegal));
     global_proxy_function =
-        Factory::NewFunction(name, JS_GLOBAL_PROXY_TYPE,
+        FACTORY->NewFunction(name, JS_GLOBAL_PROXY_TYPE,
                              JSGlobalProxy::kSize, code, true);
   } else {
     Handle<ObjectTemplateInfo> data =
@@ -753,11 +717,11 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals(
     Handle<FunctionTemplateInfo> global_constructor(
             FunctionTemplateInfo::cast(data->constructor()));
     global_proxy_function =
-        Factory::CreateApiFunction(global_constructor,
-                                   Factory::OuterGlobalObject);
+        FACTORY->CreateApiFunction(global_constructor,
+                                   FACTORY->OuterGlobalObject);
   }
 
-  Handle<String> global_name = Factory::LookupAsciiSymbol("global");
+  Handle<String> global_name = FACTORY->LookupAsciiSymbol("global");
   global_proxy_function->shared()->set_instance_class_name(*global_name);
   global_proxy_function->initial_map()->set_is_access_check_needed(true);
 
@@ -771,7 +735,7 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals(
         Handle<JSGlobalProxy>::cast(global_object));
   } else {
     return Handle<JSGlobalProxy>::cast(
-        Factory::NewJSObject(global_proxy_function, TENURED));
+        FACTORY->NewJSObject(global_proxy_function, TENURED));
   }
 }
 
@@ -796,7 +760,7 @@ void Genesis::HookUpInnerGlobal(Handle<GlobalObject> inner_global) {
   static const PropertyAttributes attributes =
       static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
   ForceSetProperty(builtins_global,
-                   Factory::LookupAsciiSymbol("global"),
+                   FACTORY->LookupAsciiSymbol("global"),
                    inner_global,
                    attributes);
   // Setup the reference from the global object to the builtins object.
@@ -824,9 +788,9 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
   // object reinitialization.
   global_context()->set_security_token(*inner_global);
 
-  Handle<String> object_name = Handle<String>(Heap::Object_symbol());
+  Handle<String> object_name = Handle<String>(HEAP->Object_symbol());
   SetLocalPropertyNoThrow(inner_global, object_name,
-                          Top::object_function(), DONT_ENUM);
+                          Isolate::Current()->object_function(), DONT_ENUM);
 
   Handle<JSObject> global = Handle<JSObject>(global_context()->global());
 
@@ -837,20 +801,20 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
   {  // --- A r r a y ---
     Handle<JSFunction> array_function =
         InstallFunction(global, "Array", JS_ARRAY_TYPE, JSArray::kSize,
-                        Top::initial_object_prototype(), Builtins::ArrayCode,
-                        true);
+                        Isolate::Current()->initial_object_prototype(),
+                        Builtins::ArrayCode, true);
     array_function->shared()->set_construct_stub(
-        Builtins::builtin(Builtins::ArrayConstructCode));
+        Isolate::Current()->builtins()->builtin(Builtins::ArrayConstructCode));
     array_function->shared()->DontAdaptArguments();
 
     // This seems a bit hackish, but we need to make sure Array.length
     // is 1.
     array_function->shared()->set_length(1);
     Handle<DescriptorArray> array_descriptors =
-        Factory::CopyAppendProxyDescriptor(
-            Factory::empty_descriptor_array(),
-            Factory::length_symbol(),
-            Factory::NewProxy(&Accessors::ArrayLength),
+        FACTORY->CopyAppendProxyDescriptor(
+            FACTORY->empty_descriptor_array(),
+            FACTORY->length_symbol(),
+            FACTORY->NewProxy(&Accessors::ArrayLength),
             static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE));
 
     // Cache the fast JavaScript array map
@@ -867,33 +831,33 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
   {  // --- N u m b e r ---
     Handle<JSFunction> number_fun =
         InstallFunction(global, "Number", JS_VALUE_TYPE, JSValue::kSize,
-                        Top::initial_object_prototype(), Builtins::Illegal,
-                        true);
+                        Isolate::Current()->initial_object_prototype(),
+                        Builtins::Illegal, true);
     global_context()->set_number_function(*number_fun);
   }
 
   {  // --- B o o l e a n ---
     Handle<JSFunction> boolean_fun =
         InstallFunction(global, "Boolean", JS_VALUE_TYPE, JSValue::kSize,
-                        Top::initial_object_prototype(), Builtins::Illegal,
-                        true);
+                        Isolate::Current()->initial_object_prototype(),
+                        Builtins::Illegal, true);
     global_context()->set_boolean_function(*boolean_fun);
   }
 
   {  // --- S t r i n g ---
     Handle<JSFunction> string_fun =
         InstallFunction(global, "String", JS_VALUE_TYPE, JSValue::kSize,
-                        Top::initial_object_prototype(), Builtins::Illegal,
-                        true);
+                        Isolate::Current()->initial_object_prototype(),
+                        Builtins::Illegal, true);
     string_fun->shared()->set_construct_stub(
-        Builtins::builtin(Builtins::StringConstructCode));
+        Isolate::Current()->builtins()->builtin(Builtins::StringConstructCode));
     global_context()->set_string_function(*string_fun);
     // Add 'length' property to strings.
     Handle<DescriptorArray> string_descriptors =
-        Factory::CopyAppendProxyDescriptor(
-            Factory::empty_descriptor_array(),
-            Factory::length_symbol(),
-            Factory::NewProxy(&Accessors::StringLength),
+        FACTORY->CopyAppendProxyDescriptor(
+            FACTORY->empty_descriptor_array(),
+            FACTORY->length_symbol(),
+            FACTORY->NewProxy(&Accessors::StringLength),
             static_cast<PropertyAttributes>(DONT_ENUM |
                                             DONT_DELETE |
                                             READ_ONLY));
@@ -907,8 +871,8 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     // Builtin functions for Date.prototype.
     Handle<JSFunction> date_fun =
         InstallFunction(global, "Date", JS_VALUE_TYPE, JSValue::kSize,
-                        Top::initial_object_prototype(), Builtins::Illegal,
-                        true);
+                        Isolate::Current()->initial_object_prototype(),
+                        Builtins::Illegal, true);
 
     global_context()->set_date_function(*date_fun);
   }
@@ -918,8 +882,8 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     // Builtin functions for RegExp.prototype.
     Handle<JSFunction> regexp_fun =
         InstallFunction(global, "RegExp", JS_REGEXP_TYPE, JSRegExp::kSize,
-                        Top::initial_object_prototype(), Builtins::Illegal,
-                        true);
+                        Isolate::Current()->initial_object_prototype(),
+                        Builtins::Illegal, true);
     global_context()->set_regexp_function(*regexp_fun);
 
     ASSERT(regexp_fun->has_initial_map());
@@ -927,13 +891,13 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
 
     ASSERT_EQ(0, initial_map->inobject_properties());
 
-    Handle<DescriptorArray> descriptors = Factory::NewDescriptorArray(5);
+    Handle<DescriptorArray> descriptors = FACTORY->NewDescriptorArray(5);
     PropertyAttributes final =
         static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
     int enum_index = 0;
     {
       // ECMA-262, section 15.10.7.1.
-      FieldDescriptor field(Heap::source_symbol(),
+      FieldDescriptor field(HEAP->source_symbol(),
                             JSRegExp::kSourceFieldIndex,
                             final,
                             enum_index++);
@@ -941,7 +905,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     }
     {
       // ECMA-262, section 15.10.7.2.
-      FieldDescriptor field(Heap::global_symbol(),
+      FieldDescriptor field(HEAP->global_symbol(),
                             JSRegExp::kGlobalFieldIndex,
                             final,
                             enum_index++);
@@ -949,7 +913,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     }
     {
       // ECMA-262, section 15.10.7.3.
-      FieldDescriptor field(Heap::ignore_case_symbol(),
+      FieldDescriptor field(HEAP->ignore_case_symbol(),
                             JSRegExp::kIgnoreCaseFieldIndex,
                             final,
                             enum_index++);
@@ -957,7 +921,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     }
     {
       // ECMA-262, section 15.10.7.4.
-      FieldDescriptor field(Heap::multiline_symbol(),
+      FieldDescriptor field(HEAP->multiline_symbol(),
                             JSRegExp::kMultilineFieldIndex,
                             final,
                             enum_index++);
@@ -967,7 +931,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
       // ECMA-262, section 15.10.7.5.
       PropertyAttributes writable =
           static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
-      FieldDescriptor field(Heap::last_index_symbol(),
+      FieldDescriptor field(HEAP->last_index_symbol(),
                             JSRegExp::kLastIndexFieldIndex,
                             writable,
                             enum_index++);
@@ -986,13 +950,13 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
   }
 
   {  // -- J S O N
-    Handle<String> name = Factory::NewStringFromAscii(CStrVector("JSON"));
-    Handle<JSFunction> cons = Factory::NewFunction(
+    Handle<String> name = FACTORY->NewStringFromAscii(CStrVector("JSON"));
+    Handle<JSFunction> cons = FACTORY->NewFunction(
         name,
-        Factory::the_hole_value());
+        FACTORY->the_hole_value());
     cons->SetInstancePrototype(global_context()->initial_object_prototype());
     cons->SetInstanceClassName(*name);
-    Handle<JSObject> json_object = Factory::NewJSObject(cons, TENURED);
+    Handle<JSObject> json_object = FACTORY->NewJSObject(cons, TENURED);
     ASSERT(json_object->IsJSObject());
     SetLocalPropertyNoThrow(global, name, json_object, DONT_ENUM);
     global_context()->set_json_object(*json_object);
@@ -1002,14 +966,15 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     // Make sure we can recognize argument objects at runtime.
     // This is done by introducing an anonymous function with
     // class_name equals 'Arguments'.
-    Handle<String> symbol = Factory::LookupAsciiSymbol("Arguments");
-    Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
+    Handle<String> symbol = FACTORY->LookupAsciiSymbol("Arguments");
+    Handle<Code> code = Handle<Code>(
+        Isolate::Current()->builtins()->builtin(Builtins::Illegal));
     Handle<JSObject> prototype =
         Handle<JSObject>(
             JSObject::cast(global_context()->object_function()->prototype()));
 
     Handle<JSFunction> function =
-        Factory::NewFunctionWithPrototype(symbol,
+        FACTORY->NewFunctionWithPrototype(symbol,
                                           JS_OBJECT_TYPE,
                                           JSObject::kHeaderSize,
                                           prototype,
@@ -1018,25 +983,25 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     ASSERT(!function->has_initial_map());
     function->shared()->set_instance_class_name(*symbol);
     function->shared()->set_expected_nof_properties(2);
-    Handle<JSObject> result = Factory::NewJSObject(function);
+    Handle<JSObject> result = FACTORY->NewJSObject(function);
 
     global_context()->set_arguments_boilerplate(*result);
     // Note: length must be added as the first property and
     //       callee must be added as the second property.
-    SetLocalPropertyNoThrow(result, Factory::length_symbol(),
-                            Factory::undefined_value(),
+    SetLocalPropertyNoThrow(result, FACTORY->length_symbol(),
+                            FACTORY->undefined_value(),
                             DONT_ENUM);
-    SetLocalPropertyNoThrow(result, Factory::callee_symbol(),
-                            Factory::undefined_value(),
+    SetLocalPropertyNoThrow(result, FACTORY->callee_symbol(),
+                            FACTORY->undefined_value(),
                             DONT_ENUM);
 
 #ifdef DEBUG
     LookupResult lookup;
-    result->LocalLookup(Heap::callee_symbol(), &lookup);
+    result->LocalLookup(HEAP->callee_symbol(), &lookup);
     ASSERT(lookup.IsProperty() && (lookup.type() == FIELD));
     ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsCalleeIndex);
 
-    result->LocalLookup(Heap::length_symbol(), &lookup);
+    result->LocalLookup(HEAP->length_symbol(), &lookup);
     ASSERT(lookup.IsProperty() && (lookup.type() == FIELD));
     ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex);
 
@@ -1054,8 +1019,8 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
 
     // Create the ThrowTypeError functions.
-    Handle<FixedArray> callee = Factory::NewFixedArray(2, TENURED);
-    Handle<FixedArray> caller = Factory::NewFixedArray(2, TENURED);
+    Handle<FixedArray> callee = FACTORY->NewFixedArray(2, TENURED);
+    Handle<FixedArray> caller = FACTORY->NewFixedArray(2, TENURED);
 
     Handle<JSFunction> callee_throw =
         CreateThrowTypeErrorFunction(Builtins::StrictArgumentsCallee);
@@ -1069,23 +1034,23 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     caller->set(1, *caller_throw);
 
     // Create the descriptor array for the arguments object.
-    Handle<DescriptorArray> descriptors = Factory::NewDescriptorArray(3);
+    Handle<DescriptorArray> descriptors = FACTORY->NewDescriptorArray(3);
     {  // length
-      FieldDescriptor d(*Factory::length_symbol(), 0, DONT_ENUM);
+      FieldDescriptor d(*FACTORY->length_symbol(), 0, DONT_ENUM);
       descriptors->Set(0, &d);
     }
     {  // callee
-      CallbacksDescriptor d(*Factory::callee_symbol(), *callee, attributes);
+      CallbacksDescriptor d(*FACTORY->callee_symbol(), *callee, attributes);
       descriptors->Set(1, &d);
     }
     {  // caller
-      CallbacksDescriptor d(*Factory::caller_symbol(), *caller, attributes);
+      CallbacksDescriptor d(*FACTORY->caller_symbol(), *caller, attributes);
       descriptors->Set(2, &d);
     }
     descriptors->Sort();
 
     // Create the map. Allocate one in-object field for length.
-    Handle<Map> map = Factory::NewMap(JS_OBJECT_TYPE,
+    Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE,
                                       Heap::kArgumentsObjectSizeStrict);
     map->set_instance_descriptors(*descriptors);
     map->set_function_with_prototype(true);
@@ -1098,17 +1063,17 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
       global_context()->arguments_boilerplate()->map()->constructor());
 
     // Allocate the arguments boilerplate object.
-    Handle<JSObject> result = Factory::NewJSObjectFromMap(map);
+    Handle<JSObject> result = FACTORY->NewJSObjectFromMap(map);
     global_context()->set_strict_mode_arguments_boilerplate(*result);
 
     // Add length property only for strict mode boilerplate.
-    SetLocalPropertyNoThrow(result, Factory::length_symbol(),
-                            Factory::undefined_value(),
+    SetLocalPropertyNoThrow(result, FACTORY->length_symbol(),
+                            FACTORY->undefined_value(),
                             DONT_ENUM);
 
 #ifdef DEBUG
     LookupResult lookup;
-    result->LocalLookup(Heap::length_symbol(), &lookup);
+    result->LocalLookup(HEAP->length_symbol(), &lookup);
     ASSERT(lookup.IsProperty() && (lookup.type() == FIELD));
     ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex);
 
@@ -1122,15 +1087,16 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
 
   {  // --- context extension
     // Create a function for the context extension objects.
-    Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
+    Handle<Code> code = Handle<Code>(
+        Isolate::Current()->builtins()->builtin(Builtins::Illegal));
     Handle<JSFunction> context_extension_fun =
-        Factory::NewFunction(Factory::empty_symbol(),
+        FACTORY->NewFunction(FACTORY->empty_symbol(),
                              JS_CONTEXT_EXTENSION_OBJECT_TYPE,
                              JSObject::kHeaderSize,
                              code,
                              true);
 
-    Handle<String> name = Factory::LookupAsciiSymbol("context_extension");
+    Handle<String> name = FACTORY->LookupAsciiSymbol("context_extension");
     context_extension_fun->shared()->set_instance_class_name(*name);
     global_context()->set_context_extension_function(*context_extension_fun);
   }
@@ -1139,9 +1105,10 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
   {
     // Setup the call-as-function delegate.
     Handle<Code> code =
-        Handle<Code>(Builtins::builtin(Builtins::HandleApiCallAsFunction));
+        Handle<Code>(Isolate::Current()->builtins()->builtin(
+            Builtins::HandleApiCallAsFunction));
     Handle<JSFunction> delegate =
-        Factory::NewFunction(Factory::empty_symbol(), JS_OBJECT_TYPE,
+        FACTORY->NewFunction(FACTORY->empty_symbol(), JS_OBJECT_TYPE,
                              JSObject::kHeaderSize, code, true);
     global_context()->set_call_as_function_delegate(*delegate);
     delegate->shared()->DontAdaptArguments();
@@ -1150,44 +1117,47 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
   {
     // Setup the call-as-constructor delegate.
     Handle<Code> code =
-        Handle<Code>(Builtins::builtin(Builtins::HandleApiCallAsConstructor));
+        Handle<Code>(Isolate::Current()->builtins()->builtin(
+            Builtins::HandleApiCallAsConstructor));
     Handle<JSFunction> delegate =
-        Factory::NewFunction(Factory::empty_symbol(), JS_OBJECT_TYPE,
+        FACTORY->NewFunction(FACTORY->empty_symbol(), JS_OBJECT_TYPE,
                              JSObject::kHeaderSize, code, true);
     global_context()->set_call_as_constructor_delegate(*delegate);
     delegate->shared()->DontAdaptArguments();
   }
 
   // Initialize the out of memory slot.
-  global_context()->set_out_of_memory(Heap::false_value());
+  global_context()->set_out_of_memory(HEAP->false_value());
 
   // Initialize the data slot.
-  global_context()->set_data(Heap::undefined_value());
+  global_context()->set_data(HEAP->undefined_value());
 }
 
 
 bool Genesis::CompileBuiltin(int index) {
   Vector<const char> name = Natives::GetScriptName(index);
-  Handle<String> source_code = Bootstrapper::NativesSourceLookup(index);
+  Handle<String> source_code =
+      Isolate::Current()->bootstrapper()->NativesSourceLookup(index);
   return CompileNative(name, source_code);
 }
 
 
 bool Genesis::CompileNative(Vector<const char> name, Handle<String> source) {
   HandleScope scope;
+  Isolate* isolate = Isolate::Current();
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  Debugger::set_compiling_natives(true);
+  isolate->debugger()->set_compiling_natives(true);
 #endif
   bool result = CompileScriptCached(name,
                                     source,
                                     NULL,
                                     NULL,
-                                    Handle<Context>(Top::context()),
+                                    Handle<Context>(isolate->context()),
                                     true);
-  ASSERT(Top::has_pending_exception() != result);
-  if (!result) Top::clear_pending_exception();
+  ASSERT(isolate->has_pending_exception() != result);
+  if (!result) isolate->clear_pending_exception();
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  Debugger::set_compiling_natives(false);
+  isolate->debugger()->set_compiling_natives(false);
 #endif
   return result;
 }
@@ -1206,7 +1176,7 @@ bool Genesis::CompileScriptCached(Vector<const char> name,
   // function and insert it into the cache.
   if (cache == NULL || !cache->Lookup(name, &function_info)) {
     ASSERT(source->IsAsciiRepresentation());
-    Handle<String> script_name = Factory::NewStringFromUtf8(name);
+    Handle<String> script_name = FACTORY->NewStringFromUtf8(name);
     function_info = Compiler::Compile(
         source,
         script_name,
@@ -1229,7 +1199,7 @@ bool Genesis::CompileScriptCached(Vector<const char> name,
                       ? Handle<Context>(top_context->runtime_context())
                       : top_context);
   Handle<JSFunction> fun =
-      Factory::NewFunctionFromSharedFunctionInfo(function_info, context);
+      FACTORY->NewFunctionFromSharedFunctionInfo(function_info, context);
 
   // Call function using either the runtime object or the global
   // object as the receiver. Provide no parameters.
@@ -1246,10 +1216,11 @@ bool Genesis::CompileScriptCached(Vector<const char> name,
 
 
 #define INSTALL_NATIVE(Type, name, var)                                     \
-  Handle<String> var##_name = Factory::LookupAsciiSymbol(name);             \
+  Handle<String> var##_name = FACTORY->LookupAsciiSymbol(name);             \
   global_context()->set_##var(Type::cast(                                   \
       global_context()->builtins()->GetPropertyNoExceptionThrown(*var##_name)));
 
+
 void Genesis::InstallNativeFunctions() {
   HandleScope scope;
   INSTALL_NATIVE(JSFunction, "CreateDate", create_date_fun);
@@ -1273,21 +1244,24 @@ void Genesis::InstallNativeFunctions() {
 
 bool Genesis::InstallNatives() {
   HandleScope scope;
+  Isolate* isolate = Isolate::Current();
+  Factory* factory = isolate->factory();
 
   // Create a function for the builtins object. Allocate space for the
   // JavaScript builtins, a reference to the builtins object
   // (itself) and a reference to the global_context directly in the object.
-  Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
+  Handle<Code> code = Handle<Code>(
+      isolate->builtins()->builtin(Builtins::Illegal));
   Handle<JSFunction> builtins_fun =
-      Factory::NewFunction(Factory::empty_symbol(), JS_BUILTINS_OBJECT_TYPE,
+      factory->NewFunction(factory->empty_symbol(), JS_BUILTINS_OBJECT_TYPE,
                            JSBuiltinsObject::kSize, code, true);
 
-  Handle<String> name = Factory::LookupAsciiSymbol("builtins");
+  Handle<String> name = factory->LookupAsciiSymbol("builtins");
   builtins_fun->shared()->set_instance_class_name(*name);
 
   // Allocate the builtins object.
   Handle<JSBuiltinsObject> builtins =
-      Handle<JSBuiltinsObject>::cast(Factory::NewGlobalObject(builtins_fun));
+      Handle<JSBuiltinsObject>::cast(factory->NewGlobalObject(builtins_fun));
   builtins->set_builtins(*builtins);
   builtins->set_global_context(*global_context());
   builtins->set_global_receiver(*builtins);
@@ -1298,7 +1272,7 @@ bool Genesis::InstallNatives() {
   // global object.
   static const PropertyAttributes attributes =
       static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
-  Handle<String> global_symbol = Factory::LookupAsciiSymbol("global");
+  Handle<String> global_symbol = factory->LookupAsciiSymbol("global");
   Handle<Object> global_obj(global_context()->global());
   SetLocalPropertyNoThrow(builtins, global_symbol, global_obj, attributes);
 
@@ -1307,12 +1281,12 @@ bool Genesis::InstallNatives() {
 
   // Create a bridge function that has context in the global context.
   Handle<JSFunction> bridge =
-      Factory::NewFunction(Factory::empty_symbol(), Factory::undefined_value());
-  ASSERT(bridge->context() == *Top::global_context());
+      factory->NewFunction(factory->empty_symbol(), factory->undefined_value());
+  ASSERT(bridge->context() == *isolate->global_context());
 
   // Allocate the builtins context.
   Handle<Context> context =
-    Factory::NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge);
+    factory->NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge);
   context->set_global(*builtins);  // override builtins global object
 
   global_context()->set_runtime_context(*context);
@@ -1321,113 +1295,113 @@ bool Genesis::InstallNatives() {
     // Builtin functions for Script.
     Handle<JSFunction> script_fun =
         InstallFunction(builtins, "Script", JS_VALUE_TYPE, JSValue::kSize,
-                        Top::initial_object_prototype(), Builtins::Illegal,
-                        false);
+                        isolate->initial_object_prototype(),
+                        Builtins::Illegal, false);
     Handle<JSObject> prototype =
-        Factory::NewJSObject(Top::object_function(), TENURED);
+        factory->NewJSObject(isolate->object_function(), TENURED);
     SetPrototype(script_fun, prototype);
     global_context()->set_script_function(*script_fun);
 
     // Add 'source' and 'data' property to scripts.
     PropertyAttributes common_attributes =
         static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
-    Handle<Proxy> proxy_source = Factory::NewProxy(&Accessors::ScriptSource);
+    Handle<Proxy> proxy_source = factory->NewProxy(&Accessors::ScriptSource);
     Handle<DescriptorArray> script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
-            Factory::empty_descriptor_array(),
-            Factory::LookupAsciiSymbol("source"),
+        factory->CopyAppendProxyDescriptor(
+            factory->empty_descriptor_array(),
+            factory->LookupAsciiSymbol("source"),
             proxy_source,
             common_attributes);
-    Handle<Proxy> proxy_name = Factory::NewProxy(&Accessors::ScriptName);
+    Handle<Proxy> proxy_name = factory->NewProxy(&Accessors::ScriptName);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("name"),
+            factory->LookupAsciiSymbol("name"),
             proxy_name,
             common_attributes);
-    Handle<Proxy> proxy_id = Factory::NewProxy(&Accessors::ScriptId);
+    Handle<Proxy> proxy_id = factory->NewProxy(&Accessors::ScriptId);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("id"),
+            factory->LookupAsciiSymbol("id"),
             proxy_id,
             common_attributes);
     Handle<Proxy> proxy_line_offset =
-        Factory::NewProxy(&Accessors::ScriptLineOffset);
+        factory->NewProxy(&Accessors::ScriptLineOffset);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("line_offset"),
+            factory->LookupAsciiSymbol("line_offset"),
             proxy_line_offset,
             common_attributes);
     Handle<Proxy> proxy_column_offset =
-        Factory::NewProxy(&Accessors::ScriptColumnOffset);
+        factory->NewProxy(&Accessors::ScriptColumnOffset);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("column_offset"),
+            factory->LookupAsciiSymbol("column_offset"),
             proxy_column_offset,
             common_attributes);
-    Handle<Proxy> proxy_data = Factory::NewProxy(&Accessors::ScriptData);
+    Handle<Proxy> proxy_data = factory->NewProxy(&Accessors::ScriptData);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("data"),
+            factory->LookupAsciiSymbol("data"),
             proxy_data,
             common_attributes);
-    Handle<Proxy> proxy_type = Factory::NewProxy(&Accessors::ScriptType);
+    Handle<Proxy> proxy_type = factory->NewProxy(&Accessors::ScriptType);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("type"),
+            factory->LookupAsciiSymbol("type"),
             proxy_type,
             common_attributes);
     Handle<Proxy> proxy_compilation_type =
-        Factory::NewProxy(&Accessors::ScriptCompilationType);
+        factory->NewProxy(&Accessors::ScriptCompilationType);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("compilation_type"),
+            factory->LookupAsciiSymbol("compilation_type"),
             proxy_compilation_type,
             common_attributes);
     Handle<Proxy> proxy_line_ends =
-        Factory::NewProxy(&Accessors::ScriptLineEnds);
+        factory->NewProxy(&Accessors::ScriptLineEnds);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("line_ends"),
+            factory->LookupAsciiSymbol("line_ends"),
             proxy_line_ends,
             common_attributes);
     Handle<Proxy> proxy_context_data =
-        Factory::NewProxy(&Accessors::ScriptContextData);
+        factory->NewProxy(&Accessors::ScriptContextData);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("context_data"),
+            factory->LookupAsciiSymbol("context_data"),
             proxy_context_data,
             common_attributes);
     Handle<Proxy> proxy_eval_from_script =
-        Factory::NewProxy(&Accessors::ScriptEvalFromScript);
+        factory->NewProxy(&Accessors::ScriptEvalFromScript);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("eval_from_script"),
+            factory->LookupAsciiSymbol("eval_from_script"),
             proxy_eval_from_script,
             common_attributes);
     Handle<Proxy> proxy_eval_from_script_position =
-        Factory::NewProxy(&Accessors::ScriptEvalFromScriptPosition);
+        factory->NewProxy(&Accessors::ScriptEvalFromScriptPosition);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("eval_from_script_position"),
+            factory->LookupAsciiSymbol("eval_from_script_position"),
             proxy_eval_from_script_position,
             common_attributes);
     Handle<Proxy> proxy_eval_from_function_name =
-        Factory::NewProxy(&Accessors::ScriptEvalFromFunctionName);
+        factory->NewProxy(&Accessors::ScriptEvalFromFunctionName);
     script_descriptors =
-        Factory::CopyAppendProxyDescriptor(
+        factory->CopyAppendProxyDescriptor(
             script_descriptors,
-            Factory::LookupAsciiSymbol("eval_from_function_name"),
+            factory->LookupAsciiSymbol("eval_from_function_name"),
             proxy_eval_from_function_name,
             common_attributes);
 
@@ -1435,9 +1409,9 @@ bool Genesis::InstallNatives() {
     script_map->set_instance_descriptors(*script_descriptors);
 
     // Allocate the empty script.
-    Handle<Script> script = Factory::NewScript(Factory::empty_string());
+    Handle<Script> script = factory->NewScript(factory->empty_string());
     script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
-    Heap::public_set_empty_script(*script);
+    HEAP->public_set_empty_script(*script);
   }
   {
     // Builtin function for OpaqueReference -- a JSValue-based object,
@@ -1445,10 +1419,11 @@ bool Genesis::InstallNatives() {
     // objects, that JavaScript code may not access.
     Handle<JSFunction> opaque_reference_fun =
         InstallFunction(builtins, "OpaqueReference", JS_VALUE_TYPE,
-                        JSValue::kSize, Top::initial_object_prototype(),
+                        JSValue::kSize,
+                        isolate->initial_object_prototype(),
                         Builtins::Illegal, false);
     Handle<JSObject> prototype =
-        Factory::NewJSObject(Top::object_function(), TENURED);
+        factory->NewJSObject(isolate->object_function(), TENURED);
     SetPrototype(opaque_reference_fun, prototype);
     global_context()->set_opaque_reference_function(*opaque_reference_fun);
   }
@@ -1467,23 +1442,23 @@ bool Genesis::InstallNatives() {
                         "InternalArray",
                         JS_ARRAY_TYPE,
                         JSArray::kSize,
-                        Top::initial_object_prototype(),
+                        isolate->initial_object_prototype(),
                         Builtins::ArrayCode,
                         true);
     Handle<JSObject> prototype =
-        Factory::NewJSObject(Top::object_function(), TENURED);
+        factory->NewJSObject(isolate->object_function(), TENURED);
     SetPrototype(array_function, prototype);
 
     array_function->shared()->set_construct_stub(
-        Builtins::builtin(Builtins::ArrayConstructCode));
+        isolate->builtins()->builtin(Builtins::ArrayConstructCode));
     array_function->shared()->DontAdaptArguments();
 
     // Make "length" magic on instances.
     Handle<DescriptorArray> array_descriptors =
-        Factory::CopyAppendProxyDescriptor(
-            Factory::empty_descriptor_array(),
-            Factory::length_symbol(),
-            Factory::NewProxy(&Accessors::ArrayLength),
+        factory->CopyAppendProxyDescriptor(
+            factory->empty_descriptor_array(),
+            factory->length_symbol(),
+            factory->NewProxy(&Accessors::ArrayLength),
             static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE));
 
     array_function->initial_map()->set_instance_descriptors(
@@ -1520,9 +1495,9 @@ bool Genesis::InstallNatives() {
   InstallBuiltinFunctionIds();
 
   // Install Function.prototype.call and apply.
-  { Handle<String> key = Factory::function_class_symbol();
+  { Handle<String> key = factory->function_class_symbol();
     Handle<JSFunction> function =
-        Handle<JSFunction>::cast(GetProperty(Top::global(), key));
+        Handle<JSFunction>::cast(GetProperty(isolate->global(), key));
     Handle<JSObject> proto =
         Handle<JSObject>(JSObject::cast(function->instance_prototype()));
 
@@ -1564,7 +1539,7 @@ bool Genesis::InstallNatives() {
 
     // Add initial map.
     Handle<Map> initial_map =
-        Factory::NewMap(JS_ARRAY_TYPE, JSRegExpResult::kSize);
+        factory->NewMap(JS_ARRAY_TYPE, JSRegExpResult::kSize);
     initial_map->set_constructor(*array_constructor);
 
     // Set prototype on map.
@@ -1578,13 +1553,13 @@ bool Genesis::InstallNatives() {
     ASSERT_EQ(1, array_descriptors->number_of_descriptors());
 
     Handle<DescriptorArray> reresult_descriptors =
-        Factory::NewDescriptorArray(3);
+        factory->NewDescriptorArray(3);
 
     reresult_descriptors->CopyFrom(0, *array_descriptors, 0);
 
     int enum_index = 0;
     {
-      FieldDescriptor index_field(Heap::index_symbol(),
+      FieldDescriptor index_field(HEAP->index_symbol(),
                                   JSRegExpResult::kIndexIndex,
                                   NONE,
                                   enum_index++);
@@ -1592,7 +1567,7 @@ bool Genesis::InstallNatives() {
     }
 
     {
-      FieldDescriptor input_field(Heap::input_symbol(),
+      FieldDescriptor input_field(HEAP->input_symbol(),
                                   JSRegExpResult::kInputIndex,
                                   NONE,
                                   enum_index++);
@@ -1624,13 +1599,13 @@ static Handle<JSObject> ResolveBuiltinIdHolder(
   const char* period_pos = strchr(holder_expr, '.');
   if (period_pos == NULL) {
     return Handle<JSObject>::cast(
-        GetProperty(global, Factory::LookupAsciiSymbol(holder_expr)));
+        GetProperty(global, FACTORY->LookupAsciiSymbol(holder_expr)));
   }
   ASSERT_EQ(".prototype", period_pos);
   Vector<const char> property(holder_expr,
                               static_cast<int>(period_pos - holder_expr));
   Handle<JSFunction> function = Handle<JSFunction>::cast(
-      GetProperty(global, Factory::LookupSymbol(property)));
+      GetProperty(global, FACTORY->LookupSymbol(property)));
   return Handle<JSObject>(JSObject::cast(function->prototype()));
 }
 
@@ -1638,7 +1613,7 @@ static Handle<JSObject> ResolveBuiltinIdHolder(
 static void InstallBuiltinFunctionId(Handle<JSObject> holder,
                                      const char* function_name,
                                      BuiltinFunctionId id) {
-  Handle<String> name = Factory::LookupAsciiSymbol(function_name);
+  Handle<String> name = FACTORY->LookupAsciiSymbol(function_name);
   Object* function_object = holder->GetProperty(*name)->ToObjectUnchecked();
   Handle<JSFunction> function(JSFunction::cast(function_object));
   function->shared()->set_function_data(Smi::FromInt(id));
@@ -1670,7 +1645,7 @@ static FixedArray* CreateCache(int size, JSFunction* factory) {
   int array_size = JSFunctionResultCache::kEntriesIndex + 2 * size;
   // Cannot use cast as object is not fully initialized yet.
   JSFunctionResultCache* cache = reinterpret_cast<JSFunctionResultCache*>(
-      *Factory::NewFixedArrayWithHoles(array_size, TENURED));
+      *FACTORY->NewFixedArrayWithHoles(array_size, TENURED));
   cache->set(JSFunctionResultCache::kFactoryIndex, factory);
   cache->MakeZeroSize();
   return cache;
@@ -1684,7 +1659,7 @@ void Genesis::InstallJSFunctionResultCaches() {
 #undef F
   ;
 
-  Handle<FixedArray> caches = Factory::NewFixedArray(kNumberOfCaches, TENURED);
+  Handle<FixedArray> caches = FACTORY->NewFixedArray(kNumberOfCaches, TENURED);
 
   int index = 0;
 
@@ -1703,19 +1678,17 @@ void Genesis::InstallJSFunctionResultCaches() {
 
 void Genesis::InitializeNormalizedMapCaches() {
   Handle<FixedArray> array(
-      Factory::NewFixedArray(NormalizedMapCache::kEntries, TENURED));
+      FACTORY->NewFixedArray(NormalizedMapCache::kEntries, TENURED));
   global_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array));
 }
 
 
-int BootstrapperActive::nesting_ = 0;
-
-
 bool Bootstrapper::InstallExtensions(Handle<Context> global_context,
                                      v8::ExtensionConfiguration* extensions) {
+  Isolate* isolate = Isolate::Current();
   BootstrapperActive active;
-  SaveContext saved_context;
-  Top::set_context(*global_context);
+  SaveContext saved_context(isolate);
+  isolate->set_context(*global_context);
   if (!Genesis::InstallExtensions(global_context, extensions)) return false;
   Genesis::InstallSpecialObjects(global_context);
   return true;
@@ -1729,14 +1702,14 @@ void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
   // Expose the natives in global if a name for it is specified.
   if (FLAG_expose_natives_as != NULL && strlen(FLAG_expose_natives_as) != 0) {
     Handle<String> natives_string =
-        Factory::LookupAsciiSymbol(FLAG_expose_natives_as);
+        FACTORY->LookupAsciiSymbol(FLAG_expose_natives_as);
     SetLocalPropertyNoThrow(js_global, natives_string,
                             Handle<JSObject>(js_global->builtins()), DONT_ENUM);
   }
 
   Handle<Object> Error = GetProperty(js_global, "Error");
   if (Error->IsJSObject()) {
-    Handle<String> name = Factory::LookupAsciiSymbol("stackTraceLimit");
+    Handle<String> name = FACTORY->LookupAsciiSymbol("stackTraceLimit");
     SetLocalPropertyNoThrow(Handle<JSObject>::cast(Error),
                             name,
                             Handle<Smi>(Smi::FromInt(FLAG_stack_trace_limit)),
@@ -1746,18 +1719,19 @@ void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Expose the debug global object in global if a name for it is specified.
   if (FLAG_expose_debug_as != NULL && strlen(FLAG_expose_debug_as) != 0) {
+    Debug* debug = Isolate::Current()->debug();
     // If loading fails we just bail out without installing the
     // debugger but without tanking the whole context.
-    if (!Debug::Load()) return;
+    if (!debug->Load()) return;
     // Set the security token for the debugger context to the same as
     // the shell global context to allow calling between these (otherwise
     // exposing debug global object doesn't make much sense).
-    Debug::debug_context()->set_security_token(
+    debug->debug_context()->set_security_token(
         global_context->security_token());
 
     Handle<String> debug_string =
-        Factory::LookupAsciiSymbol(FLAG_expose_debug_as);
-    Handle<Object> global_proxy(Debug::debug_context()->global_proxy());
+        FACTORY->LookupAsciiSymbol(FLAG_expose_debug_as);
+    Handle<Object> global_proxy(debug->debug_context()->global_proxy());
     SetLocalPropertyNoThrow(js_global, debug_string, global_proxy, DONT_ENUM);
   }
 #endif
@@ -1766,6 +1740,10 @@ void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
 
 bool Genesis::InstallExtensions(Handle<Context> global_context,
                                 v8::ExtensionConfiguration* extensions) {
+  // TODO(isolates): Extensions on multiple isolates may take a little more
+  //                 effort. (The external API reads 'ignore'-- does that mean
+  //                 we can break the interface?)
+
   // Clear coloring of extension list
   v8::RegisteredExtension* current = v8::RegisteredExtension::first_extension();
   while (current != NULL) {
@@ -1834,16 +1812,18 @@ bool Genesis::InstallExtension(v8::RegisteredExtension* current) {
     if (!InstallExtension(extension->dependencies()[i])) return false;
   }
   Vector<const char> source = CStrVector(extension->source());
-  Handle<String> source_code = Factory::NewStringFromAscii(source);
+  Handle<String> source_code = FACTORY->NewStringFromAscii(source);
   bool result = CompileScriptCached(CStrVector(extension->name()),
                                     source_code,
-                                    &extensions_cache,
+                                    Isolate::Current()->bootstrapper()->
+                                        extensions_cache(),
                                     extension,
-                                    Handle<Context>(Top::context()),
+                                    Handle<Context>(
+                                        Isolate::Current()->context()),
                                     false);
-  ASSERT(Top::has_pending_exception() != result);
+  ASSERT(Isolate::Current()->has_pending_exception() != result);
   if (!result) {
-    Top::clear_pending_exception();
+    Isolate::Current()->clear_pending_exception();
   }
   current->set_state(v8::INSTALLED);
   return result;
@@ -1854,7 +1834,7 @@ bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) {
   HandleScope scope;
   for (int i = 0; i < Builtins::NumberOfJavaScriptBuiltins(); i++) {
     Builtins::JavaScript id = static_cast<Builtins::JavaScript>(i);
-    Handle<String> name = Factory::LookupAsciiSymbol(Builtins::GetName(id));
+    Handle<String> name = FACTORY->LookupAsciiSymbol(Builtins::GetName(id));
     Object* function_object = builtins->GetPropertyNoExceptionThrown(*name);
     Handle<JSFunction> function
         = Handle<JSFunction>(JSFunction::cast(function_object));
@@ -1907,8 +1887,8 @@ bool Genesis::ConfigureApiObject(Handle<JSObject> object,
   Handle<JSObject> obj =
       Execution::InstantiateObject(object_template, &pending_exception);
   if (pending_exception) {
-    ASSERT(Top::has_pending_exception());
-    Top::clear_pending_exception();
+    ASSERT(Isolate::Current()->has_pending_exception());
+    Isolate::Current()->clear_pending_exception();
     return false;
   }
   TransferObject(obj, object);
@@ -1999,7 +1979,7 @@ void Genesis::TransferIndexedProperties(Handle<JSObject> from,
   // Cloning the elements array is sufficient.
   Handle<FixedArray> from_elements =
       Handle<FixedArray>(FixedArray::cast(from->elements()));
-  Handle<FixedArray> to_elements = Factory::CopyFixedArray(from_elements);
+  Handle<FixedArray> to_elements = FACTORY->CopyFixedArray(from_elements);
   to->set_elements(*to_elements);
 }
 
@@ -2015,7 +1995,7 @@ void Genesis::TransferObject(Handle<JSObject> from, Handle<JSObject> to) {
 
   // Transfer the prototype (new map is needed).
   Handle<Map> old_to_map = Handle<Map>(to->map());
-  Handle<Map> new_to_map = Factory::CopyMapDropTransitions(old_to_map);
+  Handle<Map> new_to_map = FACTORY->CopyMapDropTransitions(old_to_map);
   new_to_map->set_prototype(from->map()->prototype());
   to->set_map(*new_to_map);
 }
@@ -2039,6 +2019,7 @@ void Genesis::MakeFunctionInstancePrototypeWritable() {
 Genesis::Genesis(Handle<Object> global_object,
                  v8::Handle<v8::ObjectTemplate> global_template,
                  v8::ExtensionConfiguration* extensions) {
+  Isolate* isolate = Isolate::Current();
   result_ = Handle<Context>::null();
   // If V8 isn't running and cannot be initialized, just return.
   if (!V8::IsRunning() && !V8::Initialize(NULL)) return;
@@ -2046,15 +2027,15 @@ Genesis::Genesis(Handle<Object> global_object,
   // Before creating the roots we must save the context and restore it
   // on all function exits.
   HandleScope scope;
-  SaveContext saved_context;
+  SaveContext saved_context(isolate);
 
   Handle<Context> new_context = Snapshot::NewContextFromSnapshot();
   if (!new_context.is_null()) {
     global_context_ =
-      Handle<Context>::cast(GlobalHandles::Create(*new_context));
+        Handle<Context>::cast(isolate->global_handles()->Create(*new_context));
     AddToWeakGlobalContextList(*global_context_);
-    Top::set_context(*global_context_);
-    i::Counters::contexts_created_by_snapshot.Increment();
+    isolate->set_context(*global_context_);
+    isolate->counters()->contexts_created_by_snapshot()->Increment();
     Handle<GlobalObject> inner_global;
     Handle<JSGlobalProxy> global_proxy =
         CreateNewGlobals(global_template,
@@ -2082,7 +2063,7 @@ Genesis::Genesis(Handle<Object> global_object,
     MakeFunctionInstancePrototypeWritable();
 
     if (!ConfigureGlobalObjects(global_template)) return;
-    i::Counters::contexts_created_from_scratch.Increment();
+    isolate->counters()->contexts_created_from_scratch()->Increment();
   }
 
   result_ = global_context_;
@@ -2093,46 +2074,28 @@ Genesis::Genesis(Handle<Object> global_object,
 
 // Reserve space for statics needing saving and restoring.
 int Bootstrapper::ArchiveSpacePerThread() {
-  return BootstrapperActive::ArchiveSpacePerThread();
+  return sizeof(NestingCounterType);
 }
 
 
 // Archive statics that are thread local.
 char* Bootstrapper::ArchiveState(char* to) {
-  return BootstrapperActive::ArchiveState(to);
+  *reinterpret_cast<NestingCounterType*>(to) = nesting_;
+  nesting_ = 0;
+  return to + sizeof(NestingCounterType);
 }
 
 
 // Restore statics that are thread local.
 char* Bootstrapper::RestoreState(char* from) {
-  return BootstrapperActive::RestoreState(from);
+  nesting_ = *reinterpret_cast<NestingCounterType*>(from);
+  return from + sizeof(NestingCounterType);
 }
 
 
 // Called when the top-level V8 mutex is destroyed.
 void Bootstrapper::FreeThreadResources() {
-  ASSERT(!BootstrapperActive::IsActive());
-}
-
-
-// Reserve space for statics needing saving and restoring.
-int BootstrapperActive::ArchiveSpacePerThread() {
-  return sizeof(nesting_);
-}
-
-
-// Archive statics that are thread local.
-char* BootstrapperActive::ArchiveState(char* to) {
-  *reinterpret_cast<int*>(to) = nesting_;
-  nesting_ = 0;
-  return to + sizeof(nesting_);
-}
-
-
-// Restore statics that are thread local.
-char* BootstrapperActive::RestoreState(char* from) {
-  nesting_ = *reinterpret_cast<int*>(from);
-  return from + sizeof(nesting_);
+  ASSERT(!IsActive());
 }
 
 } }  // namespace v8::internal
index 2b789e28eaf4d45f54316df9ae7d397c3659b4e9..3e158d662a08250ae73f43e86a76b17f6c59ac39 100644 (file)
@@ -33,73 +33,140 @@ namespace v8 {
 namespace internal {
 
 
-class BootstrapperActive BASE_EMBEDDED {
+// A SourceCodeCache uses a FixedArray to store pairs of
+// (AsciiString*, JSFunction*), mapping names of native code files
+// (runtime.js, etc.) to precompiled functions. Instead of mapping
+// names to functions it might make sense to let the JS2C tool
+// generate an index for each native JS file.
+class SourceCodeCache BASE_EMBEDDED {
  public:
-  BootstrapperActive() { nesting_++; }
-  ~BootstrapperActive() { nesting_--; }
+  explicit SourceCodeCache(Script::Type type): type_(type), cache_(NULL) { }
 
-  // Support for thread preemption.
-  static int ArchiveSpacePerThread();
-  static char* ArchiveState(char* to);
-  static char* RestoreState(char* from);
+  void Initialize(bool create_heap_objects) {
+    cache_ = create_heap_objects ? HEAP->empty_fixed_array() : NULL;
+  }
+
+  void Iterate(ObjectVisitor* v) {
+    v->VisitPointer(BitCast<Object**, FixedArray**>(&cache_));
+  }
+
+  bool Lookup(Vector<const char> name, Handle<SharedFunctionInfo>* handle) {
+    for (int i = 0; i < cache_->length(); i+=2) {
+      SeqAsciiString* str = SeqAsciiString::cast(cache_->get(i));
+      if (str->IsEqualTo(name)) {
+        *handle = Handle<SharedFunctionInfo>(
+            SharedFunctionInfo::cast(cache_->get(i + 1)));
+        return true;
+      }
+    }
+    return false;
+  }
+
+  void Add(Vector<const char> name, Handle<SharedFunctionInfo> shared) {
+    HandleScope scope;
+    int length = cache_->length();
+    Handle<FixedArray> new_array =
+        FACTORY->NewFixedArray(length + 2, TENURED);
+    cache_->CopyTo(0, *new_array, 0, cache_->length());
+    cache_ = *new_array;
+    Handle<String> str = FACTORY->NewStringFromAscii(name, TENURED);
+    cache_->set(length, *str);
+    cache_->set(length + 1, *shared);
+    Script::cast(shared->script())->set_type(Smi::FromInt(type_));
+  }
 
  private:
-  static bool IsActive() { return nesting_ != 0; }
-  static int nesting_;
-  friend class Bootstrapper;
+  Script::Type type_;
+  FixedArray* cache_;
+  DISALLOW_COPY_AND_ASSIGN(SourceCodeCache);
 };
 
 
 // The Boostrapper is the public interface for creating a JavaScript global
 // context.
-class Bootstrapper : public AllStatic {
+class Bootstrapper {
  public:
   // Requires: Heap::Setup has been called.
-  static void Initialize(bool create_heap_objects);
-  static void TearDown();
+  void Initialize(bool create_heap_objects);
+  void TearDown();
 
   // Creates a JavaScript Global Context with initial object graph.
   // The returned value is a global handle casted to V8Environment*.
-  static Handle<Context> CreateEnvironment(
+  Handle<Context> CreateEnvironment(
       Handle<Object> global_object,
       v8::Handle<v8::ObjectTemplate> global_template,
       v8::ExtensionConfiguration* extensions);
 
   // Detach the environment from its outer global object.
-  static void DetachGlobal(Handle<Context> env);
+  void DetachGlobal(Handle<Context> env);
 
   // Reattach an outer global object to an environment.
-  static void ReattachGlobal(Handle<Context> env, Handle<Object> global_object);
+  void ReattachGlobal(Handle<Context> env, Handle<Object> global_object);
 
   // Traverses the pointers for memory management.
-  static void Iterate(ObjectVisitor* v);
+  void Iterate(ObjectVisitor* v);
 
   // Accessor for the native scripts source code.
-  static Handle<String> NativesSourceLookup(int index);
+  Handle<String> NativesSourceLookup(int index);
 
   // Tells whether bootstrapping is active.
-  static bool IsActive() { return BootstrapperActive::IsActive(); }
+  bool IsActive() const { return nesting_ != 0; }
 
   // Support for thread preemption.
-  static int ArchiveSpacePerThread();
-  static char* ArchiveState(char* to);
-  static char* RestoreState(char* from);
-  static void FreeThreadResources();
+  RLYSTC int ArchiveSpacePerThread();
+  char* ArchiveState(char* to);
+  char* RestoreState(char* from);
+  void FreeThreadResources();
 
   // This will allocate a char array that is deleted when V8 is shut down.
   // It should only be used for strictly finite allocations.
-  static char* AllocateAutoDeletedArray(int bytes);
+  char* AllocateAutoDeletedArray(int bytes);
 
   // Used for new context creation.
-  static bool InstallExtensions(Handle<Context> global_context,
-                                v8::ExtensionConfiguration* extensions);
+  bool InstallExtensions(Handle<Context> global_context,
+                         v8::ExtensionConfiguration* extensions);
+
+  SourceCodeCache* extensions_cache() { return &extensions_cache_; }
+
+ private:
+  typedef int NestingCounterType;
+  NestingCounterType nesting_;
+  SourceCodeCache extensions_cache_;
+  // This is for delete, not delete[].
+  List<char*>* delete_these_non_arrays_on_tear_down_;
+  // This is for delete[]
+  List<char*>* delete_these_arrays_on_tear_down_;
+
+  friend class BootstrapperActive;
+  friend class Isolate;
+  friend class NativesExternalStringResource;
+
+  Bootstrapper();
+
+  DISALLOW_COPY_AND_ASSIGN(Bootstrapper);
+};
+
+
+class BootstrapperActive BASE_EMBEDDED {
+ public:
+  BootstrapperActive() {
+    ++Isolate::Current()->bootstrapper()->nesting_;
+  }
+
+  ~BootstrapperActive() {
+    --Isolate::Current()->bootstrapper()->nesting_;
+  }
+
+ private:
+  DISALLOW_COPY_AND_ASSIGN(BootstrapperActive);
 };
 
 
 class NativesExternalStringResource
     : public v8::String::ExternalAsciiStringResource {
  public:
-  explicit NativesExternalStringResource(const char* source);
+  explicit NativesExternalStringResource(Bootstrapper* bootstrapper,
+                                         const char* source);
 
   const char* data() const {
     return data_;
index 75b961c7276b43320ad28acd309fb161b1223fd0..4604872aaf581fcf02fea1862a09b1f86ebe8254 100644 (file)
@@ -107,7 +107,6 @@ BUILTIN_LIST_C(DEF_ARG_TYPE)
 
 }  // namespace
 
-
 // ----------------------------------------------------------------------------
 // Support macro for defining builtins in C++.
 // ----------------------------------------------------------------------------
@@ -123,26 +122,27 @@ BUILTIN_LIST_C(DEF_ARG_TYPE)
 
 #ifdef DEBUG
 
-#define BUILTIN(name)                                                \
-  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name(           \
-      name##ArgumentsType args);                                     \
-  MUST_USE_RESULT static MaybeObject* Builtin_##name(                \
-      name##ArgumentsType args) {     \
-    args.Verify();                                                   \
-    return Builtin_Impl_##name(args);                                \
-  }                                                                  \
-  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name(           \
-    name##ArgumentsType args)
+#define BUILTIN(name)                                      \
+  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
+      name##ArgumentsType args, Isolate* isolate);         \
+  MUST_USE_RESULT static MaybeObject* Builtin_##name(      \
+      name##ArgumentsType args, Isolate* isolate) {        \
+    ASSERT(isolate == Isolate::Current());                 \
+    args.Verify();                                         \
+    return Builtin_Impl_##name(args, isolate);             \
+  }                                                        \
+  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
+      name##ArgumentsType args, Isolate* isolate)
 
 #else  // For release mode.
 
-#define BUILTIN(name)                                                \
-  static MaybeObject* Builtin_##name(name##ArgumentsType args)
+#define BUILTIN(name)                                      \
+  static MaybeObject* Builtin_##name(name##ArgumentsType args, Isolate* isolate)
 
 #endif
 
 
-static inline bool CalledAsConstructor() {
+static inline bool CalledAsConstructor(Isolate* isolate) {
 #ifdef DEBUG
   // Calculate the result using a full stack frame iterator and check
   // that the state of the stack is as we assume it to be in the
@@ -153,7 +153,7 @@ static inline bool CalledAsConstructor() {
   StackFrame* frame = it.frame();
   bool reference_result = frame->is_construct();
 #endif
-  Address fp = Top::c_entry_fp(Top::GetCurrentThread());
+  Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
   // Because we know fp points to an exit frame we can use the relevant
   // part of ExitFrame::ComputeCallerState directly.
   const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
@@ -172,30 +172,30 @@ static inline bool CalledAsConstructor() {
 
 // ----------------------------------------------------------------------------
 
-
 BUILTIN(Illegal) {
   UNREACHABLE();
-  return Heap::undefined_value();  // Make compiler happy.
+  return isolate->heap()->undefined_value();  // Make compiler happy.
 }
 
 
 BUILTIN(EmptyFunction) {
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
 BUILTIN(ArrayCodeGeneric) {
-  Counters::array_function_runtime.Increment();
+  Heap* heap = isolate->heap();
+  isolate->counters()->array_function_runtime()->Increment();
 
   JSArray* array;
-  if (CalledAsConstructor()) {
+  if (CalledAsConstructor(isolate)) {
     array = JSArray::cast(*args.receiver());
   } else {
     // Allocate the JS Array
     JSFunction* constructor =
-        Top::context()->global_context()->array_function();
+        isolate->context()->global_context()->array_function();
     Object* obj;
-    { MaybeObject* maybe_obj = Heap::AllocateJSObject(constructor);
+    { MaybeObject* maybe_obj = heap->AllocateJSObject(constructor);
       if (!maybe_obj->ToObject(&obj)) return maybe_obj;
     }
     array = JSArray::cast(obj);
@@ -212,7 +212,7 @@ BUILTIN(ArrayCodeGeneric) {
       int len = Smi::cast(obj)->value();
       if (len >= 0 && len < JSObject::kInitialMaxFastElementArray) {
         Object* obj;
-        { MaybeObject* maybe_obj = Heap::AllocateFixedArrayWithHoles(len);
+        { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len);
           if (!maybe_obj->ToObject(&obj)) return maybe_obj;
         }
         array->SetContent(FixedArray::cast(obj));
@@ -235,7 +235,7 @@ BUILTIN(ArrayCodeGeneric) {
   int number_of_elements = args.length() - 1;
   Smi* len = Smi::FromInt(number_of_elements);
   Object* obj;
-  { MaybeObject* maybe_obj = Heap::AllocateFixedArrayWithHoles(len->value());
+  { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len->value());
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
 
@@ -255,77 +255,81 @@ BUILTIN(ArrayCodeGeneric) {
 }
 
 
-MUST_USE_RESULT static MaybeObject* AllocateJSArray() {
+MUST_USE_RESULT static MaybeObject* AllocateJSArray(Heap* heap) {
   JSFunction* array_function =
-      Top::context()->global_context()->array_function();
+      heap->isolate()->context()->global_context()->array_function();
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateJSObject(array_function);
+  { MaybeObject* maybe_result = heap->AllocateJSObject(array_function);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   return result;
 }
 
 
-MUST_USE_RESULT static MaybeObject* AllocateEmptyJSArray() {
+MUST_USE_RESULT static MaybeObject* AllocateEmptyJSArray(Heap* heap) {
   Object* result;
-  { MaybeObject* maybe_result = AllocateJSArray();
+  { MaybeObject* maybe_result = AllocateJSArray(heap);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   JSArray* result_array = JSArray::cast(result);
   result_array->set_length(Smi::FromInt(0));
-  result_array->set_elements(Heap::empty_fixed_array());
+  result_array->set_elements(heap->empty_fixed_array());
   return result_array;
 }
 
 
-static void CopyElements(AssertNoAllocation* no_gc,
+static void CopyElements(Heap* heap,
+                         AssertNoAllocation* no_gc,
                          FixedArray* dst,
                          int dst_index,
                          FixedArray* src,
                          int src_index,
                          int len) {
   ASSERT(dst != src);  // Use MoveElements instead.
-  ASSERT(dst->map() != Heap::fixed_cow_array_map());
+  ASSERT(dst->map() != HEAP->fixed_cow_array_map());
   ASSERT(len > 0);
   CopyWords(dst->data_start() + dst_index,
             src->data_start() + src_index,
             len);
   WriteBarrierMode mode = dst->GetWriteBarrierMode(*no_gc);
   if (mode == UPDATE_WRITE_BARRIER) {
-    Heap::RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len);
+    heap->RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len);
   }
 }
 
 
-static void MoveElements(AssertNoAllocation* no_gc,
+static void MoveElements(Heap* heap,
+                         AssertNoAllocation* no_gc,
                          FixedArray* dst,
                          int dst_index,
                          FixedArray* src,
                          int src_index,
                          int len) {
-  ASSERT(dst->map() != Heap::fixed_cow_array_map());
+  ASSERT(dst->map() != HEAP->fixed_cow_array_map());
   memmove(dst->data_start() + dst_index,
           src->data_start() + src_index,
           len * kPointerSize);
   WriteBarrierMode mode = dst->GetWriteBarrierMode(*no_gc);
   if (mode == UPDATE_WRITE_BARRIER) {
-    Heap::RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len);
+    heap->RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len);
   }
 }
 
 
-static void FillWithHoles(FixedArray* dst, int from, int to) {
-  ASSERT(dst->map() != Heap::fixed_cow_array_map());
-  MemsetPointer(dst->data_start() + from, Heap::the_hole_value(), to - from);
+static void FillWithHoles(Heap* heap, FixedArray* dst, int from, int to) {
+  ASSERT(dst->map() != heap->fixed_cow_array_map());
+  MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from);
 }
 
 
-static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) {
-  ASSERT(elms->map() != Heap::fixed_cow_array_map());
+static FixedArray* LeftTrimFixedArray(Heap* heap,
+                                      FixedArray* elms,
+                                      int to_trim) {
+  ASSERT(elms->map() != HEAP->fixed_cow_array_map());
   // For now this trick is only applied to fixed arrays in new and paged space.
   // In large object space the object's start must coincide with chunk
   // and thus the trick is just not applicable.
-  ASSERT(!Heap::lo_space()->Contains(elms));
+  ASSERT(!HEAP->lo_space()->Contains(elms));
 
   STATIC_ASSERT(FixedArray::kMapOffset == 0);
   STATIC_ASSERT(FixedArray::kLengthOffset == kPointerSize);
@@ -336,7 +340,7 @@ static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) {
   const int len = elms->length();
 
   if (to_trim > FixedArray::kHeaderSize / kPointerSize &&
-      !Heap::new_space()->Contains(elms)) {
+      !heap->new_space()->Contains(elms)) {
     // If we are doing a big trim in old space then we zap the space that was
     // formerly part of the array so that the GC (aided by the card-based
     // remembered set) won't find pointers to new-space there.
@@ -349,9 +353,9 @@ static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) {
   // Technically in new space this write might be omitted (except for
   // debug mode which iterates through the heap), but to play safer
   // we still do it.
-  Heap::CreateFillerObjectAt(elms->address(), to_trim * kPointerSize);
+  heap->CreateFillerObjectAt(elms->address(), to_trim * kPointerSize);
 
-  former_start[to_trim] = Heap::fixed_array_map();
+  former_start[to_trim] = heap->fixed_array_map();
   former_start[to_trim + 1] = Smi::FromInt(len - to_trim);
 
   return FixedArray::cast(HeapObject::FromAddress(
@@ -359,20 +363,21 @@ static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) {
 }
 
 
-static bool ArrayPrototypeHasNoElements(Context* global_context,
+static bool ArrayPrototypeHasNoElements(Heap* heap,
+                                        Context* global_context,
                                         JSObject* array_proto) {
   // This method depends on non writability of Object and Array prototype
   // fields.
-  if (array_proto->elements() != Heap::empty_fixed_array()) return false;
+  if (array_proto->elements() != heap->empty_fixed_array()) return false;
   // Hidden prototype
   array_proto = JSObject::cast(array_proto->GetPrototype());
-  ASSERT(array_proto->elements() == Heap::empty_fixed_array());
+  ASSERT(array_proto->elements() == heap->empty_fixed_array());
   // Object.prototype
   Object* proto = array_proto->GetPrototype();
-  if (proto == Heap::null_value()) return false;
+  if (proto == heap->null_value()) return false;
   array_proto = JSObject::cast(proto);
   if (array_proto != global_context->initial_object_prototype()) return false;
-  if (array_proto->elements() != Heap::empty_fixed_array()) return false;
+  if (array_proto->elements() != heap->empty_fixed_array()) return false;
   ASSERT(array_proto->GetPrototype()->IsNull());
   return true;
 }
@@ -380,35 +385,38 @@ static bool ArrayPrototypeHasNoElements(Context* global_context,
 
 MUST_USE_RESULT
 static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
-    Object* receiver) {
+    Heap* heap, Object* receiver) {
   if (!receiver->IsJSArray()) return NULL;
   JSArray* array = JSArray::cast(receiver);
   HeapObject* elms = array->elements();
-  if (elms->map() == Heap::fixed_array_map()) return elms;
-  if (elms->map() == Heap::fixed_cow_array_map()) {
+  if (elms->map() == heap->fixed_array_map()) return elms;
+  if (elms->map() == heap->fixed_cow_array_map()) {
     return array->EnsureWritableFastElements();
   }
   return NULL;
 }
 
 
-static inline bool IsJSArrayFastElementMovingAllowed(JSArray* receiver) {
-  Context* global_context = Top::context()->global_context();
+static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
+                                                     JSArray* receiver) {
+  Context* global_context = heap->isolate()->context()->global_context();
   JSObject* array_proto =
       JSObject::cast(global_context->array_function()->prototype());
   return receiver->GetPrototype() == array_proto &&
-         ArrayPrototypeHasNoElements(global_context, array_proto);
+         ArrayPrototypeHasNoElements(heap, global_context, array_proto);
 }
 
 
 MUST_USE_RESULT static MaybeObject* CallJsBuiltin(
+    Isolate* isolate,
     const char* name,
     BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
-  HandleScope handleScope;
+  HandleScope handleScope(isolate);
 
   Handle<Object> js_builtin =
-      GetProperty(Handle<JSObject>(Top::global_context()->builtins()),
-                  name);
+      GetProperty(Handle<JSObject>(
+          isolate->global_context()->builtins()),
+          name);
   ASSERT(js_builtin->IsJSFunction());
   Handle<JSFunction> function(Handle<JSFunction>::cast(js_builtin));
   ScopedVector<Object**> argv(args.length() - 1);
@@ -428,11 +436,14 @@ MUST_USE_RESULT static MaybeObject* CallJsBuiltin(
 
 
 BUILTIN(ArrayPush) {
+  Heap* heap = isolate->heap();
   Object* receiver = *args.receiver();
   Object* elms_obj;
   { MaybeObject* maybe_elms_obj =
-        EnsureJSArrayWithWritableFastElements(receiver);
-    if (maybe_elms_obj == NULL) return CallJsBuiltin("ArrayPush", args);
+        EnsureJSArrayWithWritableFastElements(heap, receiver);
+    if (maybe_elms_obj == NULL) {
+      return CallJsBuiltin(isolate, "ArrayPush", args);
+    }
     if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
   }
   FixedArray* elms = FixedArray::cast(elms_obj);
@@ -453,16 +464,16 @@ BUILTIN(ArrayPush) {
     // New backing storage is needed.
     int capacity = new_length + (new_length >> 1) + 16;
     Object* obj;
-    { MaybeObject* maybe_obj = Heap::AllocateUninitializedFixedArray(capacity);
+    { MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
       if (!maybe_obj->ToObject(&obj)) return maybe_obj;
     }
     FixedArray* new_elms = FixedArray::cast(obj);
 
     AssertNoAllocation no_gc;
     if (len > 0) {
-      CopyElements(&no_gc, new_elms, 0, elms, 0, len);
+      CopyElements(heap, &no_gc, new_elms, 0, elms, 0, len);
     }
-    FillWithHoles(new_elms, new_length, capacity);
+    FillWithHoles(heap, new_elms, new_length, capacity);
 
     elms = new_elms;
     array->set_elements(elms);
@@ -482,18 +493,19 @@ BUILTIN(ArrayPush) {
 
 
 BUILTIN(ArrayPop) {
+  Heap* heap = isolate->heap();
   Object* receiver = *args.receiver();
   Object* elms_obj;
   { MaybeObject* maybe_elms_obj =
-        EnsureJSArrayWithWritableFastElements(receiver);
-    if (maybe_elms_obj == NULL) return CallJsBuiltin("ArrayPop", args);
+        EnsureJSArrayWithWritableFastElements(heap, receiver);
+    if (maybe_elms_obj == NULL) return CallJsBuiltin(isolate, "ArrayPop", args);
     if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
   }
   FixedArray* elms = FixedArray::cast(elms_obj);
   JSArray* array = JSArray::cast(receiver);
 
   int len = Smi::cast(array->length())->value();
-  if (len == 0) return Heap::undefined_value();
+  if (len == 0) return heap->undefined_value();
 
   // Get top element
   MaybeObject* top = elms->get(len - 1);
@@ -514,38 +526,40 @@ BUILTIN(ArrayPop) {
 
 
 BUILTIN(ArrayShift) {
+  Heap* heap = isolate->heap();
   Object* receiver = *args.receiver();
   Object* elms_obj;
   { MaybeObject* maybe_elms_obj =
-        EnsureJSArrayWithWritableFastElements(receiver);
-    if (maybe_elms_obj == NULL) return CallJsBuiltin("ArrayShift", args);
+        EnsureJSArrayWithWritableFastElements(heap, receiver);
+    if (maybe_elms_obj == NULL)
+        return CallJsBuiltin(isolate, "ArrayShift", args);
     if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
   }
-  if (!IsJSArrayFastElementMovingAllowed(JSArray::cast(receiver))) {
-    return CallJsBuiltin("ArrayShift", args);
+  if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
+    return CallJsBuiltin(isolate, "ArrayShift", args);
   }
   FixedArray* elms = FixedArray::cast(elms_obj);
   JSArray* array = JSArray::cast(receiver);
   ASSERT(array->HasFastElements());
 
   int len = Smi::cast(array->length())->value();
-  if (len == 0) return Heap::undefined_value();
+  if (len == 0) return heap->undefined_value();
 
   // Get first element
   Object* first = elms->get(0);
   if (first->IsTheHole()) {
-    first = Heap::undefined_value();
+    first = heap->undefined_value();
   }
 
-  if (!Heap::lo_space()->Contains(elms)) {
+  if (!heap->lo_space()->Contains(elms)) {
     // As elms still in the same space they used to be,
     // there is no need to update region dirty mark.
-    array->set_elements(LeftTrimFixedArray(elms, 1), SKIP_WRITE_BARRIER);
+    array->set_elements(LeftTrimFixedArray(heap, elms, 1), SKIP_WRITE_BARRIER);
   } else {
     // Shift the elements.
     AssertNoAllocation no_gc;
-    MoveElements(&no_gc, elms, 0, elms, 1, len - 1);
-    elms->set(len - 1, Heap::the_hole_value());
+    MoveElements(heap, &no_gc, elms, 0, elms, 1, len - 1);
+    elms->set(len - 1, heap->the_hole_value());
   }
 
   // Set the length.
@@ -556,15 +570,17 @@ BUILTIN(ArrayShift) {
 
 
 BUILTIN(ArrayUnshift) {
+  Heap* heap = isolate->heap();
   Object* receiver = *args.receiver();
   Object* elms_obj;
   { MaybeObject* maybe_elms_obj =
-        EnsureJSArrayWithWritableFastElements(receiver);
-    if (maybe_elms_obj == NULL) return CallJsBuiltin("ArrayUnshift", args);
+        EnsureJSArrayWithWritableFastElements(heap, receiver);
+    if (maybe_elms_obj == NULL)
+        return CallJsBuiltin(isolate, "ArrayUnshift", args);
     if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
   }
-  if (!IsJSArrayFastElementMovingAllowed(JSArray::cast(receiver))) {
-    return CallJsBuiltin("ArrayUnshift", args);
+  if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
+    return CallJsBuiltin(isolate, "ArrayUnshift", args);
   }
   FixedArray* elms = FixedArray::cast(elms_obj);
   JSArray* array = JSArray::cast(receiver);
@@ -581,22 +597,22 @@ BUILTIN(ArrayUnshift) {
     // New backing storage is needed.
     int capacity = new_length + (new_length >> 1) + 16;
     Object* obj;
-    { MaybeObject* maybe_obj = Heap::AllocateUninitializedFixedArray(capacity);
+    { MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
       if (!maybe_obj->ToObject(&obj)) return maybe_obj;
     }
     FixedArray* new_elms = FixedArray::cast(obj);
 
     AssertNoAllocation no_gc;
     if (len > 0) {
-      CopyElements(&no_gc, new_elms, to_add, elms, 0, len);
+      CopyElements(heap, &no_gc, new_elms, to_add, elms, 0, len);
     }
-    FillWithHoles(new_elms, new_length, capacity);
+    FillWithHoles(heap, new_elms, new_length, capacity);
 
     elms = new_elms;
     array->set_elements(elms);
   } else {
     AssertNoAllocation no_gc;
-    MoveElements(&no_gc, elms, to_add, elms, 0, len);
+    MoveElements(heap, &no_gc, elms, to_add, elms, 0, len);
   }
 
   // Add the provided values.
@@ -613,14 +629,15 @@ BUILTIN(ArrayUnshift) {
 
 
 BUILTIN(ArraySlice) {
+  Heap* heap = isolate->heap();
   Object* receiver = *args.receiver();
   FixedArray* elms;
   int len = -1;
   if (receiver->IsJSArray()) {
     JSArray* array = JSArray::cast(receiver);
     if (!array->HasFastElements() ||
-        !IsJSArrayFastElementMovingAllowed(array)) {
-      return CallJsBuiltin("ArraySlice", args);
+        !IsJSArrayFastElementMovingAllowed(heap, array)) {
+      return CallJsBuiltin(isolate, "ArraySlice", args);
     }
 
     elms = FixedArray::cast(array->elements());
@@ -629,28 +646,28 @@ BUILTIN(ArraySlice) {
     // Array.slice(arguments, ...) is quite a common idiom (notably more
     // than 50% of invocations in Web apps).  Treat it in C++ as well.
     Map* arguments_map =
-        Top::context()->global_context()->arguments_boilerplate()->map();
+        isolate->context()->global_context()->arguments_boilerplate()->map();
 
     bool is_arguments_object_with_fast_elements =
         receiver->IsJSObject()
         && JSObject::cast(receiver)->map() == arguments_map
         && JSObject::cast(receiver)->HasFastElements();
     if (!is_arguments_object_with_fast_elements) {
-      return CallJsBuiltin("ArraySlice", args);
+      return CallJsBuiltin(isolate, "ArraySlice", args);
     }
     elms = FixedArray::cast(JSObject::cast(receiver)->elements());
     Object* len_obj = JSObject::cast(receiver)
         ->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
     if (!len_obj->IsSmi()) {
-      return CallJsBuiltin("ArraySlice", args);
+      return CallJsBuiltin(isolate, "ArraySlice", args);
     }
     len = Smi::cast(len_obj)->value();
     if (len > elms->length()) {
-      return CallJsBuiltin("ArraySlice", args);
+      return CallJsBuiltin(isolate, "ArraySlice", args);
     }
     for (int i = 0; i < len; i++) {
-      if (elms->get(i) == Heap::the_hole_value()) {
-        return CallJsBuiltin("ArraySlice", args);
+      if (elms->get(i) == heap->the_hole_value()) {
+        return CallJsBuiltin(isolate, "ArraySlice", args);
       }
     }
   }
@@ -667,14 +684,14 @@ BUILTIN(ArraySlice) {
     if (arg1->IsSmi()) {
       relative_start = Smi::cast(arg1)->value();
     } else if (!arg1->IsUndefined()) {
-      return CallJsBuiltin("ArraySlice", args);
+      return CallJsBuiltin(isolate, "ArraySlice", args);
     }
     if (n_arguments > 1) {
       Object* arg2 = args[2];
       if (arg2->IsSmi()) {
         relative_end = Smi::cast(arg2)->value();
       } else if (!arg2->IsUndefined()) {
-        return CallJsBuiltin("ArraySlice", args);
+        return CallJsBuiltin(isolate, "ArraySlice", args);
       }
     }
   }
@@ -690,23 +707,23 @@ BUILTIN(ArraySlice) {
   // Calculate the length of result array.
   int result_len = final - k;
   if (result_len <= 0) {
-    return AllocateEmptyJSArray();
+    return AllocateEmptyJSArray(heap);
   }
 
   Object* result;
-  { MaybeObject* maybe_result = AllocateJSArray();
+  { MaybeObject* maybe_result = AllocateJSArray(heap);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   JSArray* result_array = JSArray::cast(result);
 
   { MaybeObject* maybe_result =
-        Heap::AllocateUninitializedFixedArray(result_len);
+        heap->AllocateUninitializedFixedArray(result_len);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   FixedArray* result_elms = FixedArray::cast(result);
 
   AssertNoAllocation no_gc;
-  CopyElements(&no_gc, result_elms, 0, elms, k, result_len);
+  CopyElements(heap, &no_gc, result_elms, 0, elms, k, result_len);
 
   // Set elements.
   result_array->set_elements(result_elms);
@@ -718,15 +735,17 @@ BUILTIN(ArraySlice) {
 
 
 BUILTIN(ArraySplice) {
+  Heap* heap = isolate->heap();
   Object* receiver = *args.receiver();
   Object* elms_obj;
   { MaybeObject* maybe_elms_obj =
-        EnsureJSArrayWithWritableFastElements(receiver);
-    if (maybe_elms_obj == NULL) return CallJsBuiltin("ArraySplice", args);
+        EnsureJSArrayWithWritableFastElements(heap, receiver);
+    if (maybe_elms_obj == NULL)
+        return CallJsBuiltin(isolate, "ArraySplice", args);
     if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
   }
-  if (!IsJSArrayFastElementMovingAllowed(JSArray::cast(receiver))) {
-    return CallJsBuiltin("ArraySplice", args);
+  if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
+    return CallJsBuiltin(isolate, "ArraySplice", args);
   }
   FixedArray* elms = FixedArray::cast(elms_obj);
   JSArray* array = JSArray::cast(receiver);
@@ -742,7 +761,7 @@ BUILTIN(ArraySplice) {
     if (arg1->IsSmi()) {
       relative_start = Smi::cast(arg1)->value();
     } else if (!arg1->IsUndefined()) {
-      return CallJsBuiltin("ArraySplice", args);
+      return CallJsBuiltin(isolate, "ArraySplice", args);
     }
   }
   int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
@@ -764,7 +783,7 @@ BUILTIN(ArraySplice) {
       if (arg2->IsSmi()) {
         value = Smi::cast(arg2)->value();
       } else {
-        return CallJsBuiltin("ArraySplice", args);
+        return CallJsBuiltin(isolate, "ArraySplice", args);
       }
     }
     actual_delete_count = Min(Max(value, 0), len - actual_start);
@@ -773,27 +792,28 @@ BUILTIN(ArraySplice) {
   JSArray* result_array = NULL;
   if (actual_delete_count == 0) {
     Object* result;
-    { MaybeObject* maybe_result = AllocateEmptyJSArray();
+    { MaybeObject* maybe_result = AllocateEmptyJSArray(heap);
       if (!maybe_result->ToObject(&result)) return maybe_result;
     }
     result_array = JSArray::cast(result);
   } else {
     // Allocate result array.
     Object* result;
-    { MaybeObject* maybe_result = AllocateJSArray();
+    { MaybeObject* maybe_result = AllocateJSArray(heap);
       if (!maybe_result->ToObject(&result)) return maybe_result;
     }
     result_array = JSArray::cast(result);
 
     { MaybeObject* maybe_result =
-          Heap::AllocateUninitializedFixedArray(actual_delete_count);
+          heap->AllocateUninitializedFixedArray(actual_delete_count);
       if (!maybe_result->ToObject(&result)) return maybe_result;
     }
     FixedArray* result_elms = FixedArray::cast(result);
 
     AssertNoAllocation no_gc;
     // Fill newly created array.
-    CopyElements(&no_gc,
+    CopyElements(heap,
+                 &no_gc,
                  result_elms, 0,
                  elms, actual_start,
                  actual_delete_count);
@@ -811,7 +831,7 @@ BUILTIN(ArraySplice) {
 
   if (item_count < actual_delete_count) {
     // Shrink the array.
-    const bool trim_array = !Heap::lo_space()->Contains(elms) &&
+    const bool trim_array = !heap->lo_space()->Contains(elms) &&
       ((actual_start + item_count) <
           (len - actual_delete_count - actual_start));
     if (trim_array) {
@@ -822,15 +842,15 @@ BUILTIN(ArraySplice) {
         memmove(start + delta, start, actual_start * kPointerSize);
       }
 
-      elms = LeftTrimFixedArray(elms, delta);
+      elms = LeftTrimFixedArray(heap, elms, delta);
       array->set_elements(elms, SKIP_WRITE_BARRIER);
     } else {
       AssertNoAllocation no_gc;
-      MoveElements(&no_gc,
+      MoveElements(heap, &no_gc,
                    elms, actual_start + item_count,
                    elms, actual_start + actual_delete_count,
                    (len - actual_delete_count - actual_start));
-      FillWithHoles(elms, new_length, len);
+      FillWithHoles(heap, elms, new_length, len);
     }
   } else if (item_count > actual_delete_count) {
     // Currently fixed arrays cannot grow too big, so
@@ -843,7 +863,7 @@ BUILTIN(ArraySplice) {
       int capacity = new_length + (new_length >> 1) + 16;
       Object* obj;
       { MaybeObject* maybe_obj =
-            Heap::AllocateUninitializedFixedArray(capacity);
+            heap->AllocateUninitializedFixedArray(capacity);
         if (!maybe_obj->ToObject(&obj)) return maybe_obj;
       }
       FixedArray* new_elms = FixedArray::cast(obj);
@@ -851,22 +871,22 @@ BUILTIN(ArraySplice) {
       AssertNoAllocation no_gc;
       // Copy the part before actual_start as is.
       if (actual_start > 0) {
-        CopyElements(&no_gc, new_elms, 0, elms, 0, actual_start);
+        CopyElements(heap, &no_gc, new_elms, 0, elms, 0, actual_start);
       }
       const int to_copy = len - actual_delete_count - actual_start;
       if (to_copy > 0) {
-        CopyElements(&no_gc,
+        CopyElements(heap, &no_gc,
                      new_elms, actual_start + item_count,
                      elms, actual_start + actual_delete_count,
                      to_copy);
       }
-      FillWithHoles(new_elms, new_length, capacity);
+      FillWithHoles(heap, new_elms, new_length, capacity);
 
       elms = new_elms;
       array->set_elements(elms);
     } else {
       AssertNoAllocation no_gc;
-      MoveElements(&no_gc,
+      MoveElements(heap, &no_gc,
                    elms, actual_start + item_count,
                    elms, actual_start + actual_delete_count,
                    (len - actual_delete_count - actual_start));
@@ -887,11 +907,12 @@ BUILTIN(ArraySplice) {
 
 
 BUILTIN(ArrayConcat) {
-  Context* global_context = Top::context()->global_context();
+  Heap* heap = isolate->heap();
+  Context* global_context = isolate->context()->global_context();
   JSObject* array_proto =
       JSObject::cast(global_context->array_function()->prototype());
-  if (!ArrayPrototypeHasNoElements(global_context, array_proto)) {
-    return CallJsBuiltin("ArrayConcat", args);
+  if (!ArrayPrototypeHasNoElements(heap, global_context, array_proto)) {
+    return CallJsBuiltin(isolate, "ArrayConcat", args);
   }
 
   // Iterate through all the arguments performing checks
@@ -902,7 +923,7 @@ BUILTIN(ArrayConcat) {
     Object* arg = args[i];
     if (!arg->IsJSArray() || !JSArray::cast(arg)->HasFastElements()
         || JSArray::cast(arg)->GetPrototype() != array_proto) {
-      return CallJsBuiltin("ArrayConcat", args);
+      return CallJsBuiltin(isolate, "ArrayConcat", args);
     }
 
     int len = Smi::cast(JSArray::cast(arg)->length())->value();
@@ -915,23 +936,23 @@ BUILTIN(ArrayConcat) {
     ASSERT(result_len >= 0);
 
     if (result_len > FixedArray::kMaxLength) {
-      return CallJsBuiltin("ArrayConcat", args);
+      return CallJsBuiltin(isolate, "ArrayConcat", args);
     }
   }
 
   if (result_len == 0) {
-    return AllocateEmptyJSArray();
+    return AllocateEmptyJSArray(heap);
   }
 
   // Allocate result.
   Object* result;
-  { MaybeObject* maybe_result = AllocateJSArray();
+  { MaybeObject* maybe_result = AllocateJSArray(heap);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   JSArray* result_array = JSArray::cast(result);
 
   { MaybeObject* maybe_result =
-        Heap::AllocateUninitializedFixedArray(result_len);
+        heap->AllocateUninitializedFixedArray(result_len);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   FixedArray* result_elms = FixedArray::cast(result);
@@ -944,7 +965,7 @@ BUILTIN(ArrayConcat) {
     int len = Smi::cast(array->length())->value();
     if (len > 0) {
       FixedArray* elms = FixedArray::cast(array->elements());
-      CopyElements(&no_gc, result_elms, start_pos, elms, 0, len);
+      CopyElements(heap, &no_gc, result_elms, start_pos, elms, 0, len);
       start_pos += len;
     }
   }
@@ -964,29 +985,29 @@ BUILTIN(ArrayConcat) {
 
 BUILTIN(StrictArgumentsCallee) {
   HandleScope scope;
-  return Top::Throw(*Factory::NewTypeError("strict_arguments_callee",
-                                           HandleVector<Object>(NULL, 0)));
+  return isolate->Throw(*isolate->factory()->NewTypeError(
+      "strict_arguments_callee", HandleVector<Object>(NULL, 0)));
 }
 
 
 BUILTIN(StrictArgumentsCaller) {
   HandleScope scope;
-  return Top::Throw(*Factory::NewTypeError("strict_arguments_caller",
-                                           HandleVector<Object>(NULL, 0)));
+  return isolate->Throw(*isolate->factory()->NewTypeError(
+      "strict_arguments_caller", HandleVector<Object>(NULL, 0)));
 }
 
 
 BUILTIN(StrictFunctionCaller) {
   HandleScope scope;
-  return Top::Throw(*Factory::NewTypeError("strict_function_caller",
-                                            HandleVector<Object>(NULL, 0)));
+  return isolate->Throw(*isolate->factory()->NewTypeError(
+      "strict_function_caller", HandleVector<Object>(NULL, 0)));
 }
 
 
 BUILTIN(StrictFunctionArguments) {
   HandleScope scope;
-  return Top::Throw(*Factory::NewTypeError("strict_function_arguments",
-                                            HandleVector<Object>(NULL, 0)));
+  return isolate->Throw(*isolate->factory()->NewTypeError(
+      "strict_function_arguments", HandleVector<Object>(NULL, 0)));
 }
 
 
@@ -1000,7 +1021,8 @@ BUILTIN(StrictFunctionArguments) {
 // overwritten with undefined.  Arguments that do fit the expected
 // type is overwritten with the object in the prototype chain that
 // actually has that type.
-static inline Object* TypeCheck(int argc,
+static inline Object* TypeCheck(Heap* heap,
+                                int argc,
                                 Object** argv,
                                 FunctionTemplateInfo* info) {
   Object* recv = argv[0];
@@ -1012,12 +1034,12 @@ static inline Object* TypeCheck(int argc,
 
   Object* holder = recv;
   if (!recv_type->IsUndefined()) {
-    for (; holder != Heap::null_value(); holder = holder->GetPrototype()) {
+    for (; holder != heap->null_value(); holder = holder->GetPrototype()) {
       if (holder->IsInstanceOf(FunctionTemplateInfo::cast(recv_type))) {
         break;
       }
     }
-    if (holder == Heap::null_value()) return holder;
+    if (holder == heap->null_value()) return holder;
   }
   Object* args_obj = sig->args();
   // If there is no argument signature we're done
@@ -1030,13 +1052,13 @@ static inline Object* TypeCheck(int argc,
     if (argtype->IsUndefined()) continue;
     Object** arg = &argv[-1 - i];
     Object* current = *arg;
-    for (; current != Heap::null_value(); current = current->GetPrototype()) {
+    for (; current != heap->null_value(); current = current->GetPrototype()) {
       if (current->IsInstanceOf(FunctionTemplateInfo::cast(argtype))) {
         *arg = current;
         break;
       }
     }
-    if (current == Heap::null_value()) *arg = Heap::undefined_value();
+    if (current == heap->null_value()) *arg = heap->undefined_value();
   }
   return holder;
 }
@@ -1044,31 +1066,33 @@ static inline Object* TypeCheck(int argc,
 
 template <bool is_construct>
 MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
-    BuiltinArguments<NEEDS_CALLED_FUNCTION> args) {
-  ASSERT(is_construct == CalledAsConstructor());
+    BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
+  ASSERT(is_construct == CalledAsConstructor(isolate));
+  Heap* heap = isolate->heap();
 
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<JSFunction> function = args.called_function();
   ASSERT(function->shared()->IsApiFunction());
 
   FunctionTemplateInfo* fun_data = function->shared()->get_api_func_data();
   if (is_construct) {
-    Handle<FunctionTemplateInfo> desc(fun_data);
+    Handle<FunctionTemplateInfo> desc(fun_data, isolate);
     bool pending_exception = false;
-    Factory::ConfigureInstance(desc, Handle<JSObject>::cast(args.receiver()),
-                               &pending_exception);
-    ASSERT(Top::has_pending_exception() == pending_exception);
+    isolate->factory()->ConfigureInstance(
+        desc, Handle<JSObject>::cast(args.receiver()), &pending_exception);
+    ASSERT(isolate->has_pending_exception() == pending_exception);
     if (pending_exception) return Failure::Exception();
     fun_data = *desc;
   }
 
-  Object* raw_holder = TypeCheck(args.length(), &args[0], fun_data);
+  Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data);
 
   if (raw_holder->IsNull()) {
     // This function cannot be called with the given receiver.  Abort!
     Handle<Object> obj =
-        Factory::NewTypeError("illegal_invocation", HandleVector(&function, 1));
-    return Top::Throw(*obj);
+        isolate->factory()->NewTypeError(
+            "illegal_invocation", HandleVector(&function, 1));
+    return isolate->Throw(*obj);
   }
 
   Object* raw_call_data = fun_data->call_code();
@@ -1080,10 +1104,10 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
     Object* data_obj = call_data->data();
     Object* result;
 
-    LOG(ApiObjectAccess("call", JSObject::cast(*args.receiver())));
+    LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
     ASSERT(raw_holder->IsJSObject());
 
-    CustomArguments custom;
+    CustomArguments custom(isolate);
     v8::ImplementationUtilities::PrepareArgumentsData(custom.end(),
         data_obj, *function, raw_holder);
 
@@ -1096,17 +1120,18 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
     v8::Handle<v8::Value> value;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
-      ExternalCallbackScope call_scope(v8::ToCData<Address>(callback_obj));
+      VMState state(isolate, EXTERNAL);
+      ExternalCallbackScope call_scope(isolate,
+                                       v8::ToCData<Address>(callback_obj));
       value = callback(new_args);
     }
     if (value.IsEmpty()) {
-      result = Heap::undefined_value();
+      result = heap->undefined_value();
     } else {
       result = *reinterpret_cast<Object**>(*value);
     }
 
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     if (!is_construct || result->IsJSObject()) return result;
   }
 
@@ -1115,12 +1140,12 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
 
 
 BUILTIN(HandleApiCall) {
-  return HandleApiCallHelper<false>(args);
+  return HandleApiCallHelper<false>(args, isolate);
 }
 
 
 BUILTIN(HandleApiCallConstruct) {
-  return HandleApiCallHelper<true>(args);
+  return HandleApiCallHelper<true>(args, isolate);
 }
 
 
@@ -1142,7 +1167,8 @@ static void VerifyTypeCheck(Handle<JSObject> object,
 
 
 BUILTIN(FastHandleApiCall) {
-  ASSERT(!CalledAsConstructor());
+  ASSERT(!CalledAsConstructor(isolate));
+  Heap* heap = isolate->heap();
   const bool is_construct = false;
 
   // We expect four more arguments: callback, function, call data, and holder.
@@ -1161,25 +1187,26 @@ BUILTIN(FastHandleApiCall) {
   VerifyTypeCheck(Utils::OpenHandle(*new_args.Holder()),
                   Utils::OpenHandle(*new_args.Callee()));
 #endif
-  HandleScope scope;
+  HandleScope scope(isolate);
   Object* result;
   v8::Handle<v8::Value> value;
   {
     // Leaving JavaScript.
-    VMState state(EXTERNAL);
-    ExternalCallbackScope call_scope(v8::ToCData<Address>(callback_obj));
+    VMState state(isolate, EXTERNAL);
+    ExternalCallbackScope call_scope(isolate,
+                                     v8::ToCData<Address>(callback_obj));
     v8::InvocationCallback callback =
         v8::ToCData<v8::InvocationCallback>(callback_obj);
 
     value = callback(new_args);
   }
   if (value.IsEmpty()) {
-    result = Heap::undefined_value();
+    result = heap->undefined_value();
   } else {
     result = *reinterpret_cast<Object**>(*value);
   }
 
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return result;
 }
 
@@ -1188,11 +1215,13 @@ BUILTIN(FastHandleApiCall) {
 // API. The object can be called as either a constructor (using new) or just as
 // a function (without new).
 MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
+    Isolate* isolate,
     bool is_construct_call,
     BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
   // Non-functions are never called as constructors. Even if this is an object
   // called as a constructor the delegate call is not a construct call.
-  ASSERT(!CalledAsConstructor());
+  ASSERT(!CalledAsConstructor(isolate));
+  Heap* heap = isolate->heap();
 
   Handle<Object> receiver = args.at<Object>(0);
 
@@ -1215,11 +1244,10 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
   // Get the data for the call and perform the callback.
   Object* result;
   {
-    HandleScope scope;
-
-    LOG(ApiObjectAccess("call non-function", obj));
+    HandleScope scope(isolate);
+    LOG(isolate, ApiObjectAccess("call non-function", obj));
 
-    CustomArguments custom;
+    CustomArguments custom(isolate);
     v8::ImplementationUtilities::PrepareArgumentsData(custom.end(),
         call_data->data(), constructor, obj);
     v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
@@ -1230,18 +1258,19 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
     v8::Handle<v8::Value> value;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
-      ExternalCallbackScope call_scope(v8::ToCData<Address>(callback_obj));
+      VMState state(isolate, EXTERNAL);
+      ExternalCallbackScope call_scope(isolate,
+                                       v8::ToCData<Address>(callback_obj));
       value = callback(new_args);
     }
     if (value.IsEmpty()) {
-      result = Heap::undefined_value();
+      result = heap->undefined_value();
     } else {
       result = *reinterpret_cast<Object**>(*value);
     }
   }
   // Check for exceptions and return result.
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return result;
 }
 
@@ -1249,14 +1278,14 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
 // Handle calls to non-function objects created through the API. This delegate
 // function is used when the call is a normal function call.
 BUILTIN(HandleApiCallAsFunction) {
-  return HandleApiCallAsFunctionOrConstructor(false, args);
+  return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
 }
 
 
 // Handle calls to non-function objects created through the API. This delegate
 // function is used when the call is a construct call.
 BUILTIN(HandleApiCallAsConstructor) {
-  return HandleApiCallAsFunctionOrConstructor(true, args);
+  return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
 }
 
 
@@ -1465,73 +1494,112 @@ static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
 }
 #endif
 
-Object* Builtins::builtins_[builtin_count] = { NULL, };
-const char* Builtins::names_[builtin_count] = { NULL, };
+
+Builtins::Builtins() : initialized_(false) {
+  memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
+  memset(names_, 0, sizeof(names_[0]) * builtin_count);
+}
+
+
+Builtins::~Builtins() {
+}
+
 
 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
-  Address Builtins::c_functions_[cfunction_count] = {
-    BUILTIN_LIST_C(DEF_ENUM_C)
-  };
+Address const Builtins::c_functions_[cfunction_count] = {
+  BUILTIN_LIST_C(DEF_ENUM_C)
+};
 #undef DEF_ENUM_C
 
 #define DEF_JS_NAME(name, ignore) #name,
 #define DEF_JS_ARGC(ignore, argc) argc,
-const char* Builtins::javascript_names_[id_count] = {
+const char* const Builtins::javascript_names_[id_count] = {
   BUILTINS_LIST_JS(DEF_JS_NAME)
 };
 
-int Builtins::javascript_argc_[id_count] = {
+int const Builtins::javascript_argc_[id_count] = {
   BUILTINS_LIST_JS(DEF_JS_ARGC)
 };
 #undef DEF_JS_NAME
 #undef DEF_JS_ARGC
 
-static bool is_initialized = false;
-void Builtins::Setup(bool create_heap_objects) {
-  ASSERT(!is_initialized);
+struct BuiltinDesc {
+  byte* generator;
+  byte* c_code;
+  const char* s_name;  // name is only used for generating log information.
+  int name;
+  Code::Flags flags;
+  BuiltinExtraArguments extra_args;
+};
 
-  // Create a scope for the handles in the builtins.
-  HandleScope scope;
+class BuiltinFunctionTable {
+ public:
+  BuiltinFunctionTable() {
+    Builtins::InitBuiltinFunctionTable();
+  }
+
+  static const BuiltinDesc* functions() { return functions_; }
+
+ private:
+  static BuiltinDesc functions_[Builtins::builtin_count + 1];
+
+  friend class Builtins;
+};
 
-  struct BuiltinDesc {
-    byte* generator;
-    byte* c_code;
-    const char* s_name;  // name is only used for generating log information.
-    int name;
-    Code::Flags flags;
-    BuiltinExtraArguments extra_args;
-  };
-
-#define DEF_FUNCTION_PTR_C(name, extra_args) \
-    { FUNCTION_ADDR(Generate_Adaptor),            \
-      FUNCTION_ADDR(Builtin_##name),              \
-      #name,                                      \
-      c_##name,                                   \
-      Code::ComputeFlags(Code::BUILTIN),          \
-      extra_args                                  \
-    },
-
-#define DEF_FUNCTION_PTR_A(name, kind, state, extra)              \
-    { FUNCTION_ADDR(Generate_##name),                             \
-      NULL,                                                       \
-      #name,                                                      \
-      name,                                                       \
-      Code::ComputeFlags(Code::kind, NOT_IN_LOOP, state, extra),  \
-      NO_EXTRA_ARGUMENTS                                          \
-    },
-
-  // Define array of pointers to generators and C builtin functions.
-  static BuiltinDesc functions[] = {
-      BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
-      BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
-      BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
-      // Terminator:
-      { NULL, NULL, NULL, builtin_count, static_cast<Code::Flags>(0),
-        NO_EXTRA_ARGUMENTS }
-  };
+BuiltinDesc BuiltinFunctionTable::functions_[Builtins::builtin_count + 1];
+
+static const BuiltinFunctionTable builtin_function_table_init;
+
+// Define array of pointers to generators and C builtin functions.
+// We do this in a sort of roundabout way so that we can do the initialization
+// within the lexical scope of Builtins:: and within a context where
+// Code::Flags names a non-abstract type.
+void Builtins::InitBuiltinFunctionTable() {
+  BuiltinDesc* functions = BuiltinFunctionTable::functions_;
+  functions[builtin_count].generator = NULL;
+  functions[builtin_count].c_code = NULL;
+  functions[builtin_count].s_name = NULL;
+  functions[builtin_count].name = builtin_count;
+  functions[builtin_count].flags = static_cast<Code::Flags>(0);
+  functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
+
+#define DEF_FUNCTION_PTR_C(aname, aextra_args)                         \
+    functions->generator = FUNCTION_ADDR(Generate_Adaptor);            \
+    functions->c_code = FUNCTION_ADDR(Builtin_##aname);                \
+    functions->s_name = #aname;                                        \
+    functions->name = c_##aname;                                       \
+    functions->flags = Code::ComputeFlags(Code::BUILTIN);              \
+    functions->extra_args = aextra_args;                               \
+    ++functions;
+
+#define DEF_FUNCTION_PTR_A(aname, kind, state, extra)                       \
+    functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
+    functions->c_code = NULL;                                               \
+    functions->s_name = #aname;                                             \
+    functions->name = aname;                                                \
+    functions->flags = Code::ComputeFlags(Code::kind,                       \
+                                          NOT_IN_LOOP,                      \
+                                          state,                            \
+                                          extra);                           \
+    functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
+    ++functions;
+
+  BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
+  BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
+  BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
 
 #undef DEF_FUNCTION_PTR_C
 #undef DEF_FUNCTION_PTR_A
+}
+
+void Builtins::Setup(bool create_heap_objects) {
+  ASSERT(!initialized_);
+  Heap* heap = Isolate::Current()->heap();
+
+  // Create a scope for the handles in the builtins.
+  HandleScope scope;
+
+  const BuiltinDesc* functions = BuiltinFunctionTable::functions();
 
   // For now we generate builtin adaptor code into a stack-allocated
   // buffer, before copying it into individual code objects.
@@ -1559,14 +1627,15 @@ void Builtins::Setup(bool create_heap_objects) {
         // This simplifies things because we don't need to retry.
         AlwaysAllocateScope __scope__;
         { MaybeObject* maybe_code =
-              Heap::CreateCode(desc, flags, masm.CodeObject());
+              heap->CreateCode(desc, flags, masm.CodeObject());
           if (!maybe_code->ToObject(&code)) {
             v8::internal::V8::FatalProcessOutOfMemory("CreateCode");
           }
         }
       }
       // Log the event and add the code to the builtins array.
-      PROFILE(CodeCreateEvent(Logger::BUILTIN_TAG,
+      PROFILE(ISOLATE,
+              CodeCreateEvent(Logger::BUILTIN_TAG,
                               Code::cast(code),
                               functions[i].s_name));
       GDBJIT(AddCode(GDBJITInterface::BUILTIN,
@@ -1588,12 +1657,12 @@ void Builtins::Setup(bool create_heap_objects) {
   }
 
   // Mark as initialized.
-  is_initialized = true;
+  initialized_ = true;
 }
 
 
 void Builtins::TearDown() {
-  is_initialized = false;
+  initialized_ = false;
 }
 
 
@@ -1603,7 +1672,8 @@ void Builtins::IterateBuiltins(ObjectVisitor* v) {
 
 
 const char* Builtins::Lookup(byte* pc) {
-  if (is_initialized) {  // may be called during initialization (disassembler!)
+  // may be called during initialization (disassembler!)
+  if (initialized_) {
     for (int i = 0; i < builtin_count; i++) {
       Code* entry = Code::cast(builtins_[i]);
       if (entry->contains(pc)) {
index 106d9cb8f14fea1d14b344a88b038ae6166d31ed..4dd61117cd39b66442df70eab654384a940a3e2f 100644 (file)
@@ -240,21 +240,24 @@ enum BuiltinExtraArguments {
   V(APPLY_OVERFLOW, 1)
 
 
+class BuiltinFunctionTable;
 class ObjectVisitor;
 
 
-class Builtins : public AllStatic {
+class Builtins {
  public:
+  ~Builtins();
+
   // Generate all builtin code objects. Should be called once during
-  // VM initialization.
-  static void Setup(bool create_heap_objects);
-  static void TearDown();
+  // isolate initialization.
+  void Setup(bool create_heap_objects);
+  void TearDown();
 
   // Garbage collection support.
-  static void IterateBuiltins(ObjectVisitor* v);
+  void IterateBuiltins(ObjectVisitor* v);
 
   // Disassembler support.
-  static const char* Lookup(byte* pc);
+  const char* Lookup(byte* pc);
 
   enum Name {
 #define DEF_ENUM_C(name, ignore) name,
@@ -281,13 +284,13 @@ class Builtins : public AllStatic {
     id_count
   };
 
-  static Code* builtin(Name name) {
+  Code* builtin(Name name) {
     // Code::cast cannot be used here since we access builtins
     // during the marking phase of mark sweep. See IC::Clear.
     return reinterpret_cast<Code*>(builtins_[name]);
   }
 
-  static Address builtin_address(Name name) {
+  Address builtin_address(Name name) {
     return reinterpret_cast<Address>(&builtins_[name]);
   }
 
@@ -297,20 +300,24 @@ class Builtins : public AllStatic {
 
   static const char* GetName(JavaScript id) { return javascript_names_[id]; }
   static int GetArgumentsCount(JavaScript id) { return javascript_argc_[id]; }
-  static Handle<Code> GetCode(JavaScript id, bool* resolved);
+  Handle<Code> GetCode(JavaScript id, bool* resolved);
   static int NumberOfJavaScriptBuiltins() { return id_count; }
 
+  bool is_initialized() const { return initialized_; }
+
  private:
+  Builtins();
+
   // The external C++ functions called from the code.
-  static Address c_functions_[cfunction_count];
+  static Address const c_functions_[cfunction_count];
 
   // Note: These are always Code objects, but to conform with
   // IterateBuiltins() above which assumes Object**'s for the callback
   // function f, we use an Object* array here.
-  static Object* builtins_[builtin_count];
-  static const char* names_[builtin_count];
-  static const char* javascript_names_[id_count];
-  static int javascript_argc_[id_count];
+  Object* builtins_[builtin_count];
+  const char* names_[builtin_count];
+  static const char* const javascript_names_[id_count];
+  static int const javascript_argc_[id_count];
 
   static void Generate_Adaptor(MacroAssembler* masm,
                                CFunctionId id,
@@ -335,8 +342,16 @@ class Builtins : public AllStatic {
   static void Generate_ArrayConstructCode(MacroAssembler* masm);
 
   static void Generate_StringConstructCode(MacroAssembler* masm);
-
   static void Generate_OnStackReplacement(MacroAssembler* masm);
+
+  static void InitBuiltinFunctionTable();
+
+  bool initialized_;
+
+  friend class BuiltinFunctionTable;
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(Builtins);
 };
 
 } }  // namespace v8::internal
index 3c3d940be7f0e48cce4ff71a13d2a9be9a501581..320fd6b5ea5b0df2446ad28d665f67b16e698c3c 100644 (file)
@@ -30,8 +30,8 @@
 #include "v8.h"
 
 #include "platform.h"
-#include "top.h"
 
+// TODO(isolates): is it necessary to lift this?
 static int fatal_error_handler_nesting_depth = 0;
 
 // Contains protection against recursive calls (faults while handling faults).
@@ -52,7 +52,7 @@ extern "C" void V8_Fatal(const char* file, int line, const char* format, ...) {
   if (fatal_error_handler_nesting_depth < 3) {
     if (i::FLAG_stack_trace_on_abort) {
       // Call this one twice on double fault
-      i::Top::PrintStack();
+      i::Isolate::Current()->PrintStack();
     }
   }
   i::OS::Abort();
index adba2a5c961a6d1cf6377672381096abb9b7d06e..cfe93096d7abaae06211fc103790795cf54adde5 100644 (file)
@@ -37,9 +37,9 @@ namespace v8 {
 namespace internal {
 
 bool CodeStub::FindCodeInCache(Code** code_out) {
-  int index = Heap::code_stubs()->FindEntry(GetKey());
+  int index = HEAP->code_stubs()->FindEntry(GetKey());
   if (index != NumberDictionary::kNotFound) {
-    *code_out = Code::cast(Heap::code_stubs()->ValueAt(index));
+    *code_out = Code::cast(HEAP->code_stubs()->ValueAt(index));
     return true;
   }
   return false;
@@ -48,7 +48,7 @@ bool CodeStub::FindCodeInCache(Code** code_out) {
 
 void CodeStub::GenerateCode(MacroAssembler* masm) {
   // Update the static counter each time a new code stub is generated.
-  Counters::code_stubs.Increment();
+  COUNTERS->code_stubs()->Increment();
 
   // Nested stubs are not allowed for leafs.
   AllowStubCallsScope allow_scope(masm, AllowsStubCalls());
@@ -62,9 +62,9 @@ void CodeStub::GenerateCode(MacroAssembler* masm) {
 void CodeStub::RecordCodeGeneration(Code* code, MacroAssembler* masm) {
   code->set_major_key(MajorKey());
 
-  PROFILE(CodeCreateEvent(Logger::STUB_TAG, code, GetName()));
+  PROFILE(ISOLATE, CodeCreateEvent(Logger::STUB_TAG, code, GetName()));
   GDBJIT(AddCode(GDBJITInterface::STUB, GetName(), code));
-  Counters::total_stubs_code_size.Increment(code->instruction_size());
+  COUNTERS->total_stubs_code_size()->Increment(code->instruction_size());
 
 #ifdef ENABLE_DISASSEMBLER
   if (FLAG_print_code_stubs) {
@@ -101,23 +101,23 @@ Handle<Code> CodeStub::GetCode() {
         static_cast<Code::Kind>(GetCodeKind()),
         InLoop(),
         GetICState());
-    Handle<Code> new_object = Factory::NewCode(
+    Handle<Code> new_object = FACTORY->NewCode(
         desc, flags, masm.CodeObject(), NeedsImmovableCode());
     RecordCodeGeneration(*new_object, &masm);
     FinishCode(*new_object);
 
     // Update the dictionary and the root in Heap.
     Handle<NumberDictionary> dict =
-        Factory::DictionaryAtNumberPut(
-            Handle<NumberDictionary>(Heap::code_stubs()),
+        FACTORY->DictionaryAtNumberPut(
+            Handle<NumberDictionary>(HEAP->code_stubs()),
             GetKey(),
             new_object);
-    Heap::public_set_code_stubs(*dict);
+    HEAP->public_set_code_stubs(*dict);
 
     code = *new_object;
   }
 
-  ASSERT(!NeedsImmovableCode() || Heap::lo_space()->Contains(code));
+  ASSERT(!NeedsImmovableCode() || HEAP->lo_space()->Contains(code));
   return Handle<Code>(code);
 }
 
@@ -140,7 +140,7 @@ MaybeObject* CodeStub::TryGetCode() {
         GetICState());
     Object* new_object;
     { MaybeObject* maybe_new_object =
-          Heap::CreateCode(desc, flags, masm.CodeObject());
+          HEAP->CreateCode(desc, flags, masm.CodeObject());
       if (!maybe_new_object->ToObject(&new_object)) return maybe_new_object;
     }
     code = Code::cast(new_object);
@@ -149,9 +149,9 @@ MaybeObject* CodeStub::TryGetCode() {
 
     // Try to update the code cache but do not fail if unable.
     MaybeObject* maybe_new_object =
-        Heap::code_stubs()->AtNumberPut(GetKey(), code);
+        HEAP->code_stubs()->AtNumberPut(GetKey(), code);
     if (maybe_new_object->ToObject(&new_object)) {
-      Heap::public_set_code_stubs(NumberDictionary::cast(new_object));
+      HEAP->public_set_code_stubs(NumberDictionary::cast(new_object));
     }
   }
 
@@ -202,7 +202,8 @@ void ICCompareStub::Generate(MacroAssembler* masm) {
 const char* InstanceofStub::GetName() {
   if (name_ != NULL) return name_;
   const int kMaxNameLength = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
+      kMaxNameLength);
   if (name_ == NULL) return "OOM";
 
   const char* args = "";
index 51841c2466866d03c6017fe538ba1b703f148906..5a9927295933b677dd76e940bd57bbb9170fff2a 100644 (file)
@@ -61,9 +61,6 @@ Comment::~Comment() {
 #undef __
 
 
-CodeGenerator* CodeGeneratorScope::top_ = NULL;
-
-
 void CodeGenerator::ProcessDeferred() {
   while (!deferred_.is_empty()) {
     DeferredCode* code = deferred_.RemoveLast();
@@ -129,7 +126,7 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) {
   bool print_json_ast = false;
   const char* ftype;
 
-  if (Bootstrapper::IsActive()) {
+  if (Isolate::Current()->bootstrapper()->IsActive()) {
     print_source = FLAG_print_builtin_source;
     print_ast = FLAG_print_builtin_ast;
     print_json_ast = FLAG_print_builtin_json_ast;
@@ -181,10 +178,10 @@ Handle<Code> CodeGenerator::MakeCodeEpilogue(MacroAssembler* masm,
   // Allocate and install the code.
   CodeDesc desc;
   masm->GetCode(&desc);
-  Handle<Code> code = Factory::NewCode(desc, flags, masm->CodeObject());
+  Handle<Code> code = FACTORY->NewCode(desc, flags, masm->CodeObject());
 
   if (!code.is_null()) {
-    Counters::total_compiled_code_size.Increment(code->instruction_size());
+    COUNTERS->total_compiled_code_size()->Increment(code->instruction_size());
   }
   return code;
 }
@@ -192,7 +189,7 @@ Handle<Code> CodeGenerator::MakeCodeEpilogue(MacroAssembler* masm,
 
 void CodeGenerator::PrintCode(Handle<Code> code, CompilationInfo* info) {
 #ifdef ENABLE_DISASSEMBLER
-  bool print_code = Bootstrapper::IsActive()
+  bool print_code = Isolate::Current()->bootstrapper()->IsActive()
       ? FLAG_print_builtin_code
       : (FLAG_print_code || (info->IsOptimizing() && FLAG_print_opt_code));
   Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
@@ -238,7 +235,7 @@ bool CodeGenerator::MakeCode(CompilationInfo* info) {
   Handle<Script> script = info->script();
   if (!script->IsUndefined() && !script->source()->IsUndefined()) {
     int len = String::cast(script->source())->length();
-    Counters::total_old_codegen_source_size.Increment(len);
+    COUNTERS->total_old_codegen_source_size()->Increment(len);
   }
   if (FLAG_trace_codegen) {
     PrintF("Classic Compiler - ");
@@ -251,10 +248,10 @@ bool CodeGenerator::MakeCode(CompilationInfo* info) {
   masm.positions_recorder()->StartGDBJITLineInfoRecording();
 #endif
   CodeGenerator cgen(&masm);
-  CodeGeneratorScope scope(&cgen);
+  CodeGeneratorScope scope(Isolate::Current(), &cgen);
   cgen.Generate(info);
   if (cgen.HasStackOverflow()) {
-    ASSERT(!Top::has_pending_exception());
+    ASSERT(!Isolate::Current()->has_pending_exception());
     return false;
   }
 
@@ -279,12 +276,15 @@ bool CodeGenerator::MakeCode(CompilationInfo* info) {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 
+
+static Vector<const char> kRegexp = CStrVector("regexp");
+
+
 bool CodeGenerator::ShouldGenerateLog(Expression* type) {
   ASSERT(type != NULL);
-  if (!Logger::is_logging() && !CpuProfiler::is_profiling()) return false;
+  if (!LOGGER->is_logging() && !CpuProfiler::is_profiling()) return false;
   Handle<String> name = Handle<String>::cast(type->AsLiteral()->handle());
   if (FLAG_log_regexp) {
-    static Vector<const char> kRegexp = CStrVector("regexp");
     if (name->IsEqualTo(kRegexp))
       return true;
   }
@@ -317,7 +317,7 @@ void CodeGenerator::ProcessDeclarations(ZoneList<Declaration*>* declarations) {
   if (globals == 0) return;
 
   // Compute array of global variable and function declarations.
-  Handle<FixedArray> array = Factory::NewFixedArray(2 * globals, TENURED);
+  Handle<FixedArray> array = FACTORY->NewFixedArray(2 * globals, TENURED);
   for (int j = 0, i = 0; i < length; i++) {
     Declaration* node = declarations->at(i);
     Variable* var = node->proxy()->var();
@@ -374,7 +374,7 @@ const CodeGenerator::InlineFunctionGenerator
 bool CodeGenerator::CheckForInlineRuntimeCall(CallRuntime* node) {
   ZoneList<Expression*>* args = node->arguments();
   Handle<String> name = node->name();
-  Runtime::Function* function = node->function();
+  const Runtime::Function* function = node->function();
   if (function != NULL && function->intrinsic_type == Runtime::INLINE) {
     int lookup_index = static_cast<int>(function->function_id) -
         static_cast<int>(Runtime::kFirstInlineFunction);
index 23b36f07a8f19e11f5449a198de989312df969f1..aa3199972349bc5de33f22640e898ae511247362 100644 (file)
@@ -92,26 +92,26 @@ namespace internal {
 // of active code generators.
 class CodeGeneratorScope BASE_EMBEDDED {
  public:
-  explicit CodeGeneratorScope(CodeGenerator* cgen) {
-    previous_ = top_;
-    top_ = cgen;
+  explicit CodeGeneratorScope(Isolate* isolate, CodeGenerator* cgen)
+      : isolate_(isolate) {
+    previous_ = isolate->current_code_generator();
+    isolate->set_current_code_generator(cgen);
   }
 
   ~CodeGeneratorScope() {
-    top_ = previous_;
+    isolate_->set_current_code_generator(previous_);
   }
 
-  static CodeGenerator* Current() {
-    ASSERT(top_ != NULL);
-    return top_;
+  static CodeGenerator* Current(Isolate* isolate) {
+    ASSERT(isolate->current_code_generator() != NULL);
+    return isolate->current_code_generator();
   }
 
  private:
-  static CodeGenerator* top_;
   CodeGenerator* previous_;
+  Isolate* isolate_;
 };
 
-
 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64
 
 // State of used registers in a virtual frame.
index cccb7a4f2163d9157e8908035fa2799fec1c7817..6e1de0ae9cb16aa68bc0351939b881875e1dc5fa 100644 (file)
@@ -33,8 +33,6 @@
 namespace v8 {
 namespace internal {
 
-// The number of sub caches covering the different types to cache.
-static const int kSubCacheCount = 4;
 
 // The number of generations for each sub cache.
 // The number of ScriptGenerations is carefully chosen based on histograms.
@@ -47,162 +45,32 @@ static const int kRegExpGenerations = 2;
 // Initial size of each compilation cache table allocated.
 static const int kInitialCacheSize = 64;
 
-// Index for the first generation in the cache.
-static const int kFirstGeneration = 0;
-
-// The compilation cache consists of several generational sub-caches which uses
-// this class as a base class. A sub-cache contains a compilation cache tables
-// for each generation of the sub-cache. Since the same source code string has
-// different compiled code for scripts and evals, we use separate sub-caches
-// for different compilation modes, to avoid retrieving the wrong result.
-class CompilationSubCache {
- public:
-  explicit CompilationSubCache(int generations): generations_(generations) {
-    tables_ = NewArray<Object*>(generations);
-  }
-
-  ~CompilationSubCache() { DeleteArray(tables_); }
-
-  // Get the compilation cache tables for a specific generation.
-  Handle<CompilationCacheTable> GetTable(int generation);
 
-  // Accessors for first generation.
-  Handle<CompilationCacheTable> GetFirstTable() {
-    return GetTable(kFirstGeneration);
+CompilationCache::CompilationCache()
+    : script_(kScriptGenerations),
+      eval_global_(kEvalGlobalGenerations),
+      eval_contextual_(kEvalContextualGenerations),
+      reg_exp_(kRegExpGenerations),
+      enabled_(true),
+      eager_optimizing_set_(NULL) {
+  CompilationSubCache* subcaches[kSubCacheCount] =
+    {&script_, &eval_global_, &eval_contextual_, &reg_exp_};
+  for (int i = 0; i < kSubCacheCount; ++i) {
+    subcaches_[i] = subcaches[i];
   }
-  void SetFirstTable(Handle<CompilationCacheTable> value) {
-    ASSERT(kFirstGeneration < generations_);
-    tables_[kFirstGeneration] = *value;
-  }
-
-  // Age the sub-cache by evicting the oldest generation and creating a new
-  // young generation.
-  void Age();
-
-  // GC support.
-  void Iterate(ObjectVisitor* v);
-  void IterateFunctions(ObjectVisitor* v);
-
-  // Clear this sub-cache evicting all its content.
-  void Clear();
-
-  // Remove given shared function info from sub-cache.
-  void Remove(Handle<SharedFunctionInfo> function_info);
-
-  // Number of generations in this sub-cache.
-  inline int generations() { return generations_; }
-
- private:
-  int generations_;  // Number of generations.
-  Object** tables_;  // Compilation cache tables - one for each generation.
-
-  DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationSubCache);
-};
-
-
-// Sub-cache for scripts.
-class CompilationCacheScript : public CompilationSubCache {
- public:
-  explicit CompilationCacheScript(int generations)
-      : CompilationSubCache(generations) { }
-
-  Handle<SharedFunctionInfo> Lookup(Handle<String> source,
-                                    Handle<Object> name,
-                                    int line_offset,
-                                    int column_offset);
-  void Put(Handle<String> source, Handle<SharedFunctionInfo> function_info);
-
- private:
-  MUST_USE_RESULT MaybeObject* TryTablePut(
-      Handle<String> source, Handle<SharedFunctionInfo> function_info);
-
-  // Note: Returns a new hash table if operation results in expansion.
-  Handle<CompilationCacheTable> TablePut(
-      Handle<String> source, Handle<SharedFunctionInfo> function_info);
-
-  bool HasOrigin(Handle<SharedFunctionInfo> function_info,
-                 Handle<Object> name,
-                 int line_offset,
-                 int column_offset);
-
-  DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheScript);
-};
-
-
-// Sub-cache for eval scripts.
-class CompilationCacheEval: public CompilationSubCache {
- public:
-  explicit CompilationCacheEval(int generations)
-      : CompilationSubCache(generations) { }
-
-  Handle<SharedFunctionInfo> Lookup(Handle<String> source,
-                                    Handle<Context> context,
-                                    StrictModeFlag strict_mode);
-
-  void Put(Handle<String> source,
-           Handle<Context> context,
-           Handle<SharedFunctionInfo> function_info);
-
- private:
-  MUST_USE_RESULT MaybeObject* TryTablePut(
-      Handle<String> source,
-      Handle<Context> context,
-      Handle<SharedFunctionInfo> function_info);
-
-
-  // Note: Returns a new hash table if operation results in expansion.
-  Handle<CompilationCacheTable> TablePut(
-      Handle<String> source,
-      Handle<Context> context,
-      Handle<SharedFunctionInfo> function_info);
-
-  DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheEval);
-};
-
-
-// Sub-cache for regular expressions.
-class CompilationCacheRegExp: public CompilationSubCache {
- public:
-  explicit CompilationCacheRegExp(int generations)
-      : CompilationSubCache(generations) { }
-
-  Handle<FixedArray> Lookup(Handle<String> source, JSRegExp::Flags flags);
-
-  void Put(Handle<String> source,
-           JSRegExp::Flags flags,
-           Handle<FixedArray> data);
- private:
-  MUST_USE_RESULT MaybeObject* TryTablePut(Handle<String> source,
-                                           JSRegExp::Flags flags,
-                                           Handle<FixedArray> data);
-
-  // Note: Returns a new hash table if operation results in expansion.
-  Handle<CompilationCacheTable> TablePut(Handle<String> source,
-                                         JSRegExp::Flags flags,
-                                         Handle<FixedArray> data);
-
-  DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheRegExp);
-};
-
-
-// Statically allocate all the sub-caches.
-static CompilationCacheScript script(kScriptGenerations);
-static CompilationCacheEval eval_global(kEvalGlobalGenerations);
-static CompilationCacheEval eval_contextual(kEvalContextualGenerations);
-static CompilationCacheRegExp reg_exp(kRegExpGenerations);
-static CompilationSubCache* subcaches[kSubCacheCount] =
-    {&script, &eval_global, &eval_contextual, &reg_exp};
+}
 
 
-// Current enable state of the compilation cache.
-static bool enabled = true;
-static inline bool IsEnabled() {
-  return FLAG_compilation_cache && enabled;
+CompilationCache::~CompilationCache() {
+  delete eager_optimizing_set_;
+  eager_optimizing_set_ = NULL;
 }
 
 
 static Handle<CompilationCacheTable> AllocateTable(int size) {
-  CALL_HEAP_FUNCTION(CompilationCacheTable::Allocate(size),
+  Isolate* isolate = Isolate::Current();
+  CALL_HEAP_FUNCTION(isolate,
+                     CompilationCacheTable::Allocate(size),
                      CompilationCacheTable);
 }
 
@@ -221,7 +89,6 @@ Handle<CompilationCacheTable> CompilationSubCache::GetTable(int generation) {
   return result;
 }
 
-
 void CompilationSubCache::Age() {
   // Age the generations implicitly killing off the oldest.
   for (int i = generations_ - 1; i > 0; i--) {
@@ -229,12 +96,12 @@ void CompilationSubCache::Age() {
   }
 
   // Set the first generation as unborn.
-  tables_[0] = Heap::undefined_value();
+  tables_[0] = HEAP->undefined_value();
 }
 
 
 void CompilationSubCache::IterateFunctions(ObjectVisitor* v) {
-  Object* undefined = Heap::raw_unchecked_undefined_value();
+  Object* undefined = HEAP->raw_unchecked_undefined_value();
   for (int i = 0; i < generations_; i++) {
     if (tables_[i] != undefined) {
       reinterpret_cast<CompilationCacheTable*>(tables_[i])->IterateElements(v);
@@ -249,7 +116,7 @@ void CompilationSubCache::Iterate(ObjectVisitor* v) {
 
 
 void CompilationSubCache::Clear() {
-  MemsetPointer(tables_, Heap::undefined_value(), generations_);
+  MemsetPointer(tables_, HEAP->undefined_value(), generations_);
 }
 
 
@@ -265,6 +132,13 @@ void CompilationSubCache::Remove(Handle<SharedFunctionInfo> function_info) {
 }
 
 
+CompilationCacheScript::CompilationCacheScript(int generations)
+     : CompilationSubCache(generations),
+       script_histogram_(NULL),
+       script_histogram_initialized_(false) {
+}
+
+
 // We only re-use a cached function for some script source code if the
 // script originates from the same place. This is to avoid issues
 // when reporting errors, etc.
@@ -320,15 +194,19 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(Handle<String> source,
     }
   }
 
-  static void* script_histogram = StatsTable::CreateHistogram(
-      "V8.ScriptCache",
-      0,
-      kScriptGenerations,
-      kScriptGenerations + 1);
+  Isolate* isolate = Isolate::Current();
+  if (!script_histogram_initialized_) {
+    script_histogram_ = isolate->stats_table()->CreateHistogram(
+        "V8.ScriptCache",
+        0,
+        kScriptGenerations,
+        kScriptGenerations + 1);
+    script_histogram_initialized_ = true;
+  }
 
-  if (script_histogram != NULL) {
+  if (script_histogram_ != NULL) {
     // The level NUMBER_OF_SCRIPT_GENERATIONS is equivalent to a cache miss.
-    StatsTable::AddHistogramSample(script_histogram, generation);
+    isolate->stats_table()->AddHistogramSample(script_histogram_, generation);
   }
 
   // Once outside the manacles of the handle scope, we need to recheck
@@ -340,10 +218,10 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(Handle<String> source,
     // If the script was found in a later generation, we promote it to
     // the first generation to let it survive longer in the cache.
     if (generation != 0) Put(source, shared);
-    Counters::compilation_cache_hits.Increment();
+    isolate->counters()->compilation_cache_hits()->Increment();
     return shared;
   } else {
-    Counters::compilation_cache_misses.Increment();
+    isolate->counters()->compilation_cache_misses()->Increment();
     return Handle<SharedFunctionInfo>::null();
   }
 }
@@ -360,7 +238,10 @@ MaybeObject* CompilationCacheScript::TryTablePut(
 Handle<CompilationCacheTable> CompilationCacheScript::TablePut(
     Handle<String> source,
     Handle<SharedFunctionInfo> function_info) {
-  CALL_HEAP_FUNCTION(TryTablePut(source, function_info), CompilationCacheTable);
+  Isolate* isolate = Isolate::Current();
+  CALL_HEAP_FUNCTION(isolate,
+                     TryTablePut(source, function_info),
+                     CompilationCacheTable);
 }
 
 
@@ -395,10 +276,10 @@ Handle<SharedFunctionInfo> CompilationCacheEval::Lookup(
     if (generation != 0) {
       Put(source, context, function_info);
     }
-    Counters::compilation_cache_hits.Increment();
+    COUNTERS->compilation_cache_hits()->Increment();
     return function_info;
   } else {
-    Counters::compilation_cache_misses.Increment();
+    COUNTERS->compilation_cache_misses()->Increment();
     return Handle<SharedFunctionInfo>::null();
   }
 }
@@ -417,7 +298,9 @@ Handle<CompilationCacheTable> CompilationCacheEval::TablePut(
     Handle<String> source,
     Handle<Context> context,
     Handle<SharedFunctionInfo> function_info) {
-  CALL_HEAP_FUNCTION(TryTablePut(source, context, function_info),
+  Isolate* isolate = Isolate::Current();
+  CALL_HEAP_FUNCTION(isolate,
+                     TryTablePut(source, context, function_info),
                      CompilationCacheTable);
 }
 
@@ -451,10 +334,10 @@ Handle<FixedArray> CompilationCacheRegExp::Lookup(Handle<String> source,
     if (generation != 0) {
       Put(source, flags, data);
     }
-    Counters::compilation_cache_hits.Increment();
+    COUNTERS->compilation_cache_hits()->Increment();
     return data;
   } else {
-    Counters::compilation_cache_misses.Increment();
+    COUNTERS->compilation_cache_misses()->Increment();
     return Handle<FixedArray>::null();
   }
 }
@@ -473,7 +356,10 @@ Handle<CompilationCacheTable> CompilationCacheRegExp::TablePut(
     Handle<String> source,
     JSRegExp::Flags flags,
     Handle<FixedArray> data) {
-  CALL_HEAP_FUNCTION(TryTablePut(source, flags, data), CompilationCacheTable);
+  Isolate* isolate = Isolate::Current();
+  CALL_HEAP_FUNCTION(isolate,
+                     TryTablePut(source, flags, data),
+                     CompilationCacheTable);
 }
 
 
@@ -488,9 +374,9 @@ void CompilationCacheRegExp::Put(Handle<String> source,
 void CompilationCache::Remove(Handle<SharedFunctionInfo> function_info) {
   if (!IsEnabled()) return;
 
-  eval_global.Remove(function_info);
-  eval_contextual.Remove(function_info);
-  script.Remove(function_info);
+  eval_global_.Remove(function_info);
+  eval_contextual_.Remove(function_info);
+  script_.Remove(function_info);
 }
 
 
@@ -502,7 +388,7 @@ Handle<SharedFunctionInfo> CompilationCache::LookupScript(Handle<String> source,
     return Handle<SharedFunctionInfo>::null();
   }
 
-  return script.Lookup(source, name, line_offset, column_offset);
+  return script_.Lookup(source, name, line_offset, column_offset);
 }
 
 
@@ -517,9 +403,9 @@ Handle<SharedFunctionInfo> CompilationCache::LookupEval(
 
   Handle<SharedFunctionInfo> result;
   if (is_global) {
-    result = eval_global.Lookup(source, context, strict_mode);
+    result = eval_global_.Lookup(source, context, strict_mode);
   } else {
-    result = eval_contextual.Lookup(source, context, strict_mode);
+    result = eval_contextual_.Lookup(source, context, strict_mode);
   }
   return result;
 }
@@ -531,7 +417,7 @@ Handle<FixedArray> CompilationCache::LookupRegExp(Handle<String> source,
     return Handle<FixedArray>::null();
   }
 
-  return reg_exp.Lookup(source, flags);
+  return reg_exp_.Lookup(source, flags);
 }
 
 
@@ -541,7 +427,7 @@ void CompilationCache::PutScript(Handle<String> source,
     return;
   }
 
-  script.Put(source, function_info);
+  script_.Put(source, function_info);
 }
 
 
@@ -555,9 +441,9 @@ void CompilationCache::PutEval(Handle<String> source,
 
   HandleScope scope;
   if (is_global) {
-    eval_global.Put(source, context, function_info);
+    eval_global_.Put(source, context, function_info);
   } else {
-    eval_contextual.Put(source, context, function_info);
+    eval_contextual_.Put(source, context, function_info);
   }
 }
 
@@ -570,7 +456,7 @@ void CompilationCache::PutRegExp(Handle<String> source,
     return;
   }
 
-  reg_exp.Put(source, flags, data);
+  reg_exp_.Put(source, flags, data);
 }
 
 
@@ -579,9 +465,11 @@ static bool SourceHashCompare(void* key1, void* key2) {
 }
 
 
-static HashMap* EagerOptimizingSet() {
-  static HashMap map(&SourceHashCompare);
-  return &map;
+HashMap* CompilationCache::EagerOptimizingSet() {
+  if (eager_optimizing_set_ == NULL) {
+    eager_optimizing_set_ = new HashMap(&SourceHashCompare);
+  }
+  return eager_optimizing_set_;
 }
 
 
@@ -615,38 +503,39 @@ void CompilationCache::ResetEagerOptimizingData() {
 
 void CompilationCache::Clear() {
   for (int i = 0; i < kSubCacheCount; i++) {
-    subcaches[i]->Clear();
+    subcaches_[i]->Clear();
   }
 }
 
+
 void CompilationCache::Iterate(ObjectVisitor* v) {
   for (int i = 0; i < kSubCacheCount; i++) {
-    subcaches[i]->Iterate(v);
+    subcaches_[i]->Iterate(v);
   }
 }
 
 
 void CompilationCache::IterateFunctions(ObjectVisitor* v) {
   for (int i = 0; i < kSubCacheCount; i++) {
-    subcaches[i]->IterateFunctions(v);
+    subcaches_[i]->IterateFunctions(v);
   }
 }
 
 
 void CompilationCache::MarkCompactPrologue() {
   for (int i = 0; i < kSubCacheCount; i++) {
-    subcaches[i]->Age();
+    subcaches_[i]->Age();
   }
 }
 
 
 void CompilationCache::Enable() {
-  enabled = true;
+  enabled_ = true;
 }
 
 
 void CompilationCache::Disable() {
-  enabled = false;
+  enabled_ = false;
   Clear();
 }
 
index f779a23aacc6702d4575c73442ceb1d457119408..770d878b5b3428049cdb676a9cfb5be0e537e273 100644 (file)
 namespace v8 {
 namespace internal {
 
+class HashMap;
+
+// The compilation cache consists of several generational sub-caches which uses
+// this class as a base class. A sub-cache contains a compilation cache tables
+// for each generation of the sub-cache. Since the same source code string has
+// different compiled code for scripts and evals, we use separate sub-caches
+// for different compilation modes, to avoid retrieving the wrong result.
+class CompilationSubCache {
+ public:
+  explicit CompilationSubCache(int generations): generations_(generations) {
+    tables_ = NewArray<Object*>(generations);
+  }
+
+  ~CompilationSubCache() { DeleteArray(tables_); }
+
+  // Index for the first generation in the cache.
+  static const int kFirstGeneration = 0;
+
+  // Get the compilation cache tables for a specific generation.
+  Handle<CompilationCacheTable> GetTable(int generation);
+
+  // Accessors for first generation.
+  Handle<CompilationCacheTable> GetFirstTable() {
+    return GetTable(kFirstGeneration);
+  }
+  void SetFirstTable(Handle<CompilationCacheTable> value) {
+    ASSERT(kFirstGeneration < generations_);
+    tables_[kFirstGeneration] = *value;
+  }
+
+  // Age the sub-cache by evicting the oldest generation and creating a new
+  // young generation.
+  void Age();
+
+  // GC support.
+  void Iterate(ObjectVisitor* v);
+  void IterateFunctions(ObjectVisitor* v);
+
+  // Clear this sub-cache evicting all its content.
+  void Clear();
+
+  // Remove given shared function info from sub-cache.
+  void Remove(Handle<SharedFunctionInfo> function_info);
+
+  // Number of generations in this sub-cache.
+  inline int generations() { return generations_; }
+
+ private:
+  int generations_;  // Number of generations.
+  Object** tables_;  // Compilation cache tables - one for each generation.
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationSubCache);
+};
+
+
+// Sub-cache for scripts.
+class CompilationCacheScript : public CompilationSubCache {
+ public:
+  explicit CompilationCacheScript(int generations);
+
+  Handle<SharedFunctionInfo> Lookup(Handle<String> source,
+                                    Handle<Object> name,
+                                    int line_offset,
+                                    int column_offset);
+  void Put(Handle<String> source, Handle<SharedFunctionInfo> function_info);
+
+ private:
+  MUST_USE_RESULT MaybeObject* TryTablePut(
+      Handle<String> source, Handle<SharedFunctionInfo> function_info);
+
+  // Note: Returns a new hash table if operation results in expansion.
+  Handle<CompilationCacheTable> TablePut(
+      Handle<String> source, Handle<SharedFunctionInfo> function_info);
+
+  bool HasOrigin(Handle<SharedFunctionInfo> function_info,
+                 Handle<Object> name,
+                 int line_offset,
+                 int column_offset);
+
+  void* script_histogram_;
+  bool script_histogram_initialized_;
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheScript);
+};
+
+
+// Sub-cache for eval scripts.
+class CompilationCacheEval: public CompilationSubCache {
+ public:
+  explicit CompilationCacheEval(int generations)
+      : CompilationSubCache(generations) { }
+
+  Handle<SharedFunctionInfo> Lookup(Handle<String> source,
+                                    Handle<Context> context,
+                                    StrictModeFlag strict_mode);
+
+  void Put(Handle<String> source,
+           Handle<Context> context,
+           Handle<SharedFunctionInfo> function_info);
+
+ private:
+  MUST_USE_RESULT MaybeObject* TryTablePut(
+      Handle<String> source,
+      Handle<Context> context,
+      Handle<SharedFunctionInfo> function_info);
+
+  // Note: Returns a new hash table if operation results in expansion.
+  Handle<CompilationCacheTable> TablePut(
+      Handle<String> source,
+      Handle<Context> context,
+      Handle<SharedFunctionInfo> function_info);
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheEval);
+};
+
+
+// Sub-cache for regular expressions.
+class CompilationCacheRegExp: public CompilationSubCache {
+ public:
+  explicit CompilationCacheRegExp(int generations)
+      : CompilationSubCache(generations) { }
+
+  Handle<FixedArray> Lookup(Handle<String> source, JSRegExp::Flags flags);
+
+  void Put(Handle<String> source,
+           JSRegExp::Flags flags,
+           Handle<FixedArray> data);
+ private:
+  MUST_USE_RESULT MaybeObject* TryTablePut(Handle<String> source,
+                                      JSRegExp::Flags flags,
+                                      Handle<FixedArray> data);
+
+  // Note: Returns a new hash table if operation results in expansion.
+  Handle<CompilationCacheTable> TablePut(Handle<String> source,
+                                         JSRegExp::Flags flags,
+                                         Handle<FixedArray> data);
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheRegExp);
+};
+
 
 // The compilation cache keeps shared function infos for compiled
 // scripts and evals. The shared function infos are looked up using
@@ -41,69 +181,94 @@ class CompilationCache {
   // Finds the script shared function info for a source
   // string. Returns an empty handle if the cache doesn't contain a
   // script for the given source string with the right origin.
-  static Handle<SharedFunctionInfo> LookupScript(Handle<String> source,
-                                                 Handle<Object> name,
-                                                 int line_offset,
-                                                 int column_offset);
+  Handle<SharedFunctionInfo> LookupScript(Handle<String> source,
+                                          Handle<Object> name,
+                                          int line_offset,
+                                          int column_offset);
 
   // Finds the shared function info for a source string for eval in a
   // given context.  Returns an empty handle if the cache doesn't
   // contain a script for the given source string.
-  static Handle<SharedFunctionInfo> LookupEval(Handle<String> source,
-                                               Handle<Context> context,
-                                               bool is_global,
-                                               StrictModeFlag strict_mode);
+  Handle<SharedFunctionInfo> LookupEval(Handle<String> source,
+                                        Handle<Context> context,
+                                        bool is_global,
+                                        StrictModeFlag strict_mode);
 
   // Returns the regexp data associated with the given regexp if it
   // is in cache, otherwise an empty handle.
-  static Handle<FixedArray> LookupRegExp(Handle<String> source,
-                                         JSRegExp::Flags flags);
+  Handle<FixedArray> LookupRegExp(Handle<String> source,
+                                  JSRegExp::Flags flags);
 
   // Associate the (source, kind) pair to the shared function
   // info. This may overwrite an existing mapping.
-  static void PutScript(Handle<String> source,
-                        Handle<SharedFunctionInfo> function_info);
+  void PutScript(Handle<String> source,
+                 Handle<SharedFunctionInfo> function_info);
 
   // Associate the (source, context->closure()->shared(), kind) triple
   // with the shared function info. This may overwrite an existing mapping.
-  static void PutEval(Handle<String> source,
-                      Handle<Context> context,
-                      bool is_global,
-                      Handle<SharedFunctionInfo> function_info);
+  void PutEval(Handle<String> source,
+               Handle<Context> context,
+               bool is_global,
+               Handle<SharedFunctionInfo> function_info);
 
   // Associate the (source, flags) pair to the given regexp data.
   // This may overwrite an existing mapping.
-  static void PutRegExp(Handle<String> source,
-                        JSRegExp::Flags flags,
-                        Handle<FixedArray> data);
+  void PutRegExp(Handle<String> source,
+                 JSRegExp::Flags flags,
+                 Handle<FixedArray> data);
 
   // Support for eager optimization tracking.
-  static bool ShouldOptimizeEagerly(Handle<JSFunction> function);
-  static void MarkForEagerOptimizing(Handle<JSFunction> function);
-  static void MarkForLazyOptimizing(Handle<JSFunction> function);
+  bool ShouldOptimizeEagerly(Handle<JSFunction> function);
+  void MarkForEagerOptimizing(Handle<JSFunction> function);
+  void MarkForLazyOptimizing(Handle<JSFunction> function);
 
   // Reset the eager optimization tracking data.
-  static void ResetEagerOptimizingData();
+  void ResetEagerOptimizingData();
 
   // Clear the cache - also used to initialize the cache at startup.
-  static void Clear();
+  void Clear();
 
   // Remove given shared function info from all caches.
-  static void Remove(Handle<SharedFunctionInfo> function_info);
+  void Remove(Handle<SharedFunctionInfo> function_info);
 
   // GC support.
-  static void Iterate(ObjectVisitor* v);
-  static void IterateFunctions(ObjectVisitor* v);
+  void Iterate(ObjectVisitor* v);
+  void IterateFunctions(ObjectVisitor* v);
 
   // Notify the cache that a mark-sweep garbage collection is about to
   // take place. This is used to retire entries from the cache to
   // avoid keeping them alive too long without using them.
-  static void MarkCompactPrologue();
+  void MarkCompactPrologue();
 
   // Enable/disable compilation cache. Used by debugger to disable compilation
   // cache during debugging to make sure new scripts are always compiled.
-  static void Enable();
-  static void Disable();
+  void Enable();
+  void Disable();
+ private:
+  CompilationCache();
+  ~CompilationCache();
+
+  HashMap* EagerOptimizingSet();
+
+  // The number of sub caches covering the different types to cache.
+  static const int kSubCacheCount = 4;
+
+  CompilationCacheScript script_;
+  CompilationCacheEval eval_global_;
+  CompilationCacheEval eval_contextual_;
+  CompilationCacheRegExp reg_exp_;
+  CompilationSubCache* subcaches_[kSubCacheCount];
+
+  // Current enable state of the compilation cache.
+  bool enabled_;
+
+  HashMap* eager_optimizing_set_;
+
+  bool IsEnabled() { return FLAG_compilation_cache && enabled_; }
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(CompilationCache);
 };
 
 
index cfba4b22ba2fdf65519036fb45106e10e2ae8b0a..85448dd5b603c7c5e3b0500c668acdf83bbfb326 100755 (executable)
@@ -51,7 +51,8 @@ namespace internal {
 
 
 CompilationInfo::CompilationInfo(Handle<Script> script)
-    : flags_(0),
+    : isolate_(script->GetIsolate()),
+      flags_(0),
       function_(NULL),
       scope_(NULL),
       script_(script),
@@ -64,7 +65,8 @@ CompilationInfo::CompilationInfo(Handle<Script> script)
 
 
 CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info)
-    : flags_(IsLazy::encode(true)),
+    : isolate_(shared_info->GetIsolate()),
+      flags_(IsLazy::encode(true)),
       function_(NULL),
       scope_(NULL),
       shared_info_(shared_info),
@@ -78,7 +80,8 @@ CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info)
 
 
 CompilationInfo::CompilationInfo(Handle<JSFunction> closure)
-    : flags_(IsLazy::encode(true)),
+    : isolate_(closure->GetIsolate()),
+      flags_(IsLazy::encode(true)),
       function_(NULL),
       scope_(NULL),
       closure_(closure),
@@ -121,10 +124,11 @@ void CompilationInfo::DisableOptimization() {
 // break points has actually been set.
 static bool AlwaysFullCompiler() {
 #ifdef ENABLE_DEBUGGER_SUPPORT
+  Isolate* isolate = Isolate::Current();
   if (V8::UseCrankshaft()) {
-    return FLAG_always_full_compiler || Debug::has_break_points();
+    return FLAG_always_full_compiler || isolate->debug()->has_break_points();
   } else {
-    return FLAG_always_full_compiler || Debugger::IsDebuggerActive();
+    return FLAG_always_full_compiler || isolate->debugger()->IsDebuggerActive();
   }
 #else
   return FLAG_always_full_compiler;
@@ -172,7 +176,8 @@ static void AbortAndDisable(CompilationInfo* info) {
   ASSERT(code->kind() == Code::FUNCTION);
   code->set_optimizable(false);
   info->SetCode(code);
-  CompilationCache::MarkForLazyOptimizing(info->closure());
+  Isolate* isolate = code->GetIsolate();
+  isolate->compilation_cache()->MarkForLazyOptimizing(info->closure());
   if (FLAG_trace_opt) {
     PrintF("[disabled optimization for: ");
     info->closure()->PrintName();
@@ -287,7 +292,7 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
   HGraphBuilder builder(info, &oracle);
   HPhase phase(HPhase::kTotal);
   HGraph* graph = builder.CreateGraph();
-  if (Top::has_pending_exception()) {
+  if (info->isolate()->has_pending_exception()) {
     info->SetCode(Handle<Code>::null());
     return false;
   }
@@ -365,11 +370,12 @@ bool Compiler::MakeCodeForLiveEdit(CompilationInfo* info) {
 static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
   CompilationZoneScope zone_scope(DELETE_ON_EXIT);
 
-  PostponeInterruptsScope postpone;
+  Isolate* isolate = info->isolate();
+  PostponeInterruptsScope postpone(isolate);
 
-  ASSERT(!i::Top::global_context().is_null());
+  ASSERT(!isolate->global_context().is_null());
   Handle<Script> script = info->script();
-  script->set_context_data((*i::Top::global_context())->data());
+  script->set_context_data((*isolate->global_context())->data());
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   if (info->is_eval()) {
@@ -382,15 +388,16 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
       if (!it.done()) {
         script->set_eval_from_shared(
             JSFunction::cast(it.frame()->function())->shared());
+        Code* code = it.frame()->LookupCode(isolate);
         int offset = static_cast<int>(
-            it.frame()->pc() - it.frame()->code()->instruction_start());
+            it.frame()->pc() - code->instruction_start());
         script->set_eval_from_instructions_offset(Smi::FromInt(offset));
       }
     }
   }
 
   // Notify debugger
-  Debugger::OnBeforeCompile(script);
+  isolate->debugger()->OnBeforeCompile(script);
 #endif
 
   // Only allow non-global compiles for eval.
@@ -402,22 +409,22 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
   // rest of the function into account to avoid overlap with the
   // parsing statistics.
   HistogramTimer* rate = info->is_eval()
-      ? &Counters::compile_eval
-      : &Counters::compile;
+      ? COUNTERS->compile_eval()
+      : COUNTERS->compile();
   HistogramTimerScope timer(rate);
 
   // Compile the code.
   FunctionLiteral* lit = info->function();
-  LiveEditFunctionTracker live_edit_tracker(lit);
+  LiveEditFunctionTracker live_edit_tracker(isolate, lit);
   if (!MakeCode(info)) {
-    Top::StackOverflow();
+    isolate->StackOverflow();
     return Handle<SharedFunctionInfo>::null();
   }
 
   // Allocate function.
   ASSERT(!info->code().is_null());
   Handle<SharedFunctionInfo> result =
-      Factory::NewSharedFunctionInfo(
+      isolate->factory()->NewSharedFunctionInfo(
           lit->name(),
           lit->materialized_literal_count(),
           info->code(),
@@ -427,7 +434,7 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
   Compiler::SetFunctionInfo(result, lit, true, script);
 
   if (script->name()->IsString()) {
-    PROFILE(CodeCreateEvent(
+    PROFILE(isolate, CodeCreateEvent(
         info->is_eval()
             ? Logger::EVAL_TAG
             : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
@@ -438,13 +445,13 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
                    script,
                    info->code()));
   } else {
-    PROFILE(CodeCreateEvent(
+    PROFILE(isolate, CodeCreateEvent(
         info->is_eval()
             ? Logger::EVAL_TAG
             : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
         *info->code(),
         *result,
-        Heap::empty_string()));
+        isolate->heap()->empty_string()));
     GDBJIT(AddCode(Handle<String>(), script, info->code()));
   }
 
@@ -455,7 +462,8 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Notify debugger
-  Debugger::OnAfterCompile(script, Debugger::NO_AFTER_COMPILE_FLAGS);
+  isolate->debugger()->OnAfterCompile(
+      script, Debugger::NO_AFTER_COMPILE_FLAGS);
 #endif
 
   live_edit_tracker.RecordFunctionInfo(result, lit);
@@ -472,20 +480,23 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
                                              ScriptDataImpl* input_pre_data,
                                              Handle<Object> script_data,
                                              NativesFlag natives) {
+  Isolate* isolate = Isolate::Current();
   int source_length = source->length();
-  Counters::total_load_size.Increment(source_length);
-  Counters::total_compile_size.Increment(source_length);
+  COUNTERS->total_load_size()->Increment(source_length);
+  COUNTERS->total_compile_size()->Increment(source_length);
 
   // The VM is in the COMPILER state until exiting this function.
-  VMState state(COMPILER);
+  VMState state(isolate, COMPILER);
+
+  CompilationCache* compilation_cache = isolate->compilation_cache();
 
   // Do a lookup in the compilation cache but not for extensions.
   Handle<SharedFunctionInfo> result;
   if (extension == NULL) {
-    result = CompilationCache::LookupScript(source,
-                                            script_name,
-                                            line_offset,
-                                            column_offset);
+    result = compilation_cache->LookupScript(source,
+                                             script_name,
+                                             line_offset,
+                                             column_offset);
   }
 
   if (result.is_null()) {
@@ -511,7 +522,7 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
     }
 
     // Create a script object describing the script to be compiled.
-    Handle<Script> script = Factory::NewScript(source);
+    Handle<Script> script = FACTORY->NewScript(source);
     if (natives == NATIVES_CODE) {
       script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
     }
@@ -521,7 +532,7 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
       script->set_column_offset(Smi::FromInt(column_offset));
     }
 
-    script->set_data(script_data.is_null() ? Heap::undefined_value()
+    script->set_data(script_data.is_null() ? HEAP->undefined_value()
                                            : *script_data);
 
     // Compile the function and add it to the cache.
@@ -532,7 +543,7 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
     if (natives == NATIVES_CODE) info.MarkAsAllowingNativesSyntax();
     result = MakeFunctionInfo(&info);
     if (extension == NULL && !result.is_null()) {
-      CompilationCache::PutScript(source, result);
+      compilation_cache->PutScript(source, result);
     }
 
     // Get rid of the pre-parsing data (if necessary).
@@ -541,7 +552,7 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
     }
   }
 
-  if (result.is_null()) Top::ReportPendingMessages();
+  if (result.is_null()) isolate->ReportPendingMessages();
   return result;
 }
 
@@ -550,24 +561,26 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
                                                  Handle<Context> context,
                                                  bool is_global,
                                                  StrictModeFlag strict_mode) {
+  Isolate* isolate = source->GetIsolate();
   int source_length = source->length();
-  Counters::total_eval_size.Increment(source_length);
-  Counters::total_compile_size.Increment(source_length);
+  isolate->counters()->total_eval_size()->Increment(source_length);
+  isolate->counters()->total_compile_size()->Increment(source_length);
 
   // The VM is in the COMPILER state until exiting this function.
-  VMState state(COMPILER);
+  VMState state(isolate, COMPILER);
 
   // Do a lookup in the compilation cache; if the entry is not there, invoke
   // the compiler and add the result to the cache.
   Handle<SharedFunctionInfo> result;
-  result = CompilationCache::LookupEval(source,
-                                        context,
-                                        is_global,
-                                        strict_mode);
+  CompilationCache* compilation_cache = isolate->compilation_cache();
+  result = compilation_cache->LookupEval(source,
+                                         context,
+                                         is_global,
+                                         strict_mode);
 
   if (result.is_null()) {
     // Create a script object describing the script to be compiled.
-    Handle<Script> script = Factory::NewScript(source);
+    Handle<Script> script = isolate->factory()->NewScript(source);
     CompilationInfo info(script);
     info.MarkAsEval();
     if (is_global) info.MarkAsGlobal();
@@ -575,11 +588,12 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
     info.SetCallingContext(context);
     result = MakeFunctionInfo(&info);
     if (!result.is_null()) {
+      CompilationCache* compilation_cache = isolate->compilation_cache();
       // If caller is strict mode, the result must be strict as well,
       // but not the other way around. Consider:
       // eval("'use strict'; ...");
       ASSERT(strict_mode == kNonStrictMode || result->strict_mode());
-      CompilationCache::PutEval(source, context, is_global, result);
+      compilation_cache->PutEval(source, context, is_global, result);
     }
   }
 
@@ -591,25 +605,26 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
   CompilationZoneScope zone_scope(DELETE_ON_EXIT);
 
   // The VM is in the COMPILER state until exiting this function.
-  VMState state(COMPILER);
+  VMState state(info->isolate(), COMPILER);
 
-  PostponeInterruptsScope postpone;
+  Isolate* isolate = info->isolate();
+  PostponeInterruptsScope postpone(isolate);
 
   Handle<SharedFunctionInfo> shared = info->shared_info();
   int compiled_size = shared->end_position() - shared->start_position();
-  Counters::total_compile_size.Increment(compiled_size);
+  isolate->counters()->total_compile_size()->Increment(compiled_size);
 
   // Generate the AST for the lazily compiled function.
   if (ParserApi::Parse(info)) {
     // Measure how long it takes to do the lazy compilation; only take the
     // rest of the function into account to avoid overlap with the lazy
     // parsing statistics.
-    HistogramTimerScope timer(&Counters::compile_lazy);
+    HistogramTimerScope timer(isolate->counters()->compile_lazy());
 
     // Compile the code.
     if (!MakeCode(info)) {
-      if (!Top::has_pending_exception()) {
-        Top::StackOverflow();
+      if (!isolate->has_pending_exception()) {
+        isolate->StackOverflow();
       }
     } else {
       ASSERT(!info->code().is_null());
@@ -654,12 +669,14 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
           // If we're asked to always optimize, we compile the optimized
           // version of the function right away - unless the debugger is
           // active as it makes no sense to compile optimized code then.
-          if (FLAG_always_opt && !Debug::has_break_points()) {
+          if (FLAG_always_opt &&
+              !Isolate::Current()->debug()->has_break_points()) {
             CompilationInfo optimized(function);
             optimized.SetOptimizing(AstNode::kNoNumber);
             return CompileLazy(&optimized);
-          } else if (CompilationCache::ShouldOptimizeEagerly(function)) {
-            RuntimeProfiler::OptimizeSoon(*function);
+          } else if (isolate->compilation_cache()->ShouldOptimizeEagerly(
+              function)) {
+            isolate->runtime_profiler()->OptimizeSoon(*function);
           }
         }
       }
@@ -680,20 +697,21 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
   info.SetFunction(literal);
   info.SetScope(literal->scope());
 
-  LiveEditFunctionTracker live_edit_tracker(literal);
+  LiveEditFunctionTracker live_edit_tracker(info.isolate(), literal);
   // Determine if the function can be lazily compiled. This is necessary to
   // allow some of our builtin JS files to be lazily compiled. These
   // builtins cannot be handled lazily by the parser, since we have to know
   // if a function uses the special natives syntax, which is something the
   // parser records.
   bool allow_lazy = literal->AllowsLazyCompilation() &&
-      !LiveEditFunctionTracker::IsActive();
+      !LiveEditFunctionTracker::IsActive(info.isolate());
 
   Handle<SerializedScopeInfo> scope_info(SerializedScopeInfo::Empty());
 
   // Generate code
   if (FLAG_lazy && allow_lazy) {
-    Handle<Code> code(Builtins::builtin(Builtins::LazyCompile));
+    Handle<Code> code(
+        info.isolate()->builtins()->builtin(Builtins::LazyCompile));
     info.SetCode(code);
   } else {
     if (V8::UseCrankshaft()) {
@@ -728,7 +746,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
 
   // Create a shared function info object.
   Handle<SharedFunctionInfo> result =
-      Factory::NewSharedFunctionInfo(literal->name(),
+      FACTORY->NewSharedFunctionInfo(literal->name(),
                                      literal->materialized_literal_count(),
                                      info.code(),
                                      scope_info);
@@ -780,20 +798,23 @@ void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
   // Log the code generation. If source information is available include
   // script name and line number. Check explicitly whether logging is
   // enabled as finding the line number is not free.
-  if (Logger::is_logging() || CpuProfiler::is_profiling()) {
+  if (info->isolate()->logger()->is_logging() || CpuProfiler::is_profiling()) {
     Handle<Script> script = info->script();
     Handle<Code> code = info->code();
-    if (*code == Builtins::builtin(Builtins::LazyCompile)) return;
+    if (*code == info->isolate()->builtins()->builtin(Builtins::LazyCompile))
+      return;
     if (script->name()->IsString()) {
       int line_num = GetScriptLineNumber(script, shared->start_position()) + 1;
       USE(line_num);
-      PROFILE(CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
+      PROFILE(info->isolate(),
+              CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
                               *code,
                               *shared,
                               String::cast(script->name()),
                               line_num));
     } else {
-      PROFILE(CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
+      PROFILE(info->isolate(),
+              CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
                               *code,
                               *shared,
                               shared->DebugName()));
index 92ec9ed6c4ff40cac5dbd507481187cef00d738f..a66c54010e5e74b6d9680c06366dd31a3857693a 100644 (file)
@@ -46,6 +46,10 @@ class CompilationInfo BASE_EMBEDDED {
   explicit CompilationInfo(Handle<SharedFunctionInfo> shared_info);
   explicit CompilationInfo(Handle<JSFunction> closure);
 
+  Isolate* isolate() {
+    ASSERT(Isolate::Current() == isolate_);
+    return isolate_;
+  }
   bool is_lazy() const { return (flags_ & IsLazy::mask()) != 0; }
   bool is_eval() const { return (flags_ & IsEval::mask()) != 0; }
   bool is_global() const { return (flags_ & IsGlobal::mask()) != 0; }
@@ -142,6 +146,8 @@ class CompilationInfo BASE_EMBEDDED {
   }
 
  private:
+  Isolate* isolate_;
+
   // Compilation mode.
   // BASE is generated by the full codegen, optionally prepared for bailouts.
   // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
@@ -286,8 +292,9 @@ class CompilationZoneScope : public ZoneScope {
   explicit CompilationZoneScope(ZoneScopeMode mode) : ZoneScope(mode) { }
   virtual ~CompilationZoneScope() {
     if (ShouldDeleteOnExit()) {
-      FrameElement::ClearConstantList();
-      Result::ClearConstantList();
+      Isolate* isolate = Isolate::Current();
+      isolate->frame_element_constant_list()->Clear();
+      isolate->result_constant_list()->Clear();
     }
   }
 };
index 3ad72a16b50c3a81d06e777300dc2630549c6512..520f3dde2444e4d87e5817ddb6f3e54c27edbcc1 100644 (file)
@@ -55,7 +55,7 @@ Context* Context::global_context() {
 
   // During bootstrapping, the global object might not be set and we
   // have to search the context chain to find the global context.
-  ASSERT(Bootstrapper::IsActive());
+  ASSERT(Isolate::Current()->bootstrapper()->IsActive());
   Context* current = this;
   while (!current->IsGlobalContext()) {
     JSFunction* closure = JSFunction::cast(current->closure());
@@ -76,7 +76,8 @@ void Context::set_global_proxy(JSObject* object) {
 
 Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
                                int* index_, PropertyAttributes* attributes) {
-  Handle<Context> context(this);
+  Isolate* isolate = GetIsolate();
+  Handle<Context> context(this, isolate);
 
   bool follow_context_chain = (flags & FOLLOW_CONTEXT_CHAIN) != 0;
   *index_ = -1;
@@ -97,7 +98,8 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
 
     // check extension/with object
     if (context->has_extension()) {
-      Handle<JSObject> extension = Handle<JSObject>(context->extension());
+      Handle<JSObject> extension = Handle<JSObject>(context->extension(),
+                                                    isolate);
       // Context extension objects needs to behave as if they have no
       // prototype.  So even if we want to follow prototype chains, we
       // need to only do a local lookup for context extension objects.
@@ -122,7 +124,7 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
 
       // check non-parameter locals in context
       Handle<SerializedScopeInfo> scope_info(
-          context->closure()->shared()->scope_info());
+          context->closure()->shared()->scope_info(), isolate);
       Variable::Mode mode;
       int index = scope_info->ContextSlotIndex(*name, &mode);
       ASSERT(index < 0 || index >= MIN_CONTEXT_SLOTS);
@@ -155,11 +157,12 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
       int param_index = scope_info->ParameterIndex(*name);
       if (param_index >= 0) {
         // slot found.
-        int index =
-            scope_info->ContextSlotIndex(Heap::arguments_shadow_symbol(), NULL);
+        int index = scope_info->ContextSlotIndex(
+            isolate->heap()->arguments_shadow_symbol(), NULL);
         ASSERT(index >= 0);  // arguments must exist and be in the heap context
-        Handle<JSObject> arguments(JSObject::cast(context->get(index)));
-        ASSERT(arguments->HasLocalProperty(Heap::length_symbol()));
+        Handle<JSObject> arguments(JSObject::cast(context->get(index)),
+                                   isolate);
+        ASSERT(arguments->HasLocalProperty(isolate->heap()->length_symbol()));
         if (FLAG_trace_contexts) {
           PrintF("=> found parameter %d in arguments object\n", param_index);
         }
@@ -188,9 +191,10 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
     if (context->IsGlobalContext()) {
       follow_context_chain = false;
     } else if (context->is_function_context()) {
-      context = Handle<Context>(Context::cast(context->closure()->context()));
+      context = Handle<Context>(Context::cast(context->closure()->context()),
+                                isolate);
     } else {
-      context = Handle<Context>(context->previous());
+      context = Handle<Context>(context->previous(), isolate);
     }
   } while (follow_context_chain);
 
@@ -252,7 +256,7 @@ void Context::AddOptimizedFunction(JSFunction* function) {
 
   // Check that the context belongs to the weak global contexts list.
   bool found = false;
-  Object* context = Heap::global_contexts_list();
+  Object* context = GetHeap()->global_contexts_list();
   while (!context->IsUndefined()) {
     if (context == this) {
       found = true;
@@ -281,7 +285,7 @@ void Context::RemoveOptimizedFunction(JSFunction* function) {
       } else {
         prev->set_next_function_link(element_function->next_function_link());
       }
-      element_function->set_next_function_link(Heap::undefined_value());
+      element_function->set_next_function_link(GetHeap()->undefined_value());
       return;
     }
     prev = element_function;
@@ -298,7 +302,7 @@ Object* Context::OptimizedFunctionsListHead() {
 
 
 void Context::ClearOptimizedFunctions() {
-  set(OPTIMIZED_FUNCTIONS_LIST, Heap::undefined_value());
+  set(OPTIMIZED_FUNCTIONS_LIST, GetHeap()->undefined_value());
 }
 
 
@@ -306,14 +310,17 @@ void Context::ClearOptimizedFunctions() {
 bool Context::IsBootstrappingOrContext(Object* object) {
   // During bootstrapping we allow all objects to pass as
   // contexts. This is necessary to fix circular dependencies.
-  return Bootstrapper::IsActive() || object->IsContext();
+  return Isolate::Current()->bootstrapper()->IsActive() || object->IsContext();
 }
 
 
 bool Context::IsBootstrappingOrGlobalObject(Object* object) {
   // During bootstrapping we allow all objects to pass as global
   // objects. This is necessary to fix circular dependencies.
-  return Bootstrapper::IsActive() || object->IsGlobalObject();
+  Isolate* isolate = Isolate::Current();
+  return isolate->heap()->gc_state() != Heap::NOT_IN_GC ||
+      isolate->bootstrapper()->IsActive() ||
+      object->IsGlobalObject();
 }
 #endif
 
index 23412cb117d34ed54b1fe15712ddb80bde4ee22f..e46619ec4c136fab8c004b891eb93b9b9320f6a0 100644 (file)
@@ -268,8 +268,7 @@ class Context: public FixedArray {
 
   GlobalObject* global() {
     Object* result = get(GLOBAL_INDEX);
-    ASSERT(Heap::gc_state() != Heap::NOT_IN_GC ||
-           IsBootstrappingOrGlobalObject(result));
+    ASSERT(IsBootstrappingOrGlobalObject(result));
     return reinterpret_cast<GlobalObject*>(result);
   }
   void set_global(GlobalObject* global) { set(GLOBAL_INDEX, global); }
@@ -288,14 +287,10 @@ class Context: public FixedArray {
   bool is_function_context() { return unchecked_previous() == NULL; }
 
   // Tells whether the global context is marked with out of memory.
-  bool has_out_of_memory() {
-    return global_context()->out_of_memory() == Heap::true_value();
-  }
+  inline bool has_out_of_memory();
 
   // Mark the global context with out of memory.
-  void mark_out_of_memory() {
-    global_context()->set_out_of_memory(Heap::true_value());
-  }
+  inline void mark_out_of_memory();
 
   // The exception holder is the object used as a with object in
   // the implementation of a catch block.
index a348235d6689e4467ea293d01c78543ed327b36f..c3d7bdfa643db3d42c0dd7818588aabb05b95ba6 100644 (file)
@@ -109,9 +109,11 @@ static const double JUNK_STRING_VALUE = OS::nan_value();
 
 // Returns true if a nonspace found and false if the end has reached.
 template <class Iterator, class EndMark>
-static inline bool AdvanceToNonspace(Iterator* current, EndMark end) {
+static inline bool AdvanceToNonspace(ScannerConstants* scanner_constants,
+                                     Iterator* current,
+                                     EndMark end) {
   while (*current != end) {
-    if (!ScannerConstants::kIsWhiteSpace.get(**current)) return true;
+    if (!scanner_constants->IsWhiteSpace(**current)) return true;
     ++*current;
   }
   return false;
@@ -132,7 +134,8 @@ static double SignedZero(bool negative) {
 
 // Parsing integers with radix 2, 4, 8, 16, 32. Assumes current != end.
 template <int radix_log_2, class Iterator, class EndMark>
-static double InternalStringToIntDouble(Iterator current,
+static double InternalStringToIntDouble(ScannerConstants* scanner_constants,
+                                        Iterator current,
                                         EndMark end,
                                         bool negative,
                                         bool allow_trailing_junk) {
@@ -157,7 +160,8 @@ static double InternalStringToIntDouble(Iterator current,
     } else if (radix > 10 && *current >= 'A' && *current < 'A' + radix - 10) {
       digit = static_cast<char>(*current) - 'A' + 10;
     } else {
-      if (allow_trailing_junk || !AdvanceToNonspace(&current, end)) {
+      if (allow_trailing_junk ||
+          !AdvanceToNonspace(scanner_constants, &current, end)) {
         break;
       } else {
         return JUNK_STRING_VALUE;
@@ -188,7 +192,8 @@ static double InternalStringToIntDouble(Iterator current,
         exponent += radix_log_2;
       }
 
-      if (!allow_trailing_junk && AdvanceToNonspace(&current, end)) {
+      if (!allow_trailing_junk &&
+          AdvanceToNonspace(scanner_constants, &current, end)) {
         return JUNK_STRING_VALUE;
       }
 
@@ -232,11 +237,16 @@ static double InternalStringToIntDouble(Iterator current,
 
 
 template <class Iterator, class EndMark>
-static double InternalStringToInt(Iterator current, EndMark end, int radix) {
+static double InternalStringToInt(ScannerConstants* scanner_constants,
+                                  Iterator current,
+                                  EndMark end,
+                                  int radix) {
   const bool allow_trailing_junk = true;
   const double empty_string_val = JUNK_STRING_VALUE;
 
-  if (!AdvanceToNonspace(&current, end)) return empty_string_val;
+  if (!AdvanceToNonspace(scanner_constants, &current, end)) {
+    return empty_string_val;
+  }
 
   bool negative = false;
   bool leading_zero = false;
@@ -244,10 +254,14 @@ static double InternalStringToInt(Iterator current, EndMark end, int radix) {
   if (*current == '+') {
     // Ignore leading sign; skip following spaces.
     ++current;
-    if (!AdvanceToNonspace(&current, end)) return JUNK_STRING_VALUE;
+    if (!AdvanceToNonspace(scanner_constants, &current, end)) {
+      return JUNK_STRING_VALUE;
+    }
   } else if (*current == '-') {
     ++current;
-    if (!AdvanceToNonspace(&current, end)) return JUNK_STRING_VALUE;
+    if (!AdvanceToNonspace(scanner_constants, &current, end)) {
+      return JUNK_STRING_VALUE;
+    }
     negative = true;
   }
 
@@ -298,21 +312,21 @@ static double InternalStringToInt(Iterator current, EndMark end, int radix) {
     switch (radix) {
       case 2:
         return InternalStringToIntDouble<1>(
-                   current, end, negative, allow_trailing_junk);
+            scanner_constants, current, end, negative, allow_trailing_junk);
       case 4:
         return InternalStringToIntDouble<2>(
-                   current, end, negative, allow_trailing_junk);
+            scanner_constants, current, end, negative, allow_trailing_junk);
       case 8:
         return InternalStringToIntDouble<3>(
-                   current, end, negative, allow_trailing_junk);
+            scanner_constants, current, end, negative, allow_trailing_junk);
 
       case 16:
         return InternalStringToIntDouble<4>(
-                   current, end, negative, allow_trailing_junk);
+            scanner_constants, current, end, negative, allow_trailing_junk);
 
       case 32:
         return InternalStringToIntDouble<5>(
-                   current, end, negative, allow_trailing_junk);
+            scanner_constants, current, end, negative, allow_trailing_junk);
       default:
         UNREACHABLE();
     }
@@ -337,7 +351,8 @@ static double InternalStringToInt(Iterator current, EndMark end, int radix) {
       if (current == end) break;
     }
 
-    if (!allow_trailing_junk && AdvanceToNonspace(&current, end)) {
+    if (!allow_trailing_junk &&
+        AdvanceToNonspace(scanner_constants, &current, end)) {
       return JUNK_STRING_VALUE;
     }
 
@@ -402,7 +417,8 @@ static double InternalStringToInt(Iterator current, EndMark end, int radix) {
     v = v * multiplier + part;
   } while (!done);
 
-  if (!allow_trailing_junk && AdvanceToNonspace(&current, end)) {
+  if (!allow_trailing_junk &&
+      AdvanceToNonspace(scanner_constants, &current, end)) {
     return JUNK_STRING_VALUE;
   }
 
@@ -416,7 +432,8 @@ static double InternalStringToInt(Iterator current, EndMark end, int radix) {
 // 2. *current - gets the current character in the sequence.
 // 3. ++current (advances the position).
 template <class Iterator, class EndMark>
-static double InternalStringToDouble(Iterator current,
+static double InternalStringToDouble(ScannerConstants* scanner_constants,
+                                     Iterator current,
                                      EndMark end,
                                      int flags,
                                      double empty_string_val) {
@@ -428,7 +445,9 @@ static double InternalStringToDouble(Iterator current,
   // 'parsing_done'.
   // 4. 'current' is not dereferenced after the 'parsing_done' label.
   // 5. Code before 'parsing_done' may rely on 'current != end'.
-  if (!AdvanceToNonspace(&current, end)) return empty_string_val;
+  if (!AdvanceToNonspace(scanner_constants, &current, end)) {
+    return empty_string_val;
+  }
 
   const bool allow_trailing_junk = (flags & ALLOW_TRAILING_JUNK) != 0;
 
@@ -463,7 +482,8 @@ static double InternalStringToDouble(Iterator current,
       return JUNK_STRING_VALUE;
     }
 
-    if (!allow_trailing_junk && AdvanceToNonspace(&current, end)) {
+    if (!allow_trailing_junk &&
+        AdvanceToNonspace(scanner_constants, &current, end)) {
       return JUNK_STRING_VALUE;
     }
 
@@ -485,7 +505,8 @@ static double InternalStringToDouble(Iterator current,
         return JUNK_STRING_VALUE;  // "0x".
       }
 
-      return InternalStringToIntDouble<4>(current,
+      return InternalStringToIntDouble<4>(scanner_constants,
+                                          current,
                                           end,
                                           negative,
                                           allow_trailing_junk);
@@ -621,7 +642,8 @@ static double InternalStringToDouble(Iterator current,
     exponent += (sign == '-' ? -num : num);
   }
 
-  if (!allow_trailing_junk && AdvanceToNonspace(&current, end)) {
+  if (!allow_trailing_junk &&
+      AdvanceToNonspace(scanner_constants, &current, end)) {
     return JUNK_STRING_VALUE;
   }
 
@@ -629,7 +651,8 @@ static double InternalStringToDouble(Iterator current,
   exponent += insignificant_digits;
 
   if (octal) {
-    return InternalStringToIntDouble<3>(buffer,
+    return InternalStringToIntDouble<3>(scanner_constants,
+                                        buffer,
                                         buffer + buffer_pos,
                                         negative,
                                         allow_trailing_junk);
@@ -649,18 +672,23 @@ static double InternalStringToDouble(Iterator current,
 
 
 double StringToDouble(String* str, int flags, double empty_string_val) {
+  ScannerConstants* scanner_constants =
+      Isolate::Current()->scanner_constants();
   StringShape shape(str);
   if (shape.IsSequentialAscii()) {
     const char* begin = SeqAsciiString::cast(str)->GetChars();
     const char* end = begin + str->length();
-    return InternalStringToDouble(begin, end, flags, empty_string_val);
+    return InternalStringToDouble(scanner_constants, begin, end, flags,
+                                  empty_string_val);
   } else if (shape.IsSequentialTwoByte()) {
     const uc16* begin = SeqTwoByteString::cast(str)->GetChars();
     const uc16* end = begin + str->length();
-    return InternalStringToDouble(begin, end, flags, empty_string_val);
+    return InternalStringToDouble(scanner_constants, begin, end, flags,
+                                  empty_string_val);
   } else {
     StringInputBuffer buffer(str);
-    return InternalStringToDouble(StringInputBufferIterator(&buffer),
+    return InternalStringToDouble(scanner_constants,
+                                  StringInputBufferIterator(&buffer),
                                   StringInputBufferIterator::EndMarker(),
                                   flags,
                                   empty_string_val);
@@ -669,18 +697,21 @@ double StringToDouble(String* str, int flags, double empty_string_val) {
 
 
 double StringToInt(String* str, int radix) {
+  ScannerConstants* scanner_constants =
+      Isolate::Current()->scanner_constants();
   StringShape shape(str);
   if (shape.IsSequentialAscii()) {
     const char* begin = SeqAsciiString::cast(str)->GetChars();
     const char* end = begin + str->length();
-    return InternalStringToInt(begin, end, radix);
+    return InternalStringToInt(scanner_constants, begin, end, radix);
   } else if (shape.IsSequentialTwoByte()) {
     const uc16* begin = SeqTwoByteString::cast(str)->GetChars();
     const uc16* end = begin + str->length();
-    return InternalStringToInt(begin, end, radix);
+    return InternalStringToInt(scanner_constants, begin, end, radix);
   } else {
     StringInputBuffer buffer(str);
-    return InternalStringToInt(StringInputBufferIterator(&buffer),
+    return InternalStringToInt(scanner_constants,
+                               StringInputBufferIterator(&buffer),
                                StringInputBufferIterator::EndMarker(),
                                radix);
   }
@@ -688,16 +719,22 @@ double StringToInt(String* str, int radix) {
 
 
 double StringToDouble(const char* str, int flags, double empty_string_val) {
+  ScannerConstants* scanner_constants =
+      Isolate::Current()->scanner_constants();
   const char* end = str + StrLength(str);
-  return InternalStringToDouble(str, end, flags, empty_string_val);
+  return InternalStringToDouble(scanner_constants, str, end, flags,
+                                empty_string_val);
 }
 
 
 double StringToDouble(Vector<const char> str,
                       int flags,
                       double empty_string_val) {
+  ScannerConstants* scanner_constants =
+      Isolate::Current()->scanner_constants();
   const char* end = str.start() + str.length();
-  return InternalStringToDouble(str.start(), end, flags, empty_string_val);
+  return InternalStringToDouble(scanner_constants, str.start(), end, flags,
+                                empty_string_val);
 }
 
 
@@ -1066,4 +1103,23 @@ char* DoubleToRadixCString(double value, int radix) {
 }
 
 
+static Mutex* dtoa_lock_one = OS::CreateMutex();
+static Mutex* dtoa_lock_zero = OS::CreateMutex();
+
+
 } }  // namespace v8::internal
+
+
+extern "C" {
+void ACQUIRE_DTOA_LOCK(int n) {
+  ASSERT(n == 0 || n == 1);
+  (n == 0 ? v8::internal::dtoa_lock_zero : v8::internal::dtoa_lock_one)->Lock();
+}
+
+
+void FREE_DTOA_LOCK(int n) {
+  ASSERT(n == 0 || n == 1);
+  (n == 0 ? v8::internal::dtoa_lock_zero : v8::internal::dtoa_lock_one)->
+      Unlock();
+}
+}
index 239a5f7a0cb6885b70a21bd1abb1d0f491a39fa4..faad6d409ade1cdb7acac4d2c1f72488d0136d28 100644 (file)
 #include "v8.h"
 
 #include "counters.h"
+#include "isolate.h"
 #include "platform.h"
 
 namespace v8 {
 namespace internal {
 
-CounterLookupCallback StatsTable::lookup_function_ = NULL;
-CreateHistogramCallback StatsTable::create_histogram_function_ = NULL;
-AddHistogramSampleCallback StatsTable::add_histogram_sample_function_ = NULL;
+StatsTable::StatsTable()
+    : lookup_function_(NULL),
+      create_histogram_function_(NULL),
+      add_histogram_sample_function_(NULL) {}
+
+
+int* StatsCounter::FindLocationInStatsTable() const {
+  return Isolate::Current()->stats_table()->FindLocation(name_);
+}
+
 
 // Start the timer.
 void StatsCounterTimer::Start() {
@@ -71,8 +79,15 @@ void HistogramTimer::Stop() {
 
     // Compute the delta between start and stop, in milliseconds.
     int milliseconds = static_cast<int>(stop_time_ - start_time_) / 1000;
-    StatsTable::AddHistogramSample(histogram_, milliseconds);
+    Isolate::Current()->stats_table()->
+        AddHistogramSample(histogram_, milliseconds);
   }
 }
 
+
+void* HistogramTimer::CreateHistogram() const {
+  return Isolate::Current()->stats_table()->
+      CreateHistogram(name_, 0, 10000, 50);
+}
+
 } }  // namespace v8::internal
index 048fdaabf2996faa9e895c994a3daa8996713468..6498a0242f9246a1d638bcd55f620971bf135740 100644 (file)
@@ -38,27 +38,27 @@ namespace internal {
 // counters for monitoring.  Counters can be looked up and
 // manipulated by name.
 
-class StatsTable : public AllStatic {
+class StatsTable {
  public:
   // Register an application-defined function where
   // counters can be looked up.
-  static void SetCounterFunction(CounterLookupCallback f) {
+  void SetCounterFunction(CounterLookupCallback f) {
     lookup_function_ = f;
   }
 
   // Register an application-defined function to create
   // a histogram for passing to the AddHistogramSample function
-  static void SetCreateHistogramFunction(CreateHistogramCallback f) {
+  void SetCreateHistogramFunction(CreateHistogramCallback f) {
     create_histogram_function_ = f;
   }
 
   // Register an application-defined function to add a sample
   // to a histogram created with CreateHistogram function
-  static void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
+  void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
     add_histogram_sample_function_ = f;
   }
 
-  static bool HasCounterFunction() {
+  bool HasCounterFunction() const {
     return lookup_function_ != NULL;
   }
 
@@ -68,7 +68,7 @@ class StatsTable : public AllStatic {
   // may receive a different location to store it's counter.
   // The return value must not be cached and re-used across
   // threads, although a single thread is free to cache it.
-  static int* FindLocation(const char* name) {
+  int* FindLocation(const char* name) {
     if (!lookup_function_) return NULL;
     return lookup_function_(name);
   }
@@ -78,25 +78,31 @@ class StatsTable : public AllStatic {
   // function. min and max define the expected minimum and maximum
   // sample values. buckets is the maximum number of buckets
   // that the samples will be grouped into.
-  static void* CreateHistogram(const char* name,
-                               int min,
-                               int max,
-                               size_t buckets) {
+  void* CreateHistogram(const char* name,
+                        int min,
+                        int max,
+                        size_t buckets) {
     if (!create_histogram_function_) return NULL;
     return create_histogram_function_(name, min, max, buckets);
   }
 
   // Add a sample to a histogram created with the CreateHistogram
   // function.
-  static void AddHistogramSample(void* histogram, int sample) {
+  void AddHistogramSample(void* histogram, int sample) {
     if (!add_histogram_sample_function_) return;
     return add_histogram_sample_function_(histogram, sample);
   }
 
  private:
-  static CounterLookupCallback lookup_function_;
-  static CreateHistogramCallback create_histogram_function_;
-  static AddHistogramSampleCallback add_histogram_sample_function_;
+  StatsTable();
+
+  CounterLookupCallback lookup_function_;
+  CreateHistogramCallback create_histogram_function_;
+  AddHistogramSampleCallback add_histogram_sample_function_;
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(StatsTable);
 };
 
 // StatsCounters are dynamically created values which can be tracked in
@@ -166,9 +172,12 @@ struct StatsCounter {
     if (lookup_done_)
       return ptr_;
     lookup_done_ = true;
-    ptr_ = StatsTable::FindLocation(name_);
+    ptr_ = FindLocationInStatsTable();
     return ptr_;
   }
+
+ private:
+  int* FindLocationInStatsTable() const;
 };
 
 // StatsCounterTimer t = { { L"t:foo", NULL, false }, 0, 0 };
@@ -216,10 +225,13 @@ struct HistogramTimer {
   void* GetHistogram() {
     if (!lookup_done_) {
       lookup_done_ = true;
-      histogram_ = StatsTable::CreateHistogram(name_, 0, 10000, 50);
+      histogram_ = CreateHistogram();
     }
     return histogram_;
   }
+
+ private:
+  void* CreateHistogram() const;
 };
 
 // Helper class for scoping a HistogramTimer.
index f82d8ee97ef8e35a662a41431d6472ebf5254795..c0a2c2b4ffe4ae9e1826c6e29416c1e81abafcf0 100644 (file)
@@ -46,8 +46,9 @@ static const int kTickSamplesBufferChunkSize = 64*KB;
 static const int kTickSamplesBufferChunksCount = 16;
 
 
-ProfilerEventsProcessor::ProfilerEventsProcessor(ProfileGenerator* generator)
-    : Thread("v8:ProfEvntProc"),
+ProfilerEventsProcessor::ProfilerEventsProcessor(Isolate* isolate,
+                                                 ProfileGenerator* generator)
+    : Thread(isolate, "v8:ProfEvntProc"),
       generator_(generator),
       running_(true),
       ticks_buffer_(sizeof(TickSampleEventRecord),
@@ -183,7 +184,7 @@ void ProfilerEventsProcessor::RegExpCodeCreateEvent(
 void ProfilerEventsProcessor::AddCurrentStack() {
   TickSampleEventRecord record;
   TickSample* sample = &record.sample;
-  sample->state = Top::current_vm_state();
+  sample->state = Isolate::Current()->current_vm_state();
   sample->pc = reinterpret_cast<Address>(sample);  // Not NULL.
   sample->tos = NULL;
   sample->frames_count = 0;
@@ -272,57 +273,58 @@ void ProfilerEventsProcessor::Run() {
 }
 
 
-CpuProfiler* CpuProfiler::singleton_ = NULL;
-Atomic32 CpuProfiler::is_profiling_ = false;
-
 void CpuProfiler::StartProfiling(const char* title) {
-  ASSERT(singleton_ != NULL);
-  singleton_->StartCollectingProfile(title);
+  ASSERT(Isolate::Current()->cpu_profiler() != NULL);
+  Isolate::Current()->cpu_profiler()->StartCollectingProfile(title);
 }
 
 
 void CpuProfiler::StartProfiling(String* title) {
-  ASSERT(singleton_ != NULL);
-  singleton_->StartCollectingProfile(title);
+  ASSERT(Isolate::Current()->cpu_profiler() != NULL);
+  Isolate::Current()->cpu_profiler()->StartCollectingProfile(title);
 }
 
 
 CpuProfile* CpuProfiler::StopProfiling(const char* title) {
-  return is_profiling() ? singleton_->StopCollectingProfile(title) : NULL;
+  return is_profiling() ?
+      Isolate::Current()->cpu_profiler()->StopCollectingProfile(title) : NULL;
 }
 
 
 CpuProfile* CpuProfiler::StopProfiling(Object* security_token, String* title) {
   return is_profiling() ?
-      singleton_->StopCollectingProfile(security_token, title) : NULL;
+      Isolate::Current()->cpu_profiler()->StopCollectingProfile(
+          security_token, title) : NULL;
 }
 
 
 int CpuProfiler::GetProfilesCount() {
-  ASSERT(singleton_ != NULL);
+  ASSERT(Isolate::Current()->cpu_profiler() != NULL);
   // The count of profiles doesn't depend on a security token.
-  return singleton_->profiles_->Profiles(
+  return Isolate::Current()->cpu_profiler()->profiles_->Profiles(
       TokenEnumerator::kNoSecurityToken)->length();
 }
 
 
 CpuProfile* CpuProfiler::GetProfile(Object* security_token, int index) {
-  ASSERT(singleton_ != NULL);
-  const int token = singleton_->token_enumerator_->GetTokenId(security_token);
-  return singleton_->profiles_->Profiles(token)->at(index);
+  ASSERT(Isolate::Current()->cpu_profiler() != NULL);
+  CpuProfiler* profiler = Isolate::Current()->cpu_profiler();
+  const int token = profiler->token_enumerator_->GetTokenId(security_token);
+  return profiler->profiles_->Profiles(token)->at(index);
 }
 
 
 CpuProfile* CpuProfiler::FindProfile(Object* security_token, unsigned uid) {
-  ASSERT(singleton_ != NULL);
-  const int token = singleton_->token_enumerator_->GetTokenId(security_token);
-  return singleton_->profiles_->GetProfile(token, uid);
+  ASSERT(Isolate::Current()->cpu_profiler() != NULL);
+  CpuProfiler* profiler = Isolate::Current()->cpu_profiler();
+  const int token = profiler->token_enumerator_->GetTokenId(security_token);
+  return profiler->profiles_->GetProfile(token, uid);
 }
 
 
-TickSample* CpuProfiler::TickSampleEvent() {
-  if (CpuProfiler::is_profiling()) {
-    return singleton_->processor_->TickSampleEvent();
+TickSample* CpuProfiler::TickSampleEvent(Isolate* isolate) {
+  if (CpuProfiler::is_profiling(isolate)) {
+    return isolate->cpu_profiler()->processor_->TickSampleEvent();
   } else {
     return NULL;
   }
@@ -330,24 +332,24 @@ TickSample* CpuProfiler::TickSampleEvent() {
 
 
 void CpuProfiler::CallbackEvent(String* name, Address entry_point) {
-  singleton_->processor_->CallbackCreateEvent(
+  Isolate::Current()->cpu_profiler()->processor_->CallbackCreateEvent(
       Logger::CALLBACK_TAG, CodeEntry::kEmptyNamePrefix, name, entry_point);
 }
 
 
 void CpuProfiler::CodeCreateEvent(Logger::LogEventsAndTags tag,
                            Code* code, const char* comment) {
-  singleton_->processor_->CodeCreateEvent(
+  Isolate::Current()->cpu_profiler()->processor_->CodeCreateEvent(
       tag, comment, code->address(), code->ExecutableSize());
 }
 
 
 void CpuProfiler::CodeCreateEvent(Logger::LogEventsAndTags tag,
                            Code* code, String* name) {
-  singleton_->processor_->CodeCreateEvent(
+  Isolate::Current()->cpu_profiler()->processor_->CodeCreateEvent(
       tag,
       name,
-      Heap::empty_string(),
+      HEAP->empty_string(),
       v8::CpuProfileNode::kNoLineNumberInfo,
       code->address(),
       code->ExecutableSize(),
@@ -359,10 +361,10 @@ void CpuProfiler::CodeCreateEvent(Logger::LogEventsAndTags tag,
                                   Code* code,
                                   SharedFunctionInfo* shared,
                                   String* name) {
-  singleton_->processor_->CodeCreateEvent(
+  Isolate::Current()->cpu_profiler()->processor_->CodeCreateEvent(
       tag,
       name,
-      Heap::empty_string(),
+      HEAP->empty_string(),
       v8::CpuProfileNode::kNoLineNumberInfo,
       code->address(),
       code->ExecutableSize(),
@@ -374,7 +376,7 @@ void CpuProfiler::CodeCreateEvent(Logger::LogEventsAndTags tag,
                                   Code* code,
                                   SharedFunctionInfo* shared,
                                   String* source, int line) {
-  singleton_->processor_->CodeCreateEvent(
+  Isolate::Current()->cpu_profiler()->processor_->CodeCreateEvent(
       tag,
       shared->DebugName(),
       source,
@@ -387,7 +389,7 @@ void CpuProfiler::CodeCreateEvent(Logger::LogEventsAndTags tag,
 
 void CpuProfiler::CodeCreateEvent(Logger::LogEventsAndTags tag,
                            Code* code, int args_count) {
-  singleton_->processor_->CodeCreateEvent(
+  Isolate::Current()->cpu_profiler()->processor_->CodeCreateEvent(
       tag,
       args_count,
       code->address(),
@@ -396,28 +398,29 @@ void CpuProfiler::CodeCreateEvent(Logger::LogEventsAndTags tag,
 
 
 void CpuProfiler::CodeMoveEvent(Address from, Address to) {
-  singleton_->processor_->CodeMoveEvent(from, to);
+  Isolate::Current()->cpu_profiler()->processor_->CodeMoveEvent(from, to);
 }
 
 
 void CpuProfiler::CodeDeleteEvent(Address from) {
-  singleton_->processor_->CodeDeleteEvent(from);
+  Isolate::Current()->cpu_profiler()->processor_->CodeDeleteEvent(from);
 }
 
 
 void CpuProfiler::SharedFunctionInfoMoveEvent(Address from, Address to) {
-  singleton_->processor_->SharedFunctionInfoMoveEvent(from, to);
+  CpuProfiler* profiler = Isolate::Current()->cpu_profiler();
+  profiler->processor_->SharedFunctionInfoMoveEvent(from, to);
 }
 
 
 void CpuProfiler::GetterCallbackEvent(String* name, Address entry_point) {
-  singleton_->processor_->CallbackCreateEvent(
+  Isolate::Current()->cpu_profiler()->processor_->CallbackCreateEvent(
       Logger::CALLBACK_TAG, "get ", name, entry_point);
 }
 
 
 void CpuProfiler::RegExpCodeCreateEvent(Code* code, String* source) {
-  singleton_->processor_->RegExpCodeCreateEvent(
+  Isolate::Current()->cpu_profiler()->processor_->RegExpCodeCreateEvent(
       Logger::REG_EXP_TAG,
       "RegExp: ",
       source,
@@ -427,7 +430,7 @@ void CpuProfiler::RegExpCodeCreateEvent(Code* code, String* source) {
 
 
 void CpuProfiler::SetterCallbackEvent(String* name, Address entry_point) {
-  singleton_->processor_->CallbackCreateEvent(
+  Isolate::Current()->cpu_profiler()->processor_->CallbackCreateEvent(
       Logger::CALLBACK_TAG, "set ", name, entry_point);
 }
 
@@ -437,7 +440,8 @@ CpuProfiler::CpuProfiler()
       next_profile_uid_(1),
       token_enumerator_(new TokenEnumerator()),
       generator_(NULL),
-      processor_(NULL) {
+      processor_(NULL),
+      is_profiling_(false) {
 }
 
 
@@ -463,25 +467,25 @@ void CpuProfiler::StartCollectingProfile(String* title) {
 void CpuProfiler::StartProcessorIfNotStarted() {
   if (processor_ == NULL) {
     // Disable logging when using the new implementation.
-    saved_logging_nesting_ = Logger::logging_nesting_;
-    Logger::logging_nesting_ = 0;
+    saved_logging_nesting_ = LOGGER->logging_nesting_;
+    LOGGER->logging_nesting_ = 0;
     generator_ = new ProfileGenerator(profiles_);
-    processor_ = new ProfilerEventsProcessor(generator_);
+    processor_ = new ProfilerEventsProcessor(Isolate::Current(), generator_);
     NoBarrier_Store(&is_profiling_, true);
     processor_->Start();
     // Enumerate stuff we already have in the heap.
-    if (Heap::HasBeenSetup()) {
+    if (HEAP->HasBeenSetup()) {
       if (!FLAG_prof_browser_mode) {
         bool saved_log_code_flag = FLAG_log_code;
         FLAG_log_code = true;
-        Logger::LogCodeObjects();
+        LOGGER->LogCodeObjects();
         FLAG_log_code = saved_log_code_flag;
       }
-      Logger::LogCompiledFunctions();
-      Logger::LogAccessorCallbacks();
+      LOGGER->LogCompiledFunctions();
+      LOGGER->LogAccessorCallbacks();
     }
     // Enable stack sampling.
-    Sampler* sampler = reinterpret_cast<Sampler*>(Logger::ticker_);
+    Sampler* sampler = reinterpret_cast<Sampler*>(LOGGER->ticker_);
     if (!sampler->IsActive()) sampler->Start();
     sampler->IncreaseProfilingDepth();
   }
@@ -514,7 +518,7 @@ CpuProfile* CpuProfiler::StopCollectingProfile(Object* security_token,
 
 void CpuProfiler::StopProcessorIfLastProfile(const char* title) {
   if (profiles_->IsLastProfile(title)) {
-    Sampler* sampler = reinterpret_cast<Sampler*>(Logger::ticker_);
+    Sampler* sampler = reinterpret_cast<Sampler*>(LOGGER->ticker_);
     sampler->DecreaseProfilingDepth();
     sampler->Stop();
     processor_->Stop();
@@ -524,7 +528,7 @@ void CpuProfiler::StopProcessorIfLastProfile(const char* title) {
     processor_ = NULL;
     NoBarrier_Store(&is_profiling_, false);
     generator_ = NULL;
-    Logger::logging_nesting_ = saved_logging_nesting_;
+    LOGGER->logging_nesting_ = saved_logging_nesting_;
   }
 }
 
@@ -537,8 +541,9 @@ namespace internal {
 
 void CpuProfiler::Setup() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (singleton_ == NULL) {
-    singleton_ = new CpuProfiler();
+  Isolate* isolate = Isolate::Current();
+  if (isolate->cpu_profiler() == NULL) {
+    isolate->set_cpu_profiler(new CpuProfiler());
   }
 #endif
 }
@@ -546,10 +551,11 @@ void CpuProfiler::Setup() {
 
 void CpuProfiler::TearDown() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (singleton_ != NULL) {
-    delete singleton_;
+  Isolate* isolate = Isolate::Current();
+  if (isolate->cpu_profiler() != NULL) {
+    delete isolate->cpu_profiler();
   }
-  singleton_ = NULL;
+  isolate->set_cpu_profiler(NULL);
 #endif
 }
 
index fc4dfb73ec58ef80bf0bf2c3846349ae2ff7a640..9485d0c19709cc97c5c5728283424a541142f61f 100644 (file)
@@ -133,7 +133,8 @@ class TickSampleEventRecord BASE_EMBEDDED {
 // methods called by event producers: VM and stack sampler threads.
 class ProfilerEventsProcessor : public Thread {
  public:
-  explicit ProfilerEventsProcessor(ProfileGenerator* generator);
+  explicit ProfilerEventsProcessor(Isolate* isolate,
+                                   ProfileGenerator* generator);
   virtual ~ProfilerEventsProcessor() {}
 
   // Thread control.
@@ -196,21 +197,23 @@ class ProfilerEventsProcessor : public Thread {
 } }  // namespace v8::internal
 
 
-#define PROFILE(Call)                                  \
-  LOG(Call);                                           \
+#define PROFILE(isolate, Call)                         \
+  LOG(isolate, Call);                                  \
   do {                                                 \
     if (v8::internal::CpuProfiler::is_profiling()) {   \
       v8::internal::CpuProfiler::Call;                 \
     }                                                  \
   } while (false)
 #else
-#define PROFILE(Call) LOG(Call)
+#define PROFILE(isolate, Call) LOG(isolate, Call)
 #endif  // ENABLE_LOGGING_AND_PROFILING
 
 
 namespace v8 {
 namespace internal {
 
+
+// TODO(isolates): isolatify this class.
 class CpuProfiler {
  public:
   static void Setup();
@@ -226,7 +229,7 @@ class CpuProfiler {
   static CpuProfile* FindProfile(Object* security_token, unsigned uid);
 
   // Invoked from stack sampler (thread or signal handler.)
-  static TickSample* TickSampleEvent();
+  static TickSample* TickSampleEvent(Isolate* isolate);
 
   // Must be called via PROFILE macro, otherwise will crash when
   // profiling is not enabled.
@@ -253,8 +256,15 @@ class CpuProfiler {
   static void SetterCallbackEvent(String* name, Address entry_point);
   static void SharedFunctionInfoMoveEvent(Address from, Address to);
 
+  // TODO(isolates): this doesn't have to use atomics anymore.
+
   static INLINE(bool is_profiling()) {
-    return NoBarrier_Load(&is_profiling_);
+    return is_profiling(Isolate::Current());
+  }
+
+  static INLINE(bool is_profiling(Isolate* isolate)) {
+    CpuProfiler* profiler = isolate->cpu_profiler();
+    return profiler != NULL && NoBarrier_Load(&profiler->is_profiling_);
   }
 
  private:
@@ -273,9 +283,7 @@ class CpuProfiler {
   ProfileGenerator* generator_;
   ProfilerEventsProcessor* processor_;
   int saved_logging_nesting_;
-
-  static CpuProfiler* singleton_;
-  static Atomic32 is_profiling_;
+  Atomic32 is_profiling_;
 
 #else
   static INLINE(bool is_profiling()) { return false; }
index 8a3886c676b7524e0a6f99daf92165965f1c094c..3df869348b4a6bcb63d955a86e3bed968e95b01a 100644 (file)
@@ -159,7 +159,7 @@ void HandleDebugEvent(DebugEvent event,
 
 
 void RunRemoteDebugger(int port) {
-  RemoteDebugger debugger(port);
+  RemoteDebugger debugger(i::Isolate::Current(), port);
   debugger.Run();
 }
 
@@ -186,11 +186,11 @@ void RemoteDebugger::Run() {
   }
 
   // Start the receiver thread.
-  ReceiverThread receiver(this);
+  ReceiverThread receiver(isolate_, this);
   receiver.Start();
 
   // Start the keyboard thread.
-  KeyboardThread keyboard(this);
+  KeyboardThread keyboard(isolate_, this);
   keyboard.Start();
   PrintPrompt();
 
index 4e33e6f4c4527a8429d9cc92cbc47a39221b853f..ceb9e363e0ed36578d2bb8bc854326bd18904691 100644 (file)
@@ -53,11 +53,11 @@ class ReceiverThread;
 // Remote debugging class.
 class RemoteDebugger {
  public:
-  explicit RemoteDebugger(int port)
+  RemoteDebugger(i::Isolate* isolate, int port)
       : port_(port),
         event_access_(i::OS::CreateMutex()),
         event_available_(i::OS::CreateSemaphore(0)),
-        head_(NULL), tail_(NULL) {}
+        head_(NULL), tail_(NULL), isolate_(isolate) {}
   void Run();
 
   // Handle events from the subordinate threads.
@@ -89,6 +89,7 @@ class RemoteDebugger {
   i::Semaphore* event_available_;
   RemoteDebuggerEvent* head_;
   RemoteDebuggerEvent* tail_;
+  i::Isolate* isolate_;
 
   friend class ReceiverThread;
 };
@@ -97,8 +98,8 @@ class RemoteDebugger {
 // Thread reading from debugged V8 instance.
 class ReceiverThread: public i::Thread {
  public:
-  explicit ReceiverThread(RemoteDebugger* remote_debugger)
-      : Thread("d8:ReceiverThrd"),
+  ReceiverThread(i::Isolate* isolate, RemoteDebugger* remote_debugger)
+      : Thread(isolate, "d8:ReceiverThrd"),
         remote_debugger_(remote_debugger) {}
   ~ReceiverThread() {}
 
@@ -112,8 +113,8 @@ class ReceiverThread: public i::Thread {
 // Thread reading keyboard input.
 class KeyboardThread: public i::Thread {
  public:
-  explicit KeyboardThread(RemoteDebugger* remote_debugger)
-      : Thread("d8:KeyboardThrd"),
+  explicit KeyboardThread(i::Isolate* isolate, RemoteDebugger* remote_debugger)
+      : Thread(isolate, "d8:KeyboardThrd"),
         remote_debugger_(remote_debugger) {}
   ~KeyboardThread() {}
 
index 349ec90410eff0305a589c8e6fbe2ba363a99e7a..7de82b759f3b75b93390d1c6c41ef1318fe7468c 100644 (file)
--- a/src/d8.cc
+++ b/src/d8.cc
@@ -29,6 +29,8 @@
 #include <stdlib.h>
 #include <errno.h>
 
+#include "v8.h"
+
 #include "d8.h"
 #include "d8-debug.h"
 #include "debug.h"
@@ -441,24 +443,25 @@ void Shell::Initialize() {
 
   i::JSArguments js_args = i::FLAG_js_arguments;
   i::Handle<i::FixedArray> arguments_array =
-      i::Factory::NewFixedArray(js_args.argc());
+      FACTORY->NewFixedArray(js_args.argc());
   for (int j = 0; j < js_args.argc(); j++) {
     i::Handle<i::String> arg =
-        i::Factory::NewStringFromUtf8(i::CStrVector(js_args[j]));
+        FACTORY->NewStringFromUtf8(i::CStrVector(js_args[j]));
     arguments_array->set(j, *arg);
   }
   i::Handle<i::JSArray> arguments_jsarray =
-      i::Factory::NewJSArrayWithElements(arguments_array);
+      FACTORY->NewJSArrayWithElements(arguments_array);
   global_template->Set(String::New("arguments"),
                        Utils::ToLocal(arguments_jsarray));
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Install the debugger object in the utility scope
-  i::Debug::Load();
-  i::Handle<i::JSObject> debug
-      = i::Handle<i::JSObject>(i::Debug::debug_context()->global());
+  i::Debug* debug = i::Isolate::Current()->debug();
+  debug->Load();
+  i::Handle<i::JSObject> js_debug
+      = i::Handle<i::JSObject>(debug->debug_context()->global());
   utility_context_->Global()->Set(String::New("$debug"),
-                                  Utils::ToLocal(debug));
+                                  Utils::ToLocal(js_debug));
 #endif
 
   // Run the d8 shell utility script in the utility context
@@ -490,7 +493,7 @@ void Shell::Initialize() {
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Set the security token of the debug context to allow access.
-  i::Debug::debug_context()->set_security_token(i::Heap::undefined_value());
+  debug->debug_context()->set_security_token(HEAP->undefined_value());
 
   // Start the debugger agent if requested.
   if (i::FLAG_debugger_agent) {
@@ -606,8 +609,8 @@ void Shell::RunShell() {
 
 class ShellThread : public i::Thread {
  public:
-  ShellThread(int no, i::Vector<const char> files)
-    : Thread("d8:ShellThread"),
+  ShellThread(i::Isolate* isolate, int no, i::Vector<const char> files)
+    : Thread(isolate, "d8:ShellThread"),
       no_(no), files_(files) { }
   virtual void Run();
  private:
@@ -739,7 +742,8 @@ int Shell::Main(int argc, char* argv[]) {
         const char* files = ReadChars(argv[++i], &size);
         if (files == NULL) return 1;
         ShellThread* thread =
-            new ShellThread(threads.length(),
+            new ShellThread(i::Isolate::Current(),
+                            threads.length(),
                             i::Vector<const char>(files, size));
         thread->Start();
         threads.Add(thread);
index 79d760f5a4267433f41c7847482ac9e3acefcdf7..573d7d80fc6c4b03b00a45dfd2fdaccaa889118f 100644 (file)
@@ -90,7 +90,7 @@ class BitVector: public ZoneObject {
   explicit BitVector(int length)
       : length_(length),
         data_length_(SizeFor(length)),
-        data_(Zone::NewArray<uint32_t>(data_length_)) {
+        data_(ZONE->NewArray<uint32_t>(data_length_)) {
     ASSERT(length > 0);
     Clear();
   }
@@ -98,7 +98,7 @@ class BitVector: public ZoneObject {
   BitVector(const BitVector& other)
       : length_(other.length()),
         data_length_(SizeFor(length_)),
-        data_(Zone::NewArray<uint32_t>(data_length_)) {
+        data_(ZONE->NewArray<uint32_t>(data_length_)) {
     CopyFrom(other);
   }
 
@@ -237,7 +237,7 @@ class SparseSet: public ZoneObject {
 
   explicit SparseSet(int universe_size)
       : dense_(4),
-        sparse_(Zone::NewArray<int>(universe_size)) {
+        sparse_(ZONE->NewArray<int>(universe_size)) {
 #ifdef DEBUG
     size_ = universe_size;
     iterator_count_ = 0;
index 40e56f3024be49155b19ddc4cfb7d5ef602ad124..51109ee9d7ff02eec66d26d8926dfae0d8f0ee6e 100644 (file)
@@ -70,7 +70,8 @@ class DateParser : public AllStatic {
     explicit InputReader(Vector<Char> s)
         : index_(0),
           buffer_(s),
-          has_read_number_(false) {
+          has_read_number_(false),
+          scanner_constants_(Isolate::Current()->scanner_constants()) {
       Next();
     }
 
@@ -121,7 +122,7 @@ class DateParser : public AllStatic {
     }
 
     bool SkipWhiteSpace() {
-      if (ScannerConstants::kIsWhiteSpace.get(ch_)) {
+      if (scanner_constants_->IsWhiteSpace(ch_)) {
         Next();
         return true;
       }
@@ -157,6 +158,7 @@ class DateParser : public AllStatic {
     Vector<Char> buffer_;
     bool has_read_number_;
     uint32_t ch_;
+    ScannerConstants* scanner_constants_;
   };
 
   enum KeywordType { INVALID, MONTH_NAME, TIME_ZONE_NAME, AM_PM };
index 6901079b9b060d6d85af83b8faff8fbb2c31c0c1..498b88ac16dfc6d24e888091bbcd7b712c4f2797 100644 (file)
@@ -38,11 +38,11 @@ namespace internal {
 // Public V8 debugger API message handler function. This function just delegates
 // to the debugger agent through it's data parameter.
 void DebuggerAgentMessageHandler(const v8::Debug::Message& message) {
-  DebuggerAgent::instance_->DebuggerMessage(message);
+  DebuggerAgent* agent = Isolate::Current()->debugger_agent_instance();
+  ASSERT(agent != NULL);
+  agent->DebuggerMessage(message);
 }
 
-// static
-DebuggerAgent* DebuggerAgent::instance_ = NULL;
 
 // Debugger agent main thread.
 void DebuggerAgent::Run() {
@@ -102,20 +102,21 @@ void DebuggerAgent::WaitUntilListening() {
   listening_->Wait();
 }
 
+static const char* kCreateSessionMessage =
+    "Remote debugging session already active\r\n";
+
 void DebuggerAgent::CreateSession(Socket* client) {
   ScopedLock with(session_access_);
 
   // If another session is already established terminate this one.
   if (session_ != NULL) {
-    static const char* message = "Remote debugging session already active\r\n";
-
-    client->Send(message, StrLength(message));
+    client->Send(kCreateSessionMessage, StrLength(kCreateSessionMessage));
     delete client;
     return;
   }
 
   // Create a new session and hook up the debug message handler.
-  session_ = new DebuggerAgentSession(this, client);
+  session_ = new DebuggerAgentSession(isolate(), this, client);
   v8::Debug::SetMessageHandler2(DebuggerAgentMessageHandler);
   session_->Start();
 }
@@ -224,8 +225,8 @@ void DebuggerAgentSession::Shutdown() {
 }
 
 
-const char* DebuggerAgentUtil::kContentLength = "Content-Length";
-int DebuggerAgentUtil::kContentLengthSize =
+const char* const DebuggerAgentUtil::kContentLength = "Content-Length";
+const int DebuggerAgentUtil::kContentLengthSize =
     StrLength(kContentLength);
 
 
index 4cedb8318725306f32b6ea8e7a64e8afb35d8b2c..a25002e0a7f6e5c829cd0173329c1c47f45771bb 100644 (file)
@@ -43,18 +43,18 @@ class DebuggerAgentSession;
 // handles connection from a remote debugger.
 class DebuggerAgent: public Thread {
  public:
-  explicit DebuggerAgent(const char* name, int port)
-      : Thread(name),
+  DebuggerAgent(Isolate* isolate, const char* name, int port)
+      : Thread(isolate, name),
         name_(StrDup(name)), port_(port),
         server_(OS::CreateSocket()), terminate_(false),
         session_access_(OS::CreateMutex()), session_(NULL),
         terminate_now_(OS::CreateSemaphore(0)),
         listening_(OS::CreateSemaphore(0)) {
-    ASSERT(instance_ == NULL);
-    instance_ = this;
+    ASSERT(Isolate::Current()->debugger_agent_instance() == NULL);
+    Isolate::Current()->set_debugger_agent_instance(this);
   }
   ~DebuggerAgent() {
-     instance_ = NULL;
+     Isolate::Current()->set_debugger_agent_instance(NULL);
      delete server_;
   }
 
@@ -77,8 +77,6 @@ class DebuggerAgent: public Thread {
   Semaphore* terminate_now_;  // Semaphore to signal termination.
   Semaphore* listening_;
 
-  static DebuggerAgent* instance_;
-
   friend class DebuggerAgentSession;
   friend void DebuggerAgentMessageHandler(const v8::Debug::Message& message);
 
@@ -90,8 +88,8 @@ class DebuggerAgent: public Thread {
 // debugger and sends debugger events/responses to the remote debugger.
 class DebuggerAgentSession: public Thread {
  public:
-  DebuggerAgentSession(DebuggerAgent* agent, Socket* client)
-      : Thread("v8:DbgAgntSessn"),
+  DebuggerAgentSession(Isolate* isolate, DebuggerAgent* agent, Socket* client)
+      : Thread(isolate, "v8:DbgAgntSessn"),
         agent_(agent), client_(client) {}
 
   void DebuggerMessage(Vector<uint16_t> message);
@@ -112,8 +110,8 @@ class DebuggerAgentSession: public Thread {
 // Utility methods factored out to be used by the D8 shell as well.
 class DebuggerAgentUtil {
  public:
-  static const char* kContentLength;
-  static int kContentLengthSize;
+  static const char* const kContentLength;
+  static const int kContentLengthSize;
 
   static SmartPointer<char> ReceiveMessage(const Socket* conn);
   static bool SendConnectMessage(const Socket* conn,
index 8ae47b9a3820fa2ee71e7486dea404d7a505734b..10b8b4a9a6d57b5a1b443db1439e418c1c1c2291 100644 (file)
@@ -51,6 +51,26 @@ namespace v8 {
 namespace internal {
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
+
+
+Debug::Debug(Isolate* isolate)
+    : has_break_points_(false),
+      script_cache_(NULL),
+      debug_info_list_(NULL),
+      disable_break_(false),
+      break_on_exception_(false),
+      break_on_uncaught_exception_(false),
+      debug_break_return_(NULL),
+      debug_break_slot_(NULL),
+      isolate_(isolate) {
+  memset(registers_, 0, sizeof(JSCallerSavedBuffer));
+}
+
+
+Debug::~Debug() {
+}
+
+
 static void PrintLn(v8::Local<v8::Value> value) {
   v8::Local<v8::String> s = value->ToString();
   ScopedVector<char> data(s->Length() + 1);
@@ -64,22 +84,28 @@ static void PrintLn(v8::Local<v8::Value> value) {
 
 
 static Handle<Code> ComputeCallDebugBreak(int argc, Code::Kind kind) {
-  CALL_HEAP_FUNCTION(StubCache::ComputeCallDebugBreak(argc, kind), Code);
+  Isolate* isolate = Isolate::Current();
+  CALL_HEAP_FUNCTION(
+      isolate,
+      isolate->stub_cache()->ComputeCallDebugBreak(argc, kind),
+      Code);
 }
 
 
-static Handle<Code> ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind) {
+static Handle<Code> ComputeCallDebugPrepareStepIn(int argc,  Code::Kind kind) {
+  Isolate* isolate = Isolate::Current();
   CALL_HEAP_FUNCTION(
-      StubCache::ComputeCallDebugPrepareStepIn(argc, kind), Code);
+      isolate,
+      isolate->stub_cache()->ComputeCallDebugPrepareStepIn(argc, kind),
+      Code);
 }
 
 
-static v8::Handle<v8::Context> GetDebugEventContext() {
-  Handle<Context> context = Debug::debugger_entry()->GetContext();
-  // Top::context() may have been NULL when "script collected" event occured.
-  if (*context == NULL) {
-    return v8::Local<v8::Context>();
-  }
+static v8::Handle<v8::Context> GetDebugEventContext(Isolate* isolate) {
+  Handle<Context> context = isolate->debug()->debugger_entry()->GetContext();
+  // Isolate::context() may have been NULL when "script collected" event
+  // occured.
+  if (context.is_null()) return v8::Local<v8::Context>();
   Handle<Context> global_context(context->global_context());
   return v8::Utils::ToLocal(global_context);
 }
@@ -535,11 +561,6 @@ void BreakLocationIterator::RinfoNext() {
 }
 
 
-bool Debug::has_break_points_ = false;
-ScriptCache* Debug::script_cache_ = NULL;
-DebugInfoListNode* Debug::debug_info_list_ = NULL;
-
-
 // Threading support.
 void Debug::ThreadInit() {
   thread_local_.break_count_ = 0;
@@ -552,16 +573,13 @@ void Debug::ThreadInit() {
   thread_local_.step_into_fp_ = 0;
   thread_local_.step_out_fp_ = 0;
   thread_local_.after_break_target_ = 0;
+  // TODO(isolates): frames_are_dropped_?
   thread_local_.debugger_entry_ = NULL;
   thread_local_.pending_interrupts_ = 0;
   thread_local_.restarter_frame_function_pointer_ = NULL;
 }
 
 
-JSCallerSavedBuffer Debug::registers_;
-Debug::ThreadLocal Debug::thread_local_;
-
-
 char* Debug::ArchiveDebug(char* storage) {
   char* to = storage;
   memcpy(to, reinterpret_cast<char*>(&thread_local_), sizeof(ThreadLocal));
@@ -584,7 +602,7 @@ char* Debug::RestoreDebug(char* storage) {
 
 
 int Debug::ArchiveSpacePerThread() {
-  return sizeof(ThreadLocal) + sizeof(registers_);
+  return sizeof(ThreadLocal) + sizeof(JSCallerSavedBuffer);
 }
 
 
@@ -614,22 +632,8 @@ Object** Debug::SetUpFrameDropperFrame(StackFrame* bottom_js_frame,
 const int Debug::kFrameDropperFrameSize = 4;
 
 
-
-
-
-// Default break enabled.
-bool Debug::disable_break_ = false;
-
-// Default call debugger on uncaught exception.
-bool Debug::break_on_exception_ = false;
-bool Debug::break_on_uncaught_exception_ = false;
-
-Handle<Context> Debug::debug_context_ = Handle<Context>();
-Code* Debug::debug_break_return_ = NULL;
-Code* Debug::debug_break_slot_ = NULL;
-
-
 void ScriptCache::Add(Handle<Script> script) {
+  Isolate* isolate = Isolate::Current();
   // Create an entry in the hash map for the script.
   int id = Smi::cast(script->id())->value();
   HashMap::Entry* entry =
@@ -642,15 +646,18 @@ void ScriptCache::Add(Handle<Script> script) {
   // Globalize the script object, make it weak and use the location of the
   // global handle as the value in the hash map.
   Handle<Script> script_ =
-      Handle<Script>::cast((GlobalHandles::Create(*script)));
-  GlobalHandles::MakeWeak(reinterpret_cast<Object**>(script_.location()),
-                          this, ScriptCache::HandleWeakScript);
+      Handle<Script>::cast(
+          (isolate->global_handles()->Create(*script)));
+  isolate->global_handles()->MakeWeak(
+      reinterpret_cast<Object**>(script_.location()),
+      this,
+      ScriptCache::HandleWeakScript);
   entry->value = script_.location();
 }
 
 
 Handle<FixedArray> ScriptCache::GetScripts() {
-  Handle<FixedArray> instances = Factory::NewFixedArray(occupancy());
+  Handle<FixedArray> instances = FACTORY->NewFixedArray(occupancy());
   int count = 0;
   for (HashMap::Entry* entry = Start(); entry != NULL; entry = Next(entry)) {
     ASSERT(entry->value != NULL);
@@ -664,21 +671,23 @@ Handle<FixedArray> ScriptCache::GetScripts() {
 
 
 void ScriptCache::ProcessCollectedScripts() {
+  Isolate* isolate = Isolate::Current();
   for (int i = 0; i < collected_scripts_.length(); i++) {
-    Debugger::OnScriptCollected(collected_scripts_[i]);
+    isolate->debugger()->OnScriptCollected(collected_scripts_[i]);
   }
   collected_scripts_.Clear();
 }
 
 
 void ScriptCache::Clear() {
+  Isolate* isolate = Isolate::Current();
   // Iterate the script cache to get rid of all the weak handles.
   for (HashMap::Entry* entry = Start(); entry != NULL; entry = Next(entry)) {
     ASSERT(entry != NULL);
     Object** location = reinterpret_cast<Object**>(entry->value);
     ASSERT((*location)->IsScript());
-    GlobalHandles::ClearWeakness(location);
-    GlobalHandles::Destroy(location);
+    isolate->global_handles()->ClearWeakness(location);
+    isolate->global_handles()->Destroy(location);
   }
   // Clear the content of the hash map.
   HashMap::Clear();
@@ -708,17 +717,18 @@ void Debug::Setup(bool create_heap_objects) {
   if (create_heap_objects) {
     // Get code to handle debug break on return.
     debug_break_return_ =
-        Builtins::builtin(Builtins::Return_DebugBreak);
+        Isolate::Current()->builtins()->builtin(Builtins::Return_DebugBreak);
     ASSERT(debug_break_return_->IsCode());
     // Get code to handle debug break in debug break slots.
     debug_break_slot_ =
-        Builtins::builtin(Builtins::Slot_DebugBreak);
+        Isolate::Current()->builtins()->builtin(Builtins::Slot_DebugBreak);
     ASSERT(debug_break_slot_->IsCode());
   }
 }
 
 
 void Debug::HandleWeakDebugInfo(v8::Persistent<v8::Value> obj, void* data) {
+  Debug* debug = Isolate::Current()->debug();
   DebugInfoListNode* node = reinterpret_cast<DebugInfoListNode*>(data);
   // We need to clear all breakpoints associated with the function to restore
   // original code and avoid patching the code twice later because
@@ -726,9 +736,9 @@ void Debug::HandleWeakDebugInfo(v8::Persistent<v8::Value> obj, void* data) {
   // Runtime::FindSharedFunctionInfoInScript.
   BreakLocationIterator it(node->debug_info(), ALL_BREAK_LOCATIONS);
   it.ClearAllDebugBreak();
-  RemoveDebugInfo(node->debug_info());
+  debug->RemoveDebugInfo(node->debug_info());
 #ifdef DEBUG
-  node = Debug::debug_info_list_;
+  node = debug->debug_info_list_;
   while (node != NULL) {
     ASSERT(node != reinterpret_cast<DebugInfoListNode*>(data));
     node = node->next();
@@ -738,15 +748,20 @@ void Debug::HandleWeakDebugInfo(v8::Persistent<v8::Value> obj, void* data) {
 
 
 DebugInfoListNode::DebugInfoListNode(DebugInfo* debug_info): next_(NULL) {
+  Isolate* isolate = Isolate::Current();
   // Globalize the request debug info object and make it weak.
-  debug_info_ = Handle<DebugInfo>::cast((GlobalHandles::Create(debug_info)));
-  GlobalHandles::MakeWeak(reinterpret_cast<Object**>(debug_info_.location()),
-                          this, Debug::HandleWeakDebugInfo);
+  debug_info_ = Handle<DebugInfo>::cast(
+      (isolate->global_handles()->Create(debug_info)));
+  isolate->global_handles()->MakeWeak(
+      reinterpret_cast<Object**>(debug_info_.location()),
+      this,
+      Debug::HandleWeakDebugInfo);
 }
 
 
 DebugInfoListNode::~DebugInfoListNode() {
-  GlobalHandles::Destroy(reinterpret_cast<Object**>(debug_info_.location()));
+  Isolate::Current()->global_handles()->Destroy(
+      reinterpret_cast<Object**>(debug_info_.location()));
 }
 
 
@@ -759,9 +774,10 @@ bool Debug::CompileDebuggerScript(int index) {
   }
 
   // Find source and name for the requested script.
-  Handle<String> source_code = Bootstrapper::NativesSourceLookup(index);
+  Handle<String> source_code =
+      Isolate::Current()->bootstrapper()->NativesSourceLookup(index);
   Vector<const char> name = Natives::GetScriptName(index);
-  Handle<String> script_name = Factory::NewStringFromAscii(name);
+  Handle<String> script_name = FACTORY->NewStringFromAscii(name);
 
   // Compile the script.
   Handle<SharedFunctionInfo> function_info;
@@ -773,16 +789,16 @@ bool Debug::CompileDebuggerScript(int index) {
 
   // Silently ignore stack overflows during compilation.
   if (function_info.is_null()) {
-    ASSERT(Top::has_pending_exception());
-    Top::clear_pending_exception();
+    ASSERT(Isolate::Current()->has_pending_exception());
+    Isolate::Current()->clear_pending_exception();
     return false;
   }
 
   // Execute the shared function in the debugger context.
-  Handle<Context> context = Top::global_context();
+  Handle<Context> context = Isolate::Current()->global_context();
   bool caught_exception = false;
   Handle<JSFunction> function =
-      Factory::NewFunctionFromSharedFunctionInfo(function_info, context);
+      FACTORY->NewFunctionFromSharedFunctionInfo(function_info, context);
   Handle<Object> result =
       Execution::TryCall(function, Handle<Object>(context->global()),
                          0, NULL, &caught_exception);
@@ -807,38 +823,43 @@ bool Debug::Load() {
   // Return if debugger is already loaded.
   if (IsLoaded()) return true;
 
+  Isolate* isolate = Isolate::Current();
+
   // Bail out if we're already in the process of compiling the native
   // JavaScript source code for the debugger.
-  if (Debugger::compiling_natives() || Debugger::is_loading_debugger())
+  if (isolate->debugger()->compiling_natives() ||
+      isolate->debugger()->is_loading_debugger())
     return false;
-  Debugger::set_loading_debugger(true);
+  isolate->debugger()->set_loading_debugger(true);
 
   // Disable breakpoints and interrupts while compiling and running the
   // debugger scripts including the context creation code.
   DisableBreak disable(true);
-  PostponeInterruptsScope postpone;
+  PostponeInterruptsScope postpone(isolate);
 
   // Create the debugger context.
   HandleScope scope;
   Handle<Context> context =
-      Bootstrapper::CreateEnvironment(Handle<Object>::null(),
-                                      v8::Handle<ObjectTemplate>(),
-                                      NULL);
+      isolate->bootstrapper()->CreateEnvironment(
+          Handle<Object>::null(),
+          v8::Handle<ObjectTemplate>(),
+          NULL);
 
   // Use the debugger context.
-  SaveContext save;
-  Top::set_context(*context);
+  SaveContext save(isolate);
+  isolate->set_context(*context);
 
   // Expose the builtins object in the debugger context.
-  Handle<String> key = Factory::LookupAsciiSymbol("builtins");
+  Handle<String> key = FACTORY->LookupAsciiSymbol("builtins");
   Handle<GlobalObject> global = Handle<GlobalObject>(context->global());
   RETURN_IF_EMPTY_HANDLE_VALUE(
+      isolate,
       SetProperty(global, key, Handle<Object>(global->builtins()),
                   NONE, kNonStrictMode),
       false);
 
   // Compile the JavaScript for the debugger in the debugger context.
-  Debugger::set_compiling_natives(true);
+  isolate->debugger()->set_compiling_natives(true);
   bool caught_exception =
       !CompileDebuggerScript(Natives::GetIndex("mirror")) ||
       !CompileDebuggerScript(Natives::GetIndex("debug"));
@@ -848,11 +869,11 @@ bool Debug::Load() {
         !CompileDebuggerScript(Natives::GetIndex("liveedit"));
   }
 
-  Debugger::set_compiling_natives(false);
+  isolate->debugger()->set_compiling_natives(false);
 
   // Make sure we mark the debugger as not loading before we might
   // return.
-  Debugger::set_loading_debugger(false);
+  isolate->debugger()->set_loading_debugger(false);
 
   // Check for caught exceptions.
   if (caught_exception) return false;
@@ -874,7 +895,8 @@ void Debug::Unload() {
   DestroyScriptCache();
 
   // Clear debugger context global handle.
-  GlobalHandles::Destroy(reinterpret_cast<Object**>(debug_context_.location()));
+  Isolate::Current()->global_handles()->Destroy(
+      reinterpret_cast<Object**>(debug_context_.location()));
   debug_context_ = Handle<Context>();
 }
 
@@ -892,30 +914,35 @@ void Debug::Iterate(ObjectVisitor* v) {
 }
 
 
-Object* Debug::Break(Arguments args) {
+// This remains a static method so that generated code can call it.
+Object* Debug::Break(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+
+  Debug* debug = isolate->debug();
+  Heap* heap = isolate->heap();
   HandleScope scope;
   ASSERT(args.length() == 0);
 
-  thread_local_.frame_drop_mode_ = FRAMES_UNTOUCHED;
+  debug->thread_local_.frame_drop_mode_ = FRAMES_UNTOUCHED;
 
   // Get the top-most JavaScript frame.
   JavaScriptFrameIterator it;
   JavaScriptFrame* frame = it.frame();
 
   // Just continue if breaks are disabled or debugger cannot be loaded.
-  if (disable_break() || !Load()) {
-    SetAfterBreakTarget(frame);
-    return Heap::undefined_value();
+  if (debug->disable_break() || !debug->Load()) {
+    debug->SetAfterBreakTarget(frame);
+    return heap->undefined_value();
   }
 
   // Enter the debugger.
   EnterDebugger debugger;
   if (debugger.FailedToEnter()) {
-    return Heap::undefined_value();
+    return heap->undefined_value();
   }
 
   // Postpone interrupt during breakpoint processing.
-  PostponeInterruptsScope postpone;
+  PostponeInterruptsScope postpone(isolate);
 
   // Get the debug info (create it if it does not exist).
   Handle<SharedFunctionInfo> shared =
@@ -928,71 +955,75 @@ Object* Debug::Break(Arguments args) {
   break_location_iterator.FindBreakLocationFromAddress(frame->pc());
 
   // Check whether step next reached a new statement.
-  if (!StepNextContinue(&break_location_iterator, frame)) {
+  if (!debug->StepNextContinue(&break_location_iterator, frame)) {
     // Decrease steps left if performing multiple steps.
-    if (thread_local_.step_count_ > 0) {
-      thread_local_.step_count_--;
+    if (debug->thread_local_.step_count_ > 0) {
+      debug->thread_local_.step_count_--;
     }
   }
 
   // If there is one or more real break points check whether any of these are
   // triggered.
-  Handle<Object> break_points_hit(Heap::undefined_value());
+  Handle<Object> break_points_hit(heap->undefined_value());
   if (break_location_iterator.HasBreakPoint()) {
     Handle<Object> break_point_objects =
         Handle<Object>(break_location_iterator.BreakPointObjects());
-    break_points_hit = CheckBreakPoints(break_point_objects);
+    break_points_hit = debug->CheckBreakPoints(break_point_objects);
   }
 
   // If step out is active skip everything until the frame where we need to step
   // out to is reached, unless real breakpoint is hit.
-  if (Debug::StepOutActive() && frame->fp() != Debug::step_out_fp() &&
+  if (debug->StepOutActive() && frame->fp() != debug->step_out_fp() &&
       break_points_hit->IsUndefined() ) {
       // Step count should always be 0 for StepOut.
-      ASSERT(thread_local_.step_count_ == 0);
+      ASSERT(debug->thread_local_.step_count_ == 0);
   } else if (!break_points_hit->IsUndefined() ||
-             (thread_local_.last_step_action_ != StepNone &&
-              thread_local_.step_count_ == 0)) {
+             (debug->thread_local_.last_step_action_ != StepNone &&
+              debug->thread_local_.step_count_ == 0)) {
     // Notify debugger if a real break point is triggered or if performing
     // single stepping with no more steps to perform. Otherwise do another step.
 
     // Clear all current stepping setup.
-    ClearStepping();
+    debug->ClearStepping();
 
     // Notify the debug event listeners.
-    Debugger::OnDebugBreak(break_points_hit, false);
-  } else if (thread_local_.last_step_action_ != StepNone) {
+    isolate->debugger()->OnDebugBreak(break_points_hit, false);
+  } else if (debug->thread_local_.last_step_action_ != StepNone) {
     // Hold on to last step action as it is cleared by the call to
     // ClearStepping.
-    StepAction step_action = thread_local_.last_step_action_;
-    int step_count = thread_local_.step_count_;
+    StepAction step_action = debug->thread_local_.last_step_action_;
+    int step_count = debug->thread_local_.step_count_;
 
     // Clear all current stepping setup.
-    ClearStepping();
+    debug->ClearStepping();
 
     // Set up for the remaining steps.
-    PrepareStep(step_action, step_count);
+    debug->PrepareStep(step_action, step_count);
   }
 
-  if (thread_local_.frame_drop_mode_ == FRAMES_UNTOUCHED) {
-    SetAfterBreakTarget(frame);
-  } else if (thread_local_.frame_drop_mode_ == FRAME_DROPPED_IN_IC_CALL) {
+  if (debug->thread_local_.frame_drop_mode_ == FRAMES_UNTOUCHED) {
+    debug->SetAfterBreakTarget(frame);
+  } else if (debug->thread_local_.frame_drop_mode_ ==
+      FRAME_DROPPED_IN_IC_CALL) {
     // We must have been calling IC stub. Do not go there anymore.
-    Code* plain_return = Builtins::builtin(Builtins::PlainReturn_LiveEdit);
-    thread_local_.after_break_target_ = plain_return->entry();
-  } else if (thread_local_.frame_drop_mode_ ==
+    Code* plain_return =
+        Isolate::Current()->builtins()->builtin(Builtins::PlainReturn_LiveEdit);
+    debug->thread_local_.after_break_target_ = plain_return->entry();
+  } else if (debug->thread_local_.frame_drop_mode_ ==
       FRAME_DROPPED_IN_DEBUG_SLOT_CALL) {
     // Debug break slot stub does not return normally, instead it manually
     // cleans the stack and jumps. We should patch the jump address.
-    Code* plain_return = Builtins::builtin(Builtins::FrameDropper_LiveEdit);
-    thread_local_.after_break_target_ = plain_return->entry();
-  } else if (thread_local_.frame_drop_mode_ == FRAME_DROPPED_IN_DIRECT_CALL) {
+    Code* plain_return = Isolate::Current()->builtins()->builtin(
+        Builtins::FrameDropper_LiveEdit);
+    debug->thread_local_.after_break_target_ = plain_return->entry();
+  } else if (debug->thread_local_.frame_drop_mode_ ==
+      FRAME_DROPPED_IN_DIRECT_CALL) {
     // Nothing to do, after_break_target is not used here.
   } else {
     UNREACHABLE();
   }
 
-  return Heap::undefined_value();
+  return heap->undefined_value();
 }
 
 
@@ -1007,7 +1038,7 @@ Handle<Object> Debug::CheckBreakPoints(Handle<Object> break_point_objects) {
   ASSERT(!break_point_objects->IsUndefined());
   if (break_point_objects->IsFixedArray()) {
     Handle<FixedArray> array(FixedArray::cast(*break_point_objects));
-    break_points_hit = Factory::NewFixedArray(array->length());
+    break_points_hit = FACTORY->NewFixedArray(array->length());
     for (int i = 0; i < array->length(); i++) {
       Handle<Object> o(array->get(i));
       if (CheckBreakPoint(o)) {
@@ -1015,7 +1046,7 @@ Handle<Object> Debug::CheckBreakPoints(Handle<Object> break_point_objects) {
       }
     }
   } else {
-    break_points_hit = Factory::NewFixedArray(1);
+    break_points_hit = FACTORY->NewFixedArray(1);
     if (CheckBreakPoint(break_point_objects)) {
       break_points_hit->set(break_points_hit_count++, *break_point_objects);
     }
@@ -1023,10 +1054,10 @@ Handle<Object> Debug::CheckBreakPoints(Handle<Object> break_point_objects) {
 
   // Return undefined if no break points were triggered.
   if (break_points_hit_count == 0) {
-    return Factory::undefined_value();
+    return FACTORY->undefined_value();
   }
   // Return break points hit as a JSArray.
-  Handle<JSArray> result = Factory::NewJSArrayWithElements(break_points_hit);
+  Handle<JSArray> result = FACTORY->NewJSArrayWithElements(break_points_hit);
   result->set_length(Smi::FromInt(break_points_hit_count));
   return result;
 }
@@ -1041,14 +1072,14 @@ bool Debug::CheckBreakPoint(Handle<Object> break_point_object) {
 
   // Get the function IsBreakPointTriggered (defined in debug-debugger.js).
   Handle<String> is_break_point_triggered_symbol =
-      Factory::LookupAsciiSymbol("IsBreakPointTriggered");
+      FACTORY->LookupAsciiSymbol("IsBreakPointTriggered");
   Handle<JSFunction> check_break_point =
     Handle<JSFunction>(JSFunction::cast(
         debug_context()->global()->GetPropertyNoExceptionThrown(
             *is_break_point_triggered_symbol)));
 
   // Get the break id as an object.
-  Handle<Object> break_id = Factory::NewNumberFromInt(Debug::break_id());
+  Handle<Object> break_id = FACTORY->NewNumberFromInt(Debug::break_id());
 
   // Call HandleBreakPointx.
   bool caught_exception = false;
@@ -1058,8 +1089,7 @@ bool Debug::CheckBreakPoint(Handle<Object> break_point_object) {
     reinterpret_cast<Object**>(break_point_object.location())
   };
   Handle<Object> result = Execution::TryCall(check_break_point,
-                                             Top::builtins(), argc, argv,
-                                             &caught_exception);
+      Isolate::Current()->js_builtins_object(), argc, argv, &caught_exception);
 
   // If exception or non boolean result handle as not triggered
   if (caught_exception || !result->IsBoolean()) {
@@ -1067,7 +1097,8 @@ bool Debug::CheckBreakPoint(Handle<Object> break_point_object) {
   }
 
   // Return whether the break point is triggered.
-  return *result == Heap::true_value();
+  ASSERT(!result.is_null());
+  return (*result)->IsTrue();
 }
 
 
@@ -1366,8 +1397,9 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
       // Reverse lookup required as the minor key cannot be retrieved
       // from the code object.
       Handle<Object> obj(
-          Heap::code_stubs()->SlowReverseLookup(*call_function_stub));
-      ASSERT(*obj != Heap::undefined_value());
+          HEAP->code_stubs()->SlowReverseLookup(*call_function_stub));
+      ASSERT(!obj.is_null());
+      ASSERT(!(*obj)->IsUndefined());
       ASSERT(obj->IsSmi());
       // Get the STUB key and extract major and minor key.
       uint32_t key = Smi::cast(*obj)->value();
@@ -1485,18 +1517,22 @@ Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) {
         return ComputeCallDebugBreak(code->arguments_count(), code->kind());
 
       case Code::LOAD_IC:
-        return Handle<Code>(Builtins::builtin(Builtins::LoadIC_DebugBreak));
+        return Handle<Code>(Isolate::Current()->builtins()->builtin(
+            Builtins::LoadIC_DebugBreak));
 
       case Code::STORE_IC:
-        return Handle<Code>(Builtins::builtin(Builtins::StoreIC_DebugBreak));
+        return Handle<Code>(Isolate::Current()->builtins()->builtin(
+            Builtins::StoreIC_DebugBreak));
 
       case Code::KEYED_LOAD_IC:
         return Handle<Code>(
-            Builtins::builtin(Builtins::KeyedLoadIC_DebugBreak));
+            Isolate::Current()->builtins()->builtin(
+                Builtins::KeyedLoadIC_DebugBreak));
 
       case Code::KEYED_STORE_IC:
         return Handle<Code>(
-            Builtins::builtin(Builtins::KeyedStoreIC_DebugBreak));
+            Isolate::Current()->builtins()->builtin(
+                Builtins::KeyedStoreIC_DebugBreak));
 
       default:
         UNREACHABLE();
@@ -1504,13 +1540,15 @@ Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) {
   }
   if (RelocInfo::IsConstructCall(mode)) {
     Handle<Code> result =
-        Handle<Code>(Builtins::builtin(Builtins::ConstructCall_DebugBreak));
+        Handle<Code>(Isolate::Current()->builtins()->builtin(
+            Builtins::ConstructCall_DebugBreak));
     return result;
   }
   if (code->kind() == Code::STUB) {
     ASSERT(code->major_key() == CodeStub::CallFunction);
     Handle<Code> result =
-        Handle<Code>(Builtins::builtin(Builtins::StubNoRegisters_DebugBreak));
+        Handle<Code>(Isolate::Current()->builtins()->builtin(
+            Builtins::StubNoRegisters_DebugBreak));
     return result;
   }
 
@@ -1522,13 +1560,13 @@ Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) {
 // Simple function for returning the source positions for active break points.
 Handle<Object> Debug::GetSourceBreakLocations(
     Handle<SharedFunctionInfo> shared) {
-  if (!HasDebugInfo(shared)) return Handle<Object>(Heap::undefined_value());
+  if (!HasDebugInfo(shared)) return Handle<Object>(HEAP->undefined_value());
   Handle<DebugInfo> debug_info = GetDebugInfo(shared);
   if (debug_info->GetBreakPointCount() == 0) {
-    return Handle<Object>(Heap::undefined_value());
+    return Handle<Object>(HEAP->undefined_value());
   }
   Handle<FixedArray> locations =
-      Factory::NewFixedArray(debug_info->GetBreakPointCount());
+      FACTORY->NewFixedArray(debug_info->GetBreakPointCount());
   int count = 0;
   for (int i = 0; i < debug_info->break_points()->length(); i++) {
     if (!debug_info->break_points()->get(i)->IsUndefined()) {
@@ -1574,13 +1612,13 @@ void Debug::HandleStepIn(Handle<JSFunction> function,
 
   // Flood the function with one-shot break points if it is called from where
   // step into was requested.
-  if (fp == Debug::step_in_fp()) {
+  if (fp == step_in_fp()) {
     // Don't allow step into functions in the native context.
     if (!function->IsBuiltin()) {
       if (function->shared()->code() ==
-          Builtins::builtin(Builtins::FunctionApply) ||
+          Isolate::Current()->builtins()->builtin(Builtins::FunctionApply) ||
           function->shared()->code() ==
-          Builtins::builtin(Builtins::FunctionCall)) {
+          Isolate::Current()->builtins()->builtin(Builtins::FunctionCall)) {
         // Handle function.apply and function.call separately to flood the
         // function to be called and not the code for Builtins::FunctionApply or
         // Builtins::FunctionCall. The receiver of call/apply is the target
@@ -1674,7 +1712,7 @@ bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) {
   }
 
   // Create the debug info object.
-  Handle<DebugInfo> debug_info = Factory::NewDebugInfo(shared);
+  Handle<DebugInfo> debug_info = FACTORY->NewDebugInfo(shared);
 
   // Add debug info to the list.
   DebugInfoListNode* node = new DebugInfoListNode(*debug_info);
@@ -1701,7 +1739,7 @@ void Debug::RemoveDebugInfo(Handle<DebugInfo> debug_info) {
       } else {
         prev->set_next(current->next());
       }
-      current->debug_info()->shared()->set_debug_info(Heap::undefined_value());
+      current->debug_info()->shared()->set_debug_info(HEAP->undefined_value());
       delete current;
 
       // If there are no more debug info objects there are not more break
@@ -1733,7 +1771,7 @@ void Debug::SetAfterBreakTarget(JavaScriptFrame* frame) {
   Handle<Code> original_code(debug_info->original_code());
 #ifdef DEBUG
   // Get the code which is actually executing.
-  Handle<Code> frame_code(frame->code());
+  Handle<Code> frame_code(frame->LookupCode(Isolate::Current()));
   ASSERT(frame_code.is_identical_to(code));
 #endif
 
@@ -1815,7 +1853,7 @@ bool Debug::IsBreakAtReturn(JavaScriptFrame* frame) {
   Handle<Code> code(debug_info->code());
 #ifdef DEBUG
   // Get the code which is actually executing.
-  Handle<Code> frame_code(frame->code());
+  Handle<Code> frame_code(frame->LookupCode(Isolate::Current()));
   ASSERT(frame_code.is_identical_to(code));
 #endif
 
@@ -1846,19 +1884,19 @@ void Debug::FramesHaveBeenDropped(StackFrame::Id new_break_frame_id,
 
 
 bool Debug::IsDebugGlobal(GlobalObject* global) {
-  return IsLoaded() && global == Debug::debug_context()->global();
+  return IsLoaded() && global == debug_context()->global();
 }
 
 
 void Debug::ClearMirrorCache() {
-  PostponeInterruptsScope postpone;
+  PostponeInterruptsScope postpone(isolate_);
   HandleScope scope;
-  ASSERT(Top::context() == *Debug::debug_context());
+  ASSERT(Isolate::Current()->context() == *Debug::debug_context());
 
   // Clear the mirror cache.
   Handle<String> function_name =
-      Factory::LookupSymbol(CStrVector("ClearMirrorCache"));
-  Handle<Object> fun(Top::global()->GetPropertyNoExceptionThrown(
+      FACTORY->LookupSymbol(CStrVector("ClearMirrorCache"));
+  Handle<Object> fun(Isolate::Current()->global()->GetPropertyNoExceptionThrown(
       *function_name));
   ASSERT(fun->IsJSFunction());
   bool caught_exception;
@@ -1875,8 +1913,8 @@ void Debug::CreateScriptCache() {
   // Perform two GCs to get rid of all unreferenced scripts. The first GC gets
   // rid of all the cached script wrappers and the second gets rid of the
   // scripts which are no longer referenced.
-  Heap::CollectAllGarbage(false);
-  Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   ASSERT(script_cache_ == NULL);
   script_cache_ = new ScriptCache();
@@ -1919,12 +1957,12 @@ Handle<FixedArray> Debug::GetLoadedScripts() {
   // If the script cache is not active just return an empty array.
   ASSERT(script_cache_ != NULL);
   if (script_cache_ == NULL) {
-    Factory::NewFixedArray(0);
+    FACTORY->NewFixedArray(0);
   }
 
   // Perform GC to get unreferenced scripts evicted from the cache before
   // returning the content.
-  Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   // Get the scripts from the cache.
   return script_cache_->GetScripts();
@@ -1939,51 +1977,65 @@ void Debug::AfterGarbageCollection() {
 }
 
 
-Mutex* Debugger::debugger_access_ = OS::CreateMutex();
-Handle<Object> Debugger::event_listener_ = Handle<Object>();
-Handle<Object> Debugger::event_listener_data_ = Handle<Object>();
-bool Debugger::compiling_natives_ = false;
-bool Debugger::is_loading_debugger_ = false;
-bool Debugger::never_unload_debugger_ = false;
-v8::Debug::MessageHandler2 Debugger::message_handler_ = NULL;
-bool Debugger::debugger_unload_pending_ = false;
-v8::Debug::HostDispatchHandler Debugger::host_dispatch_handler_ = NULL;
-Mutex* Debugger::dispatch_handler_access_ = OS::CreateMutex();
-v8::Debug::DebugMessageDispatchHandler
-    Debugger::debug_message_dispatch_handler_ = NULL;
-MessageDispatchHelperThread* Debugger::message_dispatch_helper_thread_ = NULL;
-int Debugger::host_dispatch_micros_ = 100 * 1000;
-DebuggerAgent* Debugger::agent_ = NULL;
-LockingCommandMessageQueue Debugger::command_queue_(kQueueInitialSize);
-Semaphore* Debugger::command_received_ = OS::CreateSemaphore(0);
-LockingCommandMessageQueue Debugger::event_command_queue_(kQueueInitialSize);
+Debugger::Debugger()
+    : debugger_access_(OS::CreateMutex()),
+      event_listener_(Handle<Object>()),
+      event_listener_data_(Handle<Object>()),
+      compiling_natives_(false),
+      is_loading_debugger_(false),
+      never_unload_debugger_(false),
+      message_handler_(NULL),
+      debugger_unload_pending_(false),
+      host_dispatch_handler_(NULL),
+      dispatch_handler_access_(OS::CreateMutex()),
+      debug_message_dispatch_handler_(NULL),
+      message_dispatch_helper_thread_(NULL),
+      host_dispatch_micros_(100 * 1000),
+      agent_(NULL),
+      command_queue_(kQueueInitialSize),
+      command_received_(OS::CreateSemaphore(0)),
+      event_command_queue_(kQueueInitialSize) {
+}
+
+
+Debugger::~Debugger() {
+  delete debugger_access_;
+  debugger_access_ = 0;
+  delete dispatch_handler_access_;
+  dispatch_handler_access_ = 0;
+  delete command_received_;
+  command_received_ = 0;
+}
 
 
 Handle<Object> Debugger::MakeJSObject(Vector<const char> constructor_name,
                                       int argc, Object*** argv,
                                       bool* caught_exception) {
-  ASSERT(Top::context() == *Debug::debug_context());
+  ASSERT(Isolate::Current() == isolate_);
+  ASSERT(isolate_->context() == *isolate_->debug()->debug_context());
 
   // Create the execution state object.
-  Handle<String> constructor_str = Factory::LookupSymbol(constructor_name);
-  Handle<Object> constructor(Top::global()->GetPropertyNoExceptionThrown(
-      *constructor_str));
+  Handle<String> constructor_str = FACTORY->LookupSymbol(constructor_name);
+  Handle<Object> constructor(
+      isolate_->global()->GetPropertyNoExceptionThrown(*constructor_str));
   ASSERT(constructor->IsJSFunction());
   if (!constructor->IsJSFunction()) {
     *caught_exception = true;
-    return Factory::undefined_value();
+    return FACTORY->undefined_value();
   }
   Handle<Object> js_object = Execution::TryCall(
       Handle<JSFunction>::cast(constructor),
-      Handle<JSObject>(Debug::debug_context()->global()), argc, argv,
-      caught_exception);
+      Handle<JSObject>(isolate_->debug()->debug_context()->global()),
+      argc, argv, caught_exception);
   return js_object;
 }
 
 
 Handle<Object> Debugger::MakeExecutionState(bool* caught_exception) {
+  ASSERT(Isolate::Current() == isolate_);
   // Create the execution state object.
-  Handle<Object> break_id = Factory::NewNumberFromInt(Debug::break_id());
+  Handle<Object> break_id = FACTORY->NewNumberFromInt(
+      isolate_->debug()->break_id());
   const int argc = 1;
   Object** argv[argc] = { break_id.location() };
   return MakeJSObject(CStrVector("MakeExecutionState"),
@@ -1994,6 +2046,7 @@ Handle<Object> Debugger::MakeExecutionState(bool* caught_exception) {
 Handle<Object> Debugger::MakeBreakEvent(Handle<Object> exec_state,
                                         Handle<Object> break_points_hit,
                                         bool* caught_exception) {
+  ASSERT(Isolate::Current() == isolate_);
   // Create the new break event object.
   const int argc = 2;
   Object** argv[argc] = { exec_state.location(),
@@ -2009,12 +2062,13 @@ Handle<Object> Debugger::MakeExceptionEvent(Handle<Object> exec_state,
                                             Handle<Object> exception,
                                             bool uncaught,
                                             bool* caught_exception) {
+  ASSERT(Isolate::Current() == isolate_);
   // Create the new exception event object.
   const int argc = 3;
   Object** argv[argc] = { exec_state.location(),
                           exception.location(),
-                          uncaught ? Factory::true_value().location() :
-                                     Factory::false_value().location()};
+                          uncaught ? FACTORY->true_value().location() :
+                                     FACTORY->false_value().location()};
   return MakeJSObject(CStrVector("MakeExceptionEvent"),
                       argc, argv, caught_exception);
 }
@@ -2022,6 +2076,7 @@ Handle<Object> Debugger::MakeExceptionEvent(Handle<Object> exec_state,
 
 Handle<Object> Debugger::MakeNewFunctionEvent(Handle<Object> function,
                                               bool* caught_exception) {
+  ASSERT(Isolate::Current() == isolate_);
   // Create the new function event object.
   const int argc = 1;
   Object** argv[argc] = { function.location() };
@@ -2033,14 +2088,15 @@ Handle<Object> Debugger::MakeNewFunctionEvent(Handle<Object> function,
 Handle<Object> Debugger::MakeCompileEvent(Handle<Script> script,
                                           bool before,
                                           bool* caught_exception) {
+  ASSERT(Isolate::Current() == isolate_);
   // Create the compile event object.
   Handle<Object> exec_state = MakeExecutionState(caught_exception);
   Handle<Object> script_wrapper = GetScriptWrapper(script);
   const int argc = 3;
   Object** argv[argc] = { exec_state.location(),
                           script_wrapper.location(),
-                          before ? Factory::true_value().location() :
-                                   Factory::false_value().location() };
+                          before ? FACTORY->true_value().location() :
+                                   FACTORY->false_value().location() };
 
   return MakeJSObject(CStrVector("MakeCompileEvent"),
                       argc,
@@ -2051,6 +2107,7 @@ Handle<Object> Debugger::MakeCompileEvent(Handle<Script> script,
 
 Handle<Object> Debugger::MakeScriptCollectedEvent(int id,
                                                   bool* caught_exception) {
+  ASSERT(Isolate::Current() == isolate_);
   // Create the script collected event object.
   Handle<Object> exec_state = MakeExecutionState(caught_exception);
   Handle<Object> id_object = Handle<Smi>(Smi::FromInt(id));
@@ -2065,20 +2122,21 @@ Handle<Object> Debugger::MakeScriptCollectedEvent(int id,
 
 
 void Debugger::OnException(Handle<Object> exception, bool uncaught) {
+  ASSERT(Isolate::Current() == isolate_);
   HandleScope scope;
 
   // Bail out based on state or if there is no listener for this event
-  if (Debug::InDebugger()) return;
+  if (isolate_->debug()->InDebugger()) return;
   if (!Debugger::EventActive(v8::Exception)) return;
 
   // Bail out if exception breaks are not active
   if (uncaught) {
     // Uncaught exceptions are reported by either flags.
-    if (!(Debug::break_on_uncaught_exception() ||
-          Debug::break_on_exception())) return;
+    if (!(isolate_->debug()->break_on_uncaught_exception() ||
+          isolate_->debug()->break_on_exception())) return;
   } else {
     // Caught exceptions are reported is activated.
-    if (!Debug::break_on_exception()) return;
+    if (!isolate_->debug()->break_on_exception()) return;
   }
 
   // Enter the debugger.
@@ -2086,7 +2144,7 @@ void Debugger::OnException(Handle<Object> exception, bool uncaught) {
   if (debugger.FailedToEnter()) return;
 
   // Clear all current stepping setup.
-  Debug::ClearStepping();
+  isolate_->debug()->ClearStepping();
   // Create the event data object.
   bool caught_exception = false;
   Handle<Object> exec_state = MakeExecutionState(&caught_exception);
@@ -2108,16 +2166,17 @@ void Debugger::OnException(Handle<Object> exception, bool uncaught) {
 
 void Debugger::OnDebugBreak(Handle<Object> break_points_hit,
                             bool auto_continue) {
+  ASSERT(Isolate::Current() == isolate_);
   HandleScope scope;
 
   // Debugger has already been entered by caller.
-  ASSERT(Top::context() == *Debug::debug_context());
+  ASSERT(isolate_->context() == *isolate_->debug()->debug_context());
 
   // Bail out if there is no listener for this event
   if (!Debugger::EventActive(v8::Break)) return;
 
   // Debugger must be entered in advance.
-  ASSERT(Top::context() == *Debug::debug_context());
+  ASSERT(Isolate::Current()->context() == *isolate_->debug()->debug_context());
 
   // Create the event data object.
   bool caught_exception = false;
@@ -2140,10 +2199,11 @@ void Debugger::OnDebugBreak(Handle<Object> break_points_hit,
 
 
 void Debugger::OnBeforeCompile(Handle<Script> script) {
+  ASSERT(Isolate::Current() == isolate_);
   HandleScope scope;
 
   // Bail out based on state or if there is no listener for this event
-  if (Debug::InDebugger()) return;
+  if (isolate_->debug()->InDebugger()) return;
   if (compiling_natives()) return;
   if (!EventActive(v8::BeforeCompile)) return;
 
@@ -2169,10 +2229,11 @@ void Debugger::OnBeforeCompile(Handle<Script> script) {
 // Handle debugger actions when a new script is compiled.
 void Debugger::OnAfterCompile(Handle<Script> script,
                               AfterCompileFlags after_compile_flags) {
+  ASSERT(Isolate::Current() == isolate_);
   HandleScope scope;
 
   // Add the newly compiled script to the script cache.
-  Debug::AddScriptToScriptCache(script);
+  isolate_->debug()->AddScriptToScriptCache(script);
 
   // No more to do if not debugging.
   if (!IsDebuggerActive()) return;
@@ -2181,7 +2242,7 @@ void Debugger::OnAfterCompile(Handle<Script> script,
   if (compiling_natives()) return;
 
   // Store whether in debugger before entering debugger.
-  bool in_debugger = Debug::InDebugger();
+  bool in_debugger = isolate_->debug()->InDebugger();
 
   // Enter the debugger.
   EnterDebugger debugger;
@@ -2192,9 +2253,9 @@ void Debugger::OnAfterCompile(Handle<Script> script,
 
   // Get the function UpdateScriptBreakPoints (defined in debug-debugger.js).
   Handle<String> update_script_break_points_symbol =
-      Factory::LookupAsciiSymbol("UpdateScriptBreakPoints");
+      FACTORY->LookupAsciiSymbol("UpdateScriptBreakPoints");
   Handle<Object> update_script_break_points =
-      Handle<Object>(Debug::debug_context()->global()->
+      Handle<Object>(isolate_->debug()->debug_context()->global()->
           GetPropertyNoExceptionThrown(*update_script_break_points_symbol));
   if (!update_script_break_points->IsJSFunction()) {
     return;
@@ -2211,7 +2272,7 @@ void Debugger::OnAfterCompile(Handle<Script> script,
   Object** argv[argc] = { reinterpret_cast<Object**>(wrapper.location()) };
   Handle<Object> result = Execution::TryCall(
       Handle<JSFunction>::cast(update_script_break_points),
-      Top::builtins(), argc, argv,
+      Isolate::Current()->js_builtins_object(), argc, argv,
       &caught_exception);
   if (caught_exception) {
     return;
@@ -2236,6 +2297,7 @@ void Debugger::OnAfterCompile(Handle<Script> script,
 
 
 void Debugger::OnScriptCollected(int id) {
+  ASSERT(Isolate::Current() == isolate_);
   HandleScope scope;
 
   // No more to do if not debugging.
@@ -2265,11 +2327,12 @@ void Debugger::OnScriptCollected(int id) {
 void Debugger::ProcessDebugEvent(v8::DebugEvent event,
                                  Handle<JSObject> event_data,
                                  bool auto_continue) {
+  ASSERT(Isolate::Current() == isolate_);
   HandleScope scope;
 
   // Clear any pending debug break if this is a real break.
   if (!auto_continue) {
-    Debug::clear_interrupt_pending(DEBUGBREAK);
+    isolate_->debug()->clear_interrupt_pending(DEBUGBREAK);
   }
 
   // Create the execution state.
@@ -2340,6 +2403,7 @@ void Debugger::CallJSEventCallback(v8::DebugEvent event,
                                    Handle<Object> exec_state,
                                    Handle<Object> event_data) {
   ASSERT(event_listener_->IsJSFunction());
+  ASSERT(Isolate::Current() == isolate_);
   Handle<JSFunction> fun(Handle<JSFunction>::cast(event_listener_));
 
   // Invoke the JavaScript debug event listener.
@@ -2349,25 +2413,28 @@ void Debugger::CallJSEventCallback(v8::DebugEvent event,
                           Handle<Object>::cast(event_data).location(),
                           event_listener_data_.location() };
   bool caught_exception = false;
-  Execution::TryCall(fun, Top::global(), argc, argv, &caught_exception);
+  Execution::TryCall(fun, isolate_->global(), argc, argv, &caught_exception);
   // Silently ignore exceptions from debug event listeners.
 }
 
 
 Handle<Context> Debugger::GetDebugContext() {
-    never_unload_debugger_ = true;
-    EnterDebugger debugger;
-    return Debug::debug_context();
+  ASSERT(Isolate::Current() == isolate_);
+  never_unload_debugger_ = true;
+  EnterDebugger debugger;
+  return isolate_->debug()->debug_context();
 }
 
 
 void Debugger::UnloadDebugger() {
+  ASSERT(Isolate::Current() == isolate_);
+
   // Make sure that there are no breakpoints left.
-  Debug::ClearAllBreakPoints();
+  isolate_->debug()->ClearAllBreakPoints();
 
   // Unload the debugger if feasible.
   if (!never_unload_debugger_) {
-    Debug::Unload();
+    isolate_->debug()->Unload();
   }
 
   // Clear the flag indicating that the debugger should be unloaded.
@@ -2379,9 +2446,10 @@ void Debugger::NotifyMessageHandler(v8::DebugEvent event,
                                     Handle<JSObject> exec_state,
                                     Handle<JSObject> event_data,
                                     bool auto_continue) {
+  ASSERT(Isolate::Current() == isolate_);
   HandleScope scope;
 
-  if (!Debug::Load()) return;
+  if (!isolate_->debug()->Load()) return;
 
   // Process the individual events.
   bool sendEventMessage = false;
@@ -2410,8 +2478,8 @@ void Debugger::NotifyMessageHandler(v8::DebugEvent event,
   // The debug command interrupt flag might have been set when the command was
   // added. It should be enough to clear the flag only once while we are in the
   // debugger.
-  ASSERT(Debug::InDebugger());
-  StackGuard::Continue(DEBUGCOMMAND);
+  ASSERT(isolate_->debug()->InDebugger());
+  isolate_->stack_guard()->Continue(DEBUGCOMMAND);
 
   // Notify the debugger that a debug event has occurred unless auto continue is
   // active in which case no event is send.
@@ -2474,7 +2542,7 @@ void Debugger::NotifyMessageHandler(v8::DebugEvent event,
 
     // Get the command from the queue.
     CommandMessage command = command_queue_.Get();
-    Logger::DebugTag("Got request from command queue, in interactive loop.");
+    LOGGER->DebugTag("Got request from command queue, in interactive loop.");
     if (!Debugger::IsDebuggerActive()) {
       // Delete command text and user data.
       command.Dispose();
@@ -2548,17 +2616,18 @@ void Debugger::NotifyMessageHandler(v8::DebugEvent event,
 
 void Debugger::SetEventListener(Handle<Object> callback,
                                 Handle<Object> data) {
+  ASSERT(Isolate::Current() == isolate_);
   HandleScope scope;
 
   // Clear the global handles for the event listener and the event listener data
   // object.
   if (!event_listener_.is_null()) {
-    GlobalHandles::Destroy(
+    isolate_->global_handles()->Destroy(
         reinterpret_cast<Object**>(event_listener_.location()));
     event_listener_ = Handle<Object>();
   }
   if (!event_listener_data_.is_null()) {
-    GlobalHandles::Destroy(
+    isolate_->global_handles()->Destroy(
         reinterpret_cast<Object**>(event_listener_data_.location()));
     event_listener_data_ = Handle<Object>();
   }
@@ -2566,11 +2635,13 @@ void Debugger::SetEventListener(Handle<Object> callback,
   // If there is a new debug event listener register it together with its data
   // object.
   if (!callback->IsUndefined() && !callback->IsNull()) {
-    event_listener_ = Handle<Object>::cast(GlobalHandles::Create(*callback));
+    event_listener_ = Handle<Object>::cast(
+        isolate_->global_handles()->Create(*callback));
     if (data.is_null()) {
-      data = Factory::undefined_value();
+      data = FACTORY->undefined_value();
     }
-    event_listener_data_ = Handle<Object>::cast(GlobalHandles::Create(*data));
+    event_listener_data_ = Handle<Object>::cast(
+        isolate_->global_handles()->Create(*data));
   }
 
   ListenersChanged();
@@ -2578,6 +2649,7 @@ void Debugger::SetEventListener(Handle<Object> callback,
 
 
 void Debugger::SetMessageHandler(v8::Debug::MessageHandler2 handler) {
+  ASSERT(Isolate::Current() == isolate_);
   ScopedLock with(debugger_access_);
 
   message_handler_ = handler;
@@ -2585,7 +2657,7 @@ void Debugger::SetMessageHandler(v8::Debug::MessageHandler2 handler) {
   if (handler == NULL) {
     // Send an empty command to the debugger if in a break to make JavaScript
     // run again if the debugger is closed.
-    if (Debug::InDebugger()) {
+    if (isolate_->debug()->InDebugger()) {
       ProcessCommand(Vector<const uint16_t>::empty());
     }
   }
@@ -2593,12 +2665,13 @@ void Debugger::SetMessageHandler(v8::Debug::MessageHandler2 handler) {
 
 
 void Debugger::ListenersChanged() {
+  Isolate* isolate = Isolate::Current();
   if (IsDebuggerActive()) {
     // Disable the compilation cache when the debugger is active.
-    CompilationCache::Disable();
+    isolate->compilation_cache()->Disable();
     debugger_unload_pending_ = false;
   } else {
-    CompilationCache::Enable();
+    isolate->compilation_cache()->Enable();
     // Unload the debugger if event listener and message handler cleared.
     // Schedule this for later, because we may be in non-V8 thread.
     debugger_unload_pending_ = true;
@@ -2608,6 +2681,7 @@ void Debugger::ListenersChanged() {
 
 void Debugger::SetHostDispatchHandler(v8::Debug::HostDispatchHandler handler,
                                       int period) {
+  ASSERT(Isolate::Current() == isolate_);
   host_dispatch_handler_ = handler;
   host_dispatch_micros_ = period * 1000;
 }
@@ -2615,11 +2689,12 @@ void Debugger::SetHostDispatchHandler(v8::Debug::HostDispatchHandler handler,
 
 void Debugger::SetDebugMessageDispatchHandler(
     v8::Debug::DebugMessageDispatchHandler handler, bool provide_locker) {
+  ASSERT(Isolate::Current() == isolate_);
   ScopedLock with(dispatch_handler_access_);
   debug_message_dispatch_handler_ = handler;
 
   if (provide_locker && message_dispatch_helper_thread_ == NULL) {
-    message_dispatch_helper_thread_ = new MessageDispatchHelperThread;
+    message_dispatch_helper_thread_ = new MessageDispatchHelperThread(isolate_);
     message_dispatch_helper_thread_->Start();
   }
 }
@@ -2628,6 +2703,7 @@ void Debugger::SetDebugMessageDispatchHandler(
 // Calls the registered debug message handler. This callback is part of the
 // public API.
 void Debugger::InvokeMessageHandler(MessageImpl message) {
+  ASSERT(Isolate::Current() == isolate_);
   ScopedLock with(debugger_access_);
 
   if (message_handler_ != NULL) {
@@ -2642,18 +2718,19 @@ void Debugger::InvokeMessageHandler(MessageImpl message) {
 // by the API client thread.
 void Debugger::ProcessCommand(Vector<const uint16_t> command,
                               v8::Debug::ClientData* client_data) {
+  ASSERT(Isolate::Current() == isolate_);
   // Need to cast away const.
   CommandMessage message = CommandMessage::New(
       Vector<uint16_t>(const_cast<uint16_t*>(command.start()),
                        command.length()),
       client_data);
-  Logger::DebugTag("Put command on command_queue.");
+  LOGGER->DebugTag("Put command on command_queue.");
   command_queue_.Put(message);
   command_received_->Signal();
 
   // Set the debug command break flag to have the command processed.
-  if (!Debug::InDebugger()) {
-    StackGuard::DebugCommand();
+  if (!isolate_->debug()->InDebugger()) {
+    isolate_->stack_guard()->DebugCommand();
   }
 
   MessageDispatchHelperThread* dispatch_thread;
@@ -2671,22 +2748,25 @@ void Debugger::ProcessCommand(Vector<const uint16_t> command,
 
 
 bool Debugger::HasCommands() {
+  ASSERT(Isolate::Current() == isolate_);
   return !command_queue_.IsEmpty();
 }
 
 
 void Debugger::EnqueueDebugCommand(v8::Debug::ClientData* client_data) {
+  ASSERT(Isolate::Current() == isolate_);
   CommandMessage message = CommandMessage::New(Vector<uint16_t>(), client_data);
   event_command_queue_.Put(message);
 
   // Set the debug command break flag to have the command processed.
-  if (!Debug::InDebugger()) {
-    StackGuard::DebugCommand();
+  if (!isolate_->debug()->InDebugger()) {
+    isolate_->stack_guard()->DebugCommand();
   }
 }
 
 
 bool Debugger::IsDebuggerActive() {
+  ASSERT(Isolate::Current() == isolate_);
   ScopedLock with(debugger_access_);
 
   return message_handler_ != NULL || !event_listener_.is_null();
@@ -2696,27 +2776,28 @@ bool Debugger::IsDebuggerActive() {
 Handle<Object> Debugger::Call(Handle<JSFunction> fun,
                               Handle<Object> data,
                               bool* pending_exception) {
+  ASSERT(Isolate::Current() == isolate_);
   // When calling functions in the debugger prevent it from beeing unloaded.
   Debugger::never_unload_debugger_ = true;
 
   // Enter the debugger.
   EnterDebugger debugger;
   if (debugger.FailedToEnter()) {
-    return Factory::undefined_value();
+    return FACTORY->undefined_value();
   }
 
   // Create the execution state.
   bool caught_exception = false;
   Handle<Object> exec_state = MakeExecutionState(&caught_exception);
   if (caught_exception) {
-    return Factory::undefined_value();
+    return FACTORY->undefined_value();
   }
 
   static const int kArgc = 2;
   Object** argv[kArgc] = { exec_state.location(), data.location() };
   Handle<Object> result = Execution::Call(
       fun,
-      Handle<Object>(Debug::debug_context_->global_proxy()),
+      Handle<Object>(isolate_->debug()->debug_context_->global_proxy()),
       kArgc,
       argv,
       pending_exception);
@@ -2731,6 +2812,7 @@ static void StubMessageHandler2(const v8::Debug::Message& message) {
 
 bool Debugger::StartAgent(const char* name, int port,
                           bool wait_for_connection) {
+  ASSERT(Isolate::Current() == isolate_);
   if (wait_for_connection) {
     // Suspend V8 if it is already running or set V8 to suspend whenever
     // it starts.
@@ -2744,7 +2826,7 @@ bool Debugger::StartAgent(const char* name, int port,
 
   if (Socket::Setup()) {
     if (agent_ == NULL) {
-      agent_ = new DebuggerAgent(name, port);
+      agent_ = new DebuggerAgent(isolate_, name, port);
       agent_->Start();
     }
     return true;
@@ -2755,6 +2837,7 @@ bool Debugger::StartAgent(const char* name, int port,
 
 
 void Debugger::StopAgent() {
+  ASSERT(Isolate::Current() == isolate_);
   if (agent_ != NULL) {
     agent_->Shutdown();
     agent_->Join();
@@ -2765,12 +2848,14 @@ void Debugger::StopAgent() {
 
 
 void Debugger::WaitForAgent() {
+  ASSERT(Isolate::Current() == isolate_);
   if (agent_ != NULL)
     agent_->WaitUntilListening();
 }
 
 
 void Debugger::CallMessageDispatchHandler() {
+  ASSERT(Isolate::Current() == isolate_);
   v8::Debug::DebugMessageDispatchHandler handler;
   {
     ScopedLock with(dispatch_handler_access_);
@@ -2874,10 +2959,11 @@ v8::Handle<v8::String> MessageImpl::GetJSON() const {
 
 
 v8::Handle<v8::Context> MessageImpl::GetEventContext() const {
-  v8::Handle<v8::Context> context = GetDebugEventContext();
-  // Top::context() may be NULL when "script collected" event occures.
+  Isolate* isolate = Isolate::Current();
+  v8::Handle<v8::Context> context = GetDebugEventContext(isolate);
+  // Isolate::context() may be NULL when "script collected" event occures.
   ASSERT(!context.IsEmpty() || event_ == v8::ScriptCollected);
-  return GetDebugEventContext();
+  return GetDebugEventContext(isolate);
 }
 
 
@@ -2914,7 +3000,7 @@ v8::Handle<v8::Object> EventDetailsImpl::GetEventData() const {
 
 
 v8::Handle<v8::Context> EventDetailsImpl::GetEventContext() const {
-  return GetDebugEventContext();
+  return GetDebugEventContext(Isolate::Current());
 }
 
 
@@ -3023,7 +3109,7 @@ bool LockingCommandMessageQueue::IsEmpty() const {
 CommandMessage LockingCommandMessageQueue::Get() {
   ScopedLock sl(lock_);
   CommandMessage result = queue_.Get();
-  Logger::DebugEvent("Get", result.text());
+  LOGGER->DebugEvent("Get", result.text());
   return result;
 }
 
@@ -3031,7 +3117,7 @@ CommandMessage LockingCommandMessageQueue::Get() {
 void LockingCommandMessageQueue::Put(const CommandMessage& message) {
   ScopedLock sl(lock_);
   queue_.Put(message);
-  Logger::DebugEvent("Put", message.text());
+  LOGGER->DebugEvent("Put", message.text());
 }
 
 
@@ -3041,8 +3127,8 @@ void LockingCommandMessageQueue::Clear() {
 }
 
 
-MessageDispatchHelperThread::MessageDispatchHelperThread()
-    : Thread("v8:MsgDispHelpr"),
+MessageDispatchHelperThread::MessageDispatchHelperThread(Isolate* isolate)
+    : Thread(isolate, "v8:MsgDispHelpr"),
       sem_(OS::CreateSemaphore(0)), mutex_(OS::CreateMutex()),
       already_signalled_(false) {
 }
@@ -3075,7 +3161,7 @@ void MessageDispatchHelperThread::Run() {
     }
     {
       Locker locker;
-      Debugger::CallMessageDispatchHandler();
+      Isolate::Current()->debugger()->CallMessageDispatchHandler();
     }
   }
 }
index 85c4d534ff797899b5936f0860656848ea3b4fd6..d5125956a0e96a03294d60cee1bd144121cbd77e 100644 (file)
@@ -28,6 +28,7 @@
 #ifndef V8_DEBUG_H_
 #define V8_DEBUG_H_
 
+#include "arguments.h"
 #include "assembler.h"
 #include "debug-agent.h"
 #include "execution.h"
@@ -210,7 +211,6 @@ class DebugInfoListNode {
   DebugInfoListNode* next_;
 };
 
-
 // This class contains the debugger support. The main purpose is to handle
 // setting break points in the code.
 //
@@ -220,33 +220,33 @@ class DebugInfoListNode {
 // DebugInfo.
 class Debug {
  public:
-  static void Setup(bool create_heap_objects);
-  static bool Load();
-  static void Unload();
-  static bool IsLoaded() { return !debug_context_.is_null(); }
-  static bool InDebugger() { return thread_local_.debugger_entry_ != NULL; }
-  static void PreemptionWhileInDebugger();
-  static void Iterate(ObjectVisitor* v);
-
-  static Object* Break(Arguments args);
-  static void SetBreakPoint(Handle<SharedFunctionInfo> shared,
-                            Handle<Object> break_point_object,
-                            int* source_position);
-  static void ClearBreakPoint(Handle<Object> break_point_object);
-  static void ClearAllBreakPoints();
-  static void FloodWithOneShot(Handle<SharedFunctionInfo> shared);
-  static void FloodHandlerWithOneShot();
-  static void ChangeBreakOnException(ExceptionBreakType type, bool enable);
-  static bool IsBreakOnException(ExceptionBreakType type);
-  static void PrepareStep(StepAction step_action, int step_count);
-  static void ClearStepping();
-  static bool StepNextContinue(BreakLocationIterator* break_location_iterator,
-                               JavaScriptFrame* frame);
+  void Setup(bool create_heap_objects);
+  bool Load();
+  void Unload();
+  bool IsLoaded() { return !debug_context_.is_null(); }
+  bool InDebugger() { return thread_local_.debugger_entry_ != NULL; }
+  void PreemptionWhileInDebugger();
+  void Iterate(ObjectVisitor* v);
+
+  static Object* Break(RUNTIME_CALLING_CONVENTION);
+  void SetBreakPoint(Handle<SharedFunctionInfo> shared,
+                     Handle<Object> break_point_object,
+                     int* source_position);
+  void ClearBreakPoint(Handle<Object> break_point_object);
+  void ClearAllBreakPoints();
+  void FloodWithOneShot(Handle<SharedFunctionInfo> shared);
+  void FloodHandlerWithOneShot();
+  void ChangeBreakOnException(ExceptionBreakType type, bool enable);
+  bool IsBreakOnException(ExceptionBreakType type);
+  void PrepareStep(StepAction step_action, int step_count);
+  void ClearStepping();
+  bool StepNextContinue(BreakLocationIterator* break_location_iterator,
+                        JavaScriptFrame* frame);
   static Handle<DebugInfo> GetDebugInfo(Handle<SharedFunctionInfo> shared);
   static bool HasDebugInfo(Handle<SharedFunctionInfo> shared);
 
   // Returns whether the operation succeeded.
-  static bool EnsureDebugInfo(Handle<SharedFunctionInfo> shared);
+  bool EnsureDebugInfo(Handle<SharedFunctionInfo> shared);
 
   // Returns true if the current stub call is patched to call the debugger.
   static bool IsDebugBreak(Address addr);
@@ -266,66 +266,66 @@ class Debug {
       Handle<SharedFunctionInfo> shared);
 
   // Getter for the debug_context.
-  inline static Handle<Context> debug_context() { return debug_context_; }
+  inline Handle<Context> debug_context() { return debug_context_; }
 
   // Check whether a global object is the debug global object.
-  static bool IsDebugGlobal(GlobalObject* global);
+  bool IsDebugGlobal(GlobalObject* global);
 
   // Check whether this frame is just about to return.
-  static bool IsBreakAtReturn(JavaScriptFrame* frame);
+  bool IsBreakAtReturn(JavaScriptFrame* frame);
 
   // Fast check to see if any break points are active.
-  inline static bool has_break_points() { return has_break_points_; }
+  inline bool has_break_points() { return has_break_points_; }
 
-  static void NewBreak(StackFrame::Id break_frame_id);
-  static void SetBreak(StackFrame::Id break_frame_id, int break_id);
-  static StackFrame::Id break_frame_id() {
+  void NewBreak(StackFrame::Id break_frame_id);
+  void SetBreak(StackFrame::Id break_frame_id, int break_id);
+  StackFrame::Id break_frame_id() {
     return thread_local_.break_frame_id_;
   }
-  static int break_id() { return thread_local_.break_id_; }
+  int break_id() { return thread_local_.break_id_; }
 
-  static bool StepInActive() { return thread_local_.step_into_fp_ != 0; }
-  static void HandleStepIn(Handle<JSFunction> function,
-                           Handle<Object> holder,
-                           Address fp,
-                           bool is_constructor);
-  static Address step_in_fp() { return thread_local_.step_into_fp_; }
-  static Address* step_in_fp_addr() { return &thread_local_.step_into_fp_; }
+  bool StepInActive() { return thread_local_.step_into_fp_ != 0; }
+  void HandleStepIn(Handle<JSFunction> function,
+                    Handle<Object> holder,
+                    Address fp,
+                    bool is_constructor);
+  Address step_in_fp() { return thread_local_.step_into_fp_; }
+  Address* step_in_fp_addr() { return &thread_local_.step_into_fp_; }
 
-  static bool StepOutActive() { return thread_local_.step_out_fp_ != 0; }
-  static Address step_out_fp() { return thread_local_.step_out_fp_; }
+  bool StepOutActive() { return thread_local_.step_out_fp_ != 0; }
+  Address step_out_fp() { return thread_local_.step_out_fp_; }
 
-  static EnterDebugger* debugger_entry() {
+  EnterDebugger* debugger_entry() {
     return thread_local_.debugger_entry_;
   }
-  static void set_debugger_entry(EnterDebugger* entry) {
+  void set_debugger_entry(EnterDebugger* entry) {
     thread_local_.debugger_entry_ = entry;
   }
 
   // Check whether any of the specified interrupts are pending.
-  static bool is_interrupt_pending(InterruptFlag what) {
+  bool is_interrupt_pending(InterruptFlag what) {
     return (thread_local_.pending_interrupts_ & what) != 0;
   }
 
   // Set specified interrupts as pending.
-  static void set_interrupts_pending(InterruptFlag what) {
+  void set_interrupts_pending(InterruptFlag what) {
     thread_local_.pending_interrupts_ |= what;
   }
 
   // Clear specified interrupts from pending.
-  static void clear_interrupt_pending(InterruptFlag what) {
+  void clear_interrupt_pending(InterruptFlag what) {
     thread_local_.pending_interrupts_ &= ~static_cast<int>(what);
   }
 
   // Getter and setter for the disable break state.
-  static bool disable_break() { return disable_break_; }
-  static void set_disable_break(bool disable_break) {
+  bool disable_break() { return disable_break_; }
+  void set_disable_break(bool disable_break) {
     disable_break_ = disable_break;
   }
 
   // Getters for the current exception break state.
-  static bool break_on_exception() { return break_on_exception_; }
-  static bool break_on_uncaught_exception() {
+  bool break_on_exception() { return break_on_exception_; }
+  bool break_on_uncaught_exception() {
     return break_on_uncaught_exception_;
   }
 
@@ -337,34 +337,35 @@ class Debug {
   };
 
   // Support for setting the address to jump to when returning from break point.
-  static Address* after_break_target_address() {
+  Address* after_break_target_address() {
     return reinterpret_cast<Address*>(&thread_local_.after_break_target_);
   }
-  static Address* restarter_frame_function_pointer_address() {
+  Address* restarter_frame_function_pointer_address() {
     Object*** address = &thread_local_.restarter_frame_function_pointer_;
     return reinterpret_cast<Address*>(address);
   }
 
   // Support for saving/restoring registers when handling debug break calls.
-  static Object** register_address(int r) {
+  Object** register_address(int r) {
     return &registers_[r];
   }
 
   // Access to the debug break on return code.
-  static Code* debug_break_return() { return debug_break_return_; }
-  static Code** debug_break_return_address() {
+  Code* debug_break_return() { return debug_break_return_; }
+  Code** debug_break_return_address() {
     return &debug_break_return_;
   }
 
   // Access to the debug break in debug break slot code.
-  static Code* debug_break_slot() { return debug_break_slot_; }
-  static Code** debug_break_slot_address() {
+  Code* debug_break_slot() { return debug_break_slot_; }
+  Code** debug_break_slot_address() {
     return &debug_break_slot_;
   }
 
   static const int kEstimatedNofDebugInfoEntries = 16;
   static const int kEstimatedNofBreakPointsInFunction = 16;
 
+  // Passed to MakeWeak.
   static void HandleWeakDebugInfo(v8::Persistent<v8::Value> obj, void* data);
 
   friend class Debugger;
@@ -372,22 +373,22 @@ class Debug {
   friend void CheckDebuggerUnloaded(bool check_functions);  // In test-debug.cc
 
   // Threading support.
-  static char* ArchiveDebug(char* to);
-  static char* RestoreDebug(char* from);
+  char* ArchiveDebug(char* to);
+  char* RestoreDebug(char* from);
   static int ArchiveSpacePerThread();
-  static void FreeThreadResources() { }
+  void FreeThreadResources() { }
 
   // Mirror cache handling.
-  static void ClearMirrorCache();
+  void ClearMirrorCache();
 
   // Script cache handling.
-  static void CreateScriptCache();
-  static void DestroyScriptCache();
-  static void AddScriptToScriptCache(Handle<Script> script);
-  static Handle<FixedArray> GetLoadedScripts();
+  void CreateScriptCache();
+  void DestroyScriptCache();
+  void AddScriptToScriptCache(Handle<Script> script);
+  Handle<FixedArray> GetLoadedScripts();
 
   // Garbage collection notifications.
-  static void AfterGarbageCollection();
+  void AfterGarbageCollection();
 
   // Code generator routines.
   static void GenerateSlot(MacroAssembler* masm);
@@ -424,7 +425,7 @@ class Debug {
     FRAME_DROPPED_IN_DIRECT_CALL
   };
 
-  static void FramesHaveBeenDropped(StackFrame::Id new_break_frame_id,
+  void FramesHaveBeenDropped(StackFrame::Id new_break_frame_id,
                                     FrameDropMode mode,
                                     Object** restarter_frame_function_pointer);
 
@@ -445,35 +446,38 @@ class Debug {
   static const bool kFrameDropperSupported;
 
  private:
+  explicit Debug(Isolate* isolate);
+  ~Debug();
+
   static bool CompileDebuggerScript(int index);
-  static void ClearOneShot();
-  static void ActivateStepIn(StackFrame* frame);
-  static void ClearStepIn();
-  static void ActivateStepOut(StackFrame* frame);
-  static void ClearStepOut();
-  static void ClearStepNext();
+  void ClearOneShot();
+  void ActivateStepIn(StackFrame* frame);
+  void ClearStepIn();
+  void ActivateStepOut(StackFrame* frame);
+  void ClearStepOut();
+  void ClearStepNext();
   // Returns whether the compile succeeded.
-  static void RemoveDebugInfo(Handle<DebugInfo> debug_info);
-  static void SetAfterBreakTarget(JavaScriptFrame* frame);
-  static Handle<Object> CheckBreakPoints(Handle<Object> break_point);
-  static bool CheckBreakPoint(Handle<Object> break_point_object);
+  void RemoveDebugInfo(Handle<DebugInfo> debug_info);
+  void SetAfterBreakTarget(JavaScriptFrame* frame);
+  Handle<Object> CheckBreakPoints(Handle<Object> break_point);
+  bool CheckBreakPoint(Handle<Object> break_point_object);
 
   // Global handle to debug context where all the debugger JavaScript code is
   // loaded.
-  static Handle<Context> debug_context_;
+  Handle<Context> debug_context_;
 
   // Boolean state indicating whether any break points are set.
-  static bool has_break_points_;
+  bool has_break_points_;
 
   // Cache of all scripts in the heap.
-  static ScriptCache* script_cache_;
+  ScriptCache* script_cache_;
 
   // List of active debug info objects.
-  static DebugInfoListNode* debug_info_list_;
+  DebugInfoListNode* debug_info_list_;
 
-  static bool disable_break_;
-  static bool break_on_exception_;
-  static bool break_on_uncaught_exception_;
+  bool disable_break_;
+  bool break_on_exception_;
+  bool break_on_uncaught_exception_;
 
   // Per-thread data.
   class ThreadLocal {
@@ -526,15 +530,19 @@ class Debug {
   };
 
   // Storage location for registers when handling debug break calls
-  static JSCallerSavedBuffer registers_;
-  static ThreadLocal thread_local_;
-  static void ThreadInit();
+  JSCallerSavedBuffer registers_;
+  ThreadLocal thread_local_;
+  void ThreadInit();
 
   // Code to call for handling debug break on return.
-  static Code* debug_break_return_;
+  Code* debug_break_return_;
 
   // Code to call for handling debug break in debug break slots.
-  static Code* debug_break_slot_;
+  Code* debug_break_slot_;
+
+  Isolate* isolate_;
+
+  friend class Isolate;
 
   DISALLOW_COPY_AND_ASSIGN(Debug);
 };
@@ -680,95 +688,97 @@ class LockingCommandMessageQueue BASE_EMBEDDED {
 
 class Debugger {
  public:
-  static void DebugRequest(const uint16_t* json_request, int length);
-
-  static Handle<Object> MakeJSObject(Vector<const char> constructor_name,
-                                     int argc, Object*** argv,
-                                     bool* caught_exception);
-  static Handle<Object> MakeExecutionState(bool* caught_exception);
-  static Handle<Object> MakeBreakEvent(Handle<Object> exec_state,
-                                       Handle<Object> break_points_hit,
-                                       bool* caught_exception);
-  static Handle<Object> MakeExceptionEvent(Handle<Object> exec_state,
-                                           Handle<Object> exception,
-                                           bool uncaught,
-                                           bool* caught_exception);
-  static Handle<Object> MakeNewFunctionEvent(Handle<Object> func,
-                                             bool* caught_exception);
-  static Handle<Object> MakeCompileEvent(Handle<Script> script,
-                                         bool before,
-                                         bool* caught_exception);
-  static Handle<Object> MakeScriptCollectedEvent(int id,
-                                                 bool* caught_exception);
-  static void OnDebugBreak(Handle<Object> break_points_hit, bool auto_continue);
-  static void OnException(Handle<Object> exception, bool uncaught);
-  static void OnBeforeCompile(Handle<Script> script);
+  ~Debugger();
+
+  void DebugRequest(const uint16_t* json_request, int length);
+
+  Handle<Object> MakeJSObject(Vector<const char> constructor_name,
+                              int argc, Object*** argv,
+                              bool* caught_exception);
+  Handle<Object> MakeExecutionState(bool* caught_exception);
+  Handle<Object> MakeBreakEvent(Handle<Object> exec_state,
+                                Handle<Object> break_points_hit,
+                                bool* caught_exception);
+  Handle<Object> MakeExceptionEvent(Handle<Object> exec_state,
+                                    Handle<Object> exception,
+                                    bool uncaught,
+                                    bool* caught_exception);
+  Handle<Object> MakeNewFunctionEvent(Handle<Object> func,
+                                      bool* caught_exception);
+  Handle<Object> MakeCompileEvent(Handle<Script> script,
+                                  bool before,
+                                  bool* caught_exception);
+  Handle<Object> MakeScriptCollectedEvent(int id,
+                                          bool* caught_exception);
+  void OnDebugBreak(Handle<Object> break_points_hit, bool auto_continue);
+  void OnException(Handle<Object> exception, bool uncaught);
+  void OnBeforeCompile(Handle<Script> script);
 
   enum AfterCompileFlags {
     NO_AFTER_COMPILE_FLAGS,
     SEND_WHEN_DEBUGGING
   };
-  static void OnAfterCompile(Handle<Script> script,
-                             AfterCompileFlags after_compile_flags);
-  static void OnNewFunction(Handle<JSFunction> fun);
-  static void OnScriptCollected(int id);
-  static void ProcessDebugEvent(v8::DebugEvent event,
-                                Handle<JSObject> event_data,
-                                bool auto_continue);
-  static void NotifyMessageHandler(v8::DebugEvent event,
-                                   Handle<JSObject> exec_state,
-                                   Handle<JSObject> event_data,
-                                   bool auto_continue);
-  static void SetEventListener(Handle<Object> callback, Handle<Object> data);
-  static void SetMessageHandler(v8::Debug::MessageHandler2 handler);
-  static void SetHostDispatchHandler(v8::Debug::HostDispatchHandler handler,
-                                     int period);
-  static void SetDebugMessageDispatchHandler(
+  void OnAfterCompile(Handle<Script> script,
+                      AfterCompileFlags after_compile_flags);
+  void OnNewFunction(Handle<JSFunction> fun);
+  void OnScriptCollected(int id);
+  void ProcessDebugEvent(v8::DebugEvent event,
+                         Handle<JSObject> event_data,
+                         bool auto_continue);
+  void NotifyMessageHandler(v8::DebugEvent event,
+                            Handle<JSObject> exec_state,
+                            Handle<JSObject> event_data,
+                            bool auto_continue);
+  void SetEventListener(Handle<Object> callback, Handle<Object> data);
+  void SetMessageHandler(v8::Debug::MessageHandler2 handler);
+  void SetHostDispatchHandler(v8::Debug::HostDispatchHandler handler,
+                              int period);
+  void SetDebugMessageDispatchHandler(
       v8::Debug::DebugMessageDispatchHandler handler,
       bool provide_locker);
 
   // Invoke the message handler function.
-  static void InvokeMessageHandler(MessageImpl message);
+  void InvokeMessageHandler(MessageImpl message);
 
   // Add a debugger command to the command queue.
-  static void ProcessCommand(Vector<const uint16_t> command,
-                             v8::Debug::ClientData* client_data = NULL);
+  void ProcessCommand(Vector<const uint16_t> command,
+                      v8::Debug::ClientData* client_data = NULL);
 
   // Check whether there are commands in the command queue.
-  static bool HasCommands();
+  bool HasCommands();
 
   // Enqueue a debugger command to the command queue for event listeners.
-  static void EnqueueDebugCommand(v8::Debug::ClientData* client_data = NULL);
+  void EnqueueDebugCommand(v8::Debug::ClientData* client_data = NULL);
 
-  static Handle<Object> Call(Handle<JSFunction> fun,
-                             Handle<Object> data,
-                             bool* pending_exception);
+  Handle<Object> Call(Handle<JSFunction> fun,
+                      Handle<Object> data,
+                      bool* pending_exception);
 
   // Start the debugger agent listening on the provided port.
-  static bool StartAgent(const char* name, int port,
-                         bool wait_for_connection = false);
+  bool StartAgent(const char* name, int port,
+                  bool wait_for_connection = false);
 
   // Stop the debugger agent.
-  static void StopAgent();
+  void StopAgent();
 
   // Blocks until the agent has started listening for connections
-  static void WaitForAgent();
+  void WaitForAgent();
 
-  static void CallMessageDispatchHandler();
+  void CallMessageDispatchHandler();
 
-  static Handle<Context> GetDebugContext();
+  Handle<Context> GetDebugContext();
 
   // Unload the debugger if possible. Only called when no debugger is currently
   // active.
-  static void UnloadDebugger();
+  void UnloadDebugger();
   friend void ForceUnloadDebugger();  // In test-debug.cc
 
-  inline static bool EventActive(v8::DebugEvent event) {
+  inline bool EventActive(v8::DebugEvent event) {
     ScopedLock with(debugger_access_);
 
     // Check whether the message handler was been cleared.
     if (debugger_unload_pending_) {
-      if (Debug::debugger_entry() == NULL) {
+      if (isolate_->debug()->debugger_entry() == NULL) {
         UnloadDebugger();
       }
     }
@@ -786,52 +796,58 @@ class Debugger {
     return !compiling_natives_ && Debugger::IsDebuggerActive();
   }
 
-  static void set_compiling_natives(bool compiling_natives) {
+  void set_compiling_natives(bool compiling_natives) {
     Debugger::compiling_natives_ = compiling_natives;
   }
-  static bool compiling_natives() { return Debugger::compiling_natives_; }
-  static void set_loading_debugger(bool v) { is_loading_debugger_ = v; }
-  static bool is_loading_debugger() { return Debugger::is_loading_debugger_; }
+  bool compiling_natives() const { return compiling_natives_; }
+  void set_loading_debugger(bool v) { is_loading_debugger_ = v; }
+  bool is_loading_debugger() const { return is_loading_debugger_; }
 
-  static bool IsDebuggerActive();
+  bool IsDebuggerActive();
 
  private:
-  static void CallEventCallback(v8::DebugEvent event,
-                                Handle<Object> exec_state,
-                                Handle<Object> event_data,
-                                v8::Debug::ClientData* client_data);
-  static void CallCEventCallback(v8::DebugEvent event,
-                                 Handle<Object> exec_state,
-                                 Handle<Object> event_data,
-                                 v8::Debug::ClientData* client_data);
-  static void CallJSEventCallback(v8::DebugEvent event,
-                                  Handle<Object> exec_state,
-                                  Handle<Object> event_data);
-  static void ListenersChanged();
-
-  static Mutex* debugger_access_;  // Mutex guarding debugger variables.
-  static Handle<Object> event_listener_;  // Global handle to listener.
-  static Handle<Object> event_listener_data_;
-  static bool compiling_natives_;  // Are we compiling natives?
-  static bool is_loading_debugger_;  // Are we loading the debugger?
-  static bool never_unload_debugger_;  // Can we unload the debugger?
-  static v8::Debug::MessageHandler2 message_handler_;
-  static bool debugger_unload_pending_;  // Was message handler cleared?
-  static v8::Debug::HostDispatchHandler host_dispatch_handler_;
-  static Mutex* dispatch_handler_access_;  // Mutex guarding dispatch handler.
-  static v8::Debug::DebugMessageDispatchHandler debug_message_dispatch_handler_;
-  static MessageDispatchHelperThread* message_dispatch_helper_thread_;
-  static int host_dispatch_micros_;
-
-  static DebuggerAgent* agent_;
+  Debugger();
+
+  void CallEventCallback(v8::DebugEvent event,
+                         Handle<Object> exec_state,
+                         Handle<Object> event_data,
+                         v8::Debug::ClientData* client_data);
+  void CallCEventCallback(v8::DebugEvent event,
+                          Handle<Object> exec_state,
+                          Handle<Object> event_data,
+                          v8::Debug::ClientData* client_data);
+  void CallJSEventCallback(v8::DebugEvent event,
+                           Handle<Object> exec_state,
+                           Handle<Object> event_data);
+  void ListenersChanged();
+
+  Mutex* debugger_access_;  // Mutex guarding debugger variables.
+  Handle<Object> event_listener_;  // Global handle to listener.
+  Handle<Object> event_listener_data_;
+  bool compiling_natives_;  // Are we compiling natives?
+  bool is_loading_debugger_;  // Are we loading the debugger?
+  bool never_unload_debugger_;  // Can we unload the debugger?
+  v8::Debug::MessageHandler2 message_handler_;
+  bool debugger_unload_pending_;  // Was message handler cleared?
+  v8::Debug::HostDispatchHandler host_dispatch_handler_;
+  Mutex* dispatch_handler_access_;  // Mutex guarding dispatch handler.
+  v8::Debug::DebugMessageDispatchHandler debug_message_dispatch_handler_;
+  MessageDispatchHelperThread* message_dispatch_helper_thread_;
+  int host_dispatch_micros_;
+
+  DebuggerAgent* agent_;
 
   static const int kQueueInitialSize = 4;
-  static LockingCommandMessageQueue command_queue_;
-  static Semaphore* command_received_;  // Signaled for each command received.
+  LockingCommandMessageQueue command_queue_;
+  Semaphore* command_received_;  // Signaled for each command received.
+  LockingCommandMessageQueue event_command_queue_;
 
-  static LockingCommandMessageQueue event_command_queue_;
+  Isolate* isolate_;
 
   friend class EnterDebugger;
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(Debugger);
 };
 
 
@@ -842,38 +858,44 @@ class Debugger {
 class EnterDebugger BASE_EMBEDDED {
  public:
   EnterDebugger()
-      : prev_(Debug::debugger_entry()),
-        has_js_frames_(!it_.done()) {
-    ASSERT(prev_ != NULL || !Debug::is_interrupt_pending(PREEMPT));
-    ASSERT(prev_ != NULL || !Debug::is_interrupt_pending(DEBUGBREAK));
+      : isolate_(Isolate::Current()),
+        prev_(isolate_->debug()->debugger_entry()),
+        has_js_frames_(!it_.done()),
+        save_(isolate_) {
+    Debug* debug = isolate_->debug();
+    ASSERT(prev_ != NULL || !debug->is_interrupt_pending(PREEMPT));
+    ASSERT(prev_ != NULL || !debug->is_interrupt_pending(DEBUGBREAK));
 
     // Link recursive debugger entry.
-    Debug::set_debugger_entry(this);
+    debug->set_debugger_entry(this);
 
     // Store the previous break id and frame id.
-    break_id_ = Debug::break_id();
-    break_frame_id_ = Debug::break_frame_id();
+    break_id_ = debug->break_id();
+    break_frame_id_ = debug->break_frame_id();
 
     // Create the new break info. If there is no JavaScript frames there is no
     // break frame id.
     if (has_js_frames_) {
-      Debug::NewBreak(it_.frame()->id());
+      debug->NewBreak(it_.frame()->id());
     } else {
-      Debug::NewBreak(StackFrame::NO_ID);
+      debug->NewBreak(StackFrame::NO_ID);
     }
 
     // Make sure that debugger is loaded and enter the debugger context.
-    load_failed_ = !Debug::Load();
+    load_failed_ = !debug->Load();
     if (!load_failed_) {
       // NOTE the member variable save which saves the previous context before
       // this change.
-      Top::set_context(*Debug::debug_context());
+      isolate_->set_context(*debug->debug_context());
     }
   }
 
   ~EnterDebugger() {
+    ASSERT(Isolate::Current() == isolate_);
+    Debug* debug = isolate_->debug();
+
     // Restore to the previous break state.
-    Debug::SetBreak(break_frame_id_, break_id_);
+    debug->SetBreak(break_frame_id_, break_id_);
 
     // Check for leaving the debugger.
     if (prev_ == NULL) {
@@ -881,43 +903,43 @@ class EnterDebugger BASE_EMBEDDED {
       // pending exception as clearing the mirror cache calls back into
       // JavaScript. This can happen if the v8::Debug::Call is used in which
       // case the exception should end up in the calling code.
-      if (!Top::has_pending_exception()) {
+      if (!isolate_->has_pending_exception()) {
         // Try to avoid any pending debug break breaking in the clear mirror
         // cache JavaScript code.
-        if (StackGuard::IsDebugBreak()) {
-          Debug::set_interrupts_pending(DEBUGBREAK);
-          StackGuard::Continue(DEBUGBREAK);
+        if (isolate_->stack_guard()->IsDebugBreak()) {
+          debug->set_interrupts_pending(DEBUGBREAK);
+          isolate_->stack_guard()->Continue(DEBUGBREAK);
         }
-        Debug::ClearMirrorCache();
+        debug->ClearMirrorCache();
       }
 
       // Request preemption and debug break when leaving the last debugger entry
       // if any of these where recorded while debugging.
-      if (Debug::is_interrupt_pending(PREEMPT)) {
+      if (debug->is_interrupt_pending(PREEMPT)) {
         // This re-scheduling of preemption is to avoid starvation in some
         // debugging scenarios.
-        Debug::clear_interrupt_pending(PREEMPT);
-        StackGuard::Preempt();
+        debug->clear_interrupt_pending(PREEMPT);
+        isolate_->stack_guard()->Preempt();
       }
-      if (Debug::is_interrupt_pending(DEBUGBREAK)) {
-        Debug::clear_interrupt_pending(DEBUGBREAK);
-        StackGuard::DebugBreak();
+      if (debug->is_interrupt_pending(DEBUGBREAK)) {
+        debug->clear_interrupt_pending(DEBUGBREAK);
+        isolate_->stack_guard()->DebugBreak();
       }
 
       // If there are commands in the queue when leaving the debugger request
       // that these commands are processed.
-      if (Debugger::HasCommands()) {
-        StackGuard::DebugCommand();
+      if (isolate_->debugger()->HasCommands()) {
+        isolate_->stack_guard()->DebugCommand();
       }
 
       // If leaving the debugger with the debugger no longer active unload it.
-      if (!Debugger::IsDebuggerActive()) {
-        Debugger::UnloadDebugger();
+      if (!isolate_->debugger()->IsDebuggerActive()) {
+        isolate_->debugger()->UnloadDebugger();
       }
     }
 
     // Leaving this debugger entry.
-    Debug::set_debugger_entry(prev_);
+    debug->set_debugger_entry(prev_);
   }
 
   // Check whether the debugger could be entered.
@@ -930,6 +952,7 @@ class EnterDebugger BASE_EMBEDDED {
   inline Handle<Context> GetContext() { return save_.context(); }
 
  private:
+  Isolate* isolate_;
   EnterDebugger* prev_;  // Previous debugger entry if entered recursively.
   JavaScriptFrameIterator it_;
   const bool has_js_frames_;  // Were there any JavaScript frames?
@@ -943,15 +966,17 @@ class EnterDebugger BASE_EMBEDDED {
 // Stack allocated class for disabling break.
 class DisableBreak BASE_EMBEDDED {
  public:
-  explicit DisableBreak(bool disable_break)  {
-    prev_disable_break_ = Debug::disable_break();
-    Debug::set_disable_break(disable_break);
+  explicit DisableBreak(bool disable_break) : isolate_(Isolate::Current()) {
+    prev_disable_break_ = isolate_->debug()->disable_break();
+    isolate_->debug()->set_disable_break(disable_break);
   }
   ~DisableBreak() {
-    Debug::set_disable_break(prev_disable_break_);
+    ASSERT(Isolate::Current() == isolate_);
+    isolate_->debug()->set_disable_break(prev_disable_break_);
   }
 
  private:
+  Isolate* isolate_;
   // The previous state of the disable break used to restore the value when this
   // object is destructed.
   bool prev_disable_break_;
@@ -976,17 +1001,18 @@ class Debug_Address {
     return Debug_Address(Debug::k_restarter_frame_function_pointer);
   }
 
-  Address address() const {
+  Address address(Isolate* isolate) const {
+    Debug* debug = isolate->debug();
     switch (id_) {
       case Debug::k_after_break_target_address:
-        return reinterpret_cast<Address>(Debug::after_break_target_address());
+        return reinterpret_cast<Address>(debug->after_break_target_address());
       case Debug::k_debug_break_return_address:
-        return reinterpret_cast<Address>(Debug::debug_break_return_address());
+        return reinterpret_cast<Address>(debug->debug_break_return_address());
       case Debug::k_debug_break_slot_address:
-        return reinterpret_cast<Address>(Debug::debug_break_slot_address());
+        return reinterpret_cast<Address>(debug->debug_break_slot_address());
       case Debug::k_restarter_frame_function_pointer:
         return reinterpret_cast<Address>(
-            Debug::restarter_frame_function_pointer_address());
+            debug->restarter_frame_function_pointer_address());
       default:
         UNREACHABLE();
         return NULL;
@@ -1002,7 +1028,7 @@ class Debug_Address {
 // to do this via v8::Debug::HostDispatchHandler
 class MessageDispatchHelperThread: public Thread {
  public:
-  MessageDispatchHelperThread();
+  explicit MessageDispatchHelperThread(Isolate* isolate);
   ~MessageDispatchHelperThread();
 
   void Schedule();
index 6b5123365cf5496710bab1c9cc51c36de6372f50..4372af03dee4941f303e9f79054e014fcf9712d2 100644 (file)
 namespace v8 {
 namespace internal {
 
-LargeObjectChunk* Deoptimizer::eager_deoptimization_entry_code_ = NULL;
-LargeObjectChunk* Deoptimizer::lazy_deoptimization_entry_code_ = NULL;
-Deoptimizer* Deoptimizer::current_ = NULL;
-DeoptimizingCodeListNode* Deoptimizer::deoptimizing_code_list_ = NULL;
+DeoptimizerData::DeoptimizerData() {
+  eager_deoptimization_entry_code_ = NULL;
+  lazy_deoptimization_entry_code_ = NULL;
+  current_ = NULL;
+  deoptimizing_code_list_ = NULL;
+}
 
 
+DeoptimizerData::~DeoptimizerData() {
+  if (eager_deoptimization_entry_code_ != NULL) {
+    eager_deoptimization_entry_code_->Free(EXECUTABLE);
+    eager_deoptimization_entry_code_ = NULL;
+  }
+  if (lazy_deoptimization_entry_code_ != NULL) {
+    lazy_deoptimization_entry_code_->Free(EXECUTABLE);
+    lazy_deoptimization_entry_code_ = NULL;
+  }
+}
+
 Deoptimizer* Deoptimizer::New(JSFunction* function,
                               BailoutType type,
                               unsigned bailout_id,
                               Address from,
-                              int fp_to_sp_delta) {
-  Deoptimizer* deoptimizer =
-      new Deoptimizer(function, type, bailout_id, from, fp_to_sp_delta);
-  ASSERT(current_ == NULL);
-  current_ = deoptimizer;
+                              int fp_to_sp_delta,
+                              Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  Deoptimizer* deoptimizer = new Deoptimizer(isolate,
+                                             function,
+                                             type,
+                                             bailout_id,
+                                             from,
+                                             fp_to_sp_delta);
+  ASSERT(isolate->deoptimizer_data()->current_ == NULL);
+  isolate->deoptimizer_data()->current_ = deoptimizer;
   return deoptimizer;
 }
 
 
-Deoptimizer* Deoptimizer::Grab() {
-  Deoptimizer* result = current_;
+Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  Deoptimizer* result = isolate->deoptimizer_data()->current_;
   ASSERT(result != NULL);
   result->DeleteFrameDescriptions();
-  current_ = NULL;
+  isolate->deoptimizer_data()->current_ = NULL;
   return result;
 }
 
@@ -155,7 +175,7 @@ void Deoptimizer::VisitAllOptimizedFunctions(
   AssertNoAllocation no_allocation;
 
   // Run through the list of all global contexts and deoptimize.
-  Object* global = Heap::global_contexts_list();
+  Object* global = Isolate::Current()->heap()->global_contexts_list();
   while (!global->IsUndefined()) {
     VisitAllOptimizedFunctionsForGlobalObject(Context::cast(global)->global(),
                                               visitor);
@@ -170,7 +190,7 @@ void Deoptimizer::HandleWeakDeoptimizedCode(
       reinterpret_cast<DeoptimizingCodeListNode*>(data);
   RemoveDeoptimizingCode(*node->code());
 #ifdef DEBUG
-  node = Deoptimizer::deoptimizing_code_list_;
+  node = Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_;
   while (node != NULL) {
     ASSERT(node != reinterpret_cast<DeoptimizingCodeListNode*>(data));
     node = node->next();
@@ -179,17 +199,20 @@ void Deoptimizer::HandleWeakDeoptimizedCode(
 }
 
 
-void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
+void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer,
+                                      Isolate* isolate) {
   deoptimizer->DoComputeOutputFrames();
 }
 
 
-Deoptimizer::Deoptimizer(JSFunction* function,
+Deoptimizer::Deoptimizer(Isolate* isolate,
+                         JSFunction* function,
                          BailoutType type,
                          unsigned bailout_id,
                          Address from,
                          int fp_to_sp_delta)
-    : function_(function),
+    : isolate_(isolate),
+      function_(function),
       bailout_id_(bailout_id),
       bailout_type_(type),
       from_(from),
@@ -228,7 +251,7 @@ Deoptimizer::Deoptimizer(JSFunction* function,
     ASSERT(optimized_code_->kind() == Code::OPTIMIZED_FUNCTION);
     ASSERT(!optimized_code_->contains(from));
   }
-  ASSERT(Heap::allow_allocation(false));
+  ASSERT(HEAP->allow_allocation(false));
   unsigned size = ComputeInputFrameSize();
   input_ = new(size) FrameDescription(size, function);
 }
@@ -249,7 +272,7 @@ void Deoptimizer::DeleteFrameDescriptions() {
   delete[] output_;
   input_ = NULL;
   output_ = NULL;
-  ASSERT(!Heap::allow_allocation(true));
+  ASSERT(!HEAP->allow_allocation(true));
 }
 
 
@@ -257,16 +280,17 @@ Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) {
   ASSERT(id >= 0);
   if (id >= kNumberOfEntries) return NULL;
   LargeObjectChunk* base = NULL;
+  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
   if (type == EAGER) {
-    if (eager_deoptimization_entry_code_ == NULL) {
-      eager_deoptimization_entry_code_ = CreateCode(type);
+    if (data->eager_deoptimization_entry_code_ == NULL) {
+      data->eager_deoptimization_entry_code_ = CreateCode(type);
     }
-    base = eager_deoptimization_entry_code_;
+    base = data->eager_deoptimization_entry_code_;
   } else {
-    if (lazy_deoptimization_entry_code_ == NULL) {
-      lazy_deoptimization_entry_code_ = CreateCode(type);
+    if (data->lazy_deoptimization_entry_code_ == NULL) {
+      data->lazy_deoptimization_entry_code_ = CreateCode(type);
     }
-    base = lazy_deoptimization_entry_code_;
+    base = data->lazy_deoptimization_entry_code_;
   }
   return
       static_cast<Address>(base->GetStartAddress()) + (id * table_entry_size_);
@@ -275,10 +299,11 @@ Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) {
 
 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) {
   LargeObjectChunk* base = NULL;
+  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
   if (type == EAGER) {
-    base = eager_deoptimization_entry_code_;
+    base = data->eager_deoptimization_entry_code_;
   } else {
-    base = lazy_deoptimization_entry_code_;
+    base = data->lazy_deoptimization_entry_code_;
   }
   if (base == NULL ||
       addr < base->GetStartAddress() ||
@@ -292,23 +317,6 @@ int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) {
 }
 
 
-void Deoptimizer::Setup() {
-  // Do nothing yet.
-}
-
-
-void Deoptimizer::TearDown() {
-  if (eager_deoptimization_entry_code_ != NULL) {
-    eager_deoptimization_entry_code_->Free(EXECUTABLE);
-    eager_deoptimization_entry_code_ = NULL;
-  }
-  if (lazy_deoptimization_entry_code_ != NULL) {
-    lazy_deoptimization_entry_code_->Free(EXECUTABLE);
-    lazy_deoptimization_entry_code_ = NULL;
-  }
-}
-
-
 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
                                unsigned id,
                                SharedFunctionInfo* shared) {
@@ -335,9 +343,10 @@ int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
 }
 
 
-int Deoptimizer::GetDeoptimizedCodeCount() {
+int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
   int length = 0;
-  DeoptimizingCodeListNode* node = Deoptimizer::deoptimizing_code_list_;
+  DeoptimizingCodeListNode* node =
+      isolate->deoptimizer_data()->deoptimizing_code_list_;
   while (node != NULL) {
     length++;
     node = node->next();
@@ -445,7 +454,7 @@ void Deoptimizer::InsertHeapNumberValue(JavaScriptFrame* frame,
   int tos_index = stack_index + extra_slot_count;
   int index = (frame->ComputeExpressionsCount() - 1) - tos_index;
   if (FLAG_trace_deopt) PrintF("Allocating a new heap number: %e\n", val);
-  Handle<Object> num = Factory::NewNumber(val);
+  Handle<Object> num = isolate_->factory()->NewNumber(val);
   frame->SetExpression(index, *num);
 }
 
@@ -625,10 +634,11 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
         PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- ",
                output_[frame_index]->GetTop() + output_offset,
                output_offset);
-        Heap::arguments_marker()->ShortPrint();
+        isolate_->heap()->arguments_marker()->ShortPrint();
         PrintF(" ; arguments object\n");
       }
-      intptr_t value = reinterpret_cast<intptr_t>(Heap::arguments_marker());
+      intptr_t value = reinterpret_cast<intptr_t>(
+          isolate_->heap()->arguments_marker());
       output_[frame_index]->SetFrameSlot(output_offset, value);
       return;
     }
@@ -939,7 +949,8 @@ LargeObjectChunk* Deoptimizer::CreateCode(BailoutType type) {
 
 
 Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) {
-  DeoptimizingCodeListNode* node = Deoptimizer::deoptimizing_code_list_;
+  DeoptimizingCodeListNode* node =
+      Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_;
   while (node != NULL) {
     if (node->code()->contains(addr)) return *node->code();
     node = node->next();
@@ -949,15 +960,16 @@ Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) {
 
 
 void Deoptimizer::RemoveDeoptimizingCode(Code* code) {
-  ASSERT(deoptimizing_code_list_ != NULL);
+  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
+  ASSERT(data->deoptimizing_code_list_ != NULL);
   // Run through the code objects to find this one and remove it.
   DeoptimizingCodeListNode* prev = NULL;
-  DeoptimizingCodeListNode* current = deoptimizing_code_list_;
+  DeoptimizingCodeListNode* current = data->deoptimizing_code_list_;
   while (current != NULL) {
     if (*current->code() == code) {
       // Unlink from list. If prev is NULL we are looking at the first element.
       if (prev == NULL) {
-        deoptimizing_code_list_ = current->next();
+        data->deoptimizing_code_list_ = current->next();
       } else {
         prev->set_next(current->next());
       }
@@ -1044,7 +1056,8 @@ int32_t TranslationIterator::Next() {
 
 Handle<ByteArray> TranslationBuffer::CreateByteArray() {
   int length = contents_.length();
-  Handle<ByteArray> result = Factory::NewByteArray(length, TENURED);
+  Handle<ByteArray> result =
+      Isolate::Current()->factory()->NewByteArray(length, TENURED);
   memcpy(result->GetDataStartAddress(), contents_.ToVector().start(), length);
   return result;
 }
@@ -1167,16 +1180,18 @@ const char* Translation::StringFor(Opcode opcode) {
 
 
 DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) {
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   // Globalize the code object and make it weak.
-  code_ = Handle<Code>::cast((GlobalHandles::Create(code)));
-  GlobalHandles::MakeWeak(reinterpret_cast<Object**>(code_.location()),
-                          this,
-                          Deoptimizer::HandleWeakDeoptimizedCode);
+  code_ = Handle<Code>::cast(global_handles->Create(code));
+  global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()),
+                           this,
+                           Deoptimizer::HandleWeakDeoptimizedCode);
 }
 
 
 DeoptimizingCodeListNode::~DeoptimizingCodeListNode() {
-  GlobalHandles::Destroy(reinterpret_cast<Object**>(code_.location()));
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
+  global_handles->Destroy(reinterpret_cast<Object**>(code_.location()));
 }
 
 
index 1d4f4770f9762465b49c95f4e35dcddd969b6d31..ef27646179db0f89f3c31980474594a99db60676 100644 (file)
@@ -93,6 +93,31 @@ class OptimizedFunctionVisitor BASE_EMBEDDED {
 };
 
 
+class Deoptimizer;
+
+
+class DeoptimizerData {
+ public:
+  DeoptimizerData();
+  ~DeoptimizerData();
+
+ private:
+  LargeObjectChunk* eager_deoptimization_entry_code_;
+  LargeObjectChunk* lazy_deoptimization_entry_code_;
+  Deoptimizer* current_;
+
+  // List of deoptimized code which still have references from active stack
+  // frames. These code objects are needed by the deoptimizer when deoptimizing
+  // a frame for which the code object for the function function has been
+  // changed from the code present when deoptimizing was done.
+  DeoptimizingCodeListNode* deoptimizing_code_list_;
+
+  friend class Deoptimizer;
+
+  DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
+};
+
+
 class Deoptimizer : public Malloced {
  public:
   enum BailoutType {
@@ -107,8 +132,9 @@ class Deoptimizer : public Malloced {
                           BailoutType type,
                           unsigned bailout_id,
                           Address from,
-                          int fp_to_sp_delta);
-  static Deoptimizer* Grab();
+                          int fp_to_sp_delta,
+                          Isolate* isolate);
+  static Deoptimizer* Grab(Isolate* isolate);
 
   // Deoptimize the function now. Its current optimized code will never be run
   // again and any activations of the optimized code will get deoptimized when
@@ -159,7 +185,7 @@ class Deoptimizer : public Malloced {
 
   void InsertHeapNumberValues(int index, JavaScriptFrame* frame);
 
-  static void ComputeOutputFrames(Deoptimizer* deoptimizer);
+  static void ComputeOutputFrames(Deoptimizer* deoptimizer, Isolate* isolate);
 
   static Address GetDeoptimizationEntry(int id, BailoutType type);
   static int GetDeoptimizationId(Address addr, BailoutType type);
@@ -167,9 +193,6 @@ class Deoptimizer : public Malloced {
                            unsigned node_id,
                            SharedFunctionInfo* shared);
 
-  static void Setup();
-  static void TearDown();
-
   // Code generation support.
   static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
   static int output_count_offset() {
@@ -177,7 +200,7 @@ class Deoptimizer : public Malloced {
   }
   static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
 
-  static int GetDeoptimizedCodeCount();
+  static int GetDeoptimizedCodeCount(Isolate* isolate);
 
   static const int kNotDeoptimizationEntry = -1;
 
@@ -218,7 +241,8 @@ class Deoptimizer : public Malloced {
  private:
   static const int kNumberOfEntries = 4096;
 
-  Deoptimizer(JSFunction* function,
+  Deoptimizer(Isolate* isolate,
+              JSFunction* function,
               BailoutType type,
               unsigned bailout_id,
               Address from,
@@ -264,16 +288,7 @@ class Deoptimizer : public Malloced {
   static Code* FindDeoptimizingCodeFromAddress(Address addr);
   static void RemoveDeoptimizingCode(Code* code);
 
-  static LargeObjectChunk* eager_deoptimization_entry_code_;
-  static LargeObjectChunk* lazy_deoptimization_entry_code_;
-  static Deoptimizer* current_;
-
-  // List of deoptimized code which still have references from active stack
-  // frames. These code objects are needed by the deoptimizer when deoptimizing
-  // a frame for which the code object for the function function has been
-  // changed from the code present when deoptimizing was done.
-  static DeoptimizingCodeListNode* deoptimizing_code_list_;
-
+  Isolate* isolate_;
   JSFunction* function_;
   Code* optimized_code_;
   unsigned bailout_id_;
index 6ecd1c8f3a9da214a2844eb669ac6d5bc6b66085..f7f2d4120281897ea0fd7f15841ab13c23b0b778 100644 (file)
@@ -44,6 +44,9 @@ class NameConverter {
   virtual const char* NameOfAddress(byte* addr) const;
   virtual const char* NameOfConstant(byte* addr) const;
   virtual const char* NameInCode(byte* addr) const;
+
+ protected:
+  v8::internal::EmbeddedVector<char, 128> tmp_buffer_;
 };
 
 
index 243abf079ceded133616181eb9ddb4532edc371d..d142ef6af1623aab7d59ffa756c9433f1767730c 100644 (file)
@@ -65,24 +65,24 @@ class V8NameConverter: public disasm::NameConverter {
   Code* code() const { return code_; }
  private:
   Code* code_;
+
+  EmbeddedVector<char, 128> v8_buffer_;
 };
 
 
 const char* V8NameConverter::NameOfAddress(byte* pc) const {
-  static v8::internal::EmbeddedVector<char, 128> buffer;
-
-  const char* name = Builtins::Lookup(pc);
+  const char* name = Isolate::Current()->builtins()->Lookup(pc);
   if (name != NULL) {
-    OS::SNPrintF(buffer, "%s  (%p)", name, pc);
-    return buffer.start();
+    OS::SNPrintF(v8_buffer_, "%s  (%p)", name, pc);
+    return v8_buffer_.start();
   }
 
   if (code_ != NULL) {
     int offs = static_cast<int>(pc - code_->instruction_start());
     // print as code offset, if it seems reasonable
     if (0 <= offs && offs < code_->instruction_size()) {
-      OS::SNPrintF(buffer, "%d  (%p)", offs, pc);
-      return buffer.start();
+      OS::SNPrintF(v8_buffer_, "%d  (%p)", offs, pc);
+      return v8_buffer_.start();
     }
   }
 
@@ -115,6 +115,7 @@ static int DecodeIt(FILE* f,
   NoHandleAllocation ha;
   AssertNoAllocation no_alloc;
   ExternalReferenceEncoder ref_encoder;
+  Heap* heap = HEAP;
 
   v8::internal::EmbeddedVector<char, 128> decode_buffer;
   v8::internal::EmbeddedVector<char, kOutBufferSize> out_buffer;
@@ -256,8 +257,8 @@ static int DecodeIt(FILE* f,
         } else if (kind == Code::STUB) {
           // Reverse lookup required as the minor key cannot be retrieved
           // from the code object.
-          Object* obj = Heap::code_stubs()->SlowReverseLookup(code);
-          if (obj != Heap::undefined_value()) {
+          Object* obj = heap->code_stubs()->SlowReverseLookup(code);
+          if (obj != heap->undefined_value()) {
             ASSERT(obj->IsSmi());
             // Get the STUB key and extract major and minor key.
             uint32_t key = Smi::cast(obj)->value();
index de8f0a4661544e5fb6ac9aa7c9467ea4afb4a459..98c8b680053fa78dbffe97ae1628b1232e0892c3 100644 (file)
@@ -42,14 +42,39 @@ namespace v8 {
 namespace internal {
 
 
+StackGuard::StackGuard()
+    : isolate_(NULL) {
+}
+
+
+void StackGuard::set_interrupt_limits(const ExecutionAccess& lock) {
+  ASSERT(isolate_ != NULL);
+  // Ignore attempts to interrupt when interrupts are postponed.
+  if (should_postpone_interrupts(lock)) return;
+  thread_local_.jslimit_ = kInterruptLimit;
+  thread_local_.climit_ = kInterruptLimit;
+  isolate_->heap()->SetStackLimits();
+}
+
+
+void StackGuard::reset_limits(const ExecutionAccess& lock) {
+  ASSERT(isolate_ != NULL);
+  thread_local_.jslimit_ = thread_local_.real_jslimit_;
+  thread_local_.climit_ = thread_local_.real_climit_;
+  isolate_->heap()->SetStackLimits();
+}
+
+
 static Handle<Object> Invoke(bool construct,
                              Handle<JSFunction> func,
                              Handle<Object> receiver,
                              int argc,
                              Object*** args,
                              bool* has_pending_exception) {
+  Isolate* isolate = func->GetIsolate();
+
   // Entering JavaScript.
-  VMState state(JS);
+  VMState state(isolate, JS);
 
   // Placeholder for return value.
   MaybeObject* value = reinterpret_cast<Object*>(kZapValue);
@@ -85,7 +110,7 @@ static Handle<Object> Invoke(bool construct,
   {
     // Save and restore context around invocation and block the
     // allocation of handles without explicit handle scopes.
-    SaveContext save;
+    SaveContext save(isolate);
     NoHandleAllocation na;
     JSEntryFunction entry = FUNCTION_CAST<JSEntryFunction>(code->entry());
 
@@ -103,20 +128,20 @@ static Handle<Object> Invoke(bool construct,
 
   // Update the pending exception flag and return the value.
   *has_pending_exception = value->IsException();
-  ASSERT(*has_pending_exception == Top::has_pending_exception());
+  ASSERT(*has_pending_exception == Isolate::Current()->has_pending_exception());
   if (*has_pending_exception) {
-    Top::ReportPendingMessages();
-    if (Top::pending_exception() == Failure::OutOfMemoryException()) {
-      if (!HandleScopeImplementer::instance()->ignore_out_of_memory()) {
+    isolate->ReportPendingMessages();
+    if (isolate->pending_exception() == Failure::OutOfMemoryException()) {
+      if (!isolate->handle_scope_implementer()->ignore_out_of_memory()) {
         V8::FatalProcessOutOfMemory("JS", true);
       }
     }
     return Handle<Object>();
   } else {
-    Top::clear_pending_message();
+    isolate->clear_pending_message();
   }
 
-  return Handle<Object>(value->ToObjectUnchecked());
+  return Handle<Object>(value->ToObjectUnchecked(), isolate);
 }
 
 
@@ -131,7 +156,8 @@ Handle<Object> Execution::Call(Handle<JSFunction> func,
 
 Handle<Object> Execution::New(Handle<JSFunction> func, int argc,
                               Object*** args, bool* pending_exception) {
-  return Invoke(true, func, Top::global(), argc, args, pending_exception);
+  return Invoke(true, func, Isolate::Current()->global(), argc, args,
+                pending_exception);
 }
 
 
@@ -153,18 +179,20 @@ Handle<Object> Execution::TryCall(Handle<JSFunction> func,
 
   if (*caught_exception) {
     ASSERT(catcher.HasCaught());
-    ASSERT(Top::has_pending_exception());
-    ASSERT(Top::external_caught_exception());
-    if (Top::pending_exception() == Heap::termination_exception()) {
-      result = Factory::termination_exception();
+    Isolate* isolate = Isolate::Current();
+    ASSERT(isolate->has_pending_exception());
+    ASSERT(isolate->external_caught_exception());
+    if (isolate->pending_exception() ==
+        isolate->heap()->termination_exception()) {
+      result = isolate->factory()->termination_exception();
     } else {
       result = v8::Utils::OpenHandle(*catcher.Exception());
     }
-    Top::OptionalRescheduleException(true);
+    isolate->OptionalRescheduleException(true);
   }
 
-  ASSERT(!Top::has_pending_exception());
-  ASSERT(!Top::external_caught_exception());
+  ASSERT(!Isolate::Current()->has_pending_exception());
+  ASSERT(!Isolate::Current()->external_caught_exception());
   return result;
 }
 
@@ -178,7 +206,7 @@ Handle<Object> Execution::GetFunctionDelegate(Handle<Object> object) {
   // Regular expressions can be called as functions in both Firefox
   // and Safari so we allow it too.
   if (object->IsJSRegExp()) {
-    Handle<String> exec = Factory::exec_symbol();
+    Handle<String> exec = FACTORY->exec_symbol();
     // TODO(lrn): Bug 617.  We should use the default function here, not the
     // one on the RegExp object.
     Object* exec_function;
@@ -186,7 +214,7 @@ Handle<Object> Execution::GetFunctionDelegate(Handle<Object> object) {
       // This can lose an exception, but the alternative is to put a failure
       // object in a handle, which is not GC safe.
       if (!maybe_exec_function->ToObject(&exec_function)) {
-        return Factory::undefined_value();
+        return FACTORY->undefined_value();
       }
     }
     return Handle<Object>(exec_function);
@@ -197,10 +225,10 @@ Handle<Object> Execution::GetFunctionDelegate(Handle<Object> object) {
   if (object->IsHeapObject() &&
       HeapObject::cast(*object)->map()->has_instance_call_handler()) {
     return Handle<JSFunction>(
-        Top::global_context()->call_as_function_delegate());
+        Isolate::Current()->global_context()->call_as_function_delegate());
   }
 
-  return Factory::undefined_value();
+  return FACTORY->undefined_value();
 }
 
 
@@ -215,26 +243,22 @@ Handle<Object> Execution::GetConstructorDelegate(Handle<Object> object) {
   if (object->IsHeapObject() &&
       HeapObject::cast(*object)->map()->has_instance_call_handler()) {
     return Handle<JSFunction>(
-        Top::global_context()->call_as_constructor_delegate());
+        Isolate::Current()->global_context()->call_as_constructor_delegate());
   }
 
-  return Factory::undefined_value();
+  return FACTORY->undefined_value();
 }
 
 
-// Static state for stack guards.
-StackGuard::ThreadLocal StackGuard::thread_local_;
-
-
 bool StackGuard::IsStackOverflow() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   return (thread_local_.jslimit_ != kInterruptLimit &&
           thread_local_.climit_ != kInterruptLimit);
 }
 
 
 void StackGuard::EnableInterrupts() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   if (has_pending_interrupts(access)) {
     set_interrupt_limits(access);
   }
@@ -242,7 +266,7 @@ void StackGuard::EnableInterrupts() {
 
 
 void StackGuard::SetStackLimit(uintptr_t limit) {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   // If the current limits are special (eg due to a pending interrupt) then
   // leave them alone.
   uintptr_t jslimit = SimulatorStack::JsLimitFromCLimit(limit);
@@ -258,92 +282,92 @@ void StackGuard::SetStackLimit(uintptr_t limit) {
 
 
 void StackGuard::DisableInterrupts() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   reset_limits(access);
 }
 
 
 bool StackGuard::IsInterrupted() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   return thread_local_.interrupt_flags_ & INTERRUPT;
 }
 
 
 void StackGuard::Interrupt() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   thread_local_.interrupt_flags_ |= INTERRUPT;
   set_interrupt_limits(access);
 }
 
 
 bool StackGuard::IsPreempted() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   return thread_local_.interrupt_flags_ & PREEMPT;
 }
 
 
 void StackGuard::Preempt() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   thread_local_.interrupt_flags_ |= PREEMPT;
   set_interrupt_limits(access);
 }
 
 
 bool StackGuard::IsTerminateExecution() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   return thread_local_.interrupt_flags_ & TERMINATE;
 }
 
 
 void StackGuard::TerminateExecution() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   thread_local_.interrupt_flags_ |= TERMINATE;
   set_interrupt_limits(access);
 }
 
 
 bool StackGuard::IsRuntimeProfilerTick() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   return thread_local_.interrupt_flags_ & RUNTIME_PROFILER_TICK;
 }
 
 
 void StackGuard::RequestRuntimeProfilerTick() {
   // Ignore calls if we're not optimizing or if we can't get the lock.
-  if (FLAG_opt && ExecutionAccess::TryLock()) {
+  if (FLAG_opt && ExecutionAccess::TryLock(isolate_)) {
     thread_local_.interrupt_flags_ |= RUNTIME_PROFILER_TICK;
     if (thread_local_.postpone_interrupts_nesting_ == 0) {
       thread_local_.jslimit_ = thread_local_.climit_ = kInterruptLimit;
-      Heap::SetStackLimits();
+      isolate_->heap()->SetStackLimits();
     }
-    ExecutionAccess::Unlock();
+    ExecutionAccess::Unlock(isolate_);
   }
 }
 
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
 bool StackGuard::IsDebugBreak() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   return thread_local_.interrupt_flags_ & DEBUGBREAK;
 }
 
 
 void StackGuard::DebugBreak() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   thread_local_.interrupt_flags_ |= DEBUGBREAK;
   set_interrupt_limits(access);
 }
 
 
 bool StackGuard::IsDebugCommand() {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   return thread_local_.interrupt_flags_ & DEBUGCOMMAND;
 }
 
 
 void StackGuard::DebugCommand() {
   if (FLAG_debugger_auto_break) {
-    ExecutionAccess access;
+    ExecutionAccess access(isolate_);
     thread_local_.interrupt_flags_ |= DEBUGCOMMAND;
     set_interrupt_limits(access);
   }
@@ -351,7 +375,7 @@ void StackGuard::DebugCommand() {
 #endif
 
 void StackGuard::Continue(InterruptFlag after_what) {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   thread_local_.interrupt_flags_ &= ~static_cast<int>(after_what);
   if (!should_postpone_interrupts(access) && !has_pending_interrupts(access)) {
     reset_limits(access);
@@ -359,36 +383,34 @@ void StackGuard::Continue(InterruptFlag after_what) {
 }
 
 
-int StackGuard::ArchiveSpacePerThread() {
-  return sizeof(ThreadLocal);
-}
-
-
 char* StackGuard::ArchiveStackGuard(char* to) {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   memcpy(to, reinterpret_cast<char*>(&thread_local_), sizeof(ThreadLocal));
   ThreadLocal blank;
+
+  // Set the stack limits using the old thread_local_.
+  // TODO(isolates): This was the old semantics of constructing a ThreadLocal
+  //                 (as the ctor called SetStackLimits, which looked at the
+  //                 current thread_local_ from StackGuard)-- but is this
+  //                 really what was intended?
+  isolate_->heap()->SetStackLimits();
   thread_local_ = blank;
+
   return to + sizeof(ThreadLocal);
 }
 
 
 char* StackGuard::RestoreStackGuard(char* from) {
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
   memcpy(reinterpret_cast<char*>(&thread_local_), from, sizeof(ThreadLocal));
-  Heap::SetStackLimits();
+  isolate_->heap()->SetStackLimits();
   return from + sizeof(ThreadLocal);
 }
 
 
-static internal::Thread::LocalStorageKey stack_limit_key =
-    internal::Thread::CreateThreadLocalKey();
-
-
 void StackGuard::FreeThreadResources() {
-  Thread::SetThreadLocal(
-      stack_limit_key,
-      reinterpret_cast<void*>(thread_local_.real_climit_));
+  Isolate::CurrentPerIsolateThreadData()->set_stack_limit(
+      thread_local_.real_climit_);
 }
 
 
@@ -400,11 +422,11 @@ void StackGuard::ThreadLocal::Clear() {
   nesting_ = 0;
   postpone_interrupts_nesting_ = 0;
   interrupt_flags_ = 0;
-  Heap::SetStackLimits();
 }
 
 
-void StackGuard::ThreadLocal::Initialize() {
+bool StackGuard::ThreadLocal::Initialize() {
+  bool should_set_stack_limits = false;
   if (real_climit_ == kIllegalLimit) {
     // Takes the address of the limit variable in order to find out where
     // the top of stack is right now.
@@ -415,37 +437,41 @@ void StackGuard::ThreadLocal::Initialize() {
     jslimit_ = SimulatorStack::JsLimitFromCLimit(limit);
     real_climit_ = limit;
     climit_ = limit;
-    Heap::SetStackLimits();
+    should_set_stack_limits = true;
   }
   nesting_ = 0;
   postpone_interrupts_nesting_ = 0;
   interrupt_flags_ = 0;
+  return should_set_stack_limits;
 }
 
 
 void StackGuard::ClearThread(const ExecutionAccess& lock) {
   thread_local_.Clear();
+  isolate_->heap()->SetStackLimits();
 }
 
 
 void StackGuard::InitThread(const ExecutionAccess& lock) {
-  thread_local_.Initialize();
-  void* stored_limit = Thread::GetThreadLocal(stack_limit_key);
+  if (thread_local_.Initialize()) isolate_->heap()->SetStackLimits();
+  uintptr_t stored_limit =
+      Isolate::CurrentPerIsolateThreadData()->stack_limit();
   // You should hold the ExecutionAccess lock when you call this.
-  if (stored_limit != NULL) {
-    StackGuard::SetStackLimit(reinterpret_cast<intptr_t>(stored_limit));
+  if (stored_limit != 0) {
+    StackGuard::SetStackLimit(stored_limit);
   }
 }
 
 
 // --- C a l l s   t o   n a t i v e s ---
 
-#define RETURN_NATIVE_CALL(name, argc, argv, has_pending_exception) \
-  do {                                                              \
-    Object** args[argc] = argv;                                     \
-    ASSERT(has_pending_exception != NULL);                          \
-    return Call(Top::name##_fun(), Top::builtins(), argc, args,     \
-                has_pending_exception);                             \
+#define RETURN_NATIVE_CALL(name, argc, argv, has_pending_exception)            \
+  do {                                                                         \
+    Object** args[argc] = argv;                                                \
+    ASSERT(has_pending_exception != NULL);                                     \
+    return Call(Isolate::Current()->name##_fun(),                              \
+                Isolate::Current()->js_builtins_object(), argc, args,          \
+                has_pending_exception);                                        \
   } while (false)
 
 
@@ -461,7 +487,7 @@ Handle<Object> Execution::ToBoolean(Handle<Object> obj) {
     double value = obj->Number();
     result = !((value == 0) || isnan(value));
   }
-  return Handle<Object>(Heap::ToBoolean(result));
+  return Handle<Object>(HEAP->ToBoolean(result));
 }
 
 
@@ -502,7 +528,7 @@ Handle<Object> Execution::ToInt32(Handle<Object> obj, bool* exc) {
 
 
 Handle<Object> Execution::NewDate(double time, bool* exc) {
-  Handle<Object> time_obj = Factory::NewNumber(time);
+  Handle<Object> time_obj = FACTORY->NewNumber(time);
   RETURN_NATIVE_CALL(create_date, 1, { time_obj.location() }, exc);
 }
 
@@ -513,11 +539,10 @@ Handle<Object> Execution::NewDate(double time, bool* exc) {
 Handle<JSRegExp> Execution::NewJSRegExp(Handle<String> pattern,
                                         Handle<String> flags,
                                         bool* exc) {
+  Handle<JSFunction> function = Handle<JSFunction>(
+      pattern->GetIsolate()->global_context()->regexp_function());
   Handle<Object> re_obj = RegExpImpl::CreateRegExpLiteral(
-      Handle<JSFunction>(Top::global_context()->regexp_function()),
-      pattern,
-      flags,
-      exc);
+      function, pattern, flags, exc);
   if (*exc) return Handle<JSRegExp>();
   return Handle<JSRegExp>::cast(re_obj);
 }
@@ -526,17 +551,18 @@ Handle<JSRegExp> Execution::NewJSRegExp(Handle<String> pattern,
 Handle<Object> Execution::CharAt(Handle<String> string, uint32_t index) {
   int int_index = static_cast<int>(index);
   if (int_index < 0 || int_index >= string->length()) {
-    return Factory::undefined_value();
+    return FACTORY->undefined_value();
   }
 
   Handle<Object> char_at =
-      GetProperty(Top::builtins(), Factory::char_at_symbol());
+      GetProperty(Isolate::Current()->js_builtins_object(),
+                  FACTORY->char_at_symbol());
   if (!char_at->IsJSFunction()) {
-    return Factory::undefined_value();
+    return FACTORY->undefined_value();
   }
 
   bool caught_exception;
-  Handle<Object> index_object = Factory::NewNumberFromInt(int_index);
+  Handle<Object> index_object = FACTORY->NewNumberFromInt(int_index);
   Object** index_arg[] = { index_object.location() };
   Handle<Object> result = TryCall(Handle<JSFunction>::cast(char_at),
                                   string,
@@ -544,7 +570,7 @@ Handle<Object> Execution::CharAt(Handle<String> string, uint32_t index) {
                                   index_arg,
                                   &caught_exception);
   if (caught_exception) {
-    return Factory::undefined_value();
+    return FACTORY->undefined_value();
   }
   return result;
 }
@@ -554,13 +580,15 @@ Handle<JSFunction> Execution::InstantiateFunction(
     Handle<FunctionTemplateInfo> data, bool* exc) {
   // Fast case: see if the function has already been instantiated
   int serial_number = Smi::cast(data->serial_number())->value();
-  Object* elm = Top::global_context()->function_cache()->
-      GetElementNoExceptionThrown(serial_number);
+  Object* elm =
+      Isolate::Current()->global_context()->function_cache()->
+          GetElementNoExceptionThrown(serial_number);
   if (elm->IsJSFunction()) return Handle<JSFunction>(JSFunction::cast(elm));
   // The function has not yet been instantiated in this context; do it.
   Object** args[1] = { Handle<Object>::cast(data).location() };
   Handle<Object> result =
-      Call(Top::instantiate_fun(), Top::builtins(), 1, args, exc);
+      Call(Isolate::Current()->instantiate_fun(),
+           Isolate::Current()->js_builtins_object(), 1, args, exc);
   if (*exc) return Handle<JSFunction>::null();
   return Handle<JSFunction>::cast(result);
 }
@@ -588,7 +616,8 @@ Handle<JSObject> Execution::InstantiateObject(Handle<ObjectTemplateInfo> data,
   } else {
     Object** args[1] = { Handle<Object>::cast(data).location() };
     Handle<Object> result =
-        Call(Top::instantiate_fun(), Top::builtins(), 1, args, exc);
+        Call(Isolate::Current()->instantiate_fun(),
+             Isolate::Current()->js_builtins_object(), 1, args, exc);
     if (*exc) return Handle<JSObject>::null();
     return Handle<JSObject>::cast(result);
   }
@@ -599,7 +628,8 @@ void Execution::ConfigureInstance(Handle<Object> instance,
                                   Handle<Object> instance_template,
                                   bool* exc) {
   Object** args[2] = { instance.location(), instance_template.location() };
-  Execution::Call(Top::configure_instance_fun(), Top::builtins(), 2, args, exc);
+  Execution::Call(Isolate::Current()->configure_instance_fun(),
+                  Isolate::Current()->js_builtins_object(), 2, args, exc);
 }
 
 
@@ -613,50 +643,57 @@ Handle<String> Execution::GetStackTraceLine(Handle<Object> recv,
                           pos.location(),
                           is_global.location() };
   bool caught_exception = false;
-  Handle<Object> result = TryCall(Top::get_stack_trace_line_fun(),
-                                  Top::builtins(), argc, args,
-                                  &caught_exception);
-  if (caught_exception || !result->IsString()) return Factory::empty_symbol();
+  Handle<Object> result =
+      TryCall(Isolate::Current()->get_stack_trace_line_fun(),
+              Isolate::Current()->js_builtins_object(), argc, args,
+              &caught_exception);
+  if (caught_exception || !result->IsString()) return FACTORY->empty_symbol();
   return Handle<String>::cast(result);
 }
 
 
 static Object* RuntimePreempt() {
+  Isolate* isolate = Isolate::Current();
+
   // Clear the preempt request flag.
-  StackGuard::Continue(PREEMPT);
+  isolate->stack_guard()->Continue(PREEMPT);
 
   ContextSwitcher::PreemptionReceived();
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  if (Debug::InDebugger()) {
+  if (isolate->debug()->InDebugger()) {
     // If currently in the debugger don't do any actual preemption but record
     // that preemption occoured while in the debugger.
-    Debug::PreemptionWhileInDebugger();
+    isolate->debug()->PreemptionWhileInDebugger();
   } else {
     // Perform preemption.
     v8::Unlocker unlocker;
     Thread::YieldCPU();
   }
 #else
-  // Perform preemption.
-  v8::Unlocker unlocker;
-  Thread::YieldCPU();
+  { // NOLINT
+    // Perform preemption.
+    v8::Unlocker unlocker;
+    Thread::YieldCPU();
+  }
 #endif
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
 Object* Execution::DebugBreakHelper() {
+  Isolate* isolate = Isolate::Current();
+
   // Just continue if breaks are disabled.
-  if (Debug::disable_break()) {
-    return Heap::undefined_value();
+  if (isolate->debug()->disable_break()) {
+    return isolate->heap()->undefined_value();
   }
 
   // Ignore debug break during bootstrapping.
-  if (Bootstrapper::IsActive()) {
-    return Heap::undefined_value();
+  if (isolate->bootstrapper()->IsActive()) {
+    return isolate->heap()->undefined_value();
   }
 
   {
@@ -666,32 +703,33 @@ Object* Execution::DebugBreakHelper() {
     if (fun && fun->IsJSFunction()) {
       // Don't stop in builtin functions.
       if (JSFunction::cast(fun)->IsBuiltin()) {
-        return Heap::undefined_value();
+        return isolate->heap()->undefined_value();
       }
       GlobalObject* global = JSFunction::cast(fun)->context()->global();
       // Don't stop in debugger functions.
-      if (Debug::IsDebugGlobal(global)) {
-        return Heap::undefined_value();
+      if (isolate->debug()->IsDebugGlobal(global)) {
+        return isolate->heap()->undefined_value();
       }
     }
   }
 
   // Collect the break state before clearing the flags.
   bool debug_command_only =
-      StackGuard::IsDebugCommand() && !StackGuard::IsDebugBreak();
+      isolate->stack_guard()->IsDebugCommand() &&
+      !isolate->stack_guard()->IsDebugBreak();
 
   // Clear the debug break request flag.
-  StackGuard::Continue(DEBUGBREAK);
+  isolate->stack_guard()->Continue(DEBUGBREAK);
 
   ProcessDebugMesssages(debug_command_only);
 
   // Return to continue execution.
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 void Execution::ProcessDebugMesssages(bool debug_command_only) {
   // Clear the debug command request flag.
-  StackGuard::Continue(DEBUGCOMMAND);
+  Isolate::Current()->stack_guard()->Continue(DEBUGCOMMAND);
 
   HandleScope scope;
   // Enter the debugger. Just continue if we fail to enter the debugger.
@@ -702,34 +740,37 @@ void Execution::ProcessDebugMesssages(bool debug_command_only) {
 
   // Notify the debug event listeners. Indicate auto continue if the break was
   // a debug command break.
-  Debugger::OnDebugBreak(Factory::undefined_value(), debug_command_only);
+  Isolate::Current()->debugger()->OnDebugBreak(FACTORY->undefined_value(),
+                                               debug_command_only);
 }
 
 
 #endif
 
 MaybeObject* Execution::HandleStackGuardInterrupt() {
-  Counters::stack_interrupts.Increment();
-  if (StackGuard::IsRuntimeProfilerTick()) {
-    Counters::runtime_profiler_ticks.Increment();
-    StackGuard::Continue(RUNTIME_PROFILER_TICK);
-    RuntimeProfiler::OptimizeNow();
+  Isolate* isolate = Isolate::Current();
+  StackGuard* stack_guard = isolate->stack_guard();
+  isolate->counters()->stack_interrupts()->Increment();
+  if (stack_guard->IsRuntimeProfilerTick()) {
+    isolate->counters()->runtime_profiler_ticks()->Increment();
+    stack_guard->Continue(RUNTIME_PROFILER_TICK);
+    isolate->runtime_profiler()->OptimizeNow();
   }
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  if (StackGuard::IsDebugBreak() || StackGuard::IsDebugCommand()) {
+  if (stack_guard->IsDebugBreak() || stack_guard->IsDebugCommand()) {
     DebugBreakHelper();
   }
 #endif
-  if (StackGuard::IsPreempted()) RuntimePreempt();
-  if (StackGuard::IsTerminateExecution()) {
-    StackGuard::Continue(TERMINATE);
-    return Top::TerminateExecution();
+  if (stack_guard->IsPreempted()) RuntimePreempt();
+  if (stack_guard->IsTerminateExecution()) {
+    stack_guard->Continue(TERMINATE);
+    return isolate->TerminateExecution();
   }
-  if (StackGuard::IsInterrupted()) {
-    StackGuard::Continue(INTERRUPT);
-    return Top::StackOverflow();
+  if (stack_guard->IsInterrupted()) {
+    stack_guard->Continue(INTERRUPT);
+    return isolate->StackOverflow();
   }
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 } }  // namespace v8::internal
index cb07807c06c98b4a77548742ac8c40ea497b4169..d4b80d274e24d34b96f1c9cee1caa9bc66b6da8a 100644 (file)
@@ -146,71 +146,74 @@ class Execution : public AllStatic {
 
 
 class ExecutionAccess;
+class Isolate;
 
 
 // StackGuard contains the handling of the limits that are used to limit the
 // number of nested invocations of JavaScript and the stack size used in each
 // invocation.
-class StackGuard : public AllStatic {
+class StackGuard {
  public:
   // Pass the address beyond which the stack should not grow.  The stack
   // is assumed to grow downwards.
-  static void SetStackLimit(uintptr_t limit);
+  void SetStackLimit(uintptr_t limit);
 
   // Threading support.
-  static char* ArchiveStackGuard(char* to);
-  static char* RestoreStackGuard(char* from);
-  static int ArchiveSpacePerThread();
-  static void FreeThreadResources();
+  char* ArchiveStackGuard(char* to);
+  char* RestoreStackGuard(char* from);
+  static int ArchiveSpacePerThread() { return sizeof(ThreadLocal); }
+  void FreeThreadResources();
   // Sets up the default stack guard for this thread if it has not
   // already been set up.
-  static void InitThread(const ExecutionAccess& lock);
+  void InitThread(const ExecutionAccess& lock);
   // Clears the stack guard for this thread so it does not look as if
   // it has been set up.
-  static void ClearThread(const ExecutionAccess& lock);
-
-  static bool IsStackOverflow();
-  static bool IsPreempted();
-  static void Preempt();
-  static bool IsInterrupted();
-  static void Interrupt();
-  static bool IsTerminateExecution();
-  static void TerminateExecution();
-  static bool IsRuntimeProfilerTick();
-  static void RequestRuntimeProfilerTick();
+  void ClearThread(const ExecutionAccess& lock);
+
+  bool IsStackOverflow();
+  bool IsPreempted();
+  void Preempt();
+  bool IsInterrupted();
+  void Interrupt();
+  bool IsTerminateExecution();
+  void TerminateExecution();
+  bool IsRuntimeProfilerTick();
+  void RequestRuntimeProfilerTick();
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  static bool IsDebugBreak();
-  static void DebugBreak();
-  static bool IsDebugCommand();
-  static void DebugCommand();
+  bool IsDebugBreak();
+  void DebugBreak();
+  bool IsDebugCommand();
+  void DebugCommand();
 #endif
-  static void Continue(InterruptFlag after_what);
+  void Continue(InterruptFlag after_what);
 
   // This provides an asynchronous read of the stack limits for the current
   // thread.  There are no locks protecting this, but it is assumed that you
   // have the global V8 lock if you are using multiple V8 threads.
-  static uintptr_t climit() {
+  uintptr_t climit() {
     return thread_local_.climit_;
   }
-  static uintptr_t real_climit() {
+  uintptr_t real_climit() {
     return thread_local_.real_climit_;
   }
-  static uintptr_t jslimit() {
+  uintptr_t jslimit() {
     return thread_local_.jslimit_;
   }
-  static uintptr_t real_jslimit() {
+  uintptr_t real_jslimit() {
     return thread_local_.real_jslimit_;
   }
-  static Address address_of_jslimit() {
+  Address address_of_jslimit() {
     return reinterpret_cast<Address>(&thread_local_.jslimit_);
   }
-  static Address address_of_real_jslimit() {
+  Address address_of_real_jslimit() {
     return reinterpret_cast<Address>(&thread_local_.real_jslimit_);
   }
 
  private:
+  StackGuard();
+
   // You should hold the ExecutionAccess lock when calling this method.
-  static bool has_pending_interrupts(const ExecutionAccess& lock) {
+  bool has_pending_interrupts(const ExecutionAccess& lock) {
     // Sanity check: We shouldn't be asking about pending interrupts
     // unless we're not postponing them anymore.
     ASSERT(!should_postpone_interrupts(lock));
@@ -218,30 +221,20 @@ class StackGuard : public AllStatic {
   }
 
   // You should hold the ExecutionAccess lock when calling this method.
-  static bool should_postpone_interrupts(const ExecutionAccess& lock) {
+  bool should_postpone_interrupts(const ExecutionAccess& lock) {
     return thread_local_.postpone_interrupts_nesting_ > 0;
   }
 
   // You should hold the ExecutionAccess lock when calling this method.
-  static void set_interrupt_limits(const ExecutionAccess& lock) {
-    // Ignore attempts to interrupt when interrupts are postponed.
-    if (should_postpone_interrupts(lock)) return;
-    thread_local_.jslimit_ = kInterruptLimit;
-    thread_local_.climit_ = kInterruptLimit;
-    Heap::SetStackLimits();
-  }
+  inline void set_interrupt_limits(const ExecutionAccess& lock);
 
   // Reset limits to actual values. For example after handling interrupt.
   // You should hold the ExecutionAccess lock when calling this method.
-  static void reset_limits(const ExecutionAccess& lock) {
-    thread_local_.jslimit_ = thread_local_.real_jslimit_;
-    thread_local_.climit_ = thread_local_.real_climit_;
-    Heap::SetStackLimits();
-  }
+  inline void reset_limits(const ExecutionAccess& lock);
 
   // Enable or disable interrupts.
-  static void EnableInterrupts();
-  static void DisableInterrupts();
+  void EnableInterrupts();
+  void DisableInterrupts();
 
 #ifdef V8_TARGET_ARCH_X64
   static const uintptr_t kInterruptLimit = V8_UINT64_C(0xfffffffffffffffe);
@@ -256,9 +249,11 @@ class StackGuard : public AllStatic {
     ThreadLocal() { Clear(); }
     // You should hold the ExecutionAccess lock when you call Initialize or
     // Clear.
-    void Initialize();
     void Clear();
 
+    // Returns true if the heap's stack limits should be set, false if not.
+    bool Initialize();
+
     // The stack limit is split into a JavaScript and a C++ stack limit. These
     // two are the same except when running on a simulator where the C++ and
     // JavaScript stacks are separate. Each of the two stack limits have two
@@ -278,45 +273,19 @@ class StackGuard : public AllStatic {
     int interrupt_flags_;
   };
 
-  static ThreadLocal thread_local_;
+  // TODO(isolates): Technically this could be calculated directly from a
+  //                 pointer to StackGuard.
+  Isolate* isolate_;
+  ThreadLocal thread_local_;
 
+  friend class Isolate;
   friend class StackLimitCheck;
   friend class PostponeInterruptsScope;
-};
-
 
-// Support for checking for stack-overflows in C++ code.
-class StackLimitCheck BASE_EMBEDDED {
- public:
-  bool HasOverflowed() const {
-    // Stack has overflowed in C++ code only if stack pointer exceeds the C++
-    // stack guard and the limits are not set to interrupt values.
-    // TODO(214): Stack overflows are ignored if a interrupt is pending. This
-    // code should probably always use the initial C++ limit.
-    return (reinterpret_cast<uintptr_t>(this) < StackGuard::climit()) &&
-           StackGuard::IsStackOverflow();
-  }
+  DISALLOW_COPY_AND_ASSIGN(StackGuard);
 };
 
 
-// Support for temporarily postponing interrupts. When the outermost
-// postpone scope is left the interrupts will be re-enabled and any
-// interrupts that occurred while in the scope will be taken into
-// account.
-class PostponeInterruptsScope BASE_EMBEDDED {
- public:
-  PostponeInterruptsScope() {
-    StackGuard::thread_local_.postpone_interrupts_nesting_++;
-    StackGuard::DisableInterrupts();
-  }
-
-  ~PostponeInterruptsScope() {
-    if (--StackGuard::thread_local_.postpone_interrupts_nesting_ == 0) {
-      StackGuard::EnableInterrupts();
-    }
-  }
-};
-
 } }  // namespace v8::internal
 
 #endif  // V8_EXECUTION_H_
index 8b4bdbd88e4553b15360d466812f0d98ce498f1f..b3f83fe98dfdb9f5d6183d3585345cc44f6487c4 100644 (file)
@@ -100,7 +100,7 @@ v8::Handle<v8::Value> ExternalizeStringExtension::Externalize(
         data, string->length());
     result = string->MakeExternal(resource);
     if (result && !string->IsSymbol()) {
-      i::ExternalStringTable::AddString(*string);
+      HEAP->external_string_table()->AddString(*string);
     }
     if (!result) delete resource;
   } else {
@@ -110,7 +110,7 @@ v8::Handle<v8::Value> ExternalizeStringExtension::Externalize(
         data, string->length());
     result = string->MakeExternal(resource);
     if (result && !string->IsSymbol()) {
-      i::ExternalStringTable::AddString(*string);
+      HEAP->external_string_table()->AddString(*string);
     }
     if (!result) delete resource;
   }
index 63daa05b5b4cdca70bd20c3e6847a0a984e266db..3740c27aa80594d9ac702d28935f2780aeefee11 100644 (file)
@@ -45,7 +45,7 @@ v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) {
   if (args.Length() >= 1 && args[0]->IsBoolean()) {
     compact = args[0]->BooleanValue();
   }
-  Heap::CollectAllGarbage(compact);
+  HEAP->CollectAllGarbage(compact);
   return v8::Undefined();
 }
 
index 1ac501f395f307eed22fe58548905a308813ddb1..d798c3ede65b34d4dd860aeafc0e9ebe28e26a89 100644 (file)
@@ -41,35 +41,43 @@ namespace internal {
 
 Handle<FixedArray> Factory::NewFixedArray(int size, PretenureFlag pretenure) {
   ASSERT(0 <= size);
-  CALL_HEAP_FUNCTION(Heap::AllocateFixedArray(size, pretenure), FixedArray);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateFixedArray(size, pretenure),
+      FixedArray);
 }
 
 
 Handle<FixedArray> Factory::NewFixedArrayWithHoles(int size,
                                                    PretenureFlag pretenure) {
   ASSERT(0 <= size);
-  CALL_HEAP_FUNCTION(Heap::AllocateFixedArrayWithHoles(size, pretenure),
-                     FixedArray);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateFixedArrayWithHoles(size, pretenure),
+      FixedArray);
 }
 
 
 Handle<StringDictionary> Factory::NewStringDictionary(int at_least_space_for) {
   ASSERT(0 <= at_least_space_for);
-  CALL_HEAP_FUNCTION(StringDictionary::Allocate(at_least_space_for),
+  CALL_HEAP_FUNCTION(isolate(),
+                     StringDictionary::Allocate(at_least_space_for),
                      StringDictionary);
 }
 
 
 Handle<NumberDictionary> Factory::NewNumberDictionary(int at_least_space_for) {
   ASSERT(0 <= at_least_space_for);
-  CALL_HEAP_FUNCTION(NumberDictionary::Allocate(at_least_space_for),
+  CALL_HEAP_FUNCTION(isolate(),
+                     NumberDictionary::Allocate(at_least_space_for),
                      NumberDictionary);
 }
 
 
 Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors) {
   ASSERT(0 <= number_of_descriptors);
-  CALL_HEAP_FUNCTION(DescriptorArray::Allocate(number_of_descriptors),
+  CALL_HEAP_FUNCTION(isolate(),
+                     DescriptorArray::Allocate(number_of_descriptors),
                      DescriptorArray);
 }
 
@@ -78,7 +86,8 @@ Handle<DeoptimizationInputData> Factory::NewDeoptimizationInputData(
     int deopt_entry_count,
     PretenureFlag pretenure) {
   ASSERT(deopt_entry_count > 0);
-  CALL_HEAP_FUNCTION(DeoptimizationInputData::Allocate(deopt_entry_count,
+  CALL_HEAP_FUNCTION(isolate(),
+                     DeoptimizationInputData::Allocate(deopt_entry_count,
                                                        pretenure),
                      DeoptimizationInputData);
 }
@@ -88,7 +97,8 @@ Handle<DeoptimizationOutputData> Factory::NewDeoptimizationOutputData(
     int deopt_entry_count,
     PretenureFlag pretenure) {
   ASSERT(deopt_entry_count > 0);
-  CALL_HEAP_FUNCTION(DeoptimizationOutputData::Allocate(deopt_entry_count,
+  CALL_HEAP_FUNCTION(isolate(),
+                     DeoptimizationOutputData::Allocate(deopt_entry_count,
                                                         pretenure),
                      DeoptimizationOutputData);
 }
@@ -96,96 +106,137 @@ Handle<DeoptimizationOutputData> Factory::NewDeoptimizationOutputData(
 
 // Symbols are created in the old generation (data space).
 Handle<String> Factory::LookupSymbol(Vector<const char> string) {
-  CALL_HEAP_FUNCTION(Heap::LookupSymbol(string), String);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->LookupSymbol(string),
+                     String);
 }
 
 Handle<String> Factory::LookupAsciiSymbol(Vector<const char> string) {
-  CALL_HEAP_FUNCTION(Heap::LookupAsciiSymbol(string), String);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->LookupAsciiSymbol(string),
+                     String);
 }
 
 Handle<String> Factory::LookupTwoByteSymbol(Vector<const uc16> string) {
-  CALL_HEAP_FUNCTION(Heap::LookupTwoByteSymbol(string), String);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->LookupTwoByteSymbol(string),
+                     String);
 }
 
 
 Handle<String> Factory::NewStringFromAscii(Vector<const char> string,
                                            PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(Heap::AllocateStringFromAscii(string, pretenure), String);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateStringFromAscii(string, pretenure),
+      String);
 }
 
 Handle<String> Factory::NewStringFromUtf8(Vector<const char> string,
                                           PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(Heap::AllocateStringFromUtf8(string, pretenure), String);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateStringFromUtf8(string, pretenure),
+      String);
 }
 
 
 Handle<String> Factory::NewStringFromTwoByte(Vector<const uc16> string,
                                              PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(Heap::AllocateStringFromTwoByte(string, pretenure),
-                     String);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateStringFromTwoByte(string, pretenure),
+      String);
 }
 
 
 Handle<String> Factory::NewRawAsciiString(int length,
                                           PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(Heap::AllocateRawAsciiString(length, pretenure), String);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateRawAsciiString(length, pretenure),
+      String);
 }
 
 
 Handle<String> Factory::NewRawTwoByteString(int length,
                                             PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(Heap::AllocateRawTwoByteString(length, pretenure), String);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateRawTwoByteString(length, pretenure),
+      String);
 }
 
 
 Handle<String> Factory::NewConsString(Handle<String> first,
                                       Handle<String> second) {
-  CALL_HEAP_FUNCTION(Heap::AllocateConsString(*first, *second), String);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->AllocateConsString(*first, *second),
+                     String);
 }
 
 
 Handle<String> Factory::NewSubString(Handle<String> str,
                                      int begin,
                                      int end) {
-  CALL_HEAP_FUNCTION(str->SubString(begin, end), String);
+  CALL_HEAP_FUNCTION(isolate(),
+                     str->SubString(begin, end),
+                     String);
 }
 
 
 Handle<String> Factory::NewExternalStringFromAscii(
     ExternalAsciiString::Resource* resource) {
-  CALL_HEAP_FUNCTION(Heap::AllocateExternalStringFromAscii(resource), String);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateExternalStringFromAscii(resource),
+      String);
 }
 
 
 Handle<String> Factory::NewExternalStringFromTwoByte(
     ExternalTwoByteString::Resource* resource) {
-  CALL_HEAP_FUNCTION(Heap::AllocateExternalStringFromTwoByte(resource), String);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateExternalStringFromTwoByte(resource),
+      String);
 }
 
 
 Handle<Context> Factory::NewGlobalContext() {
-  CALL_HEAP_FUNCTION(Heap::AllocateGlobalContext(), Context);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateGlobalContext(),
+      Context);
 }
 
 
 Handle<Context> Factory::NewFunctionContext(int length,
                                             Handle<JSFunction> closure) {
-  CALL_HEAP_FUNCTION(Heap::AllocateFunctionContext(length, *closure), Context);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateFunctionContext(length, *closure),
+      Context);
 }
 
 
 Handle<Context> Factory::NewWithContext(Handle<Context> previous,
                                         Handle<JSObject> extension,
                                         bool is_catch_context) {
-  CALL_HEAP_FUNCTION(Heap::AllocateWithContext(*previous,
-                                               *extension,
-                                               is_catch_context),
-                     Context);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateWithContext(*previous,
+                                             *extension,
+                                             is_catch_context),
+      Context);
 }
 
 
 Handle<Struct> Factory::NewStruct(InstanceType type) {
-  CALL_HEAP_FUNCTION(Heap::AllocateStruct(type), Struct);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateStruct(type),
+      Struct);
 }
 
 
@@ -200,34 +251,35 @@ Handle<AccessorInfo> Factory::NewAccessorInfo() {
 Handle<Script> Factory::NewScript(Handle<String> source) {
   // Generate id for this script.
   int id;
-  if (Heap::last_script_id()->IsUndefined()) {
+  Heap* heap = isolate()->heap();
+  if (heap->last_script_id()->IsUndefined()) {
     // Script ids start from one.
     id = 1;
   } else {
     // Increment id, wrap when positive smi is exhausted.
-    id = Smi::cast(Heap::last_script_id())->value();
+    id = Smi::cast(heap->last_script_id())->value();
     id++;
     if (!Smi::IsValid(id)) {
       id = 0;
     }
   }
-  Heap::SetLastScriptId(Smi::FromInt(id));
+  heap->SetLastScriptId(Smi::FromInt(id));
 
   // Create and initialize script object.
-  Handle<Proxy> wrapper = Factory::NewProxy(0, TENURED);
+  Handle<Proxy> wrapper = NewProxy(0, TENURED);
   Handle<Script> script = Handle<Script>::cast(NewStruct(SCRIPT_TYPE));
   script->set_source(*source);
-  script->set_name(Heap::undefined_value());
-  script->set_id(Heap::last_script_id());
+  script->set_name(heap->undefined_value());
+  script->set_id(heap->last_script_id());
   script->set_line_offset(Smi::FromInt(0));
   script->set_column_offset(Smi::FromInt(0));
-  script->set_data(Heap::undefined_value());
-  script->set_context_data(Heap::undefined_value());
+  script->set_data(heap->undefined_value());
+  script->set_context_data(heap->undefined_value());
   script->set_type(Smi::FromInt(Script::TYPE_NORMAL));
   script->set_compilation_type(Smi::FromInt(Script::COMPILATION_TYPE_HOST));
   script->set_wrapper(*wrapper);
-  script->set_line_ends(Heap::undefined_value());
-  script->set_eval_from_shared(Heap::undefined_value());
+  script->set_line_ends(heap->undefined_value());
+  script->set_eval_from_shared(heap->undefined_value());
   script->set_eval_from_instructions_offset(Smi::FromInt(0));
 
   return script;
@@ -235,7 +287,9 @@ Handle<Script> Factory::NewScript(Handle<String> source) {
 
 
 Handle<Proxy> Factory::NewProxy(Address addr, PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(Heap::AllocateProxy(addr, pretenure), Proxy);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->AllocateProxy(addr, pretenure),
+                     Proxy);
 }
 
 
@@ -246,7 +300,10 @@ Handle<Proxy> Factory::NewProxy(const AccessorDescriptor* desc) {
 
 Handle<ByteArray> Factory::NewByteArray(int length, PretenureFlag pretenure) {
   ASSERT(0 <= length);
-  CALL_HEAP_FUNCTION(Heap::AllocateByteArray(length, pretenure), ByteArray);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateByteArray(length, pretenure),
+      ByteArray);
 }
 
 
@@ -255,32 +312,43 @@ Handle<ExternalArray> Factory::NewExternalArray(int length,
                                                 void* external_pointer,
                                                 PretenureFlag pretenure) {
   ASSERT(0 <= length);
-  CALL_HEAP_FUNCTION(Heap::AllocateExternalArray(length,
-                                                 array_type,
-                                                 external_pointer,
-                                                 pretenure), ExternalArray);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateExternalArray(length,
+                                               array_type,
+                                               external_pointer,
+                                               pretenure),
+      ExternalArray);
 }
 
 
 Handle<JSGlobalPropertyCell> Factory::NewJSGlobalPropertyCell(
     Handle<Object> value) {
-  CALL_HEAP_FUNCTION(Heap::AllocateJSGlobalPropertyCell(*value),
-                     JSGlobalPropertyCell);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateJSGlobalPropertyCell(*value),
+      JSGlobalPropertyCell);
 }
 
 
 Handle<Map> Factory::NewMap(InstanceType type, int instance_size) {
-  CALL_HEAP_FUNCTION(Heap::AllocateMap(type, instance_size), Map);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateMap(type, instance_size),
+      Map);
 }
 
 
 Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
-  CALL_HEAP_FUNCTION(Heap::AllocateFunctionPrototype(*function), JSObject);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateFunctionPrototype(*function),
+      JSObject);
 }
 
 
 Handle<Map> Factory::CopyMapDropDescriptors(Handle<Map> src) {
-  CALL_HEAP_FUNCTION(src->CopyDropDescriptors(), Map);
+  CALL_HEAP_FUNCTION(isolate(), src->CopyDropDescriptors(), Map);
 }
 
 
@@ -310,27 +378,27 @@ Handle<Map> Factory::CopyMap(Handle<Map> src,
 
 
 Handle<Map> Factory::CopyMapDropTransitions(Handle<Map> src) {
-  CALL_HEAP_FUNCTION(src->CopyDropTransitions(), Map);
+  CALL_HEAP_FUNCTION(isolate(), src->CopyDropTransitions(), Map);
 }
 
 
 Handle<Map> Factory::GetFastElementsMap(Handle<Map> src) {
-  CALL_HEAP_FUNCTION(src->GetFastElementsMap(), Map);
+  CALL_HEAP_FUNCTION(isolate(), src->GetFastElementsMap(), Map);
 }
 
 
 Handle<Map> Factory::GetSlowElementsMap(Handle<Map> src) {
-  CALL_HEAP_FUNCTION(src->GetSlowElementsMap(), Map);
+  CALL_HEAP_FUNCTION(isolate(), src->GetSlowElementsMap(), Map);
 }
 
 
 Handle<Map> Factory::NewExternalArrayElementsMap(Handle<Map> src) {
-  CALL_HEAP_FUNCTION(src->NewExternalArrayElementsMap(), Map);
+  CALL_HEAP_FUNCTION(isolate(), src->NewExternalArrayElementsMap(), Map);
 }
 
 
 Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) {
-  CALL_HEAP_FUNCTION(array->Copy(), FixedArray);
+  CALL_HEAP_FUNCTION(isolate(), array->Copy(), FixedArray);
 }
 
 
@@ -338,10 +406,12 @@ Handle<JSFunction> Factory::BaseNewFunctionFromSharedFunctionInfo(
     Handle<SharedFunctionInfo> function_info,
     Handle<Map> function_map,
     PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(Heap::AllocateFunction(*function_map,
-                                            *function_info,
-                                            Heap::the_hole_value(),
-                                            pretenure),
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateFunction(*function_map,
+                                          *function_info,
+                                          isolate()->heap()->the_hole_value(),
+                                          pretenure),
                      JSFunction);
 }
 
@@ -353,14 +423,13 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
   Handle<JSFunction> result = BaseNewFunctionFromSharedFunctionInfo(
       function_info,
       function_info->strict_mode()
-          ? Top::strict_mode_function_map()
-          : Top::function_map(),
+          ? isolate()->strict_mode_function_map()
+          : isolate()->function_map(),
       pretenure);
 
   result->set_context(*context);
   int number_of_literals = function_info->num_literals();
-  Handle<FixedArray> literals =
-      Factory::NewFixedArray(number_of_literals, pretenure);
+  Handle<FixedArray> literals = NewFixedArray(number_of_literals, pretenure);
   if (number_of_literals > 0) {
     // Store the object, regexp and array functions in the literals
     // array prefix.  These functions will be used when creating
@@ -369,7 +438,7 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
                   context->global_context());
   }
   result->set_literals(*literals);
-  result->set_next_function_link(Heap::undefined_value());
+  result->set_next_function_link(isolate()->heap()->undefined_value());
 
   if (V8::UseCrankshaft() &&
       FLAG_always_opt &&
@@ -384,23 +453,32 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
 
 Handle<Object> Factory::NewNumber(double value,
                                   PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(Heap::NumberFromDouble(value, pretenure), Object);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->NumberFromDouble(value, pretenure), Object);
 }
 
 
 Handle<Object> Factory::NewNumberFromInt(int value) {
-  CALL_HEAP_FUNCTION(Heap::NumberFromInt32(value), Object);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->NumberFromInt32(value), Object);
 }
 
 
 Handle<Object> Factory::NewNumberFromUint(uint32_t value) {
-  CALL_HEAP_FUNCTION(Heap::NumberFromUint32(value), Object);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->NumberFromUint32(value), Object);
 }
 
 
 Handle<JSObject> Factory::NewNeanderObject() {
-  CALL_HEAP_FUNCTION(Heap::AllocateJSObjectFromMap(Heap::neander_map()),
-                     JSObject);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateJSObjectFromMap(
+          isolate()->heap()->neander_map()),
+      JSObject);
 }
 
 
@@ -450,11 +528,11 @@ Handle<Object> Factory::NewReferenceError(Handle<String> message) {
 Handle<Object> Factory::NewError(const char* maker, const char* type,
     Vector< Handle<Object> > args) {
   v8::HandleScope scope;  // Instantiate a closeable HandleScope for EscapeFrom.
-  Handle<FixedArray> array = Factory::NewFixedArray(args.length());
+  Handle<FixedArray> array = NewFixedArray(args.length());
   for (int i = 0; i < args.length(); i++) {
     array->set(i, *args[i]);
   }
-  Handle<JSArray> object = Factory::NewJSArrayWithElements(array);
+  Handle<JSArray> object = NewJSArrayWithElements(array);
   Handle<Object> result = NewError(maker, type, object);
   return result.EscapeFrom(&scope);
 }
@@ -475,15 +553,15 @@ Handle<Object> Factory::NewError(const char* type,
 Handle<Object> Factory::NewError(const char* maker,
                                  const char* type,
                                  Handle<JSArray> args) {
-  Handle<String> make_str = Factory::LookupAsciiSymbol(maker);
-  Handle<Object> fun_obj(Top::builtins()->GetPropertyNoExceptionThrown(
-      *make_str));
+  Handle<String> make_str = LookupAsciiSymbol(maker);
+  Handle<Object> fun_obj(
+      isolate()->js_builtins_object()->GetPropertyNoExceptionThrown(*make_str));
   // If the builtins haven't been properly configured yet this error
   // constructor may not have been defined.  Bail out.
   if (!fun_obj->IsJSFunction())
-    return Factory::undefined_value();
+    return undefined_value();
   Handle<JSFunction> fun = Handle<JSFunction>::cast(fun_obj);
-  Handle<Object> type_obj = Factory::LookupAsciiSymbol(type);
+  Handle<Object> type_obj = LookupAsciiSymbol(type);
   Object** argv[2] = { type_obj.location(),
                        Handle<Object>::cast(args).location() };
 
@@ -491,10 +569,7 @@ Handle<Object> Factory::NewError(const char* maker,
   // running the factory method, use the exception as the result.
   bool caught_exception;
   Handle<Object> result = Execution::TryCall(fun,
-                                             Top::builtins(),
-                                             2,
-                                             argv,
-                                             &caught_exception);
+      isolate()->js_builtins_object(), 2, argv, &caught_exception);
   return result;
 }
 
@@ -506,21 +581,17 @@ Handle<Object> Factory::NewError(Handle<String> message) {
 
 Handle<Object> Factory::NewError(const char* constructor,
                                  Handle<String> message) {
-  Handle<String> constr = Factory::LookupAsciiSymbol(constructor);
-  Handle<JSFunction> fun =
-      Handle<JSFunction>(
-          JSFunction::cast(
-              Top::builtins()->GetPropertyNoExceptionThrown(*constr)));
+  Handle<String> constr = LookupAsciiSymbol(constructor);
+  Handle<JSFunction> fun = Handle<JSFunction>(
+      JSFunction::cast(isolate()->js_builtins_object()->
+                       GetPropertyNoExceptionThrown(*constr)));
   Object** argv[1] = { Handle<Object>::cast(message).location() };
 
   // Invoke the JavaScript factory method. If an exception is thrown while
   // running the factory method, use the exception as the result.
   bool caught_exception;
   Handle<Object> result = Execution::TryCall(fun,
-                                             Top::builtins(),
-                                             1,
-                                             argv,
-                                             &caught_exception);
+      isolate()->js_builtins_object(), 1, argv, &caught_exception);
   return result;
 }
 
@@ -581,8 +652,7 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name,
   // property that refers to the function.
   SetPrototypeProperty(function, prototype);
   // Currently safe because it is only invoked from Genesis.
-  SetLocalPropertyNoThrow(
-      prototype, Factory::constructor_symbol(), function, DONT_ENUM);
+  SetLocalPropertyNoThrow(prototype, constructor_symbol(), function, DONT_ENUM);
   return function;
 }
 
@@ -603,17 +673,24 @@ Handle<Code> Factory::NewCode(const CodeDesc& desc,
                               Code::Flags flags,
                               Handle<Object> self_ref,
                               bool immovable) {
-  CALL_HEAP_FUNCTION(Heap::CreateCode(desc, flags, self_ref, immovable), Code);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->CreateCode(
+                         desc, flags, self_ref, immovable),
+                     Code);
 }
 
 
 Handle<Code> Factory::CopyCode(Handle<Code> code) {
-  CALL_HEAP_FUNCTION(Heap::CopyCode(*code), Code);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->CopyCode(*code),
+                     Code);
 }
 
 
 Handle<Code> Factory::CopyCode(Handle<Code> code, Vector<byte> reloc_info) {
-  CALL_HEAP_FUNCTION(Heap::CopyCode(*code, reloc_info), Code);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->CopyCode(*code, reloc_info),
+                     Code);
 }
 
 
@@ -634,13 +711,15 @@ Handle<DescriptorArray> Factory::CopyAppendProxyDescriptor(
     Handle<String> key,
     Handle<Object> value,
     PropertyAttributes attributes) {
-  CALL_HEAP_FUNCTION(DoCopyInsert(*array, *key, *value, attributes),
+  CALL_HEAP_FUNCTION(isolate(),
+                     DoCopyInsert(*array, *key, *value, attributes),
                      DescriptorArray);
 }
 
 
 Handle<String> Factory::SymbolFromString(Handle<String> value) {
-  CALL_HEAP_FUNCTION(Heap::LookupSymbol(*value), String);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->LookupSymbol(*value), String);
 }
 
 
@@ -705,35 +784,43 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors(
 
 Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
                                       PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(Heap::AllocateJSObject(*constructor, pretenure), JSObject);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateJSObject(*constructor, pretenure), JSObject);
 }
 
 
 Handle<GlobalObject> Factory::NewGlobalObject(
     Handle<JSFunction> constructor) {
-  CALL_HEAP_FUNCTION(Heap::AllocateGlobalObject(*constructor),
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->AllocateGlobalObject(*constructor),
                      GlobalObject);
 }
 
 
 
 Handle<JSObject> Factory::NewJSObjectFromMap(Handle<Map> map) {
-  CALL_HEAP_FUNCTION(Heap::AllocateJSObjectFromMap(*map, NOT_TENURED),
-                     JSObject);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateJSObjectFromMap(*map, NOT_TENURED),
+      JSObject);
 }
 
 
 Handle<JSArray> Factory::NewJSArray(int capacity,
                                     PretenureFlag pretenure) {
-  Handle<JSObject> obj = NewJSObject(Top::array_function(), pretenure);
-  CALL_HEAP_FUNCTION(Handle<JSArray>::cast(obj)->Initialize(capacity), JSArray);
+  Handle<JSObject> obj = NewJSObject(isolate()->array_function(), pretenure);
+  CALL_HEAP_FUNCTION(isolate(),
+                     Handle<JSArray>::cast(obj)->Initialize(capacity),
+                     JSArray);
 }
 
 
 Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArray> elements,
                                                 PretenureFlag pretenure) {
   Handle<JSArray> result =
-      Handle<JSArray>::cast(NewJSObject(Top::array_function(), pretenure));
+      Handle<JSArray>::cast(NewJSObject(isolate()->array_function(),
+                                        pretenure));
   result->SetContent(*elements);
   return result;
 }
@@ -767,24 +854,27 @@ Handle<JSMessageObject> Factory::NewJSMessageObject(
     Handle<Object> script,
     Handle<Object> stack_trace,
     Handle<Object> stack_frames) {
-  CALL_HEAP_FUNCTION(Heap::AllocateJSMessageObject(*type,
-                                                   *arguments,
-                                                   start_position,
-                                                   end_position,
-                                                   *script,
-                                                   *stack_trace,
-                                                   *stack_frames),
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->AllocateJSMessageObject(*type,
+                         *arguments,
+                         start_position,
+                         end_position,
+                         *script,
+                         *stack_trace,
+                         *stack_frames),
                      JSMessageObject);
 }
 
 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(Handle<String> name) {
-  CALL_HEAP_FUNCTION(Heap::AllocateSharedFunctionInfo(*name),
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->AllocateSharedFunctionInfo(*name),
                      SharedFunctionInfo);
 }
 
 
 Handle<String> Factory::NumberToString(Handle<Object> number) {
-  CALL_HEAP_FUNCTION(Heap::NumberToString(*number), String);
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->NumberToString(*number), String);
 }
 
 
@@ -792,24 +882,28 @@ Handle<NumberDictionary> Factory::DictionaryAtNumberPut(
     Handle<NumberDictionary> dictionary,
     uint32_t key,
     Handle<Object> value) {
-  CALL_HEAP_FUNCTION(dictionary->AtNumberPut(key, *value), NumberDictionary);
+  CALL_HEAP_FUNCTION(isolate(),
+                     dictionary->AtNumberPut(key, *value),
+                     NumberDictionary);
 }
 
 
 Handle<JSFunction> Factory::NewFunctionHelper(Handle<String> name,
                                               Handle<Object> prototype) {
   Handle<SharedFunctionInfo> function_share = NewSharedFunctionInfo(name);
-  CALL_HEAP_FUNCTION(Heap::AllocateFunction(*Top::function_map(),
-                                            *function_share,
-                                            *prototype),
-                     JSFunction);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateFunction(*isolate()->function_map(),
+                                          *function_share,
+                                          *prototype),
+      JSFunction);
 }
 
 
 Handle<JSFunction> Factory::NewFunction(Handle<String> name,
                                         Handle<Object> prototype) {
   Handle<JSFunction> fun = NewFunctionHelper(name, prototype);
-  fun->set_context(Top::context()->global_context());
+  fun->set_context(isolate()->context()->global_context());
   return fun;
 }
 
@@ -819,9 +913,10 @@ Handle<JSFunction> Factory::NewFunctionWithoutPrototypeHelper(
     StrictModeFlag strict_mode) {
   Handle<SharedFunctionInfo> function_share = NewSharedFunctionInfo(name);
   Handle<Map> map = strict_mode == kStrictMode
-      ? Top::strict_mode_function_without_prototype_map()
-      : Top::function_without_prototype_map();
-  CALL_HEAP_FUNCTION(Heap::AllocateFunction(
+      ? isolate()->strict_mode_function_without_prototype_map()
+      : isolate()->function_without_prototype_map();
+  CALL_HEAP_FUNCTION(isolate(),
+                     isolate()->heap()->AllocateFunction(
                          *map,
                          *function_share,
                          *the_hole_value()),
@@ -833,19 +928,19 @@ Handle<JSFunction> Factory::NewFunctionWithoutPrototype(
     Handle<String> name,
     StrictModeFlag strict_mode) {
   Handle<JSFunction> fun = NewFunctionWithoutPrototypeHelper(name, strict_mode);
-  fun->set_context(Top::context()->global_context());
+  fun->set_context(isolate()->context()->global_context());
   return fun;
 }
 
 
 Handle<Object> Factory::ToObject(Handle<Object> object) {
-  CALL_HEAP_FUNCTION(object->ToObject(), Object);
+  CALL_HEAP_FUNCTION(isolate(), object->ToObject(), Object);
 }
 
 
 Handle<Object> Factory::ToObject(Handle<Object> object,
                                  Handle<Context> global_context) {
-  CALL_HEAP_FUNCTION(object->ToObject(*global_context), Object);
+  CALL_HEAP_FUNCTION(isolate(), object->ToObject(*global_context), Object);
 }
 
 
@@ -862,13 +957,13 @@ Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) {
   // debug info object to avoid allocation while setting up the debug info
   // object.
   Handle<FixedArray> break_points(
-      Factory::NewFixedArray(Debug::kEstimatedNofBreakPointsInFunction));
+      NewFixedArray(Debug::kEstimatedNofBreakPointsInFunction));
 
   // Create and set up the debug info object. Debug info contains function, a
   // copy of the original code, the executing code and initial fixed array for
   // active break points.
   Handle<DebugInfo> debug_info =
-      Handle<DebugInfo>::cast(Factory::NewStruct(DEBUG_INFO_TYPE));
+      Handle<DebugInfo>::cast(NewStruct(DEBUG_INFO_TYPE));
   debug_info->set_shared(*shared);
   debug_info->set_original_code(*original_code);
   debug_info->set_code(*code);
@@ -884,15 +979,19 @@ Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) {
 
 Handle<JSObject> Factory::NewArgumentsObject(Handle<Object> callee,
                                              int length) {
-  CALL_HEAP_FUNCTION(Heap::AllocateArgumentsObject(*callee, length), JSObject);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateArgumentsObject(*callee, length), JSObject);
 }
 
 
 Handle<JSFunction> Factory::CreateApiFunction(
     Handle<FunctionTemplateInfo> obj, ApiInstanceType instance_type) {
-  Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::HandleApiCall));
+  Handle<Code> code = Handle<Code>(isolate()->builtins()->builtin(
+      Builtins::HandleApiCall));
   Handle<Code> construct_stub =
-      Handle<Code>(Builtins::builtin(Builtins::JSConstructStubApi));
+      Handle<Code>(isolate()->builtins()->builtin(
+          Builtins::JSConstructStubApi));
 
   int internal_field_count = 0;
   if (!obj->instance_template()->IsUndefined()) {
@@ -924,11 +1023,11 @@ Handle<JSFunction> Factory::CreateApiFunction(
   ASSERT(type != INVALID_TYPE);
 
   Handle<JSFunction> result =
-      Factory::NewFunction(Factory::empty_symbol(),
-                           type,
-                           instance_size,
-                           code,
-                           true);
+      NewFunction(Factory::empty_symbol(),
+                  type,
+                  instance_size,
+                  code,
+                  true);
   // Set class name.
   Handle<Object> class_name = Handle<Object>(obj->class_name());
   if (class_name->IsString()) {
@@ -976,7 +1075,7 @@ Handle<JSFunction> Factory::CreateApiFunction(
   while (true) {
     Handle<Object> props = Handle<Object>(obj->property_accessors());
     if (!props->IsUndefined()) {
-      array = Factory::CopyAppendCallbackDescriptors(array, props);
+      array = CopyAppendCallbackDescriptors(array, props);
     }
     Handle<Object> parent = Handle<Object>(obj->parent_template());
     if (parent->IsUndefined()) break;
@@ -992,7 +1091,8 @@ Handle<JSFunction> Factory::CreateApiFunction(
 
 
 Handle<MapCache> Factory::NewMapCache(int at_least_space_for) {
-  CALL_HEAP_FUNCTION(MapCache::Allocate(at_least_space_for), MapCache);
+  CALL_HEAP_FUNCTION(isolate(),
+                     MapCache::Allocate(at_least_space_for), MapCache);
 }
 
 
@@ -1012,7 +1112,8 @@ MUST_USE_RESULT static MaybeObject* UpdateMapCacheWith(Context* context,
 Handle<MapCache> Factory::AddToMapCache(Handle<Context> context,
                                         Handle<FixedArray> keys,
                                         Handle<Map> map) {
-  CALL_HEAP_FUNCTION(UpdateMapCacheWith(*context, *keys, *map), MapCache);
+  CALL_HEAP_FUNCTION(isolate(),
+                     UpdateMapCacheWith(*context, *keys, *map), MapCache);
 }
 
 
@@ -1061,8 +1162,8 @@ void Factory::SetRegExpIrregexpData(Handle<JSRegExp> regexp,
   store->set(JSRegExp::kTagIndex, Smi::FromInt(type));
   store->set(JSRegExp::kSourceIndex, *source);
   store->set(JSRegExp::kFlagsIndex, Smi::FromInt(flags.value()));
-  store->set(JSRegExp::kIrregexpASCIICodeIndex, Heap::the_hole_value());
-  store->set(JSRegExp::kIrregexpUC16CodeIndex, Heap::the_hole_value());
+  store->set(JSRegExp::kIrregexpASCIICodeIndex, HEAP->the_hole_value());
+  store->set(JSRegExp::kIrregexpUC16CodeIndex, HEAP->the_hole_value());
   store->set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::FromInt(0));
   store->set(JSRegExp::kIrregexpCaptureCountIndex,
              Smi::FromInt(capture_count));
index 1b9d94028a74cbe99d1d2a21e55605335940c0a1..d9d5ec4af4a5793e4b631f5d3391a6c3941795f9 100644 (file)
@@ -29,6 +29,7 @@
 #define V8_FACTORY_H_
 
 #include "globals.h"
+#include "handles.h"
 #include "heap.h"
 
 namespace v8 {
@@ -36,34 +37,34 @@ namespace internal {
 
 // Interface for handle based allocation.
 
-class Factory : public AllStatic {
+class Factory {
  public:
   // Allocate a new fixed array with undefined entries.
-  static Handle<FixedArray> NewFixedArray(
+  Handle<FixedArray> NewFixedArray(
       int size,
       PretenureFlag pretenure = NOT_TENURED);
 
   // Allocate a new fixed array with non-existing entries (the hole).
-  static Handle<FixedArray> NewFixedArrayWithHoles(
+  Handle<FixedArray> NewFixedArrayWithHoles(
       int size,
       PretenureFlag pretenure = NOT_TENURED);
 
-  static Handle<NumberDictionary> NewNumberDictionary(int at_least_space_for);
+  Handle<NumberDictionary> NewNumberDictionary(int at_least_space_for);
 
-  static Handle<StringDictionary> NewStringDictionary(int at_least_space_for);
+  Handle<StringDictionary> NewStringDictionary(int at_least_space_for);
 
-  static Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors);
-  static Handle<DeoptimizationInputData> NewDeoptimizationInputData(
+  Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors);
+  Handle<DeoptimizationInputData> NewDeoptimizationInputData(
       int deopt_entry_count,
       PretenureFlag pretenure);
-  static Handle<DeoptimizationOutputData> NewDeoptimizationOutputData(
+  Handle<DeoptimizationOutputData> NewDeoptimizationOutputData(
       int deopt_entry_count,
       PretenureFlag pretenure);
 
-  static Handle<String> LookupSymbol(Vector<const char> str);
-  static Handle<String> LookupAsciiSymbol(Vector<const char> str);
-  static Handle<String> LookupTwoByteSymbol(Vector<const uc16> str);
-  static Handle<String> LookupAsciiSymbol(const char* str) {
+  Handle<String> LookupSymbol(Vector<const char> str);
+  Handle<String> LookupAsciiSymbol(Vector<const char> str);
+  Handle<String> LookupTwoByteSymbol(Vector<const uc16> str);
+  Handle<String> LookupAsciiSymbol(const char* str) {
     return LookupSymbol(CStrVector(str));
   }
 
@@ -90,234 +91,234 @@ class Factory : public AllStatic {
   //     two byte.
   //
   // ASCII strings are pretenured when used as keys in the SourceCodeCache.
-  static Handle<String> NewStringFromAscii(
+  Handle<String> NewStringFromAscii(
       Vector<const char> str,
       PretenureFlag pretenure = NOT_TENURED);
 
   // UTF8 strings are pretenured when used for regexp literal patterns and
   // flags in the parser.
-  static Handle<String> NewStringFromUtf8(
+  Handle<String> NewStringFromUtf8(
       Vector<const char> str,
       PretenureFlag pretenure = NOT_TENURED);
 
-  static Handle<String> NewStringFromTwoByte(
+  Handle<String> NewStringFromTwoByte(
       Vector<const uc16> str,
       PretenureFlag pretenure = NOT_TENURED);
 
   // Allocates and partially initializes an ASCII or TwoByte String. The
   // characters of the string are uninitialized. Currently used in regexp code
   // only, where they are pretenured.
-  static Handle<String> NewRawAsciiString(
+  Handle<String> NewRawAsciiString(
       int length,
       PretenureFlag pretenure = NOT_TENURED);
-  static Handle<String> NewRawTwoByteString(
+  Handle<String> NewRawTwoByteString(
       int length,
       PretenureFlag pretenure = NOT_TENURED);
 
   // Create a new cons string object which consists of a pair of strings.
-  static Handle<String> NewConsString(Handle<String> first,
-                                      Handle<String> second);
+  Handle<String> NewConsString(Handle<String> first,
+                               Handle<String> second);
 
   // Create a new string object which holds a substring of a string.
-  static Handle<String> NewSubString(Handle<String> str,
-                                     int begin,
-                                     int end);
+  Handle<String> NewSubString(Handle<String> str,
+                              int begin,
+                              int end);
 
   // Creates a new external String object.  There are two String encodings
   // in the system: ASCII and two byte.  Unlike other String types, it does
   // not make sense to have a UTF-8 factory function for external strings,
   // because we cannot change the underlying buffer.
-  static Handle<String> NewExternalStringFromAscii(
+  Handle<String> NewExternalStringFromAscii(
       ExternalAsciiString::Resource* resource);
-  static Handle<String> NewExternalStringFromTwoByte(
+  Handle<String> NewExternalStringFromTwoByte(
       ExternalTwoByteString::Resource* resource);
 
   // Create a global (but otherwise uninitialized) context.
-  static Handle<Context> NewGlobalContext();
+  Handle<Context> NewGlobalContext();
 
   // Create a function context.
-  static Handle<Context> NewFunctionContext(int length,
-                                            Handle<JSFunction> closure);
+  Handle<Context> NewFunctionContext(int length,
+                                     Handle<JSFunction> closure);
 
   // Create a 'with' context.
-  static Handle<Context> NewWithContext(Handle<Context> previous,
-                                        Handle<JSObject> extension,
-                                        bool is_catch_context);
+  Handle<Context> NewWithContext(Handle<Context> previous,
+                                 Handle<JSObject> extension,
+                                 bool is_catch_context);
 
   // Return the Symbol matching the passed in string.
-  static Handle<String> SymbolFromString(Handle<String> value);
+  Handle<String> SymbolFromString(Handle<String> value);
 
   // Allocate a new struct.  The struct is pretenured (allocated directly in
   // the old generation).
-  static Handle<Struct> NewStruct(InstanceType type);
+  Handle<Struct> NewStruct(InstanceType type);
 
-  static Handle<AccessorInfo> NewAccessorInfo();
+  Handle<AccessorInfo> NewAccessorInfo();
 
-  static Handle<Script> NewScript(Handle<String> source);
+  Handle<Script> NewScript(Handle<String> source);
 
   // Proxies are pretenured when allocated by the bootstrapper.
-  static Handle<Proxy> NewProxy(Address addr,
-                                PretenureFlag pretenure = NOT_TENURED);
+  Handle<Proxy> NewProxy(Address addr,
+                         PretenureFlag pretenure = NOT_TENURED);
 
   // Allocate a new proxy.  The proxy is pretenured (allocated directly in
   // the old generation).
-  static Handle<Proxy> NewProxy(const AccessorDescriptor* proxy);
+  Handle<Proxy> NewProxy(const AccessorDescriptor* proxy);
 
-  static Handle<ByteArray> NewByteArray(int length,
-                                        PretenureFlag pretenure = NOT_TENURED);
+  Handle<ByteArray> NewByteArray(int length,
+                                 PretenureFlag pretenure = NOT_TENURED);
 
-  static Handle<ExternalArray> NewExternalArray(
+  Handle<ExternalArray> NewExternalArray(
       int length,
       ExternalArrayType array_type,
       void* external_pointer,
       PretenureFlag pretenure = NOT_TENURED);
 
-  static Handle<JSGlobalPropertyCell> NewJSGlobalPropertyCell(
+  Handle<JSGlobalPropertyCell> NewJSGlobalPropertyCell(
       Handle<Object> value);
 
-  static Handle<Map> NewMap(InstanceType type, int instance_size);
+  Handle<Map> NewMap(InstanceType type, int instance_size);
 
-  static Handle<JSObject> NewFunctionPrototype(Handle<JSFunction> function);
+  Handle<JSObject> NewFunctionPrototype(Handle<JSFunction> function);
 
-  static Handle<Map> CopyMapDropDescriptors(Handle<Map> map);
+  Handle<Map> CopyMapDropDescriptors(Handle<Map> map);
 
   // Copy the map adding more inobject properties if possible without
   // overflowing the instance size.
-  static Handle<Map> CopyMap(Handle<Map> map, int extra_inobject_props);
+  Handle<Map> CopyMap(Handle<Map> map, int extra_inobject_props);
 
-  static Handle<Map> CopyMapDropTransitions(Handle<Map> map);
+  Handle<Map> CopyMapDropTransitions(Handle<Map> map);
 
-  static Handle<Map> GetFastElementsMap(Handle<Map> map);
+  Handle<Map> GetFastElementsMap(Handle<Map> map);
 
-  static Handle<Map> GetSlowElementsMap(Handle<Map> map);
+  Handle<Map> GetSlowElementsMap(Handle<Map> map);
 
-  static Handle<Map> NewExternalArrayElementsMap(Handle<Map> map);
+  Handle<Map> NewExternalArrayElementsMap(Handle<Map> map);
 
-  static Handle<FixedArray> CopyFixedArray(Handle<FixedArray> array);
+  Handle<FixedArray> CopyFixedArray(Handle<FixedArray> array);
 
   // Numbers (eg, literals) are pretenured by the parser.
-  static Handle<Object> NewNumber(double value,
-                                  PretenureFlag pretenure = NOT_TENURED);
+  Handle<Object> NewNumber(double value,
+                           PretenureFlag pretenure = NOT_TENURED);
 
-  static Handle<Object> NewNumberFromInt(int value);
-  static Handle<Object> NewNumberFromUint(uint32_t value);
+  Handle<Object> NewNumberFromInt(int value);
+  Handle<Object> NewNumberFromUint(uint32_t value);
 
   // These objects are used by the api to create env-independent data
   // structures in the heap.
-  static Handle<JSObject> NewNeanderObject();
+  Handle<JSObject> NewNeanderObject();
 
-  static Handle<JSObject> NewArgumentsObject(Handle<Object> callee, int length);
+  Handle<JSObject> NewArgumentsObject(Handle<Object> callee, int length);
 
   // JS objects are pretenured when allocated by the bootstrapper and
   // runtime.
-  static Handle<JSObject> NewJSObject(Handle<JSFunction> constructor,
-                                      PretenureFlag pretenure = NOT_TENURED);
+  Handle<JSObject> NewJSObject(Handle<JSFunction> constructor,
+                               PretenureFlag pretenure = NOT_TENURED);
 
   // Global objects are pretenured.
-  static Handle<GlobalObject> NewGlobalObject(Handle<JSFunction> constructor);
+  Handle<GlobalObject> NewGlobalObject(Handle<JSFunction> constructor);
 
   // JS objects are pretenured when allocated by the bootstrapper and
   // runtime.
-  static Handle<JSObject> NewJSObjectFromMap(Handle<Map> map);
+  Handle<JSObject> NewJSObjectFromMap(Handle<Map> map);
 
   // JS arrays are pretenured when allocated by the parser.
-  static Handle<JSArray> NewJSArray(int capacity,
-                                    PretenureFlag pretenure = NOT_TENURED);
+  Handle<JSArray> NewJSArray(int capacity,
+                             PretenureFlag pretenure = NOT_TENURED);
 
-  static Handle<JSArray> NewJSArrayWithElements(
+  Handle<JSArray> NewJSArrayWithElements(
       Handle<FixedArray> elements,
       PretenureFlag pretenure = NOT_TENURED);
 
-  static Handle<JSFunction> NewFunction(Handle<String> name,
-                                        Handle<Object> prototype);
+  Handle<JSFunction> NewFunction(Handle<String> name,
+                                 Handle<Object> prototype);
 
-  static Handle<JSFunction> NewFunctionWithoutPrototype(
+  Handle<JSFunction> NewFunctionWithoutPrototype(
       Handle<String> name,
       StrictModeFlag strict_mode);
 
-  static Handle<JSFunction> NewFunction(Handle<Object> super, bool is_global);
+  Handle<JSFunction> NewFunction(Handle<Object> super, bool is_global);
 
-  static Handle<JSFunction> BaseNewFunctionFromSharedFunctionInfo(
+  Handle<JSFunction> BaseNewFunctionFromSharedFunctionInfo(
       Handle<SharedFunctionInfo> function_info,
       Handle<Map> function_map,
       PretenureFlag pretenure);
 
-  static Handle<JSFunction> NewFunctionFromSharedFunctionInfo(
+  Handle<JSFunction> NewFunctionFromSharedFunctionInfo(
       Handle<SharedFunctionInfo> function_info,
       Handle<Context> context,
       PretenureFlag pretenure = TENURED);
 
-  static Handle<Code> NewCode(const CodeDesc& desc,
-                              Code::Flags flags,
-                              Handle<Object> self_reference,
-                              bool immovable = false);
+  Handle<Code> NewCode(const CodeDesc& desc,
+                       Code::Flags flags,
+                       Handle<Object> self_reference,
+                       bool immovable = false);
 
-  static Handle<Code> CopyCode(Handle<Code> code);
+  Handle<Code> CopyCode(Handle<Code> code);
 
-  static Handle<Code> CopyCode(Handle<Code> code, Vector<byte> reloc_info);
+  Handle<Code> CopyCode(Handle<Code> code, Vector<byte> reloc_info);
 
-  static Handle<Object> ToObject(Handle<Object> object);
-  static Handle<Object> ToObject(Handle<Object> object,
-                                 Handle<Context> global_context);
+  Handle<Object> ToObject(Handle<Object> object);
+  Handle<Object> ToObject(Handle<Object> object,
+                          Handle<Context> global_context);
 
   // Interface for creating error objects.
 
-  static Handle<Object> NewError(const char* maker, const char* type,
-                                 Handle<JSArray> args);
-  static Handle<Object> NewError(const char* maker, const char* type,
-                                 Vector< Handle<Object> > args);
-  static Handle<Object> NewError(const char* type,
-                                 Vector< Handle<Object> > args);
-  static Handle<Object> NewError(Handle<String> message);
-  static Handle<Object> NewError(const char* constructor,
-                                 Handle<String> message);
+  Handle<Object> NewError(const char* maker, const char* type,
+                          Handle<JSArray> args);
+  Handle<Object> NewError(const char* maker, const char* type,
+                          Vector< Handle<Object> > args);
+  Handle<Object> NewError(const char* type,
+                          Vector< Handle<Object> > args);
+  Handle<Object> NewError(Handle<String> message);
+  Handle<Object> NewError(const char* constructor,
+                          Handle<String> message);
 
-  static Handle<Object> NewTypeError(const char* type,
-                                     Vector< Handle<Object> > args);
-  static Handle<Object> NewTypeError(Handle<String> message);
+  Handle<Object> NewTypeError(const char* type,
+                              Vector< Handle<Object> > args);
+  Handle<Object> NewTypeError(Handle<String> message);
 
-  static Handle<Object> NewRangeError(const char* type,
-                                      Vector< Handle<Object> > args);
-  static Handle<Object> NewRangeError(Handle<String> message);
+  Handle<Object> NewRangeError(const char* type,
+                               Vector< Handle<Object> > args);
+  Handle<Object> NewRangeError(Handle<String> message);
 
-  static Handle<Object> NewSyntaxError(const char* type, Handle<JSArray> args);
-  static Handle<Object> NewSyntaxError(Handle<String> message);
+  Handle<Object> NewSyntaxError(const char* type, Handle<JSArray> args);
+  Handle<Object> NewSyntaxError(Handle<String> message);
 
-  static Handle<Object> NewReferenceError(const char* type,
-                                          Vector< Handle<Object> > args);
-  static Handle<Object> NewReferenceError(Handle<String> message);
+  Handle<Object> NewReferenceError(const char* type,
+                                   Vector< Handle<Object> > args);
+  Handle<Object> NewReferenceError(Handle<String> message);
 
-  static Handle<Object> NewEvalError(const char* type,
-                                     Vector< Handle<Object> > args);
+  Handle<Object> NewEvalError(const char* type,
+                              Vector< Handle<Object> > args);
 
 
-  static Handle<JSFunction> NewFunction(Handle<String> name,
-                                        InstanceType type,
-                                        int instance_size,
-                                        Handle<Code> code,
-                                        bool force_initial_map);
+  Handle<JSFunction> NewFunction(Handle<String> name,
+                                 InstanceType type,
+                                 int instance_size,
+                                 Handle<Code> code,
+                                 bool force_initial_map);
 
-  static Handle<JSFunction> NewFunction(Handle<Map> function_map,
+  Handle<JSFunction> NewFunction(Handle<Map> function_map,
       Handle<SharedFunctionInfo> shared, Handle<Object> prototype);
 
 
-  static Handle<JSFunction> NewFunctionWithPrototype(Handle<String> name,
-                                                     InstanceType type,
-                                                     int instance_size,
-                                                     Handle<JSObject> prototype,
-                                                     Handle<Code> code,
-                                                     bool force_initial_map);
+  Handle<JSFunction> NewFunctionWithPrototype(Handle<String> name,
+                                              InstanceType type,
+                                              int instance_size,
+                                              Handle<JSObject> prototype,
+                                              Handle<Code> code,
+                                              bool force_initial_map);
 
-  static Handle<JSFunction> NewFunctionWithoutPrototype(Handle<String> name,
-                                                        Handle<Code> code);
+  Handle<JSFunction> NewFunctionWithoutPrototype(Handle<String> name,
+                                                 Handle<Code> code);
 
-  static Handle<DescriptorArray> CopyAppendProxyDescriptor(
+  Handle<DescriptorArray> CopyAppendProxyDescriptor(
       Handle<DescriptorArray> array,
       Handle<String> key,
       Handle<Object> value,
       PropertyAttributes attributes);
 
-  static Handle<String> NumberToString(Handle<Object> number);
+  Handle<String> NumberToString(Handle<Object> number);
 
   enum ApiInstanceType {
     JavaScriptObject,
@@ -325,47 +326,47 @@ class Factory : public AllStatic {
     OuterGlobalObject
   };
 
-  static Handle<JSFunction> CreateApiFunction(
+  Handle<JSFunction> CreateApiFunction(
       Handle<FunctionTemplateInfo> data,
       ApiInstanceType type = JavaScriptObject);
 
-  static Handle<JSFunction> InstallMembers(Handle<JSFunction> function);
+  Handle<JSFunction> InstallMembers(Handle<JSFunction> function);
 
   // Installs interceptors on the instance.  'desc' is a function template,
   // and instance is an object instance created by the function of this
   // function template.
-  static void ConfigureInstance(Handle<FunctionTemplateInfo> desc,
-                                Handle<JSObject> instance,
-                                bool* pending_exception);
+  void ConfigureInstance(Handle<FunctionTemplateInfo> desc,
+                         Handle<JSObject> instance,
+                         bool* pending_exception);
 
 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
-  static inline Handle<type> name() {                                          \
+  inline Handle<type> name() {                                                 \
     return Handle<type>(BitCast<type**>(                                       \
-        &Heap::roots_[Heap::k##camel_name##RootIndex]));                       \
+        &isolate()->heap()->roots_[Heap::k##camel_name##RootIndex]));          \
   }
   ROOT_LIST(ROOT_ACCESSOR)
 #undef ROOT_ACCESSOR_ACCESSOR
 
-#define SYMBOL_ACCESSOR(name, str) \
-  static inline Handle<String> name() {                                        \
+#define SYMBOL_ACCESSOR(name, str)                                             \
+  inline Handle<String> name() {                                               \
     return Handle<String>(BitCast<String**>(                                   \
-        &Heap::roots_[Heap::k##name##RootIndex]));                             \
+        &isolate()->heap()->roots_[Heap::k##name##RootIndex]));                \
   }
   SYMBOL_LIST(SYMBOL_ACCESSOR)
 #undef SYMBOL_ACCESSOR
 
-  static Handle<String> hidden_symbol() {
-    return Handle<String>(&Heap::hidden_symbol_);
+  Handle<String> hidden_symbol() {
+    return Handle<String>(&isolate()->heap()->hidden_symbol_);
   }
 
-  static Handle<SharedFunctionInfo> NewSharedFunctionInfo(
+  Handle<SharedFunctionInfo> NewSharedFunctionInfo(
       Handle<String> name,
       int number_of_literals,
       Handle<Code> code,
       Handle<SerializedScopeInfo> scope_info);
-  static Handle<SharedFunctionInfo> NewSharedFunctionInfo(Handle<String> name);
+  Handle<SharedFunctionInfo> NewSharedFunctionInfo(Handle<String> name);
 
-  static Handle<JSMessageObject> NewJSMessageObject(
+  Handle<JSMessageObject> NewJSMessageObject(
       Handle<String> type,
       Handle<JSArray> arguments,
       int start_position,
@@ -374,55 +375,57 @@ class Factory : public AllStatic {
       Handle<Object> stack_trace,
       Handle<Object> stack_frames);
 
-  static Handle<NumberDictionary> DictionaryAtNumberPut(
+  Handle<NumberDictionary> DictionaryAtNumberPut(
       Handle<NumberDictionary>,
       uint32_t key,
       Handle<Object> value);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  static Handle<DebugInfo> NewDebugInfo(Handle<SharedFunctionInfo> shared);
+  Handle<DebugInfo> NewDebugInfo(Handle<SharedFunctionInfo> shared);
 #endif
 
   // Return a map using the map cache in the global context.
   // The key the an ordered set of property names.
-  static Handle<Map> ObjectLiteralMapFromCache(Handle<Context> context,
-                                               Handle<FixedArray> keys);
+  Handle<Map> ObjectLiteralMapFromCache(Handle<Context> context,
+                                        Handle<FixedArray> keys);
 
   // Creates a new FixedArray that holds the data associated with the
   // atom regexp and stores it in the regexp.
-  static void SetRegExpAtomData(Handle<JSRegExp> regexp,
-                                JSRegExp::Type type,
-                                Handle<String> source,
-                                JSRegExp::Flags flags,
-                                Handle<Object> match_pattern);
+  void SetRegExpAtomData(Handle<JSRegExp> regexp,
+                         JSRegExp::Type type,
+                         Handle<String> source,
+                         JSRegExp::Flags flags,
+                         Handle<Object> match_pattern);
 
   // Creates a new FixedArray that holds the data associated with the
   // irregexp regexp and stores it in the regexp.
-  static void SetRegExpIrregexpData(Handle<JSRegExp> regexp,
-                                    JSRegExp::Type type,
-                                    Handle<String> source,
-                                    JSRegExp::Flags flags,
-                                    int capture_count);
+  void SetRegExpIrregexpData(Handle<JSRegExp> regexp,
+                             JSRegExp::Type type,
+                             Handle<String> source,
+                             JSRegExp::Flags flags,
+                             int capture_count);
 
  private:
-  static Handle<JSFunction> NewFunctionHelper(Handle<String> name,
-                                              Handle<Object> prototype);
+  Isolate* isolate() { return reinterpret_cast<Isolate*>(this); }
 
-  static Handle<JSFunction> NewFunctionWithoutPrototypeHelper(
+  Handle<JSFunction> NewFunctionHelper(Handle<String> name,
+                                       Handle<Object> prototype);
+
+  Handle<JSFunction> NewFunctionWithoutPrototypeHelper(
       Handle<String> name,
       StrictModeFlag strict_mode);
 
-  static Handle<DescriptorArray> CopyAppendCallbackDescriptors(
+  Handle<DescriptorArray> CopyAppendCallbackDescriptors(
       Handle<DescriptorArray> array,
       Handle<Object> descriptors);
 
   // Create a new map cache.
-  static Handle<MapCache> NewMapCache(int at_least_space_for);
+  Handle<MapCache> NewMapCache(int at_least_space_for);
 
   // Update the map cache in the global context with (keys, map)
-  static Handle<MapCache> AddToMapCache(Handle<Context> context,
-                                        Handle<FixedArray> keys,
-                                        Handle<Map> map);
+  Handle<MapCache> AddToMapCache(Handle<Context> context,
+                                 Handle<FixedArray> keys,
+                                 Handle<Map> map);
 };
 
 
index cdad211cd61287f19d5d16b7e16ac7d9e2b9d479..76a45ba4d8ce29faedf889c8023df3be79f848fe 100644 (file)
@@ -447,6 +447,8 @@ DEFINE_bool(collect_heap_spill_statistics, false,
             "report heap spill statistics along with heap_stats "
             "(requires heap_stats)")
 
+DEFINE_bool(trace_isolates, false, "trace isolate state changes")
+
 // VM state
 DEFINE_bool(log_state_changes, false, "Log state changes.")
 
index ee7be95f1a4631de3bd73f82d3242304c062ec54..f629900733e14a8c733eb7712b209f319e716cec 100644 (file)
 namespace v8 {
 namespace internal {
 
-FrameElement::ZoneObjectList* FrameElement::ConstantList() {
-  static ZoneObjectList list(10);
-  return &list;
-}
-
 
 } }  // namespace v8::internal
index ae5d6a1bf2f1859f0e8aff7d58d5dfc4937dc35b..0c7d0103e02d1fbebe6004b2112f9c41ee75c646 100644 (file)
@@ -106,20 +106,9 @@ class FrameElement BASE_EMBEDDED {
     return result;
   }
 
-  // Static indirection table for handles to constants.  If a frame
-  // element represents a constant, the data contains an index into
-  // this table of handles to the actual constants.
-  typedef ZoneList<Handle<Object> > ZoneObjectList;
-
-  static ZoneObjectList* ConstantList();
-
   static bool ConstantPoolOverflowed() {
-    return !DataField::is_valid(ConstantList()->length());
-  }
-
-  // Clear the constants indirection table.
-  static void ClearConstantList() {
-    ConstantList()->Clear();
+    return !DataField::is_valid(
+        Isolate::Current()->frame_element_constant_list()->length());
   }
 
   bool is_synced() const { return SyncedField::decode(value_); }
@@ -164,7 +153,8 @@ class FrameElement BASE_EMBEDDED {
 
   Handle<Object> handle() const {
     ASSERT(is_constant());
-    return ConstantList()->at(DataField::decode(value_));
+    return Isolate::Current()->frame_element_constant_list()->
+        at(DataField::decode(value_));
   }
 
   int index() const {
@@ -232,12 +222,14 @@ class FrameElement BASE_EMBEDDED {
 
   // Used to construct constant elements.
   FrameElement(Handle<Object> value, SyncFlag is_synced, TypeInfo info) {
+    ZoneObjectList* constant_list =
+        Isolate::Current()->frame_element_constant_list();
     value_ = TypeField::encode(CONSTANT)
         | CopiedField::encode(false)
         | SyncedField::encode(is_synced != NOT_SYNCED)
         | TypeInfoField::encode(info.ToInt())
-        | DataField::encode(ConstantList()->length());
-    ConstantList()->Add(value);
+        | DataField::encode(constant_list->length());
+    constant_list->Add(value);
   }
 
   Type type() const { return TypeField::decode(value_); }
index 78bb646c78df386112ee023089c5e0e0128e2a62..f708f857ece943d7db637a2f282e7073ed6f5892 100644 (file)
@@ -30,6 +30,8 @@
 
 #include "frames.h"
 
+#include "isolate.h"
+
 #if V8_TARGET_ARCH_IA32
 #include "ia32/frames-ia32.h"
 #elif V8_TARGET_ARCH_X64
@@ -91,6 +93,11 @@ inline StackHandler* StackFrame::top_handler() const {
 }
 
 
+inline Code* StackFrame::GetContainingCode(Isolate* isolate, Address pc) {
+  return isolate->pc_to_code_cache()->GetCacheEntry(pc)->code;
+}
+
+
 inline Object* StandardFrame::GetExpression(int index) const {
   return Memory::Object_at(GetExpressionAddress(index));
 }
index 7dae462de431cde8f55aa427b35d8b70f4779b53..30d0456634e3c6de55f0b647dd81e5ca874026aa 100644 (file)
 #include "safepoint-table.h"
 #include "scopeinfo.h"
 #include "string-stream.h"
-#include "top.h"
 
 namespace v8 {
 namespace internal {
 
-PcToCodeCache::PcToCodeCacheEntry
-    PcToCodeCache::cache_[PcToCodeCache::kPcToCodeCacheSize];
 
 int SafeStackFrameIterator::active_count_ = 0;
 
@@ -77,7 +74,8 @@ class StackHandlerIterator BASE_EMBEDDED {
 #define INITIALIZE_SINGLETON(type, field) field##_(this),
 StackFrameIterator::StackFrameIterator()
     : STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
-      frame_(NULL), handler_(NULL), thread_(Top::GetCurrentThread()),
+      frame_(NULL), handler_(NULL),
+      thread_(Isolate::Current()->thread_local_top()),
       fp_(NULL), sp_(NULL), advance_(&StackFrameIterator::AdvanceWithHandler) {
   Reset();
 }
@@ -87,10 +85,11 @@ StackFrameIterator::StackFrameIterator(ThreadLocalTop* t)
       fp_(NULL), sp_(NULL), advance_(&StackFrameIterator::AdvanceWithHandler) {
   Reset();
 }
-StackFrameIterator::StackFrameIterator(bool use_top, Address fp, Address sp)
+StackFrameIterator::StackFrameIterator(Isolate* isolate,
+                                       bool use_top, Address fp, Address sp)
     : STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
       frame_(NULL), handler_(NULL),
-      thread_(use_top ? Top::GetCurrentThread() : NULL),
+      thread_(use_top ? isolate->thread_local_top() : NULL),
       fp_(use_top ? NULL : fp), sp_(sp),
       advance_(use_top ? &StackFrameIterator::AdvanceWithHandler :
                &StackFrameIterator::AdvanceWithoutHandler) {
@@ -138,8 +137,10 @@ void StackFrameIterator::Reset() {
   StackFrame::State state;
   StackFrame::Type type;
   if (thread_ != NULL) {
-    type = ExitFrame::GetStateForFramePointer(Top::c_entry_fp(thread_), &state);
-    handler_ = StackHandler::FromAddress(Top::handler(thread_));
+    type = ExitFrame::GetStateForFramePointer(
+        Isolate::c_entry_fp(thread_), &state);
+    handler_ = StackHandler::FromAddress(
+        Isolate::handler(thread_));
   } else {
     ASSERT(fp_ != NULL);
     state.fp = fp_;
@@ -221,22 +222,25 @@ bool SafeStackFrameIterator::ExitFrameValidator::IsValidFP(Address fp) {
 
 
 SafeStackFrameIterator::SafeStackFrameIterator(
+    Isolate* isolate,
     Address fp, Address sp, Address low_bound, Address high_bound) :
     maintainer_(),
     stack_validator_(low_bound, high_bound),
-    is_valid_top_(IsValidTop(low_bound, high_bound)),
+    is_valid_top_(IsValidTop(isolate, low_bound, high_bound)),
     is_valid_fp_(IsWithinBounds(low_bound, high_bound, fp)),
     is_working_iterator_(is_valid_top_ || is_valid_fp_),
     iteration_done_(!is_working_iterator_),
-    iterator_(is_valid_top_, is_valid_fp_ ? fp : NULL, sp) {
+    iterator_(isolate, is_valid_top_, is_valid_fp_ ? fp : NULL, sp) {
 }
 
 
-bool SafeStackFrameIterator::IsValidTop(Address low_bound, Address high_bound) {
-  Address fp = Top::c_entry_fp(Top::GetCurrentThread());
+bool SafeStackFrameIterator::IsValidTop(Isolate* isolate,
+                                        Address low_bound, Address high_bound) {
+  ThreadLocalTop* top = isolate->thread_local_top();
+  Address fp = Isolate::c_entry_fp(top);
   ExitFrameValidator validator(low_bound, high_bound);
   if (!validator.IsValidFP(fp)) return false;
-  return Top::handler(Top::GetCurrentThread()) != NULL;
+  return Isolate::handler(top) != NULL;
 }
 
 
@@ -312,8 +316,9 @@ void SafeStackFrameIterator::Reset() {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 SafeStackTraceFrameIterator::SafeStackTraceFrameIterator(
+    Isolate* isolate,
     Address fp, Address sp, Address low_bound, Address high_bound) :
-    SafeJavaScriptFrameIterator(fp, sp, low_bound, high_bound) {
+    SafeJavaScriptFrameIterator(isolate, fp, sp, low_bound, high_bound) {
   if (!done() && !frame()->is_java_script()) Advance();
 }
 
@@ -331,7 +336,9 @@ void SafeStackTraceFrameIterator::Advance() {
 Code* StackFrame::GetSafepointData(Address pc,
                                    SafepointEntry* safepoint_entry,
                                    unsigned* stack_slots) {
-  PcToCodeCache::PcToCodeCacheEntry* entry = PcToCodeCache::GetCacheEntry(pc);
+  Isolate* isolate = Isolate::Current();
+  PcToCodeCache::PcToCodeCacheEntry* entry =
+      isolate->pc_to_code_cache()->GetCacheEntry(pc);
   SafepointEntry cached_safepoint_entry = entry->safepoint_entry;
   if (!entry->safepoint_entry.is_valid()) {
     entry->safepoint_entry = entry->code->GetSafepointEntry(pc);
@@ -386,7 +393,8 @@ StackFrame::Type StackFrame::ComputeType(State* state) {
     // into the heap to determine the state. This is safe as long
     // as nobody tries to GC...
     if (SafeStackFrameIterator::is_active()) return JAVA_SCRIPT;
-    Code::Kind kind = GetContainingCode(*(state->pc_address))->kind();
+    Code::Kind kind = GetContainingCode(Isolate::Current(),
+                                        *(state->pc_address))->kind();
     ASSERT(kind == Code::FUNCTION || kind == Code::OPTIMIZED_FUNCTION);
     return (kind == Code::OPTIMIZED_FUNCTION) ? OPTIMIZED : JAVA_SCRIPT;
   }
@@ -402,7 +410,7 @@ StackFrame::Type StackFrame::GetCallerState(State* state) const {
 
 
 Code* EntryFrame::unchecked_code() const {
-  return Heap::raw_unchecked_js_entry_code();
+  return HEAP->raw_unchecked_js_entry_code();
 }
 
 
@@ -425,7 +433,7 @@ StackFrame::Type EntryFrame::GetCallerState(State* state) const {
 
 
 Code* EntryConstructFrame::unchecked_code() const {
-  return Heap::raw_unchecked_js_construct_entry_code();
+  return HEAP->raw_unchecked_js_construct_entry_code();
 }
 
 
@@ -457,7 +465,7 @@ void ExitFrame::SetCallerFp(Address caller_fp) {
 void ExitFrame::Iterate(ObjectVisitor* v) const {
   // The arguments are traversed as part of the expression stack of
   // the calling frame.
-  IteratePc(v, pc_address(), code());
+  IteratePc(v, pc_address(), LookupCode(Isolate::Current()));
   v->VisitPointer(&code_slot());
 }
 
@@ -632,8 +640,8 @@ Code* JavaScriptFrame::unchecked_code() const {
 
 Address JavaScriptFrame::GetCallerStackPointer() const {
   int arguments;
-  if (Heap::gc_state() != Heap::NOT_IN_GC ||
-      SafeStackFrameIterator::is_active()) {
+  if (SafeStackFrameIterator::is_active() ||
+      HEAP->gc_state() != Heap::NOT_IN_GC) {
     // If the we are currently iterating the safe stack the
     // arguments for frames are traversed as if they were
     // expression stack elements of the calling frame. The reason for
@@ -662,7 +670,7 @@ void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) {
 
 void JavaScriptFrame::Summarize(List<FrameSummary>* functions) {
   ASSERT(functions->length() == 0);
-  Code* code_pointer = code();
+  Code* code_pointer = LookupCode(Isolate::Current());
   int offset = static_cast<int>(pc() - code_pointer->address());
   FrameSummary summary(receiver(),
                        JSFunction::cast(function()),
@@ -781,7 +789,7 @@ DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
   // back to a slow search in this case to find the original optimized
   // code object.
   if (!code->contains(pc())) {
-    code = PcToCodeCache::GcSafeFindCodeForPc(pc());
+    code = Isolate::Current()->pc_to_code_cache()->GcSafeFindCodeForPc(pc());
   }
   ASSERT(code != NULL);
   ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
@@ -842,7 +850,8 @@ Address InternalFrame::GetCallerStackPointer() const {
 
 
 Code* ArgumentsAdaptorFrame::unchecked_code() const {
-  return Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline);
+  return Isolate::Current()->builtins()->builtin(
+      Builtins::ArgumentsAdaptorTrampoline);
 }
 
 
@@ -1036,14 +1045,14 @@ void EntryFrame::Iterate(ObjectVisitor* v) const {
   ASSERT(!it.done());
   StackHandler* handler = it.handler();
   ASSERT(handler->is_entry());
-  handler->Iterate(v, code());
+  handler->Iterate(v, LookupCode(Isolate::Current()));
 #ifdef DEBUG
   // Make sure that the entry frame does not contain more than one
   // stack handler.
   it.Advance();
   ASSERT(it.done());
 #endif
-  IteratePc(v, pc_address(), code());
+  IteratePc(v, pc_address(), LookupCode(Isolate::Current()));
 }
 
 
@@ -1060,7 +1069,7 @@ void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
     v->VisitPointers(base, reinterpret_cast<Object**>(address));
     base = reinterpret_cast<Object**>(address + StackHandlerConstants::kSize);
     // Traverse the pointers in the handler itself.
-    handler->Iterate(v, code());
+    handler->Iterate(v, LookupCode(Isolate::Current()));
   }
   v->VisitPointers(base, limit);
 }
@@ -1068,7 +1077,7 @@ void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
 
 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
   IterateExpressions(v);
-  IteratePc(v, pc_address(), code());
+  IteratePc(v, pc_address(), LookupCode(Isolate::Current()));
   IterateArguments(v);
 }
 
@@ -1087,7 +1096,7 @@ void InternalFrame::Iterate(ObjectVisitor* v) const {
   // Internal frames only have object pointers on the expression stack
   // as they never have any arguments.
   IterateExpressions(v);
-  IteratePc(v, pc_address(), code());
+  IteratePc(v, pc_address(), LookupCode(Isolate::Current()));
 }
 
 
@@ -1117,14 +1126,15 @@ Code* PcToCodeCache::GcSafeCastToCode(HeapObject* object, Address pc) {
 
 
 Code* PcToCodeCache::GcSafeFindCodeForPc(Address pc) {
+  Heap* heap = isolate_->heap();
   // Check if the pc points into a large object chunk.
-  LargeObjectChunk* chunk = Heap::lo_space()->FindChunkContainingPc(pc);
+  LargeObjectChunk* chunk = heap->lo_space()->FindChunkContainingPc(pc);
   if (chunk != NULL) return GcSafeCastToCode(chunk->GetObject(), pc);
 
   // Iterate through the 8K page until we reach the end or find an
   // object starting after the pc.
   Page* page = Page::FromAddress(pc);
-  HeapObjectIterator iterator(page, Heap::GcSafeSizeOfOldObjectFunction());
+  HeapObjectIterator iterator(page, heap->GcSafeSizeOfOldObjectFunction());
   HeapObject* previous = NULL;
   while (true) {
     HeapObject* next = iterator.next();
@@ -1137,14 +1147,14 @@ Code* PcToCodeCache::GcSafeFindCodeForPc(Address pc) {
 
 
 PcToCodeCache::PcToCodeCacheEntry* PcToCodeCache::GetCacheEntry(Address pc) {
-  Counters::pc_to_code.Increment();
+  COUNTERS->pc_to_code()->Increment();
   ASSERT(IsPowerOf2(kPcToCodeCacheSize));
   uint32_t hash = ComputeIntegerHash(
       static_cast<uint32_t>(reinterpret_cast<uintptr_t>(pc)));
   uint32_t index = hash & (kPcToCodeCacheSize - 1);
   PcToCodeCacheEntry* entry = cache(index);
   if (entry->pc == pc) {
-    Counters::pc_to_code_cached.Increment();
+    COUNTERS->pc_to_code_cached()->Increment();
     ASSERT(entry->code == GcSafeFindCodeForPc(pc));
   } else {
     // Because this code may be interrupted by a profiling signal that
@@ -1171,11 +1181,8 @@ int NumRegs(RegList reglist) {
 }
 
 
-int JSCallerSavedCode(int n) {
-  static int reg_code[kNumJSCallerSaved];
-  static bool initialized = false;
-  if (!initialized) {
-    initialized = true;
+struct JSCallerSavedCodeData {
+  JSCallerSavedCodeData() {
     int i = 0;
     for (int r = 0; r < kNumRegs; r++)
       if ((kJSCallerSaved & (1 << r)) != 0)
@@ -1183,8 +1190,16 @@ int JSCallerSavedCode(int n) {
 
     ASSERT(i == kNumJSCallerSaved);
   }
+  int reg_code[kNumJSCallerSaved];
+};
+
+
+static const JSCallerSavedCodeData kCallerSavedCodeData;
+
+
+int JSCallerSavedCode(int n) {
   ASSERT(0 <= n && n < kNumJSCallerSaved);
-  return reg_code[n];
+  return kCallerSavedCodeData.reg_code[n];
 }
 
 
index 2ead5d7b56ab61090181a554075377a4eb313078..bee95ccbf1d2582c448a344280429cc6e07f7bdf 100644 (file)
@@ -28,6 +28,7 @@
 #ifndef V8_FRAMES_H_
 #define V8_FRAMES_H_
 
+#include "handles.h"
 #include "safepoint-table.h"
 
 namespace v8 {
@@ -44,11 +45,10 @@ int JSCallerSavedCode(int n);
 
 // Forward declarations.
 class StackFrameIterator;
-class Top;
 class ThreadLocalTop;
+class Isolate;
 
-
-class PcToCodeCache : AllStatic {
+class PcToCodeCache {
  public:
   struct PcToCodeCacheEntry {
     Address pc;
@@ -56,22 +56,28 @@ class PcToCodeCache : AllStatic {
     SafepointEntry safepoint_entry;
   };
 
-  static PcToCodeCacheEntry* cache(int index) {
-    return &cache_[index];
+  explicit PcToCodeCache(Isolate* isolate) : isolate_(isolate) {
+    Flush();
   }
 
-  static Code* GcSafeFindCodeForPc(Address pc);
-  static Code* GcSafeCastToCode(HeapObject* object, Address pc);
+  Code* GcSafeFindCodeForPc(Address pc);
+  Code* GcSafeCastToCode(HeapObject* object, Address pc);
 
-  static void FlushPcToCodeCache() {
+  void Flush() {
     memset(&cache_[0], 0, sizeof(cache_));
   }
 
-  static PcToCodeCacheEntry* GetCacheEntry(Address pc);
+  PcToCodeCacheEntry* GetCacheEntry(Address pc);
 
  private:
+  PcToCodeCacheEntry* cache(int index) { return &cache_[index]; }
+
+  Isolate* isolate_;
+
   static const int kPcToCodeCacheSize = 1024;
-  static PcToCodeCacheEntry cache_[kPcToCodeCacheSize];
+  PcToCodeCacheEntry cache_[kPcToCodeCacheSize];
+
+  DISALLOW_COPY_AND_ASSIGN(PcToCodeCache);
 };
 
 
@@ -199,12 +205,12 @@ class StackFrame BASE_EMBEDDED {
   virtual Code* unchecked_code() const = 0;
 
   // Get the code associated with this frame.
-  Code* code() const { return GetContainingCode(pc()); }
+  Code* LookupCode(Isolate* isolate) const {
+    return GetContainingCode(isolate, pc());
+  }
 
   // Get the code object that contains the given pc.
-  static Code* GetContainingCode(Address pc) {
-    return PcToCodeCache::GetCacheEntry(pc)->code;
-  }
+  static inline Code* GetContainingCode(Isolate* isolate, Address pc);
 
   // Get the code object containing the given pc and fill in the
   // safepoint entry and the number of stack slots. The pc must be at
@@ -612,7 +618,7 @@ class StackFrameIterator BASE_EMBEDDED {
   // An iterator that can start from a given FP address.
   // If use_top, then work as usual, if fp isn't NULL, use it,
   // otherwise, do nothing.
-  StackFrameIterator(bool use_top, Address fp, Address sp);
+  StackFrameIterator(Isolate* isolate, bool use_top, Address fp, Address sp);
 
   StackFrame* frame() const {
     ASSERT(!done());
@@ -675,6 +681,13 @@ class JavaScriptFrameIteratorTemp BASE_EMBEDDED {
     if (!done()) Advance();
   }
 
+  JavaScriptFrameIteratorTemp(Isolate* isolate,
+                              Address fp, Address sp,
+                              Address low_bound, Address high_bound) :
+      iterator_(isolate, fp, sp, low_bound, high_bound) {
+    if (!done()) Advance();
+  }
+
   inline JavaScriptFrame* frame() const;
 
   bool done() const { return iterator_.done(); }
@@ -712,7 +725,8 @@ class StackTraceFrameIterator: public JavaScriptFrameIterator {
 
 class SafeStackFrameIterator BASE_EMBEDDED {
  public:
-  SafeStackFrameIterator(Address fp, Address sp,
+  SafeStackFrameIterator(Isolate* isolate,
+                         Address fp, Address sp,
                          Address low_bound, Address high_bound);
 
   StackFrame* frame() const {
@@ -762,7 +776,8 @@ class SafeStackFrameIterator BASE_EMBEDDED {
   bool CanIterateHandles(StackFrame* frame, StackHandler* handler);
   bool IsValidFrame(StackFrame* frame) const;
   bool IsValidCaller(StackFrame* frame);
-  static bool IsValidTop(Address low_bound, Address high_bound);
+  static bool IsValidTop(Isolate* isolate,
+                         Address low_bound, Address high_bound);
 
   // This is a nasty hack to make sure the active count is incremented
   // before the constructor for the embedded iterator is invoked. This
@@ -776,6 +791,7 @@ class SafeStackFrameIterator BASE_EMBEDDED {
   };
 
   ActiveCountMaintainer maintainer_;
+  // TODO(isolates): this is dangerous.
   static int active_count_;
   StackAddressValidator stack_validator_;
   const bool is_valid_top_;
@@ -793,7 +809,8 @@ typedef JavaScriptFrameIteratorTemp<SafeStackFrameIterator>
 
 class SafeStackTraceFrameIterator: public SafeJavaScriptFrameIterator {
  public:
-  explicit SafeStackTraceFrameIterator(Address fp, Address sp,
+  explicit SafeStackTraceFrameIterator(Isolate* isolate,
+                                       Address fp, Address sp,
                                        Address low_bound, Address high_bound);
   void Advance();
 };
index b3dc95bdff0fd23d785504284a521de70296fd45..9fbd9b372fd76fd6577a2aa251d4ec73610586c6 100644 (file)
@@ -275,10 +275,11 @@ void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
 #define __ ACCESS_MASM(masm())
 
 bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
+  Isolate* isolate = Isolate::Current();
   Handle<Script> script = info->script();
   if (!script->IsUndefined() && !script->source()->IsUndefined()) {
     int len = String::cast(script->source())->length();
-    Counters::total_full_codegen_source_size.Increment(len);
+    isolate->counters()->total_full_codegen_source_size()->Increment(len);
   }
   if (FLAG_trace_codegen) {
     PrintF("Full Compiler - ");
@@ -293,7 +294,7 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
   FullCodeGenerator cgen(&masm);
   cgen.Generate(info);
   if (cgen.HasStackOverflow()) {
-    ASSERT(!Top::has_pending_exception());
+    ASSERT(!isolate->has_pending_exception());
     return false;
   }
   unsigned table_offset = cgen.EmitStackCheckTable();
@@ -343,7 +344,8 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
   if (!info_->HasDeoptimizationSupport()) return;
   int length = bailout_entries_.length();
   Handle<DeoptimizationOutputData> data =
-      Factory::NewDeoptimizationOutputData(length, TENURED);
+      isolate()->factory()->
+      NewDeoptimizationOutputData(length, TENURED);
   for (int i = 0; i < length; i++) {
     data->SetAstId(i, Smi::FromInt(bailout_entries_[i].id));
     data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
@@ -545,7 +547,8 @@ void FullCodeGenerator::VisitDeclarations(
   // Compute array of global variable and function declarations.
   // Do nothing in case of no declared global functions or variables.
   if (globals > 0) {
-    Handle<FixedArray> array = Factory::NewFixedArray(2 * globals, TENURED);
+    Handle<FixedArray> array =
+        isolate()->factory()->NewFixedArray(2 * globals, TENURED);
     for (int j = 0, i = 0; i < length; i++) {
       Declaration* decl = declarations->at(i);
       Variable* var = decl->proxy()->var();
@@ -596,7 +599,7 @@ void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
 void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
   if (FLAG_debug_info) {
 #ifdef ENABLE_DEBUGGER_SUPPORT
-    if (!Debugger::IsDebuggerActive()) {
+    if (!isolate()->debugger()->IsDebuggerActive()) {
       CodeGenerator::RecordPositions(masm_, stmt->statement_pos());
     } else {
       // Check if the statement will be breakable without adding a debug break
@@ -624,7 +627,7 @@ void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
 void FullCodeGenerator::SetExpressionPosition(Expression* expr, int pos) {
   if (FLAG_debug_info) {
 #ifdef ENABLE_DEBUGGER_SUPPORT
-    if (!Debugger::IsDebuggerActive()) {
+    if (!isolate()->debugger()->IsDebuggerActive()) {
       CodeGenerator::RecordPositions(masm_, pos);
     } else {
       // Check if the expression will be breakable without adding a debug break
@@ -694,7 +697,7 @@ FullCodeGenerator::InlineFunctionGenerator
 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* node) {
   ZoneList<Expression*>* args = node->arguments();
   Handle<String> name = node->name();
-  Runtime::Function* function = node->function();
+  const Runtime::Function* function = node->function();
   ASSERT(function != NULL);
   ASSERT(function->intrinsic_type == Runtime::INLINE);
   InlineFunctionGenerator generator =
index cc0c206f089deacaaddd2cddefb3246842e2a854..e76d679d86861caf047a214106f821bd2b653eab 100644 (file)
@@ -77,7 +77,8 @@ class FullCodeGenerator: public AstVisitor {
   };
 
   explicit FullCodeGenerator(MacroAssembler* masm)
-      : masm_(masm),
+      : isolate_(Isolate::Current()),
+        masm_(masm),
         info_(NULL),
         nesting_stack_(NULL),
         loop_depth_(0),
@@ -493,6 +494,7 @@ class FullCodeGenerator: public AstVisitor {
     loop_depth_--;
   }
 
+  Isolate* isolate() { return isolate_; }
   MacroAssembler* masm() { return masm_; }
 
   class ExpressionContext;
@@ -553,6 +555,8 @@ class FullCodeGenerator: public AstVisitor {
       codegen_->set_new_context(old_);
     }
 
+    Isolate* isolate() const { return codegen_->isolate(); }
+
     // Convert constant control flow (true or false) to the result expected for
     // this expression context.
     virtual void Plug(bool flag) const = 0;
@@ -729,6 +733,7 @@ class FullCodeGenerator: public AstVisitor {
     virtual bool IsEffect() const { return true; }
   };
 
+  Isolate* isolate_;
   MacroAssembler* masm_;
   CompilationInfo* info_;
   Label return_label_;
index f12d026bdbe2253795e2ce11ea170d2fceb51ebc..c094251fa51081248b465df6c9588ecd7b68c858 100644 (file)
@@ -38,21 +38,22 @@ void FuncNameInferrer::PushEnclosingName(Handle<String> name) {
   // Enclosing name is a name of a constructor function. To check
   // that it is really a constructor, we check that it is not empty
   // and starts with a capital letter.
-  if (name->length() > 0 && Runtime::IsUpperCaseChar(name->Get(0))) {
+  if (name->length() > 0 && Runtime::IsUpperCaseChar(
+      Isolate::Current()->runtime_state(), name->Get(0))) {
     names_stack_.Add(name);
   }
 }
 
 
 void FuncNameInferrer::PushLiteralName(Handle<String> name) {
-  if (IsOpen() && !Heap::prototype_symbol()->Equals(*name)) {
+  if (IsOpen() && !HEAP->prototype_symbol()->Equals(*name)) {
     names_stack_.Add(name);
   }
 }
 
 
 void FuncNameInferrer::PushVariableName(Handle<String> name) {
-  if (IsOpen() && !Heap::result_symbol()->Equals(*name)) {
+  if (IsOpen() && !HEAP->result_symbol()->Equals(*name)) {
     names_stack_.Add(name);
   }
 }
@@ -60,7 +61,7 @@ void FuncNameInferrer::PushVariableName(Handle<String> name) {
 
 Handle<String> FuncNameInferrer::MakeNameFromStack() {
   if (names_stack_.is_empty()) {
-    return Factory::empty_string();
+    return FACTORY->empty_string();
   } else {
     return MakeNameFromStackHelper(1, names_stack_.at(0));
   }
@@ -72,8 +73,8 @@ Handle<String> FuncNameInferrer::MakeNameFromStackHelper(int pos,
   if (pos >= names_stack_.length()) {
     return prev;
   } else {
-    Handle<String> curr = Factory::NewConsString(dot_, names_stack_.at(pos));
-    return MakeNameFromStackHelper(pos + 1, Factory::NewConsString(prev, curr));
+    Handle<String> curr = FACTORY->NewConsString(dot_, names_stack_.at(pos));
+    return MakeNameFromStackHelper(pos + 1, FACTORY->NewConsString(prev, curr));
   }
 }
 
index a35034ecb52181017851321eb87aaed826089de5..5aa2b35bb14b10db5480d40cf63f03f5229b051e 100644 (file)
@@ -47,7 +47,7 @@ class FuncNameInferrer : public ZoneObject {
       : entries_stack_(10),
         names_stack_(5),
         funcs_to_infer_(4),
-        dot_(Factory::NewStringFromAscii(CStrVector("."))) {
+        dot_(FACTORY->NewStringFromAscii(CStrVector("."))) {
   }
 
   // Returns whether we have entered name collection state.
index 5136deddbf7e4aa4713cd32207b51b6c254d6c46..c8dbf5d67e6a9b2e1fce48ecaf1e51464fe52de0 100644 (file)
@@ -26,6 +26,7 @@
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 #ifdef ENABLE_GDB_JIT_INTERFACE
+#include "v8.h"
 #include "gdb-jit.h"
 
 #include "bootstrapper.h"
index c75b67cdfc2b8ae085bfd08bd1609c77eb70aa0c..4d138597b1282a7b46a7f9ed5444271219ff1ce9 100644 (file)
 namespace v8 {
 namespace internal {
 
+
+ObjectGroup::~ObjectGroup() {
+  if (info_ != NULL) info_->Dispose();
+}
+
+
 class GlobalHandles::Node : public Malloced {
  public:
 
@@ -58,7 +64,7 @@ class GlobalHandles::Node : public Malloced {
   }
 
   ~Node() {
-    if (state_ != DESTROYED) Destroy();
+    if (state_ != DESTROYED) Destroy(Isolate::Current()->global_handles());
 #ifdef DEBUG
     // Zap the values for eager trapping.
     object_ = NULL;
@@ -67,11 +73,11 @@ class GlobalHandles::Node : public Malloced {
 #endif
   }
 
-  void Destroy() {
+  void Destroy(GlobalHandles* global_handles) {
     if (state_ == WEAK || IsNearDeath()) {
-      GlobalHandles::number_of_weak_handles_--;
+      global_handles->number_of_weak_handles_--;
       if (object_->IsJSGlobalObject()) {
-        GlobalHandles::number_of_global_object_weak_handles_--;
+        global_handles->number_of_global_object_weak_handles_--;
       }
     }
     state_ = DESTROYED;
@@ -102,13 +108,15 @@ class GlobalHandles::Node : public Malloced {
   Handle<Object> handle() { return Handle<Object>(&object_); }
 
   // Make this handle weak.
-  void MakeWeak(void* parameter, WeakReferenceCallback callback) {
-    LOG(HandleEvent("GlobalHandle::MakeWeak", handle().location()));
+  void MakeWeak(GlobalHandles* global_handles, void* parameter,
+                WeakReferenceCallback callback) {
+    LOG(global_handles->isolate(),
+        HandleEvent("GlobalHandle::MakeWeak", handle().location()));
     ASSERT(state_ != DESTROYED);
     if (state_ != WEAK && !IsNearDeath()) {
-      GlobalHandles::number_of_weak_handles_++;
+      global_handles->number_of_weak_handles_++;
       if (object_->IsJSGlobalObject()) {
-        GlobalHandles::number_of_global_object_weak_handles_++;
+        global_handles->number_of_global_object_weak_handles_++;
       }
     }
     state_ = WEAK;
@@ -116,13 +124,14 @@ class GlobalHandles::Node : public Malloced {
     callback_ = callback;
   }
 
-  void ClearWeakness() {
-    LOG(HandleEvent("GlobalHandle::ClearWeakness", handle().location()));
+  void ClearWeakness(GlobalHandles* global_handles) {
+    LOG(global_handles->isolate(),
+        HandleEvent("GlobalHandle::ClearWeakness", handle().location()));
     ASSERT(state_ != DESTROYED);
     if (state_ == WEAK || IsNearDeath()) {
-      GlobalHandles::number_of_weak_handles_--;
+      global_handles->number_of_weak_handles_--;
       if (object_->IsJSGlobalObject()) {
-        GlobalHandles::number_of_global_object_weak_handles_--;
+        global_handles->number_of_global_object_weak_handles_--;
       }
     }
     state_ = NORMAL;
@@ -159,12 +168,13 @@ class GlobalHandles::Node : public Malloced {
   // Returns the callback for this weak handle.
   WeakReferenceCallback callback() { return callback_; }
 
-  bool PostGarbageCollectionProcessing() {
+  bool PostGarbageCollectionProcessing(Isolate* isolate,
+                                       GlobalHandles* global_handles) {
     if (state_ != Node::PENDING) return false;
-    LOG(HandleEvent("GlobalHandle::Processing", handle().location()));
+    LOG(isolate, HandleEvent("GlobalHandle::Processing", handle().location()));
     WeakReferenceCallback func = callback();
     if (func == NULL) {
-      Destroy();
+      Destroy(global_handles);
       return false;
     }
     void* par = parameter();
@@ -176,9 +186,9 @@ class GlobalHandles::Node : public Malloced {
       // Forbid reuse of destroyed nodes as they might be already deallocated.
       // It's fine though to reuse nodes that were destroyed in weak callback
       // as those cannot be deallocated until we are back from the callback.
-      set_first_free(NULL);
-      if (first_deallocated()) {
-        first_deallocated()->set_next(head());
+      global_handles->set_first_free(NULL);
+      if (global_handles->first_deallocated()) {
+        global_handles->first_deallocated()->set_next(global_handles->head());
       }
       // Check that we are not passing a finalized external string to
       // the callback.
@@ -187,7 +197,7 @@ class GlobalHandles::Node : public Malloced {
       ASSERT(!object_->IsExternalTwoByteString() ||
              ExternalTwoByteString::cast(object_)->resource() != NULL);
       // Leaving V8.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       func(object, par);
     }
     // Absense of explicit cleanup or revival of weak handle
@@ -230,7 +240,7 @@ class GlobalHandles::Node : public Malloced {
 };
 
 
-class GlobalHandles::Pool BASE_EMBEDDED {
+class GlobalHandles::Pool {
   public:
     Pool() {
       current_ = new Chunk();
@@ -288,11 +298,27 @@ class GlobalHandles::Pool BASE_EMBEDDED {
 };
 
 
-static GlobalHandles::Pool pool_;
+GlobalHandles::GlobalHandles(Isolate* isolate)
+    : isolate_(isolate),
+      number_of_weak_handles_(0),
+      number_of_global_object_weak_handles_(0),
+      head_(NULL),
+      first_free_(NULL),
+      first_deallocated_(NULL),
+      pool_(new Pool()),
+      post_gc_processing_count_(0),
+      object_groups_(4) {
+}
+
+
+GlobalHandles::~GlobalHandles() {
+  delete pool_;
+  pool_ = 0;
+}
 
 
 Handle<Object> GlobalHandles::Create(Object* value) {
-  Counters::global_handles.Increment();
+  isolate_->counters()->global_handles()->Increment();
   Node* result;
   if (first_free()) {
     // Take the first node in the free list.
@@ -306,7 +332,7 @@ Handle<Object> GlobalHandles::Create(Object* value) {
     set_head(result);
   } else {
     // Allocate a new node.
-    result = pool_.Allocate();
+    result = pool_->Allocate();
     result->set_next(head());
     set_head(result);
   }
@@ -316,10 +342,10 @@ Handle<Object> GlobalHandles::Create(Object* value) {
 
 
 void GlobalHandles::Destroy(Object** location) {
-  Counters::global_handles.Decrement();
+  isolate_->counters()->global_handles()->Decrement();
   if (location == NULL) return;
   Node* node = Node::FromLocation(location);
-  node->Destroy();
+  node->Destroy(this);
   // Link the destroyed.
   node->set_next_free(first_free());
   set_first_free(node);
@@ -329,12 +355,12 @@ void GlobalHandles::Destroy(Object** location) {
 void GlobalHandles::MakeWeak(Object** location, void* parameter,
                              WeakReferenceCallback callback) {
   ASSERT(callback != NULL);
-  Node::FromLocation(location)->MakeWeak(parameter, callback);
+  Node::FromLocation(location)->MakeWeak(this, parameter, callback);
 }
 
 
 void GlobalHandles::ClearWeakness(Object** location) {
-  Node::FromLocation(location)->ClearWeakness();
+  Node::FromLocation(location)->ClearWeakness(this);
 }
 
 
@@ -381,27 +407,26 @@ void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
     if (current->state_ == Node::WEAK) {
       if (f(&current->object_)) {
         current->state_ = Node::PENDING;
-        LOG(HandleEvent("GlobalHandle::Pending", current->handle().location()));
+        LOG(isolate_,
+            HandleEvent("GlobalHandle::Pending", current->handle().location()));
       }
     }
   }
 }
 
 
-int post_gc_processing_count = 0;
-
 bool GlobalHandles::PostGarbageCollectionProcessing() {
   // Process weak global handle callbacks. This must be done after the
   // GC is completely done, because the callbacks may invoke arbitrary
   // API functions.
   // At the same time deallocate all DESTROYED nodes.
-  ASSERT(Heap::gc_state() == Heap::NOT_IN_GC);
-  const int initial_post_gc_processing_count = ++post_gc_processing_count;
+  ASSERT(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
+  const int initial_post_gc_processing_count = ++post_gc_processing_count_;
   bool next_gc_likely_to_collect_more = false;
   Node** p = &head_;
   while (*p != NULL) {
-    if ((*p)->PostGarbageCollectionProcessing()) {
-      if (initial_post_gc_processing_count != post_gc_processing_count) {
+    if ((*p)->PostGarbageCollectionProcessing(isolate_, this)) {
+      if (initial_post_gc_processing_count != post_gc_processing_count_) {
         // Weak callback triggered another GC and another round of
         // PostGarbageCollection processing.  The current node might
         // have been deleted in that round, so we need to bail out (or
@@ -466,17 +491,10 @@ void GlobalHandles::TearDown() {
   set_head(NULL);
   set_first_free(NULL);
   set_first_deallocated(NULL);
-  pool_.Release();
+  pool_->Release();
 }
 
 
-int GlobalHandles::number_of_weak_handles_ = 0;
-int GlobalHandles::number_of_global_object_weak_handles_ = 0;
-
-GlobalHandles::Node* GlobalHandles::head_ = NULL;
-GlobalHandles::Node* GlobalHandles::first_free_ = NULL;
-GlobalHandles::Node* GlobalHandles::first_deallocated_ = NULL;
-
 void GlobalHandles::RecordStats(HeapStats* stats) {
   *stats->global_handle_count = 0;
   *stats->weak_global_handle_count = 0;
@@ -535,11 +553,6 @@ void GlobalHandles::Print() {
 
 #endif
 
-List<ObjectGroup*>* GlobalHandles::ObjectGroups() {
-  // Lazily initialize the list to avoid startup time static constructors.
-  static List<ObjectGroup*> groups(4);
-  return &groups;
-}
 
 
 void GlobalHandles::AddObjectGroup(Object*** handles,
@@ -549,14 +562,7 @@ void GlobalHandles::AddObjectGroup(Object*** handles,
   for (size_t i = 0; i < length; ++i) {
     new_entry->objects_.Add(handles[i]);
   }
-  ObjectGroups()->Add(new_entry);
-}
-
-
-List<ImplicitRefGroup*>* GlobalHandles::ImplicitRefGroups() {
-  // Lazily initialize the list to avoid startup time static constructors.
-  static List<ImplicitRefGroup*> groups(4);
-  return &groups;
+  object_groups_.Add(new_entry);
 }
 
 
@@ -567,25 +573,23 @@ void GlobalHandles::AddImplicitReferences(HeapObject* parent,
   for (size_t i = 0; i < length; ++i) {
     new_entry->children_.Add(children[i]);
   }
-  ImplicitRefGroups()->Add(new_entry);
+  implicit_ref_groups_.Add(new_entry);
 }
 
 
 void GlobalHandles::RemoveObjectGroups() {
-  List<ObjectGroup*>* object_groups = ObjectGroups();
-  for (int i = 0; i< object_groups->length(); i++) {
-    delete object_groups->at(i);
+  for (int i = 0; i < object_groups_.length(); i++) {
+    delete object_groups_.at(i);
   }
-  object_groups->Clear();
+  object_groups_.Clear();
 }
 
 
 void GlobalHandles::RemoveImplicitRefGroups() {
-  List<ImplicitRefGroup*>* ref_groups = ImplicitRefGroups();
-  for (int i = 0; i< ref_groups->length(); i++) {
-    delete ref_groups->at(i);
+  for (int i = 0; i < implicit_ref_groups_.length(); i++) {
+    delete implicit_ref_groups_.at(i);
   }
-  ref_groups->Clear();
+  implicit_ref_groups_.Clear();
 }
 
 
index 3559c4054b2e875e108a6bc33f2c2ec5e8866e53..a6afb2dcdeec34fa89458710b08aaa83be89ac2e 100644 (file)
@@ -48,7 +48,7 @@ class ObjectGroup : public Malloced {
   ObjectGroup(size_t capacity, v8::RetainedObjectInfo* info)
       : objects_(static_cast<int>(capacity)),
         info_(info) { }
-  ~ObjectGroup() { if (info_ != NULL) info_->Dispose(); }
+  ~ObjectGroup();
 
   List<Object**> objects_;
   v8::RetainedObjectInfo* info_;
@@ -78,13 +78,15 @@ class ImplicitRefGroup : public Malloced {
 
 typedef void (*WeakReferenceGuest)(Object* object, void* parameter);
 
-class GlobalHandles : public AllStatic {
+class GlobalHandles {
  public:
+  ~GlobalHandles();
+
   // Creates a new global handle that is alive until Destroy is called.
-  static Handle<Object> Create(Object* value);
+  Handle<Object> Create(Object* value);
 
   // Destroy a global handle.
-  static void Destroy(Object** location);
+  void Destroy(Object** location);
 
   // Make the global handle weak and set the callback parameter for the
   // handle.  When the garbage collector recognizes that only weak global
@@ -92,25 +94,25 @@ class GlobalHandles : public AllStatic {
   // function is invoked (for each handle) with the handle and corresponding
   // parameter as arguments.  Note: cleared means set to Smi::FromInt(0). The
   // reason is that Smi::FromInt(0) does not change during garage collection.
-  static void MakeWeak(Object** location,
-                       void* parameter,
-                       WeakReferenceCallback callback);
+  void MakeWeak(Object** location,
+                void* parameter,
+                WeakReferenceCallback callback);
 
   static void SetWrapperClassId(Object** location, uint16_t class_id);
 
   // Returns the current number of weak handles.
-  static int NumberOfWeakHandles() { return number_of_weak_handles_; }
+  int NumberOfWeakHandles() { return number_of_weak_handles_; }
 
-  static void RecordStats(HeapStats* stats);
+  void RecordStats(HeapStats* stats);
 
   // Returns the current number of weak handles to global objects.
   // These handles are also included in NumberOfWeakHandles().
-  static int NumberOfGlobalObjectWeakHandles() {
+  int NumberOfGlobalObjectWeakHandles() {
     return number_of_global_object_weak_handles_;
   }
 
   // Clear the weakness of a global handle.
-  static void ClearWeakness(Object** location);
+  void ClearWeakness(Object** location);
 
   // Tells whether global handle is near death.
   static bool IsNearDeath(Object** location);
@@ -120,81 +122,89 @@ class GlobalHandles : public AllStatic {
 
   // Process pending weak handles.
   // Returns true if next major GC is likely to collect more garbage.
-  static bool PostGarbageCollectionProcessing();
+  bool PostGarbageCollectionProcessing();
 
   // Iterates over all strong handles.
-  static void IterateStrongRoots(ObjectVisitor* v);
+  void IterateStrongRoots(ObjectVisitor* v);
 
   // Iterates over all handles.
-  static void IterateAllRoots(ObjectVisitor* v);
+  void IterateAllRoots(ObjectVisitor* v);
 
   // Iterates over all handles that have embedder-assigned class ID.
-  static void IterateAllRootsWithClassIds(ObjectVisitor* v);
+  void IterateAllRootsWithClassIds(ObjectVisitor* v);
 
   // Iterates over all weak roots in heap.
-  static void IterateWeakRoots(ObjectVisitor* v);
+  void IterateWeakRoots(ObjectVisitor* v);
 
   // Iterates over weak roots that are bound to a given callback.
-  static void IterateWeakRoots(WeakReferenceGuest f,
-                               WeakReferenceCallback callback);
+  void IterateWeakRoots(WeakReferenceGuest f,
+                        WeakReferenceCallback callback);
 
   // Find all weak handles satisfying the callback predicate, mark
   // them as pending.
-  static void IdentifyWeakHandles(WeakSlotCallback f);
+  void IdentifyWeakHandles(WeakSlotCallback f);
 
   // Add an object group.
   // Should be only used in GC callback function before a collection.
   // All groups are destroyed after a mark-compact collection.
-  static void AddObjectGroup(Object*** handles,
-                             size_t length,
-                             v8::RetainedObjectInfo* info);
+  void AddObjectGroup(Object*** handles,
+                      size_t length,
+                      v8::RetainedObjectInfo* info);
 
   // Add an implicit references' group.
   // Should be only used in GC callback function before a collection.
   // All groups are destroyed after a mark-compact collection.
-  static void AddImplicitReferences(HeapObject* parent,
-                                    Object*** children,
-                                    size_t length);
+  void AddImplicitReferences(HeapObject* parent,
+                             Object*** children,
+                             size_t length);
 
   // Returns the object groups.
-  static List<ObjectGroup*>* ObjectGroups();
+  List<ObjectGroup*>* object_groups() { return &object_groups_; }
 
   // Returns the implicit references' groups.
-  static List<ImplicitRefGroup*>* ImplicitRefGroups();
+  List<ImplicitRefGroup*>* implicit_ref_groups() {
+    return &implicit_ref_groups_;
+  }
 
   // Remove bags, this should only happen after GC.
-  static void RemoveObjectGroups();
-  static void RemoveImplicitRefGroups();
+  void RemoveObjectGroups();
+  void RemoveImplicitRefGroups();
 
   // Tear down the global handle structure.
-  static void TearDown();
+  void TearDown();
+
+  Isolate* isolate() { return isolate_; }
 
 #ifdef DEBUG
-  static void PrintStats();
-  static void Print();
+  void PrintStats();
+  void Print();
 #endif
   class Pool;
  private:
+  explicit GlobalHandles(Isolate* isolate);
+
   // Internal node structure, one for each global handle.
   class Node;
 
+  Isolate* isolate_;
+
   // Field always containing the number of weak and near-death handles.
-  static int number_of_weak_handles_;
+  int number_of_weak_handles_;
 
   // Field always containing the number of weak and near-death handles
   // to global objects.  These objects are also included in
   // number_of_weak_handles_.
-  static int number_of_global_object_weak_handles_;
+  int number_of_global_object_weak_handles_;
 
   // Global handles are kept in a single linked list pointed to by head_.
-  static Node* head_;
-  static Node* head() { return head_; }
-  static void set_head(Node* value) { head_ = value; }
+  Node* head_;
+  Node* head() { return head_; }
+  void set_head(Node* value) { head_ = value; }
 
   // Free list for DESTROYED global handles not yet deallocated.
-  static Node* first_free_;
-  static Node* first_free() { return first_free_; }
-  static void set_first_free(Node* value) { first_free_ = value; }
+  Node* first_free_;
+  Node* first_free() { return first_free_; }
+  void set_first_free(Node* value) { first_free_ = value; }
 
   // List of deallocated nodes.
   // Deallocated nodes form a prefix of all the nodes and
@@ -207,11 +217,20 @@ class GlobalHandles : public AllStatic {
   //    node          node        ...         node       node
   //      .next      -> .next ->                .next ->
   //   <- .next_free <- .next_free           <- .next_free
-  static Node* first_deallocated_;
-  static Node* first_deallocated() { return first_deallocated_; }
-  static void set_first_deallocated(Node* value) {
+  Node* first_deallocated_;
+  Node* first_deallocated() { return first_deallocated_; }
+  void set_first_deallocated(Node* value) {
     first_deallocated_ = value;
   }
+
+  Pool* pool_;
+  int post_gc_processing_count_;
+  List<ObjectGroup*> object_groups_;
+  List<ImplicitRefGroup*> implicit_ref_groups_;
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(GlobalHandles);
 };
 
 
index c0f2fda929a23c0ddcdb2bedbe358a0dec4269b4..a5c81cec56f1d0056be0d1066683fc27432a4362 100644 (file)
 #ifndef V8_HANDLES_INL_H_
 #define V8_HANDLES_INL_H_
 
+#include "api.h"
 #include "apiutils.h"
 #include "handles.h"
-#include "api.h"
+#include "isolate.h"
 
 namespace v8 {
 namespace internal {
 
+inline Isolate* GetIsolateForHandle(Object* obj) {
+  return Isolate::Current();
+}
+
+inline Isolate* GetIsolateForHandle(HeapObject* obj) {
+  return obj->GetIsolate();
+}
+
 template<typename T>
 Handle<T>::Handle(T* obj) {
   ASSERT(!obj->IsFailure());
-  location_ = HandleScope::CreateHandle(obj);
+  location_ = HandleScope::CreateHandle(obj, GetIsolateForHandle(obj));
+}
+
+
+template<typename T>
+Handle<T>::Handle(T* obj, Isolate* isolate) {
+  ASSERT(!obj->IsFailure());
+  location_ = HandleScope::CreateHandle(obj, isolate);
 }
 
 
@@ -51,10 +67,91 @@ inline T* Handle<T>::operator*() const {
 }
 
 
+HandleScope::HandleScope() {
+  Isolate* isolate = Isolate::Current();
+  v8::ImplementationUtilities::HandleScopeData* current =
+      isolate->handle_scope_data();
+  isolate_ = isolate;
+  prev_next_ = current->next;
+  prev_limit_ = current->limit;
+  current->level++;
+}
+
+
+HandleScope::HandleScope(Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  v8::ImplementationUtilities::HandleScopeData* current =
+      isolate->handle_scope_data();
+  isolate_ = isolate;
+  prev_next_ = current->next;
+  prev_limit_ = current->limit;
+  current->level++;
+}
+
+
+HandleScope::~HandleScope() {
+  CloseScope();
+}
+
+void HandleScope::CloseScope() {
+  ASSERT(isolate_ == Isolate::Current());
+  v8::ImplementationUtilities::HandleScopeData* current =
+      isolate_->handle_scope_data();
+  current->next = prev_next_;
+  current->level--;
+  if (current->limit != prev_limit_) {
+    current->limit = prev_limit_;
+    DeleteExtensions(isolate_);
+  }
+#ifdef DEBUG
+  ZapRange(prev_next_, prev_limit_);
+#endif
+}
+
+
+template <typename T>
+Handle<T> HandleScope::CloseAndEscape(Handle<T> handle_value) {
+  T* value = *handle_value;
+  // Throw away all handles in the current scope.
+  CloseScope();
+  v8::ImplementationUtilities::HandleScopeData* current =
+      isolate_->handle_scope_data();
+  // Allocate one handle in the parent scope.
+  ASSERT(current->level > 0);
+  Handle<T> result(CreateHandle<T>(value, isolate_));
+  // Reinitialize the current scope (so that it's ready
+  // to be used or closed again).
+  prev_next_ = current->next;
+  prev_limit_ = current->limit;
+  current->level++;
+  return result;
+}
+
+
+template <typename T>
+T** HandleScope::CreateHandle(T* value, Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  v8::ImplementationUtilities::HandleScopeData* current =
+      isolate->handle_scope_data();
+
+  internal::Object** cur = current->next;
+  if (cur == current->limit) cur = Extend();
+  // Update the current next field, set the value in the created
+  // handle, and return the result.
+  ASSERT(cur < current->limit);
+  current->next = cur + 1;
+
+  T** result = reinterpret_cast<T**>(cur);
+  *result = value;
+  return result;
+}
+
+
 #ifdef DEBUG
 inline NoHandleAllocation::NoHandleAllocation() {
   v8::ImplementationUtilities::HandleScopeData* current =
-      v8::ImplementationUtilities::CurrentHandleScope();
+      Isolate::Current()->handle_scope_data();
+
   // Shrink the current handle scope to make it impossible to do
   // handle allocations without an explicit handle scope.
   current->limit = current->next;
@@ -67,10 +164,10 @@ inline NoHandleAllocation::NoHandleAllocation() {
 inline NoHandleAllocation::~NoHandleAllocation() {
   // Restore state in current handle scope to re-enable handle
   // allocations.
-  v8::ImplementationUtilities::HandleScopeData* current =
-      v8::ImplementationUtilities::CurrentHandleScope();
-  ASSERT_EQ(0, current->level);
-  current->level = level_;
+  v8::ImplementationUtilities::HandleScopeData* data =
+      Isolate::Current()->handle_scope_data();
+  ASSERT_EQ(0, data->level);
+  data->level = level_;
 }
 #endif
 
index 65cbd1a6965a117a9c844d35c229f3392bc49fa2..90ee5603557c0b10dbaa8dd4c4aa606a6f144a8e 100644 (file)
@@ -45,57 +45,62 @@ namespace v8 {
 namespace internal {
 
 
-v8::ImplementationUtilities::HandleScopeData HandleScope::current_ =
-    { NULL, NULL, 0 };
-
-
 int HandleScope::NumberOfHandles() {
-  int n = HandleScopeImplementer::instance()->blocks()->length();
+  Isolate* isolate = Isolate::Current();
+  HandleScopeImplementer* impl = isolate->handle_scope_implementer();
+  int n = impl->blocks()->length();
   if (n == 0) return 0;
   return ((n - 1) * kHandleBlockSize) + static_cast<int>(
-      (current_.next - HandleScopeImplementer::instance()->blocks()->last()));
+      (isolate->handle_scope_data()->next - impl->blocks()->last()));
 }
 
 
 Object** HandleScope::Extend() {
-  Object** result = current_.next;
+  Isolate* isolate = Isolate::Current();
+  v8::ImplementationUtilities::HandleScopeData* current =
+      isolate->handle_scope_data();
 
-  ASSERT(result == current_.limit);
+  Object** result = current->next;
+
+  ASSERT(result == current->limit);
   // Make sure there's at least one scope on the stack and that the
   // top of the scope stack isn't a barrier.
-  if (current_.level == 0) {
+  if (current->level == 0) {
     Utils::ReportApiFailure("v8::HandleScope::CreateHandle()",
                             "Cannot create a handle without a HandleScope");
     return NULL;
   }
-  HandleScopeImplementer* impl = HandleScopeImplementer::instance();
+  HandleScopeImplementer* impl = isolate->handle_scope_implementer();
   // If there's more room in the last block, we use that. This is used
   // for fast creation of scopes after scope barriers.
   if (!impl->blocks()->is_empty()) {
     Object** limit = &impl->blocks()->last()[kHandleBlockSize];
-    if (current_.limit != limit) {
-      current_.limit = limit;
-      ASSERT(limit - current_.next < kHandleBlockSize);
+    if (current->limit != limit) {
+      current->limit = limit;
+      ASSERT(limit - current->next < kHandleBlockSize);
     }
   }
 
   // If we still haven't found a slot for the handle, we extend the
   // current handle scope by allocating a new handle block.
-  if (result == current_.limit) {
+  if (result == current->limit) {
     // If there's a spare block, use it for growing the current scope.
     result = impl->GetSpareOrNewBlock();
     // Add the extension to the global list of blocks, but count the
     // extension as part of the current scope.
     impl->blocks()->Add(result);
-    current_.limit = &result[kHandleBlockSize];
+    current->limit = &result[kHandleBlockSize];
   }
 
   return result;
 }
 
 
-void HandleScope::DeleteExtensions() {
-  HandleScopeImplementer::instance()->DeleteExtensions(current_.limit);
+void HandleScope::DeleteExtensions(Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  v8::ImplementationUtilities::HandleScopeData* current =
+      isolate->handle_scope_data();
+  isolate->handle_scope_implementer()->DeleteExtensions(current->limit);
 }
 
 
@@ -108,37 +113,44 @@ void HandleScope::ZapRange(Object** start, Object** end) {
 
 
 Address HandleScope::current_level_address() {
-  return reinterpret_cast<Address>(&current_.level);
+  return reinterpret_cast<Address>(
+      &Isolate::Current()->handle_scope_data()->level);
 }
 
 
 Address HandleScope::current_next_address() {
-  return reinterpret_cast<Address>(&current_.next);
+  return reinterpret_cast<Address>(
+      &Isolate::Current()->handle_scope_data()->next);
 }
 
 
 Address HandleScope::current_limit_address() {
-  return reinterpret_cast<Address>(&current_.limit);
+  return reinterpret_cast<Address>(
+      &Isolate::Current()->handle_scope_data()->limit);
 }
 
 
 Handle<FixedArray> AddKeysFromJSArray(Handle<FixedArray> content,
                                       Handle<JSArray> array) {
-  CALL_HEAP_FUNCTION(content->AddKeysFromJSArray(*array), FixedArray);
+  CALL_HEAP_FUNCTION(content->GetHeap()->isolate(),
+                     content->AddKeysFromJSArray(*array), FixedArray);
 }
 
 
 Handle<FixedArray> UnionOfKeys(Handle<FixedArray> first,
                                Handle<FixedArray> second) {
-  CALL_HEAP_FUNCTION(first->UnionOfKeys(*second), FixedArray);
+  CALL_HEAP_FUNCTION(first->GetHeap()->isolate(),
+                     first->UnionOfKeys(*second), FixedArray);
 }
 
 
 Handle<JSGlobalProxy> ReinitializeJSGlobalProxy(
     Handle<JSFunction> constructor,
     Handle<JSGlobalProxy> global) {
-  CALL_HEAP_FUNCTION(Heap::ReinitializeJSGlobalProxy(*constructor, *global),
-                     JSGlobalProxy);
+  CALL_HEAP_FUNCTION(
+      constructor->GetHeap()->isolate(),
+      constructor->GetHeap()->ReinitializeJSGlobalProxy(*constructor, *global),
+      JSGlobalProxy);
 }
 
 
@@ -153,7 +165,8 @@ void SetExpectedNofProperties(Handle<JSFunction> func, int nof) {
   func->shared()->set_expected_nof_properties(nof);
   if (func->has_initial_map()) {
     Handle<Map> new_initial_map =
-        Factory::CopyMapDropTransitions(Handle<Map>(func->initial_map()));
+        func->GetIsolate()->factory()->CopyMapDropTransitions(
+            Handle<Map>(func->initial_map()));
     new_initial_map->set_unused_property_fields(nof);
     func->set_initial_map(*new_initial_map);
   }
@@ -161,7 +174,8 @@ void SetExpectedNofProperties(Handle<JSFunction> func, int nof) {
 
 
 void SetPrototypeProperty(Handle<JSFunction> func, Handle<JSObject> value) {
-  CALL_HEAP_FUNCTION_VOID(func->SetPrototype(*value));
+  CALL_HEAP_FUNCTION_VOID(func->GetHeap()->isolate(),
+                          func->SetPrototype(*value));
 }
 
 
@@ -193,20 +207,23 @@ void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
 void NormalizeProperties(Handle<JSObject> object,
                          PropertyNormalizationMode mode,
                          int expected_additional_properties) {
-  CALL_HEAP_FUNCTION_VOID(object->NormalizeProperties(
-      mode,
-      expected_additional_properties));
+  CALL_HEAP_FUNCTION_VOID(object->GetHeap()->isolate(),
+                          object->NormalizeProperties(
+                              mode,
+                              expected_additional_properties));
 }
 
 
 void NormalizeElements(Handle<JSObject> object) {
-  CALL_HEAP_FUNCTION_VOID(object->NormalizeElements());
+  CALL_HEAP_FUNCTION_VOID(object->GetHeap()->isolate(),
+                          object->NormalizeElements());
 }
 
 
 void TransformToFastProperties(Handle<JSObject> object,
                                int unused_property_fields) {
   CALL_HEAP_FUNCTION_VOID(
+      object->GetHeap()->isolate(),
       object->TransformToFastProperties(unused_property_fields));
 }
 
@@ -215,24 +232,26 @@ void NumberDictionarySet(Handle<NumberDictionary> dictionary,
                          uint32_t index,
                          Handle<Object> value,
                          PropertyDetails details) {
-  CALL_HEAP_FUNCTION_VOID(dictionary->Set(index, *value, details));
+  CALL_HEAP_FUNCTION_VOID(dictionary->GetIsolate(),
+                          dictionary->Set(index, *value, details));
 }
 
 
 void FlattenString(Handle<String> string) {
-  CALL_HEAP_FUNCTION_VOID(string->TryFlatten());
+  CALL_HEAP_FUNCTION_VOID(string->GetIsolate(), string->TryFlatten());
 }
 
 
 Handle<String> FlattenGetString(Handle<String> string) {
-  CALL_HEAP_FUNCTION(string->TryFlatten(), String);
+  CALL_HEAP_FUNCTION(string->GetIsolate(), string->TryFlatten(), String);
 }
 
 
 Handle<Object> SetPrototype(Handle<JSFunction> function,
                             Handle<Object> prototype) {
   ASSERT(function->should_have_prototype());
-  CALL_HEAP_FUNCTION(Accessors::FunctionSetPrototype(*function,
+  CALL_HEAP_FUNCTION(function->GetHeap()->isolate(),
+                     Accessors::FunctionSetPrototype(*function,
                                                      *prototype,
                                                      NULL),
                      Object);
@@ -244,7 +263,8 @@ Handle<Object> SetProperty(Handle<JSObject> object,
                            Handle<Object> value,
                            PropertyAttributes attributes,
                            StrictModeFlag strict_mode) {
-  CALL_HEAP_FUNCTION(object->SetProperty(*key, *value, attributes, strict_mode),
+  CALL_HEAP_FUNCTION(object->GetHeap()->isolate(),
+                     object->SetProperty(*key, *value, attributes, strict_mode),
                      Object);
 }
 
@@ -254,8 +274,11 @@ Handle<Object> SetProperty(Handle<Object> object,
                            Handle<Object> value,
                            PropertyAttributes attributes,
                            StrictModeFlag strict_mode) {
+  Isolate* isolate = Isolate::Current();
   CALL_HEAP_FUNCTION(
-      Runtime::SetObjectProperty(object, key, value, attributes, strict_mode),
+      isolate,
+      Runtime::SetObjectProperty(
+          isolate, object, key, value, attributes, strict_mode),
       Object);
 }
 
@@ -264,9 +287,11 @@ Handle<Object> ForceSetProperty(Handle<JSObject> object,
                                 Handle<Object> key,
                                 Handle<Object> value,
                                 PropertyAttributes attributes) {
+  Isolate* isolate = object->GetIsolate();
   CALL_HEAP_FUNCTION(
+      isolate,
       Runtime::ForceSetObjectProperty(
-          object, key, value, attributes),
+          isolate, object, key, value, attributes),
       Object);
 }
 
@@ -275,14 +300,18 @@ Handle<Object> SetNormalizedProperty(Handle<JSObject> object,
                                      Handle<String> key,
                                      Handle<Object> value,
                                      PropertyDetails details) {
-  CALL_HEAP_FUNCTION(object->SetNormalizedProperty(*key, *value, details),
+  CALL_HEAP_FUNCTION(object->GetIsolate(),
+                     object->SetNormalizedProperty(*key, *value, details),
                      Object);
 }
 
 
 Handle<Object> ForceDeleteProperty(Handle<JSObject> object,
                                    Handle<Object> key) {
-  CALL_HEAP_FUNCTION(Runtime::ForceDeleteObjectProperty(object, key), Object);
+  Isolate* isolate = object->GetIsolate();
+  CALL_HEAP_FUNCTION(isolate,
+                     Runtime::ForceDeleteObjectProperty(isolate, object, key),
+                     Object);
 }
 
 
@@ -291,8 +320,10 @@ Handle<Object> SetLocalPropertyIgnoreAttributes(
     Handle<String> key,
     Handle<Object> value,
     PropertyAttributes attributes) {
-  CALL_HEAP_FUNCTION(object->
-      SetLocalPropertyIgnoreAttributes(*key, *value, attributes), Object);
+  CALL_HEAP_FUNCTION(
+    object->GetIsolate(),
+    object->SetLocalPropertyIgnoreAttributes(*key, *value, attributes),
+    Object);
 }
 
 
@@ -300,10 +331,10 @@ void SetLocalPropertyNoThrow(Handle<JSObject> object,
                              Handle<String> key,
                              Handle<Object> value,
                              PropertyAttributes attributes) {
-  ASSERT(!Top::has_pending_exception());
+  ASSERT(!object->GetIsolate()->has_pending_exception());
   CHECK(!SetLocalPropertyIgnoreAttributes(
         object, key, value, attributes).is_null());
-  CHECK(!Top::has_pending_exception());
+  CHECK(!object->GetIsolate()->has_pending_exception());
 }
 
 
@@ -312,7 +343,8 @@ Handle<Object> SetPropertyWithInterceptor(Handle<JSObject> object,
                                           Handle<Object> value,
                                           PropertyAttributes attributes,
                                           StrictModeFlag strict_mode) {
-  CALL_HEAP_FUNCTION(object->SetPropertyWithInterceptor(*key,
+  CALL_HEAP_FUNCTION(object->GetIsolate(),
+                     object->SetPropertyWithInterceptor(*key,
                                                         *value,
                                                         attributes,
                                                         strict_mode),
@@ -322,20 +354,24 @@ Handle<Object> SetPropertyWithInterceptor(Handle<JSObject> object,
 
 Handle<Object> GetProperty(Handle<JSObject> obj,
                            const char* name) {
-  Handle<String> str = Factory::LookupAsciiSymbol(name);
-  CALL_HEAP_FUNCTION(obj->GetProperty(*str), Object);
+  Isolate* isolate = obj->GetIsolate();
+  Handle<String> str = isolate->factory()->LookupAsciiSymbol(name);
+  CALL_HEAP_FUNCTION(isolate, obj->GetProperty(*str), Object);
 }
 
 
 Handle<Object> GetProperty(Handle<Object> obj,
                            Handle<Object> key) {
-  CALL_HEAP_FUNCTION(Runtime::GetObjectProperty(obj, key), Object);
+  Isolate* isolate = Isolate::Current();
+  CALL_HEAP_FUNCTION(isolate,
+                     Runtime::GetObjectProperty(isolate, obj, key), Object);
 }
 
 
 Handle<Object> GetElement(Handle<Object> obj,
                           uint32_t index) {
-  CALL_HEAP_FUNCTION(Runtime::GetElement(obj, index), Object);
+  Isolate* isolate = Isolate::Current();
+  CALL_HEAP_FUNCTION(isolate, Runtime::GetElement(obj, index), Object);
 }
 
 
@@ -343,7 +379,9 @@ Handle<Object> GetPropertyWithInterceptor(Handle<JSObject> receiver,
                                           Handle<JSObject> holder,
                                           Handle<String> name,
                                           PropertyAttributes* attributes) {
-  CALL_HEAP_FUNCTION(holder->GetPropertyWithInterceptor(*receiver,
+  Isolate* isolate = receiver->GetIsolate();
+  CALL_HEAP_FUNCTION(isolate,
+                     holder->GetPropertyWithInterceptor(*receiver,
                                                         *name,
                                                         attributes),
                      Object);
@@ -358,19 +396,21 @@ Handle<Object> GetPrototype(Handle<Object> obj) {
 
 Handle<Object> SetPrototype(Handle<JSObject> obj, Handle<Object> value) {
   const bool skip_hidden_prototypes = false;
-  CALL_HEAP_FUNCTION(obj->SetPrototype(*value, skip_hidden_prototypes), Object);
+  CALL_HEAP_FUNCTION(obj->GetIsolate(),
+                     obj->SetPrototype(*value, skip_hidden_prototypes), Object);
 }
 
 
 Handle<Object> PreventExtensions(Handle<JSObject> object) {
-  CALL_HEAP_FUNCTION(object->PreventExtensions(), Object);
+  CALL_HEAP_FUNCTION(object->GetIsolate(), object->PreventExtensions(), Object);
 }
 
 
 Handle<Object> GetHiddenProperties(Handle<JSObject> obj,
                                    bool create_if_needed) {
+  Isolate* isolate = obj->GetIsolate();
   Object* holder = obj->BypassGlobalProxy();
-  if (holder->IsUndefined()) return Factory::undefined_value();
+  if (holder->IsUndefined()) return isolate->factory()->undefined_value();
   obj = Handle<JSObject>(JSObject::cast(holder));
 
   if (obj->HasFastProperties()) {
@@ -380,7 +420,7 @@ Handle<Object> GetHiddenProperties(Handle<JSObject> obj,
     // code zero) it will always occupy the first entry if present.
     DescriptorArray* descriptors = obj->map()->instance_descriptors();
     if ((descriptors->number_of_descriptors() > 0) &&
-        (descriptors->GetKey(0) == Heap::hidden_symbol()) &&
+        (descriptors->GetKey(0) == isolate->heap()->hidden_symbol()) &&
         descriptors->IsProperty(0)) {
       ASSERT(descriptors->GetType(0) == FIELD);
       return Handle<Object>(obj->FastPropertyAt(descriptors->GetFieldIndex(0)));
@@ -394,32 +434,39 @@ Handle<Object> GetHiddenProperties(Handle<JSObject> obj,
     // Hidden properties object not found. Allocate a new hidden properties
     // object if requested. Otherwise return the undefined value.
     if (create_if_needed) {
-      Handle<Object> hidden_obj = Factory::NewJSObject(Top::object_function());
-      CALL_HEAP_FUNCTION(obj->SetHiddenPropertiesObject(*hidden_obj), Object);
+      Handle<Object> hidden_obj =
+          isolate->factory()->NewJSObject(isolate->object_function());
+      CALL_HEAP_FUNCTION(isolate,
+                         obj->SetHiddenPropertiesObject(*hidden_obj), Object);
     } else {
-      return Factory::undefined_value();
+      return isolate->factory()->undefined_value();
     }
   }
-  return Handle<Object>(obj->GetHiddenPropertiesObject());
+  return Handle<Object>(obj->GetHiddenPropertiesObject(), isolate);
 }
 
 
 Handle<Object> DeleteElement(Handle<JSObject> obj,
                              uint32_t index) {
-  CALL_HEAP_FUNCTION(obj->DeleteElement(index, JSObject::NORMAL_DELETION),
+  CALL_HEAP_FUNCTION(obj->GetIsolate(),
+                     obj->DeleteElement(index, JSObject::NORMAL_DELETION),
                      Object);
 }
 
 
 Handle<Object> DeleteProperty(Handle<JSObject> obj,
                               Handle<String> prop) {
-  CALL_HEAP_FUNCTION(obj->DeleteProperty(*prop, JSObject::NORMAL_DELETION),
+  CALL_HEAP_FUNCTION(obj->GetIsolate(),
+                     obj->DeleteProperty(*prop, JSObject::NORMAL_DELETION),
                      Object);
 }
 
 
 Handle<Object> LookupSingleCharacterStringFromCode(uint32_t index) {
-  CALL_HEAP_FUNCTION(Heap::LookupSingleCharacterStringFromCode(index), Object);
+  Isolate* isolate = Isolate::Current();
+  CALL_HEAP_FUNCTION(
+      isolate,
+      isolate->heap()->LookupSingleCharacterStringFromCode(index), Object);
 }
 
 
@@ -427,7 +474,8 @@ Handle<String> SubString(Handle<String> str,
                          int start,
                          int end,
                          PretenureFlag pretenure) {
-  CALL_HEAP_FUNCTION(str->SubString(start, end, pretenure), String);
+  CALL_HEAP_FUNCTION(str->GetIsolate(),
+                     str->SubString(start, end, pretenure), String);
 }
 
 
@@ -443,7 +491,8 @@ Handle<Object> SetElement(Handle<JSObject> object,
       value = number;
     }
   }
-  CALL_HEAP_FUNCTION(object->SetElement(index, *value, strict_mode), Object);
+  CALL_HEAP_FUNCTION(object->GetIsolate(),
+                     object->SetElement(index, *value, strict_mode), Object);
 }
 
 
@@ -452,18 +501,21 @@ Handle<Object> SetOwnElement(Handle<JSObject> object,
                              Handle<Object> value,
                              StrictModeFlag strict_mode) {
   ASSERT(!object->HasExternalArrayElements());
-  CALL_HEAP_FUNCTION(object->SetElement(index, *value, strict_mode, false),
+  CALL_HEAP_FUNCTION(object->GetIsolate(),
+                     object->SetElement(index, *value, strict_mode, false),
                      Object);
 }
 
 
 Handle<JSObject> Copy(Handle<JSObject> obj) {
-  CALL_HEAP_FUNCTION(Heap::CopyJSObject(*obj), JSObject);
+  Isolate* isolate = obj->GetIsolate();
+  CALL_HEAP_FUNCTION(isolate,
+                     isolate->heap()->CopyJSObject(*obj), JSObject);
 }
 
 
 Handle<Object> SetAccessor(Handle<JSObject> obj, Handle<AccessorInfo> info) {
-  CALL_HEAP_FUNCTION(obj->DefineAccessor(*info), Object);
+  CALL_HEAP_FUNCTION(obj->GetIsolate(), obj->DefineAccessor(*info), Object);
 }
 
 
@@ -484,12 +536,13 @@ static void ClearWrapperCache(Persistent<v8::Value> handle, void*) {
   Proxy* proxy = Script::cast(wrapper->value())->wrapper();
   ASSERT(proxy->proxy() == reinterpret_cast<Address>(cache.location()));
   proxy->set_proxy(0);
-  GlobalHandles::Destroy(cache.location());
-  Counters::script_wrappers.Decrement();
+  Isolate::Current()->global_handles()->Destroy(cache.location());
+  COUNTERS->script_wrappers()->Decrement();
 }
 
 
 Handle<JSValue> GetScriptWrapper(Handle<Script> script) {
+  Isolate* isolate = Isolate::Current();
   if (script->wrapper()->proxy() != NULL) {
     // Return the script wrapper directly from the cache.
     return Handle<JSValue>(
@@ -497,17 +550,18 @@ Handle<JSValue> GetScriptWrapper(Handle<Script> script) {
   }
 
   // Construct a new script wrapper.
-  Counters::script_wrappers.Increment();
-  Handle<JSFunction> constructor = Top::script_function();
+  isolate->counters()->script_wrappers()->Increment();
+  Handle<JSFunction> constructor = isolate->script_function();
   Handle<JSValue> result =
-      Handle<JSValue>::cast(Factory::NewJSObject(constructor));
+      Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor));
   result->set_value(*script);
 
   // Create a new weak global handle and use it to cache the wrapper
   // for future use. The cache will automatically be cleared by the
   // garbage collector when it is not used anymore.
-  Handle<Object> handle = GlobalHandles::Create(*result);
-  GlobalHandles::MakeWeak(handle.location(), NULL, &ClearWrapperCache);
+  Handle<Object> handle = isolate->global_handles()->Create(*result);
+  isolate->global_handles()->MakeWeak(handle.location(), NULL,
+                                      &ClearWrapperCache);
   script->wrapper()->set_proxy(reinterpret_cast<Address>(handle.location()));
   return result;
 }
@@ -520,7 +574,8 @@ void InitScriptLineEnds(Handle<Script> script) {
 
   if (!script->source()->IsString()) {
     ASSERT(script->source()->IsUndefined());
-    Handle<FixedArray> empty = Factory::NewFixedArray(0);
+    Handle<FixedArray> empty =
+        script->GetIsolate()->factory()->NewFixedArray(0);
     script->set_line_ends(*empty);
     ASSERT(script->line_ends()->IsFixedArray());
     return;
@@ -530,8 +585,8 @@ void InitScriptLineEnds(Handle<Script> script) {
 
   Handle<FixedArray> array = CalculateLineEnds(src, true);
 
-  if (*array != Heap::empty_fixed_array()) {
-    array->set_map(Heap::fixed_cow_array_map());
+  if (*array != HEAP->empty_fixed_array()) {
+    array->set_map(HEAP->fixed_cow_array_map());
   }
 
   script->set_line_ends(*array);
@@ -540,11 +595,12 @@ void InitScriptLineEnds(Handle<Script> script) {
 
 
 template <typename SourceChar>
-static void CalculateLineEnds(List<int>* line_ends,
+static void CalculateLineEnds(Isolate* isolate,
+                              List<int>* line_ends,
                               Vector<const SourceChar> src,
                               bool with_last_line) {
   const int src_len = src.length();
-  StringSearch<char, SourceChar> search(CStrVector("\n"));
+  StringSearch<char, SourceChar> search(isolate, CStrVector("\n"));
 
   // Find and record line ends.
   int position = 0;
@@ -571,15 +627,22 @@ Handle<FixedArray> CalculateLineEnds(Handle<String> src,
   List<int> line_ends(line_count_estimate);
   {
     AssertNoAllocation no_heap_allocation;  // ensure vectors stay valid.
+    Isolate* isolate = src->GetIsolate();
     // Dispatch on type of strings.
     if (src->IsAsciiRepresentation()) {
-      CalculateLineEnds(&line_ends, src->ToAsciiVector(), with_last_line);
+      CalculateLineEnds(isolate,
+                        &line_ends,
+                        src->ToAsciiVector(),
+                        with_last_line);
     } else {
-      CalculateLineEnds(&line_ends, src->ToUC16Vector(), with_last_line);
+      CalculateLineEnds(isolate,
+                        &line_ends,
+                        src->ToUC16Vector(),
+                        with_last_line);
     }
   }
   int line_count = line_ends.length();
-  Handle<FixedArray> array = Factory::NewFixedArray(line_count);
+  Handle<FixedArray> array = FACTORY->NewFixedArray(line_count);
   for (int i = 0; i < line_count; i++) {
     array->set(i, Smi::FromInt(line_ends[i]));
   }
@@ -645,17 +708,18 @@ void CustomArguments::IterateInstance(ObjectVisitor* v) {
 // Compute the property keys from the interceptor.
 v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSObject> receiver,
                                                  Handle<JSObject> object) {
+  Isolate* isolate = receiver->GetIsolate();
   Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
-  CustomArguments args(interceptor->data(), *receiver, *object);
+  CustomArguments args(isolate, interceptor->data(), *receiver, *object);
   v8::AccessorInfo info(args.end());
   v8::Handle<v8::Array> result;
   if (!interceptor->enumerator()->IsUndefined()) {
     v8::NamedPropertyEnumerator enum_fun =
         v8::ToCData<v8::NamedPropertyEnumerator>(interceptor->enumerator());
-    LOG(ApiObjectAccess("interceptor-named-enum", *object));
+    LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object));
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = enum_fun(info);
     }
   }
@@ -666,17 +730,18 @@ v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSObject> receiver,
 // Compute the element keys from the interceptor.
 v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSObject> receiver,
                                                    Handle<JSObject> object) {
+  Isolate* isolate = receiver->GetIsolate();
   Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
-  CustomArguments args(interceptor->data(), *receiver, *object);
+  CustomArguments args(isolate, interceptor->data(), *receiver, *object);
   v8::AccessorInfo info(args.end());
   v8::Handle<v8::Array> result;
   if (!interceptor->enumerator()->IsUndefined()) {
     v8::IndexedPropertyEnumerator enum_fun =
         v8::ToCData<v8::IndexedPropertyEnumerator>(interceptor->enumerator());
-    LOG(ApiObjectAccess("interceptor-indexed-enum", *object));
+    LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object));
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = enum_fun(info);
     }
   }
@@ -697,31 +762,33 @@ static bool ContainsOnlyValidKeys(Handle<FixedArray> array) {
 Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object,
                                           KeyCollectionType type) {
   USE(ContainsOnlyValidKeys);
-  Handle<FixedArray> content = Factory::empty_fixed_array();
+  Isolate* isolate = object->GetIsolate();
+  Handle<FixedArray> content = isolate->factory()->empty_fixed_array();
   Handle<JSObject> arguments_boilerplate =
       Handle<JSObject>(
-          Top::context()->global_context()->arguments_boilerplate());
+          isolate->context()->global_context()->arguments_boilerplate());
   Handle<JSFunction> arguments_function =
       Handle<JSFunction>(
           JSFunction::cast(arguments_boilerplate->map()->constructor()));
 
   // Only collect keys if access is permitted.
   for (Handle<Object> p = object;
-       *p != Heap::null_value();
+       *p != isolate->heap()->null_value();
        p = Handle<Object>(p->GetPrototype())) {
     Handle<JSObject> current(JSObject::cast(*p));
 
     // Check access rights if required.
     if (current->IsAccessCheckNeeded() &&
-        !Top::MayNamedAccess(*current, Heap::undefined_value(),
-                             v8::ACCESS_KEYS)) {
-      Top::ReportFailedAccessCheck(*current, v8::ACCESS_KEYS);
+        !isolate->MayNamedAccess(*current,
+                                 isolate->heap()->undefined_value(),
+                                 v8::ACCESS_KEYS)) {
+      isolate->ReportFailedAccessCheck(*current, v8::ACCESS_KEYS);
       break;
     }
 
     // Compute the element keys.
     Handle<FixedArray> element_keys =
-        Factory::NewFixedArray(current->NumberOfEnumElements());
+        isolate->factory()->NewFixedArray(current->NumberOfEnumElements());
     current->GetEnumElementKeys(*element_keys);
     content = UnionOfKeys(content, element_keys);
     ASSERT(ContainsOnlyValidKeys(content));
@@ -775,26 +842,28 @@ Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object,
 
 
 Handle<JSArray> GetKeysFor(Handle<JSObject> object) {
-  Counters::for_in.Increment();
+  Isolate* isolate = object->GetIsolate();
+  isolate->counters()->for_in()->Increment();
   Handle<FixedArray> elements = GetKeysInFixedArrayFor(object,
                                                        INCLUDE_PROTOS);
-  return Factory::NewJSArrayWithElements(elements);
+  return isolate->factory()->NewJSArrayWithElements(elements);
 }
 
 
 Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
                                        bool cache_result) {
   int index = 0;
+  Isolate* isolate = object->GetIsolate();
   if (object->HasFastProperties()) {
     if (object->map()->instance_descriptors()->HasEnumCache()) {
-      Counters::enum_cache_hits.Increment();
+      isolate->counters()->enum_cache_hits()->Increment();
       DescriptorArray* desc = object->map()->instance_descriptors();
       return Handle<FixedArray>(FixedArray::cast(desc->GetEnumCache()));
     }
-    Counters::enum_cache_misses.Increment();
+    isolate->counters()->enum_cache_misses()->Increment();
     int num_enum = object->NumberOfEnumProperties();
-    Handle<FixedArray> storage = Factory::NewFixedArray(num_enum);
-    Handle<FixedArray> sort_array = Factory::NewFixedArray(num_enum);
+    Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
+    Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
     Handle<DescriptorArray> descs =
         Handle<DescriptorArray>(object->map()->instance_descriptors());
     for (int i = 0; i < descs->number_of_descriptors(); i++) {
@@ -808,7 +877,8 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
     (*storage)->SortPairs(*sort_array, sort_array->length());
     if (cache_result) {
       Handle<FixedArray> bridge_storage =
-          Factory::NewFixedArray(DescriptorArray::kEnumCacheBridgeLength);
+          isolate->factory()->NewFixedArray(
+              DescriptorArray::kEnumCacheBridgeLength);
       DescriptorArray* desc = object->map()->instance_descriptors();
       desc->SetEnumCache(*bridge_storage, *storage);
     }
@@ -816,8 +886,8 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
     return storage;
   } else {
     int num_enum = object->NumberOfEnumProperties();
-    Handle<FixedArray> storage = Factory::NewFixedArray(num_enum);
-    Handle<FixedArray> sort_array = Factory::NewFixedArray(num_enum);
+    Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
+    Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
     object->property_dictionary()->CopyEnumKeysTo(*storage, *sort_array);
     return storage;
   }
@@ -834,10 +904,12 @@ static bool CompileLazyHelper(CompilationInfo* info,
                               ClearExceptionFlag flag) {
   // Compile the source information to a code object.
   ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled());
-  ASSERT(!Top::has_pending_exception());
+  ASSERT(!info->isolate()->has_pending_exception());
   bool result = Compiler::CompileLazy(info);
-  ASSERT(result != Top::has_pending_exception());
-  if (!result && flag == CLEAR_EXCEPTION) Top::clear_pending_exception();
+  ASSERT(result != Isolate::Current()->has_pending_exception());
+  if (!result && flag == CLEAR_EXCEPTION) {
+    info->isolate()->clear_pending_exception();
+  }
   return result;
 }
 
index 667d5caa6c426a837db396f8eff05276a8a77816..53a51ce6c0b05c5dfa168817cec8e3654cdaf4d7 100644 (file)
@@ -44,6 +44,7 @@ class Handle {
  public:
   INLINE(explicit Handle(T** location)) { location_ = location; }
   INLINE(explicit Handle(T* obj));
+  INLINE(Handle(T* obj, Isolate* isolate));
 
   INLINE(Handle()) : location_(NULL) {}
 
@@ -82,7 +83,7 @@ class Handle {
   }
 
   static Handle<T> null() { return Handle<T>(); }
-  bool is_null() { return location_ == NULL; }
+  bool is_null() const { return location_ == NULL; }
 
   // Closes the given scope, but lets this handle escape. See
   // implementation in api.h.
@@ -107,34 +108,20 @@ class Handle {
 // for which the handle scope has been deleted is undefined.
 class HandleScope {
  public:
-  HandleScope() : prev_next_(current_.next), prev_limit_(current_.limit) {
-    current_.level++;
-  }
+  inline HandleScope();
+  explicit inline HandleScope(Isolate* isolate);
 
-  ~HandleScope() {
-    CloseScope();
-  }
+  inline ~HandleScope();
 
   // Counts the number of allocated handles.
   static int NumberOfHandles();
 
   // Creates a new handle with the given value.
   template <typename T>
-  static inline T** CreateHandle(T* value) {
-    internal::Object** cur = current_.next;
-    if (cur == current_.limit) cur = Extend();
-    // Update the current next field, set the value in the created
-    // handle, and return the result.
-    ASSERT(cur < current_.limit);
-    current_.next = cur + 1;
-
-    T** result = reinterpret_cast<T**>(cur);
-    *result = value;
-    return result;
-  }
+  static inline T** CreateHandle(T* value, Isolate* isolate);
 
   // Deallocates any extensions used by the current scope.
-  static void DeleteExtensions();
+  static void DeleteExtensions(Isolate* isolate);
 
   static Address current_next_address();
   static Address current_limit_address();
@@ -145,20 +132,9 @@ class HandleScope {
   // a Handle backed by the parent scope holding the
   // value of the argument handle.
   template <typename T>
-  Handle<T> CloseAndEscape(Handle<T> handle_value) {
-    T* value = *handle_value;
-    // Throw away all handles in the current scope.
-    CloseScope();
-    // Allocate one handle in the parent scope.
-    ASSERT(current_.level > 0);
-    Handle<T> result(CreateHandle<T>(value));
-    // Reinitialize the current scope (so that it's ready
-    // to be used or closed again).
-    prev_next_ = current_.next;
-    prev_limit_ = current_.limit;
-    current_.level++;
-    return result;
-  }
+  Handle<T> CloseAndEscape(Handle<T> handle_value);
+
+  Isolate* isolate() { return isolate_; }
 
  private:
   // Prevent heap allocation or illegal handle scopes.
@@ -167,21 +143,9 @@ class HandleScope {
   void* operator new(size_t size);
   void operator delete(void* size_t);
 
-  inline void CloseScope() {
-    current_.next = prev_next_;
-    current_.level--;
-    if (current_.limit != prev_limit_) {
-      current_.limit = prev_limit_;
-      DeleteExtensions();
-    }
-#ifdef DEBUG
-    ZapRange(prev_next_, prev_limit_);
-#endif
-  }
+  inline void CloseScope();
 
-  static v8::ImplementationUtilities::HandleScopeData current_;
-  // Holds values on entry. The prev_next_ value is never NULL
-  // on_entry, but is set to NULL when this scope is closed.
+  Isolate* isolate_;
   Object** prev_next_;
   Object** prev_limit_;
 
index 27989889c3e665a6806a61557d6f6eb13f4b011e..bb3e3ceb4496fcb85c10f7f8922dfeba76bba920 100644 (file)
@@ -55,9 +55,9 @@ class HashMap {
 
   // initial_capacity is the size of the initial hash map;
   // it must be a power of 2 (and thus must not be 0).
-  HashMap(MatchFun match,
-          Allocator* allocator = &DefaultAllocator,
-          uint32_t initial_capacity = 8);
+  explicit HashMap(MatchFun match,
+                   Allocator* allocator = &DefaultAllocator,
+                   uint32_t initial_capacity = 8);
 
   ~HashMap();
 
index 7b91e8715a18d52294739664c8383825847e7f93..99737ed9b3ee4c3145bb5047b0ad3a6a91cf7502 100644 (file)
 
 #include "heap.h"
 #include "objects.h"
+#include "isolate.h"
 #include "v8-counters.h"
 
 namespace v8 {
 namespace internal {
 
+void PromotionQueue::insert(HeapObject* target, int size) {
+  *(--rear_) = reinterpret_cast<intptr_t>(target);
+  *(--rear_) = size;
+  // Assert no overflow into live objects.
+  ASSERT(reinterpret_cast<Address>(rear_) >= HEAP->new_space()->top());
+}
+
+
 int Heap::MaxObjectSizeInPagedSpace() {
   return Page::kMaxHeapObjectSize;
 }
@@ -146,8 +155,8 @@ MaybeObject* Heap::AllocateRaw(int size_in_bytes,
       Heap::allocation_timeout_-- <= 0) {
     return Failure::RetryAfterGC(space);
   }
-  Counters::objs_since_last_full.Increment();
-  Counters::objs_since_last_young.Increment();
+  isolate_->counters()->objs_since_last_full()->Increment();
+  isolate_->counters()->objs_since_last_young()->Increment();
 #endif
   MaybeObject* result;
   if (NEW_SPACE == space) {
@@ -214,8 +223,8 @@ void Heap::FinalizeExternalString(String* string) {
 
 MaybeObject* Heap::AllocateRawMap() {
 #ifdef DEBUG
-  Counters::objs_since_last_full.Increment();
-  Counters::objs_since_last_young.Increment();
+  isolate_->counters()->objs_since_last_full()->Increment();
+  isolate_->counters()->objs_since_last_young()->Increment();
 #endif
   MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
   if (result->IsFailure()) old_gen_exhausted_ = true;
@@ -232,8 +241,8 @@ MaybeObject* Heap::AllocateRawMap() {
 
 MaybeObject* Heap::AllocateRawCell() {
 #ifdef DEBUG
-  Counters::objs_since_last_full.Increment();
-  Counters::objs_since_last_young.Increment();
+  isolate_->counters()->objs_since_last_full()->Increment();
+  isolate_->counters()->objs_since_last_young()->Increment();
 #endif
   MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
   if (result->IsFailure()) old_gen_exhausted_ = true;
@@ -341,7 +350,7 @@ void Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
        remaining--) {
     Memory::Object_at(dst) = Memory::Object_at(src);
 
-    if (Heap::InNewSpace(Memory::Object_at(dst))) {
+    if (InNewSpace(Memory::Object_at(dst))) {
       marks |= page->GetRegionMaskForAddress(dst);
     }
 
@@ -387,8 +396,13 @@ void Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
 }
 
 
+void Heap::ScavengePointer(HeapObject** p) {
+  ScavengeObject(p, *p);
+}
+
+
 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
-  ASSERT(InFromSpace(object));
+  ASSERT(HEAP->InFromSpace(object));
 
   // We use the first word (where the map pointer usually is) of a heap
   // object to record the forwarding pointer.  A forwarding pointer can
@@ -461,10 +475,15 @@ void Heap::SetLastScriptId(Object* last_script_id) {
   roots_[kLastScriptIdRootIndex] = last_script_id;
 }
 
+Isolate* Heap::isolate() {
+  return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
+      reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
+}
+
 
 #ifdef DEBUG
 #define GC_GREEDY_CHECK() \
-  if (FLAG_gc_greedy) v8::internal::Heap::GarbageCollectionGreedyCheck()
+  if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
 #else
 #define GC_GREEDY_CHECK() { }
 #endif
@@ -477,7 +496,7 @@ void Heap::SetLastScriptId(Object* last_script_id) {
 // Warning: Do not use the identifiers __object__, __maybe_object__ or
 // __scope__ in a call to this macro.
 
-#define CALL_AND_RETRY(FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)         \
+#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
   do {                                                                    \
     GC_GREEDY_CHECK();                                                    \
     MaybeObject* __maybe_object__ = FUNCTION_CALL;                        \
@@ -487,16 +506,16 @@ void Heap::SetLastScriptId(Object* last_script_id) {
       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
     }                                                                     \
     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
-    Heap::CollectGarbage(                                                 \
-        Failure::cast(__maybe_object__)->allocation_space());             \
+    ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)->     \
+                                    allocation_space());                  \
     __maybe_object__ = FUNCTION_CALL;                                     \
     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
     if (__maybe_object__->IsOutOfMemory()) {                              \
       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
     }                                                                     \
     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
-    Counters::gc_last_resort_from_handles.Increment();                    \
-    Heap::CollectAllAvailableGarbage();                                   \
+    ISOLATE->counters()->gc_last_resort_from_handles()->Increment();      \
+    ISOLATE->heap()->CollectAllAvailableGarbage();                        \
     {                                                                     \
       AlwaysAllocateScope __scope__;                                      \
       __maybe_object__ = FUNCTION_CALL;                                   \
@@ -511,14 +530,17 @@ void Heap::SetLastScriptId(Object* last_script_id) {
   } while (false)
 
 
-#define CALL_HEAP_FUNCTION(FUNCTION_CALL, TYPE)                \
-  CALL_AND_RETRY(FUNCTION_CALL,                                \
-                 return Handle<TYPE>(TYPE::cast(__object__)),  \
+// TODO(isolates): cache isolate: either accept as a parameter or
+//                 set to some known symbol (__CUR_ISOLATE__?)
+#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)       \
+  CALL_AND_RETRY(ISOLATE,                                      \
+                 FUNCTION_CALL,                                \
+                 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE),  \
                  return Handle<TYPE>())
 
 
-#define CALL_HEAP_FUNCTION_VOID(FUNCTION_CALL) \
-  CALL_AND_RETRY(FUNCTION_CALL, return, return)
+#define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
+  CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
 
 
 #ifdef DEBUG
@@ -534,7 +556,7 @@ inline bool Heap::allow_allocation(bool new_state) {
 
 void ExternalStringTable::AddString(String* string) {
   ASSERT(string->IsExternalString());
-  if (Heap::InNewSpace(string)) {
+  if (heap_->InNewSpace(string)) {
     new_space_strings_.Add(string);
   } else {
     old_space_strings_.Add(string);
@@ -559,12 +581,12 @@ void ExternalStringTable::Iterate(ObjectVisitor* v) {
 void ExternalStringTable::Verify() {
 #ifdef DEBUG
   for (int i = 0; i < new_space_strings_.length(); ++i) {
-    ASSERT(Heap::InNewSpace(new_space_strings_[i]));
-    ASSERT(new_space_strings_[i] != Heap::raw_unchecked_null_value());
+    ASSERT(heap_->InNewSpace(new_space_strings_[i]));
+    ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_null_value());
   }
   for (int i = 0; i < old_space_strings_.length(); ++i) {
-    ASSERT(!Heap::InNewSpace(old_space_strings_[i]));
-    ASSERT(old_space_strings_[i] != Heap::raw_unchecked_null_value());
+    ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
+    ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_null_value());
   }
 #endif
 }
@@ -572,7 +594,7 @@ void ExternalStringTable::Verify() {
 
 void ExternalStringTable::AddOldString(String* string) {
   ASSERT(string->IsExternalString());
-  ASSERT(!Heap::InNewSpace(string));
+  ASSERT(!heap_->InNewSpace(string));
   old_space_strings_.Add(string);
 }
 
@@ -582,6 +604,100 @@ void ExternalStringTable::ShrinkNewStrings(int position) {
   Verify();
 }
 
+
+void Heap::ClearInstanceofCache() {
+  set_instanceof_cache_function(the_hole_value());
+}
+
+
+Object* Heap::ToBoolean(bool condition) {
+  return condition ? true_value() : false_value();
+}
+
+
+void Heap::CompletelyClearInstanceofCache() {
+  set_instanceof_cache_map(the_hole_value());
+  set_instanceof_cache_function(the_hole_value());
+}
+
+
+MaybeObject* TranscendentalCache::Get(Type type, double input) {
+  SubCache* cache = caches_[type];
+  if (cache == NULL) {
+    caches_[type] = cache = new SubCache(type);
+  }
+  return cache->Get(input);
+}
+
+
+Address TranscendentalCache::cache_array_address() {
+  return reinterpret_cast<Address>(caches_);
+}
+
+
+double TranscendentalCache::SubCache::Calculate(double input) {
+  switch (type_) {
+    case ACOS:
+      return acos(input);
+    case ASIN:
+      return asin(input);
+    case ATAN:
+      return atan(input);
+    case COS:
+      return cos(input);
+    case EXP:
+      return exp(input);
+    case LOG:
+      return log(input);
+    case SIN:
+      return sin(input);
+    case TAN:
+      return tan(input);
+    default:
+      return 0.0;  // Never happens.
+  }
+}
+
+
+MaybeObject* TranscendentalCache::SubCache::Get(double input) {
+  Converter c;
+  c.dbl = input;
+  int hash = Hash(c);
+  Element e = elements_[hash];
+  if (e.in[0] == c.integers[0] &&
+      e.in[1] == c.integers[1]) {
+    ASSERT(e.output != NULL);
+    isolate_->counters()->transcendental_cache_hit()->Increment();
+    return e.output;
+  }
+  double answer = Calculate(input);
+  isolate_->counters()->transcendental_cache_miss()->Increment();
+  Object* heap_number;
+  { MaybeObject* maybe_heap_number =
+        isolate_->heap()->AllocateHeapNumber(answer);
+    if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
+  }
+  elements_[hash].in[0] = c.integers[0];
+  elements_[hash].in[1] = c.integers[1];
+  elements_[hash].output = heap_number;
+  return heap_number;
+}
+
+
+Heap* _inline_get_heap_() {
+  return HEAP;
+}
+
+
+void MarkCompactCollector::SetMark(HeapObject* obj) {
+  tracer_->increment_marked_count();
+#ifdef DEBUG
+  UpdateLiveObjectCount(obj);
+#endif
+  obj->SetMark();
+}
+
+
 } }  // namespace v8::internal
 
 #endif  // V8_HEAP_INL_H_
index 97d10c9e88cad4ae42c6f8e70d3789536f24c93a..c78f474472a5fde56f7bf3206a7d68caba079eba 100644 (file)
@@ -72,14 +72,14 @@ JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) {
     String* constructor = GetConstructorNameForHeapProfile(
         JSObject::cast(js_obj));
     // Differentiate Object and Array instances.
-    if (fine_grain && (constructor == Heap::Object_symbol() ||
-                       constructor == Heap::Array_symbol())) {
+    if (fine_grain && (constructor == HEAP->Object_symbol() ||
+                       constructor == HEAP->Array_symbol())) {
       return JSObjectsCluster(constructor, obj);
     } else {
       return JSObjectsCluster(constructor);
     }
   } else if (obj->IsString()) {
-    return JSObjectsCluster(Heap::String_symbol());
+    return JSObjectsCluster(HEAP->String_symbol());
   } else if (obj->IsJSGlobalPropertyCell()) {
     return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY);
   } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) {
@@ -112,10 +112,10 @@ int Clusterizer::CalculateNetworkSize(JSObject* obj) {
   int size = obj->Size();
   // If 'properties' and 'elements' are non-empty (thus, non-shared),
   // take their size into account.
-  if (obj->properties() != Heap::empty_fixed_array()) {
+  if (obj->properties() != HEAP->empty_fixed_array()) {
     size += obj->properties()->Size();
   }
-  if (obj->elements() != Heap::empty_fixed_array()) {
+  if (obj->elements() != HEAP->empty_fixed_array()) {
     size += obj->elements()->Size();
   }
   // For functions, also account non-empty context and literals sizes.
@@ -174,7 +174,8 @@ class RetainersPrinter : public RetainerHeapProfile::Printer {
     HeapStringAllocator allocator;
     StringStream stream(&allocator);
     cluster.Print(&stream);
-    LOG(HeapSampleJSRetainersEvent(
+    LOG(ISOLATE,
+        HeapSampleJSRetainersEvent(
         *(stream.ToCString()), *(retainers.ToCString())));
   }
 };
@@ -315,8 +316,6 @@ void RetainerTreeAggregator::Call(const JSObjectsCluster& cluster,
 }
 
 
-HeapProfiler* HeapProfiler::singleton_ = NULL;
-
 HeapProfiler::HeapProfiler()
     : snapshots_(new HeapSnapshotsCollection()),
       next_snapshot_uid_(1) {
@@ -331,8 +330,9 @@ HeapProfiler::~HeapProfiler() {
 
 void HeapProfiler::Setup() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (singleton_ == NULL) {
-    singleton_ = new HeapProfiler();
+  Isolate* isolate = Isolate::Current();
+  if (isolate->heap_profiler() == NULL) {
+    isolate->set_heap_profiler(new HeapProfiler());
   }
 #endif
 }
@@ -340,8 +340,9 @@ void HeapProfiler::Setup() {
 
 void HeapProfiler::TearDown() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  delete singleton_;
-  singleton_ = NULL;
+  Isolate* isolate = Isolate::Current();
+  delete isolate->heap_profiler();
+  isolate->set_heap_profiler(NULL);
 #endif
 }
 
@@ -351,36 +352,38 @@ void HeapProfiler::TearDown() {
 HeapSnapshot* HeapProfiler::TakeSnapshot(const char* name,
                                          int type,
                                          v8::ActivityControl* control) {
-  ASSERT(singleton_ != NULL);
-  return singleton_->TakeSnapshotImpl(name, type, control);
+  ASSERT(Isolate::Current()->heap_profiler() != NULL);
+  return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
+                                                               type,
+                                                               control);
 }
 
 
 HeapSnapshot* HeapProfiler::TakeSnapshot(String* name,
                                          int type,
                                          v8::ActivityControl* control) {
-  ASSERT(singleton_ != NULL);
-  return singleton_->TakeSnapshotImpl(name, type, control);
+  ASSERT(Isolate::Current()->heap_profiler() != NULL);
+  return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
+                                                               type,
+                                                               control);
 }
 
 
 void HeapProfiler::DefineWrapperClass(
     uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback) {
-  ASSERT(singleton_ != NULL);
   ASSERT(class_id != v8::HeapProfiler::kPersistentHandleNoClassId);
-  if (singleton_->wrapper_callbacks_.length() <= class_id) {
-    singleton_->wrapper_callbacks_.AddBlock(
-        NULL, class_id - singleton_->wrapper_callbacks_.length() + 1);
+  if (wrapper_callbacks_.length() <= class_id) {
+    wrapper_callbacks_.AddBlock(
+        NULL, class_id - wrapper_callbacks_.length() + 1);
   }
-  singleton_->wrapper_callbacks_[class_id] = callback;
+  wrapper_callbacks_[class_id] = callback;
 }
 
 
 v8::RetainedObjectInfo* HeapProfiler::ExecuteWrapperClassCallback(
     uint16_t class_id, Object** wrapper) {
-  ASSERT(singleton_ != NULL);
-  if (singleton_->wrapper_callbacks_.length() <= class_id) return NULL;
-  return singleton_->wrapper_callbacks_[class_id](
+  if (wrapper_callbacks_.length() <= class_id) return NULL;
+  return wrapper_callbacks_[class_id](
       class_id, Utils::ToLocal(Handle<Object>(wrapper)));
 }
 
@@ -394,13 +397,13 @@ HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name,
   bool generation_completed = true;
   switch (s_type) {
     case HeapSnapshot::kFull: {
-      Heap::CollectAllGarbage(true);
+      HEAP->CollectAllGarbage(true);
       HeapSnapshotGenerator generator(result, control);
       generation_completed = generator.GenerateSnapshot();
       break;
     }
     case HeapSnapshot::kAggregated: {
-      Heap::CollectAllGarbage(true);
+      HEAP->CollectAllGarbage(true);
       AggregatedHeapSnapshot agg_snapshot;
       AggregatedHeapSnapshotGenerator generator(&agg_snapshot);
       generator.GenerateSnapshot();
@@ -427,26 +430,28 @@ HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name,
 
 
 int HeapProfiler::GetSnapshotsCount() {
-  ASSERT(singleton_ != NULL);
-  return singleton_->snapshots_->snapshots()->length();
+  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
+  ASSERT(profiler != NULL);
+  return profiler->snapshots_->snapshots()->length();
 }
 
 
 HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
-  ASSERT(singleton_ != NULL);
-  return singleton_->snapshots_->snapshots()->at(index);
+  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
+  ASSERT(profiler != NULL);
+  return profiler->snapshots_->snapshots()->at(index);
 }
 
 
 HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
-  ASSERT(singleton_ != NULL);
-  return singleton_->snapshots_->GetSnapshot(uid);
+  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
+  ASSERT(profiler != NULL);
+  return profiler->snapshots_->GetSnapshot(uid);
 }
 
 
 void HeapProfiler::ObjectMoveEvent(Address from, Address to) {
-  ASSERT(singleton_ != NULL);
-  singleton_->snapshots_->ObjectMoveEvent(from, to);
+  snapshots_->ObjectMoveEvent(from, to);
 }
 
 
@@ -464,7 +469,8 @@ void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster,
   HeapStringAllocator allocator;
   StringStream stream(&allocator);
   cluster.Print(&stream);
-  LOG(HeapSampleJSConstructorEvent(*(stream.ToCString()),
+  LOG(ISOLATE,
+      HeapSampleJSConstructorEvent(*(stream.ToCString()),
                                    number_and_size.number(),
                                    number_and_size.bytes()));
 }
@@ -683,7 +689,7 @@ RetainerHeapProfile::RetainerHeapProfile()
       aggregator_(NULL) {
   JSObjectsCluster roots(JSObjectsCluster::ROOTS);
   ReferencesExtractor extractor(roots, this);
-  Heap::IterateRoots(&extractor, VISIT_ONLY_STRONG);
+  HEAP->IterateRoots(&extractor, VISIT_ONLY_STRONG);
 }
 
 
@@ -753,15 +759,18 @@ static void PrintProducerStackTrace(Object* obj, void* trace) {
   String* constructor = GetConstructorNameForHeapProfile(JSObject::cast(obj));
   SmartPointer<char> s_name(
       constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
-  LOG(HeapSampleJSProducerEvent(GetConstructorName(*s_name),
+  LOG(ISOLATE,
+      HeapSampleJSProducerEvent(GetConstructorName(*s_name),
                                 reinterpret_cast<Address*>(trace)));
 }
 
 
 void HeapProfiler::WriteSample() {
-  LOG(HeapSampleBeginEvent("Heap", "allocated"));
-  LOG(HeapSampleStats(
-      "Heap", "allocated", Heap::CommittedMemory(), Heap::SizeOfObjects()));
+  Isolate* isolate = Isolate::Current();
+  LOG(isolate, HeapSampleBeginEvent("Heap", "allocated"));
+  LOG(isolate,
+      HeapSampleStats(
+          "Heap", "allocated", HEAP->CommittedMemory(), HEAP->SizeOfObjects()));
 
   AggregatedHeapSnapshot snapshot;
   AggregatedHeapSnapshotGenerator generator(&snapshot);
@@ -772,7 +781,8 @@ void HeapProfiler::WriteSample() {
        i <= AggregatedHeapSnapshotGenerator::kAllStringsType;
        ++i) {
     if (info[i].bytes() > 0) {
-      LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
+      LOG(isolate,
+          HeapSampleItemEvent(info[i].name(), info[i].number(),
                               info[i].bytes()));
     }
   }
@@ -780,10 +790,10 @@ void HeapProfiler::WriteSample() {
   snapshot.js_cons_profile()->PrintStats();
   snapshot.js_retainer_profile()->PrintStats();
 
-  GlobalHandles::IterateWeakRoots(PrintProducerStackTrace,
-                                  StackWeakReferenceCallback);
+  isolate->global_handles()->IterateWeakRoots(PrintProducerStackTrace,
+                                              StackWeakReferenceCallback);
 
-  LOG(HeapSampleEndEvent("Heap", "allocated"));
+  LOG(isolate, HeapSampleEndEvent("Heap", "allocated"));
 }
 
 
@@ -1117,8 +1127,6 @@ void AggregatedHeapSnapshotGenerator::FillHeapSnapshot(HeapSnapshot* snapshot) {
 }
 
 
-bool ProducerHeapProfile::can_log_ = false;
-
 void ProducerHeapProfile::Setup() {
   can_log_ = true;
 }
@@ -1138,10 +1146,10 @@ void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) {
     stack[i++] = it.frame()->pc();
   }
   stack[i] = NULL;
-  Handle<Object> handle = GlobalHandles::Create(obj);
-  GlobalHandles::MakeWeak(handle.location(),
-                          static_cast<void*>(stack.start()),
-                          StackWeakReferenceCallback);
+  Handle<Object> handle = isolate_->global_handles()->Create(obj);
+  isolate_->global_handles()->MakeWeak(handle.location(),
+                                       static_cast<void*>(stack.start()),
+                                       StackWeakReferenceCallback);
 }
 
 
index c5a9ab497dd2a7c02e8e3a05acb07e95c8f72873..b425635da01c2fbc84b7cc78add84526f47a3cc2 100644 (file)
@@ -28,6 +28,7 @@
 #ifndef V8_HEAP_PROFILER_H_
 #define V8_HEAP_PROFILER_H_
 
+#include "isolate.h"
 #include "zone-inl.h"
 
 namespace v8 {
@@ -38,14 +39,15 @@ namespace internal {
 class HeapSnapshot;
 class HeapSnapshotsCollection;
 
-#define HEAP_PROFILE(Call)                             \
-  do {                                                 \
-    if (v8::internal::HeapProfiler::is_profiling()) {  \
-      v8::internal::HeapProfiler::Call;                \
-    }                                                  \
+#define HEAP_PROFILE(heap, call)                                             \
+  do {                                                                       \
+    v8::internal::HeapProfiler* profiler = heap->isolate()->heap_profiler(); \
+    if (profiler != NULL && profiler->is_profiling()) {                      \
+      profiler->call;                                                        \
+    }                                                                        \
   } while (false)
 #else
-#define HEAP_PROFILE(Call) ((void) 0)
+#define HEAP_PROFILE(heap, call) ((void) 0)
 #endif  // ENABLE_LOGGING_AND_PROFILING
 
 // The HeapProfiler writes data to the log files, which can be postprocessed
@@ -66,15 +68,15 @@ class HeapProfiler {
   static HeapSnapshot* GetSnapshot(int index);
   static HeapSnapshot* FindSnapshot(unsigned uid);
 
-  static void ObjectMoveEvent(Address from, Address to);
+  void ObjectMoveEvent(Address from, Address to);
 
-  static void DefineWrapperClass(
+  void DefineWrapperClass(
       uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback);
-  static v8::RetainedObjectInfo* ExecuteWrapperClassCallback(uint16_t class_id,
-                                                             Object** wrapper);
 
-  static INLINE(bool is_profiling()) {
-    return singleton_ != NULL && singleton_->snapshots_->is_tracking_objects();
+  v8::RetainedObjectInfo* ExecuteWrapperClassCallback(uint16_t class_id,
+                                                      Object** wrapper);
+  INLINE(bool is_profiling()) {
+    return snapshots_->is_tracking_objects();
   }
 
   // Obsolete interface.
@@ -95,7 +97,6 @@ class HeapProfiler {
   unsigned next_snapshot_uid_;
   List<v8::HeapProfiler::WrapperInfoCallback> wrapper_callbacks_;
 
-  static HeapProfiler* singleton_;
 #endif  // ENABLE_LOGGING_AND_PROFILING
 };
 
@@ -154,10 +155,10 @@ class JSObjectsCluster BASE_EMBEDDED {
     // We use symbols that are illegal JS identifiers to identify special cases.
     // Their actual value is irrelevant for us.
     switch (special) {
-      case ROOTS: return Heap::result_symbol();
-      case GLOBAL_PROPERTY: return Heap::code_symbol();
-      case CODE: return Heap::arguments_shadow_symbol();
-      case SELF: return Heap::catch_var_symbol();
+      case ROOTS: return HEAP->result_symbol();
+      case GLOBAL_PROPERTY: return HEAP->code_symbol();
+      case CODE: return HEAP->arguments_shadow_symbol();
+      case SELF: return HEAP->catch_var_symbol();
       default:
         UNREACHABLE();
         return NULL;
@@ -347,7 +348,6 @@ class AggregatedHeapSnapshot {
 
 class HeapEntriesMap;
 class HeapEntriesAllocator;
-class HeapSnapshot;
 
 class AggregatedHeapSnapshotGenerator {
  public:
@@ -368,16 +368,23 @@ class AggregatedHeapSnapshotGenerator {
 };
 
 
-class ProducerHeapProfile : public AllStatic {
+class ProducerHeapProfile {
  public:
-  static void Setup();
-  static void RecordJSObjectAllocation(Object* obj) {
+  void Setup();
+  void RecordJSObjectAllocation(Object* obj) {
     if (FLAG_log_producers) DoRecordJSObjectAllocation(obj);
   }
 
  private:
-  static void DoRecordJSObjectAllocation(Object* obj);
-  static bool can_log_;
+  ProducerHeapProfile() : can_log_(false) { }
+
+  void DoRecordJSObjectAllocation(Object* obj);
+  Isolate* isolate_;
+  bool can_log_;
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(ProducerHeapProfile);
 };
 
 #endif  // ENABLE_LOGGING_AND_PROFILING
index 61a476e3af02df4fe5008372010f7813dd0bd288..9a5a2b05a72a5699fed20e306307e70ea13dace6 100644 (file)
 #include "arm/regexp-macro-assembler-arm.h"
 #endif
 
-
 namespace v8 {
 namespace internal {
 
 
-String* Heap::hidden_symbol_;
-Object* Heap::roots_[Heap::kRootListLength];
-Object* Heap::global_contexts_list_;
-
-
-NewSpace Heap::new_space_;
-OldSpace* Heap::old_pointer_space_ = NULL;
-OldSpace* Heap::old_data_space_ = NULL;
-OldSpace* Heap::code_space_ = NULL;
-MapSpace* Heap::map_space_ = NULL;
-CellSpace* Heap::cell_space_ = NULL;
-LargeObjectSpace* Heap::lo_space_ = NULL;
-
 static const intptr_t kMinimumPromotionLimit = 2 * MB;
 static const intptr_t kMinimumAllocationLimit = 8 * MB;
 
-intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
-intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
 
-int Heap::old_gen_exhausted_ = false;
+static Mutex* gc_initializer_mutex = OS::CreateMutex();
 
-int Heap::amount_of_external_allocated_memory_ = 0;
-int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
 
+Heap::Heap()
+    : isolate_(NULL),
 // semispace_size_ should be a power of 2 and old_generation_size_ should be
 // a multiple of Page::kPageSize.
 #if defined(ANDROID)
-static const int default_max_semispace_size_  = 2*MB;
-intptr_t Heap::max_old_generation_size_ = 192*MB;
-int Heap::initial_semispace_size_ = 128*KB;
-intptr_t Heap::code_range_size_ = 0;
-intptr_t Heap::max_executable_size_ = max_old_generation_size_;
+      reserved_semispace_size_(2*MB),
+      max_semispace_size_(2*MB),
+      initial_semispace_size_(128*KB),
+      max_old_generation_size_(192*MB),
+      max_executable_size_(max_old_generation_size_),
+      code_range_size_(0),
 #elif defined(V8_TARGET_ARCH_X64)
-static const int default_max_semispace_size_  = 16*MB;
-intptr_t Heap::max_old_generation_size_ = 1*GB;
-int Heap::initial_semispace_size_ = 1*MB;
-intptr_t Heap::code_range_size_ = 512*MB;
-intptr_t Heap::max_executable_size_ = 256*MB;
-#else
-static const int default_max_semispace_size_  = 8*MB;
-intptr_t Heap::max_old_generation_size_ = 512*MB;
-int Heap::initial_semispace_size_ = 512*KB;
-intptr_t Heap::code_range_size_ = 0;
-intptr_t Heap::max_executable_size_ = 128*MB;
-#endif
-
-// Allow build-time customization of the max semispace size. Building
-// V8 with snapshots and a non-default max semispace size is much
-// easier if you can define it as part of the build environment.
-#if defined(V8_MAX_SEMISPACE_SIZE)
-int Heap::max_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
+      reserved_semispace_size_(16*MB),
+      max_semispace_size_(16*MB),
+      initial_semispace_size_(1*MB),
+      max_old_generation_size_(1*GB),
+      max_executable_size_(256*MB),
+      code_range_size_(512*MB),
 #else
-int Heap::max_semispace_size_ = default_max_semispace_size_;
+      reserved_semispace_size_(8*MB),
+      max_semispace_size_(8*MB),
+      initial_semispace_size_(512*KB),
+      max_old_generation_size_(512*MB),
+      max_executable_size_(128*MB),
+      code_range_size_(0),
 #endif
-
-// The snapshot semispace size will be the default semispace size if
-// snapshotting is used and will be the requested semispace size as
-// set up by ConfigureHeap otherwise.
-int Heap::reserved_semispace_size_ = Heap::max_semispace_size_;
-
-List<Heap::GCPrologueCallbackPair> Heap::gc_prologue_callbacks_;
-List<Heap::GCEpilogueCallbackPair> Heap::gc_epilogue_callbacks_;
-
-GCCallback Heap::global_gc_prologue_callback_ = NULL;
-GCCallback Heap::global_gc_epilogue_callback_ = NULL;
-HeapObjectCallback Heap::gc_safe_size_of_old_object_ = NULL;
-
 // Variables set based on semispace_size_ and old_generation_size_ in
-// ConfigureHeap.
-
+// ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
 // Will be 4 * reserved_semispace_size_ to ensure that young
 // generation can be aligned to its size.
-int Heap::survived_since_last_expansion_ = 0;
-intptr_t Heap::external_allocation_limit_ = 0;
-
-Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
-
-int Heap::mc_count_ = 0;
-int Heap::ms_count_ = 0;
-unsigned int Heap::gc_count_ = 0;
-
-GCTracer* Heap::tracer_ = NULL;
-
-int Heap::unflattened_strings_length_ = 0;
-
-int Heap::always_allocate_scope_depth_ = 0;
-int Heap::linear_allocation_scope_depth_ = 0;
-int Heap::contexts_disposed_ = 0;
-
-int Heap::young_survivors_after_last_gc_ = 0;
-int Heap::high_survival_rate_period_length_ = 0;
-double Heap::survival_rate_ = 0;
-Heap::SurvivalRateTrend Heap::previous_survival_rate_trend_ = Heap::STABLE;
-Heap::SurvivalRateTrend Heap::survival_rate_trend_ = Heap::STABLE;
-
+      survived_since_last_expansion_(0),
+      always_allocate_scope_depth_(0),
+      linear_allocation_scope_depth_(0),
+      contexts_disposed_(0),
+      new_space_(this),
+      old_pointer_space_(NULL),
+      old_data_space_(NULL),
+      code_space_(NULL),
+      map_space_(NULL),
+      cell_space_(NULL),
+      lo_space_(NULL),
+      gc_state_(NOT_IN_GC),
+      mc_count_(0),
+      ms_count_(0),
+      gc_count_(0),
+      unflattened_strings_length_(0),
 #ifdef DEBUG
-bool Heap::allocation_allowed_ = true;
-
-int Heap::allocation_timeout_ = 0;
-bool Heap::disallow_allocation_failure_ = false;
+      allocation_allowed_(true),
+      allocation_timeout_(0),
+      disallow_allocation_failure_(false),
+      debug_utils_(NULL),
 #endif  // DEBUG
+      old_gen_promotion_limit_(kMinimumPromotionLimit),
+      old_gen_allocation_limit_(kMinimumAllocationLimit),
+      external_allocation_limit_(0),
+      amount_of_external_allocated_memory_(0),
+      amount_of_external_allocated_memory_at_last_global_gc_(0),
+      old_gen_exhausted_(false),
+      hidden_symbol_(NULL),
+      global_gc_prologue_callback_(NULL),
+      global_gc_epilogue_callback_(NULL),
+      gc_safe_size_of_old_object_(NULL),
+      tracer_(NULL),
+      young_survivors_after_last_gc_(0),
+      high_survival_rate_period_length_(0),
+      survival_rate_(0),
+      previous_survival_rate_trend_(Heap::STABLE),
+      survival_rate_trend_(Heap::STABLE),
+      max_gc_pause_(0),
+      max_alive_after_gc_(0),
+      min_in_mutator_(kMaxInt),
+      alive_after_last_gc_(0),
+      last_gc_end_timestamp_(0.0),
+      page_watermark_invalidated_mark_(1 << Page::WATERMARK_INVALIDATED),
+      number_idle_notifications_(0),
+      last_idle_notification_gc_count_(0),
+      last_idle_notification_gc_count_init_(false),
+      configured_(false),
+      is_safe_to_read_maps_(true) {
+  // Allow build-time customization of the max semispace size. Building
+  // V8 with snapshots and a non-default max semispace size is much
+  // easier if you can define it as part of the build environment.
+#if defined(V8_MAX_SEMISPACE_SIZE)
+  max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
+#endif
+
+  memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
+  global_contexts_list_ = NULL;
+  mark_compact_collector_.heap_ = this;
+  external_string_table_.heap_ = this;
+}
 
-intptr_t GCTracer::alive_after_last_gc_ = 0;
-double GCTracer::last_gc_end_timestamp_ = 0.0;
-int GCTracer::max_gc_pause_ = 0;
-intptr_t GCTracer::max_alive_after_gc_ = 0;
-int GCTracer::min_in_mutator_ = kMaxInt;
 
 intptr_t Heap::Capacity() {
   if (!HasBeenSetup()) return 0;
@@ -190,7 +181,7 @@ intptr_t Heap::CommittedMemory() {
 intptr_t Heap::CommittedMemoryExecutable() {
   if (!HasBeenSetup()) return 0;
 
-  return MemoryAllocator::SizeExecutable();
+  return isolate()->memory_allocator()->SizeExecutable();
 }
 
 
@@ -217,8 +208,8 @@ bool Heap::HasBeenSetup() {
 
 
 int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
-  ASSERT(!Heap::InNewSpace(object));  // Code only works for old objects.
-  ASSERT(!MarkCompactCollector::are_map_pointers_encoded());
+  ASSERT(!HEAP->InNewSpace(object));  // Code only works for old objects.
+  ASSERT(!HEAP->mark_compact_collector()->are_map_pointers_encoded());
   MapWord map_word = object->map_word();
   map_word.ClearMark();
   map_word.ClearOverflow();
@@ -227,8 +218,8 @@ int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
 
 
 int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
-  ASSERT(!Heap::InNewSpace(object));  // Code only works for old objects.
-  ASSERT(MarkCompactCollector::are_map_pointers_encoded());
+  ASSERT(!HEAP->InNewSpace(object));  // Code only works for old objects.
+  ASSERT(HEAP->mark_compact_collector()->are_map_pointers_encoded());
   uint32_t marker = Memory::uint32_at(object->address());
   if (marker == MarkCompactCollector::kSingleFreeEncoding) {
     return kIntSize;
@@ -236,7 +227,7 @@ int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
     return Memory::int_at(object->address() + kIntSize);
   } else {
     MapWord map_word = object->map_word();
-    Address map_address = map_word.DecodeMapAddress(Heap::map_space());
+    Address map_address = map_word.DecodeMapAddress(HEAP->map_space());
     Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_address));
     return object->SizeFromMap(map);
   }
@@ -246,19 +237,20 @@ int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
   // Is global GC requested?
   if (space != NEW_SPACE || FLAG_gc_global) {
-    Counters::gc_compactor_caused_by_request.Increment();
+    isolate_->counters()->gc_compactor_caused_by_request()->Increment();
     return MARK_COMPACTOR;
   }
 
   // Is enough data promoted to justify a global GC?
   if (OldGenerationPromotionLimitReached()) {
-    Counters::gc_compactor_caused_by_promoted_data.Increment();
+    isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
     return MARK_COMPACTOR;
   }
 
   // Have allocation in OLD and LO failed?
   if (old_gen_exhausted_) {
-    Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
+    isolate_->counters()->
+        gc_compactor_caused_by_oldspace_exhaustion()->Increment();
     return MARK_COMPACTOR;
   }
 
@@ -271,8 +263,9 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
   // and does not count available bytes already in the old space or code
   // space.  Undercounting is safe---we may get an unrequested full GC when
   // a scavenge would have succeeded.
-  if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) {
-    Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
+  if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
+    isolate_->counters()->
+        gc_compactor_caused_by_oldspace_exhaustion()->Increment();
     return MARK_COMPACTOR;
   }
 
@@ -317,8 +310,8 @@ void Heap::PrintShortHeapStatistics() {
   if (!FLAG_trace_gc_verbose) return;
   PrintF("Memory allocator,   used: %8" V8_PTR_PREFIX "d"
              ", available: %8" V8_PTR_PREFIX "d\n",
-         MemoryAllocator::Size(),
-         MemoryAllocator::Available());
+         isolate_->memory_allocator()->Size(),
+         isolate_->memory_allocator()->Available());
   PrintF("New space,          used: %8" V8_PTR_PREFIX "d"
              ", available: %8" V8_PTR_PREFIX "d\n",
          Heap::new_space_.Size(),
@@ -383,7 +376,7 @@ void Heap::ReportStatisticsAfterGC() {
 
 
 void Heap::GarbageCollectionPrologue() {
-  TranscendentalCache::Clear();
+  isolate_->transcendental_cache()->Clear();
   ClearJSFunctionResultCaches();
   gc_count_++;
   unflattened_strings_length_ = 0;
@@ -424,21 +417,24 @@ void Heap::GarbageCollectionEpilogue() {
     Verify();
   }
 
-  if (FLAG_print_global_handles) GlobalHandles::Print();
+  if (FLAG_print_global_handles) isolate_->global_handles()->Print();
   if (FLAG_print_handles) PrintHandles();
   if (FLAG_gc_verbose) Print();
   if (FLAG_code_stats) ReportCodeStatistics("After GC");
 #endif
 
-  Counters::alive_after_last_gc.Set(static_cast<int>(SizeOfObjects()));
+  isolate_->counters()->alive_after_last_gc()->Set(
+      static_cast<int>(SizeOfObjects()));
 
-  Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
-  Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
+  isolate_->counters()->symbol_table_capacity()->Set(
+      symbol_table()->Capacity());
+  isolate_->counters()->number_of_symbols()->Set(
+      symbol_table()->NumberOfElements());
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
   ReportStatisticsAfterGC();
 #endif
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  Debug::AfterGarbageCollection();
+  isolate_->debug()->AfterGarbageCollection();
 #endif
 }
 
@@ -447,9 +443,9 @@ void Heap::CollectAllGarbage(bool force_compaction) {
   // Since we are ignoring the return value, the exact choice of space does
   // not matter, so long as we do not specify NEW_SPACE, which would not
   // cause a full GC.
-  MarkCompactCollector::SetForceCompaction(force_compaction);
+  mark_compact_collector_.SetForceCompaction(force_compaction);
   CollectGarbage(OLD_POINTER_SPACE);
-  MarkCompactCollector::SetForceCompaction(false);
+  mark_compact_collector_.SetForceCompaction(false);
 }
 
 
@@ -457,7 +453,7 @@ void Heap::CollectAllAvailableGarbage() {
   // Since we are ignoring the return value, the exact choice of space does
   // not matter, so long as we do not specify NEW_SPACE, which would not
   // cause a full GC.
-  MarkCompactCollector::SetForceCompaction(true);
+  mark_compact_collector()->SetForceCompaction(true);
 
   // Major GC would invoke weak handle callbacks on weakly reachable
   // handles, but won't collect weakly reachable objects until next
@@ -473,13 +469,13 @@ void Heap::CollectAllAvailableGarbage() {
       break;
     }
   }
-  MarkCompactCollector::SetForceCompaction(false);
+  mark_compact_collector()->SetForceCompaction(false);
 }
 
 
 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
   // The VM is in the GC state until exiting this function.
-  VMState state(GC);
+  VMState state(isolate_, GC);
 
 #ifdef DEBUG
   // Reset the allocation timeout to the GC interval, but make sure to
@@ -492,7 +488,7 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
 
   bool next_gc_likely_to_collect_more = false;
 
-  { GCTracer tracer;
+  { GCTracer tracer(this);
     GarbageCollectionPrologue();
     // The GC count was incremented in the prologue.  Tell the tracer about
     // it.
@@ -502,8 +498,8 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
     tracer.set_collector(collector);
 
     HistogramTimer* rate = (collector == SCAVENGER)
-        ? &Counters::gc_scavenger
-        : &Counters::gc_compactor;
+        ? isolate_->counters()->gc_scavenger()
+        : isolate_->counters()->gc_compactor();
     rate->Start();
     next_gc_likely_to_collect_more =
         PerformGarbageCollection(collector, &tracer);
@@ -522,7 +518,7 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
 
 
 void Heap::PerformScavenge() {
-  GCTracer tracer;
+  GCTracer tracer(this);
   PerformGarbageCollection(SCAVENGER, &tracer);
 }
 
@@ -531,7 +527,6 @@ void Heap::PerformScavenge() {
 // Helper class for verifying the symbol table.
 class SymbolTableVerifier : public ObjectVisitor {
  public:
-  SymbolTableVerifier() { }
   void VisitPointers(Object** start, Object** end) {
     // Visit all HeapObject pointers in [start, end).
     for (Object** p = start; p < end; p++) {
@@ -548,7 +543,7 @@ class SymbolTableVerifier : public ObjectVisitor {
 static void VerifySymbolTable() {
 #ifdef DEBUG
   SymbolTableVerifier verifier;
-  Heap::symbol_table()->IterateElements(&verifier);
+  HEAP->symbol_table()->IterateElements(&verifier);
 #endif  // DEBUG
 }
 
@@ -633,7 +628,7 @@ void Heap::EnsureFromSpaceIsCommitted() {
 
 
 void Heap::ClearJSFunctionResultCaches() {
-  if (Bootstrapper::IsActive()) return;
+  if (isolate_->bootstrapper()->IsActive()) return;
 
   Object* context = global_contexts_list_;
   while (!context->IsUndefined()) {
@@ -651,8 +646,9 @@ void Heap::ClearJSFunctionResultCaches() {
 }
 
 
+
 void Heap::ClearNormalizedMapCaches() {
-  if (Bootstrapper::IsActive()) return;
+  if (isolate_->bootstrapper()->IsActive()) return;
 
   Object* context = global_contexts_list_;
   while (!context->IsUndefined()) {
@@ -709,7 +705,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
   bool next_gc_likely_to_collect_more = false;
 
   if (collector != SCAVENGER) {
-    PROFILE(CodeMovingGCEvent());
+    PROFILE(isolate_, CodeMovingGCEvent());
   }
 
   VerifySymbolTable();
@@ -768,13 +764,13 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
     UpdateSurvivalRateTrend(start_new_space_size);
   }
 
-  Counters::objs_since_last_young.Set(0);
+  isolate_->counters()->objs_since_last_young()->Set(0);
 
   if (collector == MARK_COMPACTOR) {
     DisableAssertNoAllocation allow_allocation;
     GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
     next_gc_likely_to_collect_more =
-        GlobalHandles::PostGarbageCollectionProcessing();
+        isolate_->global_handles()->PostGarbageCollectionProcessing();
   }
 
   // Update relocatables.
@@ -808,11 +804,11 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
 
 void Heap::MarkCompact(GCTracer* tracer) {
   gc_state_ = MARK_COMPACT;
-  LOG(ResourceEvent("markcompact", "begin"));
+  LOG(isolate_, ResourceEvent("markcompact", "begin"));
 
-  MarkCompactCollector::Prepare(tracer);
+  mark_compact_collector_.Prepare(tracer);
 
-  bool is_compacting = MarkCompactCollector::IsCompacting();
+  bool is_compacting = mark_compact_collector_.IsCompacting();
 
   if (is_compacting) {
     mc_count_++;
@@ -823,15 +819,17 @@ void Heap::MarkCompact(GCTracer* tracer) {
 
   MarkCompactPrologue(is_compacting);
 
-  MarkCompactCollector::CollectGarbage();
+  is_safe_to_read_maps_ = false;
+  mark_compact_collector_.CollectGarbage();
+  is_safe_to_read_maps_ = true;
 
-  LOG(ResourceEvent("markcompact", "end"));
+  LOG(isolate_, ResourceEvent("markcompact", "end"));
 
   gc_state_ = NOT_IN_GC;
 
   Shrink();
 
-  Counters::objs_since_last_full.Set(0);
+  isolate_->counters()->objs_since_last_full()->Set(0);
 
   contexts_disposed_ = 0;
 }
@@ -840,11 +838,11 @@ void Heap::MarkCompact(GCTracer* tracer) {
 void Heap::MarkCompactPrologue(bool is_compacting) {
   // At any old GC clear the keyed lookup cache to enable collection of unused
   // maps.
-  KeyedLookupCache::Clear();
-  ContextSlotCache::Clear();
-  DescriptorLookupCache::Clear();
+  isolate_->keyed_lookup_cache()->Clear();
+  isolate_->context_slot_cache()->Clear();
+  isolate_->descriptor_lookup_cache()->Clear();
 
-  CompilationCache::MarkCompactPrologue();
+  isolate_->compilation_cache()->MarkCompactPrologue();
 
   CompletelyClearInstanceofCache();
 
@@ -868,6 +866,7 @@ Object* Heap::FindCodeObject(Address a) {
 // Helper class for copying HeapObjects
 class ScavengeVisitor: public ObjectVisitor {
  public:
+  explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
 
   void VisitPointer(Object** p) { ScavengePointer(p); }
 
@@ -879,48 +878,15 @@ class ScavengeVisitor: public ObjectVisitor {
  private:
   void ScavengePointer(Object** p) {
     Object* object = *p;
-    if (!Heap::InNewSpace(object)) return;
+    if (!heap_->InNewSpace(object)) return;
     Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
                          reinterpret_cast<HeapObject*>(object));
   }
-};
-
 
-// A queue of objects promoted during scavenge. Each object is accompanied
-// by it's size to avoid dereferencing a map pointer for scanning.
-class PromotionQueue {
- public:
-  void Initialize(Address start_address) {
-    front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
-  }
-
-  bool is_empty() { return front_ <= rear_; }
-
-  void insert(HeapObject* target, int size) {
-    *(--rear_) = reinterpret_cast<intptr_t>(target);
-    *(--rear_) = size;
-    // Assert no overflow into live objects.
-    ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
-  }
-
-  void remove(HeapObject** target, int* size) {
-    *target = reinterpret_cast<HeapObject*>(*(--front_));
-    *size = static_cast<int>(*(--front_));
-    // Assert no underflow.
-    ASSERT(front_ >= rear_);
-  }
-
- private:
-  // The front of the queue is higher in memory than the rear.
-  intptr_t* front_;
-  intptr_t* rear_;
+  Heap* heap_;
 };
 
 
-// Shared state read by the scavenge collector and set by ScavengeObject.
-static PromotionQueue promotion_queue;
-
-
 #ifdef DEBUG
 // Visitor class to verify pointers in code or data space do not point into
 // new space.
@@ -929,7 +895,7 @@ class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
   void VisitPointers(Object** start, Object**end) {
     for (Object** current = start; current < end; current++) {
       if ((*current)->IsHeapObject()) {
-        ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
+        ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
       }
     }
   }
@@ -940,12 +906,12 @@ static void VerifyNonPointerSpacePointers() {
   // Verify that there are no pointers to new space in spaces where we
   // do not expect them.
   VerifyNonPointerSpacePointersVisitor v;
-  HeapObjectIterator code_it(Heap::code_space());
+  HeapObjectIterator code_it(HEAP->code_space());
   for (HeapObject* object = code_it.next();
        object != NULL; object = code_it.next())
     object->Iterate(&v);
 
-  HeapObjectIterator data_it(Heap::old_data_space());
+  HeapObjectIterator data_it(HEAP->old_data_space());
   for (HeapObject* object = data_it.next();
        object != NULL; object = data_it.next())
     object->Iterate(&v);
@@ -971,7 +937,7 @@ void Heap::Scavenge() {
 
   gc_state_ = SCAVENGE;
 
-  Page::FlipMeaningOfInvalidatedWatermarkFlag();
+  Page::FlipMeaningOfInvalidatedWatermarkFlag(this);
 #ifdef DEBUG
   VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID);
   VerifyPageWatermarkValidity(map_space_, ALL_VALID);
@@ -986,10 +952,10 @@ void Heap::Scavenge() {
   map_space_->FlushTopPageWatermark();
 
   // Implements Cheney's copying algorithm
-  LOG(ResourceEvent("scavenge", "begin"));
+  LOG(isolate_, ResourceEvent("scavenge", "begin"));
 
   // Clear descriptor cache.
-  DescriptorLookupCache::Clear();
+  isolate_->descriptor_lookup_cache()->Clear();
 
   // Used for updating survived_since_last_expansion_ at function end.
   intptr_t survived_watermark = PromotedSpaceSize();
@@ -1019,16 +985,17 @@ void Heap::Scavenge() {
   // frees up its size in bytes from the top of the new space, and
   // objects are at least one pointer in size.
   Address new_space_front = new_space_.ToSpaceLow();
-  promotion_queue.Initialize(new_space_.ToSpaceHigh());
+  promotion_queue_.Initialize(new_space_.ToSpaceHigh());
 
-  ScavengeVisitor scavenge_visitor;
+  is_safe_to_read_maps_ = false;
+  ScavengeVisitor scavenge_visitor(this);
   // Copy roots.
   IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
 
   // Copy objects reachable from the old generation.  By definition,
   // there are no intergenerational pointers in code or data spaces.
   IterateDirtyRegions(old_pointer_space_,
-                      &IteratePointersInDirtyRegion,
+                      &Heap::IteratePointersInDirtyRegion,
                       &ScavengePointer,
                       WATERMARK_CAN_BE_INVALID);
 
@@ -1060,10 +1027,12 @@ void Heap::Scavenge() {
       &UpdateNewSpaceReferenceInExternalStringTableEntry);
 
   LiveObjectList::UpdateReferencesForScavengeGC();
-  RuntimeProfiler::UpdateSamplesAfterScavenge();
+  isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
 
   ASSERT(new_space_front == new_space_.top());
 
+  is_safe_to_read_maps_ = true;
+
   // Set age mark.
   new_space_.set_age_mark(new_space_.top());
 
@@ -1071,18 +1040,19 @@ void Heap::Scavenge() {
   IncrementYoungSurvivorsCounter(static_cast<int>(
       (PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
 
-  LOG(ResourceEvent("scavenge", "end"));
+  LOG(isolate_, ResourceEvent("scavenge", "end"));
 
   gc_state_ = NOT_IN_GC;
 }
 
 
-String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) {
+String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
+                                                                Object** p) {
   MapWord first_word = HeapObject::cast(*p)->map_word();
 
   if (!first_word.IsForwardingAddress()) {
     // Unreachable external string can be finalized.
-    FinalizeExternalString(String::cast(*p));
+    heap->FinalizeExternalString(String::cast(*p));
     return NULL;
   }
 
@@ -1093,48 +1063,49 @@ String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) {
 
 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
     ExternalStringTableUpdaterCallback updater_func) {
-  ExternalStringTable::Verify();
+  external_string_table_.Verify();
 
-  if (ExternalStringTable::new_space_strings_.is_empty()) return;
+  if (external_string_table_.new_space_strings_.is_empty()) return;
 
-  Object** start = &ExternalStringTable::new_space_strings_[0];
-  Object** end = start + ExternalStringTable::new_space_strings_.length();
+  Object** start = &external_string_table_.new_space_strings_[0];
+  Object** end = start + external_string_table_.new_space_strings_.length();
   Object** last = start;
 
   for (Object** p = start; p < end; ++p) {
-    ASSERT(Heap::InFromSpace(*p));
-    String* target = updater_func(p);
+    ASSERT(InFromSpace(*p));
+    String* target = updater_func(this, p);
 
     if (target == NULL) continue;
 
     ASSERT(target->IsExternalString());
 
-    if (Heap::InNewSpace(target)) {
+    if (InNewSpace(target)) {
       // String is still in new space.  Update the table entry.
       *last = target;
       ++last;
     } else {
       // String got promoted.  Move it to the old string list.
-      ExternalStringTable::AddOldString(target);
+      external_string_table_.AddOldString(target);
     }
   }
 
   ASSERT(last <= end);
-  ExternalStringTable::ShrinkNewStrings(static_cast<int>(last - start));
+  external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
 }
 
 
-static Object* ProcessFunctionWeakReferences(Object* function,
+static Object* ProcessFunctionWeakReferences(Heap* heap,
+                                             Object* function,
                                              WeakObjectRetainer* retainer) {
-  Object* head = Heap::undefined_value();
+  Object* head = heap->undefined_value();
   JSFunction* tail = NULL;
   Object* candidate = function;
-  while (!candidate->IsUndefined()) {
+  while (candidate != heap->undefined_value()) {
     // Check whether to keep the candidate in the list.
     JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
     Object* retain = retainer->RetainAs(candidate);
     if (retain != NULL) {
-      if (head->IsUndefined()) {
+      if (head == heap->undefined_value()) {
         // First element in the list.
         head = candidate_function;
       } else {
@@ -1151,7 +1122,7 @@ static Object* ProcessFunctionWeakReferences(Object* function,
 
   // Terminate the list if there is one or more elements.
   if (tail != NULL) {
-    tail->set_next_function_link(Heap::undefined_value());
+    tail->set_next_function_link(heap->undefined_value());
   }
 
   return head;
@@ -1162,18 +1133,19 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
   Object* head = undefined_value();
   Context* tail = NULL;
   Object* candidate = global_contexts_list_;
-  while (!candidate->IsUndefined()) {
+  while (candidate != undefined_value()) {
     // Check whether to keep the candidate in the list.
     Context* candidate_context = reinterpret_cast<Context*>(candidate);
     Object* retain = retainer->RetainAs(candidate);
     if (retain != NULL) {
-      if (head->IsUndefined()) {
+      if (head == undefined_value()) {
         // First element in the list.
         head = candidate_context;
       } else {
         // Subsequent elements in the list.
         ASSERT(tail != NULL);
-        tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
+        tail->set_unchecked(this,
+                            Context::NEXT_CONTEXT_LINK,
                             candidate_context,
                             UPDATE_WRITE_BARRIER);
       }
@@ -1183,9 +1155,11 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
       // Process the weak list of optimized functions for the context.
       Object* function_list_head =
           ProcessFunctionWeakReferences(
+              this,
               candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
               retainer);
-      candidate_context->set_unchecked(Context::OPTIMIZED_FUNCTIONS_LIST,
+      candidate_context->set_unchecked(this,
+                                       Context::OPTIMIZED_FUNCTIONS_LIST,
                                        function_list_head,
                                        UPDATE_WRITE_BARRIER);
     }
@@ -1195,21 +1169,22 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
 
   // Terminate the list if there is one or more elements.
   if (tail != NULL) {
-    tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
+    tail->set_unchecked(this,
+                        Context::NEXT_CONTEXT_LINK,
                         Heap::undefined_value(),
                         UPDATE_WRITE_BARRIER);
   }
 
   // Update the head of the list of contexts.
-  Heap::global_contexts_list_ = head;
+  global_contexts_list_ = head;
 }
 
 
 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
  public:
-  static inline void VisitPointer(Object** p) {
+  static inline void VisitPointer(Heap* heap, Object** p) {
     Object* object = *p;
-    if (!Heap::InNewSpace(object)) return;
+    if (!heap->InNewSpace(object)) return;
     Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
                          reinterpret_cast<HeapObject*>(object));
   }
@@ -1230,10 +1205,10 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
     }
 
     // Promote and process all the to-be-promoted objects.
-    while (!promotion_queue.is_empty()) {
+    while (!promotion_queue_.is_empty()) {
       HeapObject* target;
       int size;
-      promotion_queue.remove(&target, &size);
+      promotion_queue_.remove(&target, &size);
 
       // Promoted object might be already partially visited
       // during dirty regions iteration. Thus we search specificly
@@ -1303,7 +1278,7 @@ class ScavengingVisitor : public StaticVisitorBase {
   enum SizeRestriction { SMALL, UNKNOWN_SIZE };
 
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-  static void RecordCopiedObject(HeapObject* obj) {
+  static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
     bool should_record = false;
 #ifdef DEBUG
     should_record = FLAG_heap_stats;
@@ -1312,10 +1287,10 @@ class ScavengingVisitor : public StaticVisitorBase {
     should_record = should_record || FLAG_log_gc;
 #endif
     if (should_record) {
-      if (Heap::new_space()->Contains(obj)) {
-        Heap::new_space()->RecordAllocation(obj);
+      if (heap->new_space()->Contains(obj)) {
+        heap->new_space()->RecordAllocation(obj);
       } else {
-        Heap::new_space()->RecordPromotion(obj);
+        heap->new_space()->RecordPromotion(obj);
       }
     }
   }
@@ -1324,24 +1299,27 @@ class ScavengingVisitor : public StaticVisitorBase {
   // Helper function used by CopyObject to copy a source object to an
   // allocated target object and update the forwarding pointer in the source
   // object.  Returns the target object.
-  INLINE(static HeapObject* MigrateObject(HeapObject* source,
+  INLINE(static HeapObject* MigrateObject(Heap* heap,
+                                          HeapObject* source,
                                           HeapObject* target,
                                           int size)) {
     // Copy the content of source to target.
-    Heap::CopyBlock(target->address(), source->address(), size);
+    heap->CopyBlock(target->address(), source->address(), size);
 
     // Set the forwarding address.
     source->set_map_word(MapWord::FromForwardingAddress(target));
 
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
     // Update NewSpace stats if necessary.
-    RecordCopiedObject(target);
+    RecordCopiedObject(heap, target);
 #endif
-    HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
+    HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
 #if defined(ENABLE_LOGGING_AND_PROFILING)
-    if (Logger::is_logging() || CpuProfiler::is_profiling()) {
+    Isolate* isolate = heap->isolate();
+    if (isolate->logger()->is_logging() ||
+        isolate->cpu_profiler()->is_profiling()) {
       if (target->IsSharedFunctionInfo()) {
-        PROFILE(SharedFunctionInfoMoveEvent(
+        PROFILE(isolate, SharedFunctionInfoMoveEvent(
             source->address(), target->address()));
       }
     }
@@ -1359,36 +1337,37 @@ class ScavengingVisitor : public StaticVisitorBase {
            (object_size <= Page::kMaxHeapObjectSize));
     ASSERT(object->Size() == object_size);
 
-    if (Heap::ShouldBePromoted(object->address(), object_size)) {
+    Heap* heap = map->heap();
+    if (heap->ShouldBePromoted(object->address(), object_size)) {
       MaybeObject* maybe_result;
 
       if ((size_restriction != SMALL) &&
           (object_size > Page::kMaxHeapObjectSize)) {
-        maybe_result = Heap::lo_space()->AllocateRawFixedArray(object_size);
+        maybe_result = heap->lo_space()->AllocateRawFixedArray(object_size);
       } else {
         if (object_contents == DATA_OBJECT) {
-          maybe_result = Heap::old_data_space()->AllocateRaw(object_size);
+          maybe_result = heap->old_data_space()->AllocateRaw(object_size);
         } else {
-          maybe_result = Heap::old_pointer_space()->AllocateRaw(object_size);
+          maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
         }
       }
 
       Object* result = NULL;  // Initialization to please compiler.
       if (maybe_result->ToObject(&result)) {
         HeapObject* target = HeapObject::cast(result);
-        *slot = MigrateObject(object, target, object_size);
+        *slot = MigrateObject(heap, object , target, object_size);
 
         if (object_contents == POINTER_OBJECT) {
-          promotion_queue.insert(target, object_size);
+          heap->promotion_queue()->insert(target, object_size);
         }
 
-        Heap::tracer()->increment_promoted_objects_size(object_size);
+        heap->tracer()->increment_promoted_objects_size(object_size);
         return;
       }
     }
     Object* result =
-        Heap::new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
-    *slot = MigrateObject(object, HeapObject::cast(result), object_size);
+        heap->new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
+    *slot = MigrateObject(heap, object, HeapObject::cast(result), object_size);
     return;
   }
 
@@ -1439,13 +1418,14 @@ class ScavengingVisitor : public StaticVisitorBase {
                                                HeapObject* object) {
     ASSERT(IsShortcutCandidate(map->instance_type()));
 
-    if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
+    if (ConsString::cast(object)->unchecked_second() ==
+        map->heap()->empty_string()) {
       HeapObject* first =
           HeapObject::cast(ConsString::cast(object)->unchecked_first());
 
       *slot = first;
 
-      if (!Heap::InNewSpace(first)) {
+      if (!map->heap()->InNewSpace(first)) {
         object->set_map_word(MapWord::FromForwardingAddress(first));
         return;
       }
@@ -1496,7 +1476,7 @@ VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
 
 
 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
-  ASSERT(InFromSpace(object));
+  ASSERT(HEAP->InFromSpace(object));
   MapWord first_word = object->map_word();
   ASSERT(!first_word.IsForwardingAddress());
   Map* map = first_word.ToMap();
@@ -1504,11 +1484,6 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
 }
 
 
-void Heap::ScavengePointer(HeapObject** p) {
-  ScavengeObject(p, *p);
-}
-
-
 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
                                       int instance_size) {
   Object* result;
@@ -1520,9 +1495,8 @@ MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
   reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
   reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
   reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
-  reinterpret_cast<Map*>(result)->
-      set_visitor_id(
-          StaticVisitorBase::GetVisitorId(instance_type, instance_size));
+  reinterpret_cast<Map*>(result)->set_visitor_id(
+        StaticVisitorBase::GetVisitorId(instance_type, instance_size));
   reinterpret_cast<Map*>(result)->set_inobject_properties(0);
   reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
   reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
@@ -1631,6 +1605,7 @@ bool Heap::CreateInitialMaps() {
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   set_null_value(obj);
+  Oddball::cast(obj)->set_kind(Oddball::kNull);
 
   // Allocate the empty descriptor array.
   { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
@@ -1822,7 +1797,7 @@ bool Heap::CreateInitialMaps() {
   }
   set_message_object_map(Map::cast(obj));
 
-  ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
+  ASSERT(!InNewSpace(empty_fixed_array()));
   return true;
 }
 
@@ -1875,12 +1850,13 @@ MaybeObject* Heap::AllocateJSGlobalPropertyCell(Object* value) {
 
 
 MaybeObject* Heap::CreateOddball(const char* to_string,
-                                 Object* to_number) {
+                                 Object* to_number,
+                                 byte kind) {
   Object* result;
   { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_DATA_SPACE);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
-  return Oddball::cast(result)->Initialize(to_string, to_number);
+  return Oddball::cast(result)->Initialize(to_string, to_number, kind);
 }
 
 
@@ -1892,7 +1868,7 @@ bool Heap::CreateApiObjects() {
   }
   set_neander_map(Map::cast(obj));
 
-  { MaybeObject* maybe_obj = Heap::AllocateJSObjectFromMap(neander_map());
+  { MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   Object* elements;
@@ -1957,6 +1933,7 @@ bool Heap::CreateInitialObjects() {
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   set_undefined_value(obj);
+  Oddball::cast(obj)->set_kind(Oddball::kUndefined);
   ASSERT(!InNewSpace(undefined_value()));
 
   // Allocate initial symbol table.
@@ -1976,39 +1953,50 @@ bool Heap::CreateInitialObjects() {
 
   // Allocate the null_value
   { MaybeObject* maybe_obj =
-        Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
+        Oddball::cast(null_value())->Initialize("null",
+                                                Smi::FromInt(0),
+                                                Oddball::kNull);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
 
-  { MaybeObject* maybe_obj = CreateOddball("true", Smi::FromInt(1));
+  { MaybeObject* maybe_obj = CreateOddball("true",
+                                           Smi::FromInt(1),
+                                           Oddball::kTrue);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   set_true_value(obj);
 
-  { MaybeObject* maybe_obj = CreateOddball("false", Smi::FromInt(0));
+  { MaybeObject* maybe_obj = CreateOddball("false",
+                                           Smi::FromInt(0),
+                                           Oddball::kFalse);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   set_false_value(obj);
 
-  { MaybeObject* maybe_obj = CreateOddball("hole", Smi::FromInt(-1));
+  { MaybeObject* maybe_obj = CreateOddball("hole",
+                                           Smi::FromInt(-1),
+                                           Oddball::kTheHole);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   set_the_hole_value(obj);
 
   { MaybeObject* maybe_obj = CreateOddball("arguments_marker",
-                                           Smi::FromInt(-4));
+                                           Smi::FromInt(-4),
+                                           Oddball::kArgumentMarker);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   set_arguments_marker(obj);
 
-  { MaybeObject* maybe_obj =
-        CreateOddball("no_interceptor_result_sentinel", Smi::FromInt(-2));
+  { MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel",
+                                           Smi::FromInt(-2),
+                                           Oddball::kOther);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   set_no_interceptor_result_sentinel(obj);
 
-  { MaybeObject* maybe_obj =
-        CreateOddball("termination_exception", Smi::FromInt(-3));
+  { MaybeObject* maybe_obj = CreateOddball("termination_exception",
+                                           Smi::FromInt(-3),
+                                           Oddball::kOther);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   set_termination_exception(obj);
@@ -2070,7 +2058,8 @@ bool Heap::CreateInitialObjects() {
   { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
-  { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(obj);
+  { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(this,
+                                                                       obj);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
   set_intrinsic_function_names(StringDictionary::cast(obj));
@@ -2090,20 +2079,20 @@ bool Heap::CreateInitialObjects() {
   }
   set_natives_source_cache(FixedArray::cast(obj));
 
-  // Handling of script id generation is in Factory::NewScript.
+  // Handling of script id generation is in FACTORY->NewScript.
   set_last_script_id(undefined_value());
 
   // Initialize keyed lookup cache.
-  KeyedLookupCache::Clear();
+  isolate_->keyed_lookup_cache()->Clear();
 
   // Initialize context slot cache.
-  ContextSlotCache::Clear();
+  isolate_->context_slot_cache()->Clear();
 
   // Initialize descriptor cache.
-  DescriptorLookupCache::Clear();
+  isolate_->descriptor_lookup_cache()->Clear();
 
   // Initialize compilation cache.
-  CompilationCache::Clear();
+  isolate_->compilation_cache()->Clear();
 
   return true;
 }
@@ -2127,7 +2116,7 @@ void Heap::FlushNumberStringCache() {
   // Flush the number to string cache.
   int len = number_string_cache()->length();
   for (int i = 0; i < len; i++) {
-    number_string_cache()->set_undefined(i);
+    number_string_cache()->set_undefined(this, i);
   }
 }
 
@@ -2179,7 +2168,7 @@ void Heap::SetNumberStringCache(Object* number, String* string) {
 
 MaybeObject* Heap::NumberToString(Object* number,
                                   bool check_number_string_cache) {
-  Counters::number_to_string_runtime.Increment();
+  isolate_->counters()->number_to_string_runtime()->Increment();
   if (check_number_string_cache) {
     Object* cached = GetNumberStringCache(number);
     if (cached != undefined_value()) {
@@ -2282,10 +2271,11 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
 
   SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
   share->set_name(name);
-  Code* illegal = Builtins::builtin(Builtins::Illegal);
+  Code* illegal = isolate_->builtins()->builtin(Builtins::Illegal);
   share->set_code(illegal);
   share->set_scope_info(SerializedScopeInfo::Empty());
-  Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
+  Code* construct_stub = isolate_->builtins()->builtin(
+      Builtins::JSConstructStubGeneric);
   share->set_construct_stub(construct_stub);
   share->set_expected_nof_properties(0);
   share->set_length(0);
@@ -2343,20 +2333,21 @@ static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
 
 
 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
+    Heap* heap,
     uint32_t c1,
     uint32_t c2) {
   String* symbol;
   // Numeric strings have a different hash algorithm not known by
   // LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
   if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
-      Heap::symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
+      heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
     return symbol;
   // Now we know the length is 2, we might as well make use of that fact
   // when building the new string.
   } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) {  // We can do this
     ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1));  // because of this.
     Object* result;
-    { MaybeObject* maybe_result = Heap::AllocateRawAsciiString(2);
+    { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
       if (!maybe_result->ToObject(&result)) return maybe_result;
     }
     char* dest = SeqAsciiString::cast(result)->GetChars();
@@ -2365,7 +2356,7 @@ MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
     return result;
   } else {
     Object* result;
-    { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(2);
+    { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
       if (!maybe_result->ToObject(&result)) return maybe_result;
     }
     uc16* dest = SeqTwoByteString::cast(result)->GetChars();
@@ -2395,7 +2386,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
   if (length == 2) {
     unsigned c1 = first->Get(0);
     unsigned c2 = second->Get(0);
-    return MakeOrFindTwoCharacterString(c1, c2);
+    return MakeOrFindTwoCharacterString(this, c1, c2);
   }
 
   bool first_is_ascii = first->IsAsciiRepresentation();
@@ -2405,7 +2396,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
   // Make sure that an out of memory exception is thrown if the length
   // of the new cons string is too large.
   if (length > String::kMaxLength || length < 0) {
-    Top::context()->mark_out_of_memory();
+    isolate()->context()->mark_out_of_memory();
     return Failure::OutOfMemoryException();
   }
 
@@ -2417,7 +2408,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
     is_ascii_data_in_two_byte_string =
         first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
     if (is_ascii_data_in_two_byte_string) {
-      Counters::string_add_runtime_ext_to_ascii.Increment();
+      isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
     }
   }
 
@@ -2458,6 +2449,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
         char* dest = SeqAsciiString::cast(result)->GetChars();
         String::WriteToFlat(first, dest, 0, first_length);
         String::WriteToFlat(second, dest + first_length, 0, second_length);
+        isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
         return result;
       }
 
@@ -2499,15 +2491,14 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
   int length = end - start;
 
   if (length == 1) {
-    return Heap::LookupSingleCharacterStringFromCode(
-        buffer->Get(start));
+    return LookupSingleCharacterStringFromCode(buffer->Get(start));
   } else if (length == 2) {
     // Optimization for 2-byte strings often used as keys in a decompression
     // dictionary.  Check whether we already have the string in the symbol
     // table to prevent creation of many unneccesary strings.
     unsigned c1 = buffer->Get(start);
     unsigned c2 = buffer->Get(start + 1);
-    return MakeOrFindTwoCharacterString(c1, c2);
+    return MakeOrFindTwoCharacterString(this, c1, c2);
   }
 
   // Make an attempt to flatten the buffer to reduce access time.
@@ -2539,7 +2530,7 @@ MaybeObject* Heap::AllocateExternalStringFromAscii(
     ExternalAsciiString::Resource* resource) {
   size_t length = resource->length();
   if (length > static_cast<size_t>(String::kMaxLength)) {
-    Top::context()->mark_out_of_memory();
+    isolate()->context()->mark_out_of_memory();
     return Failure::OutOfMemoryException();
   }
 
@@ -2562,7 +2553,7 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
     ExternalTwoByteString::Resource* resource) {
   size_t length = resource->length();
   if (length > static_cast<size_t>(String::kMaxLength)) {
-    Top::context()->mark_out_of_memory();
+    isolate()->context()->mark_out_of_memory();
     return Failure::OutOfMemoryException();
   }
 
@@ -2572,7 +2563,7 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
   bool is_ascii = length <= kAsciiCheckLengthLimit &&
       String::IsAscii(resource->data(), static_cast<int>(length));
   Map* map = is_ascii ?
-      Heap::external_string_with_ascii_data_map() : Heap::external_string_map();
+      external_string_with_ascii_data_map() : external_string_map();
   Object* result;
   { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
     if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -2589,8 +2580,8 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
 
 MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
   if (code <= String::kMaxAsciiCharCode) {
-    Object* value = Heap::single_character_string_cache()->get(code);
-    if (value != Heap::undefined_value()) return value;
+    Object* value = single_character_string_cache()->get(code);
+    if (value != undefined_value()) return value;
 
     char buffer[1];
     buffer[0] = static_cast<char>(code);
@@ -2598,12 +2589,12 @@ MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
     MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1));
 
     if (!maybe_result->ToObject(&result)) return maybe_result;
-    Heap::single_character_string_cache()->set(code, result);
+    single_character_string_cache()->set(code, result);
     return result;
   }
 
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(1);
+  { MaybeObject* maybe_result = AllocateRawTwoByteString(1);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   String* answer = String::cast(result);
@@ -2717,7 +2708,8 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
   // Initialize the object
   HeapObject::cast(result)->set_map(code_map());
   Code* code = Code::cast(result);
-  ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
+  ASSERT(!isolate_->code_range()->exists() ||
+      isolate_->code_range()->contains(code->address()));
   code->set_instruction_size(desc.instr_size);
   code->set_relocation_info(ByteArray::cast(reloc_info));
   code->set_flags(flags);
@@ -2763,7 +2755,8 @@ MaybeObject* Heap::CopyCode(Code* code) {
   CopyBlock(new_addr, old_addr, obj_size);
   // Relocate the copy.
   Code* new_code = Code::cast(result);
-  ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
+  ASSERT(!isolate_->code_range()->exists() ||
+      isolate_->code_range()->contains(code->address()));
   new_code->Relocate(new_addr - old_addr);
   return new_code;
 }
@@ -2812,7 +2805,8 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
   memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
 
   // Relocate the copy.
-  ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
+  ASSERT(!isolate_->code_range()->exists() ||
+      isolate_->code_range()->contains(code->address()));
   new_code->Relocate(new_addr - old_addr);
 
 #ifdef DEBUG
@@ -2836,7 +2830,7 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
   }
   HeapObject::cast(result)->set_map(map);
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  ProducerHeapProfile::RecordJSObjectAllocation(result);
+  isolate_->producer_heap_profile()->RecordJSObjectAllocation(result);
 #endif
   return result;
 }
@@ -2904,10 +2898,12 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
                             JSFunction::cast(callee)->shared()->strict_mode();
   if (strict_mode_callee) {
     boilerplate =
-        Top::context()->global_context()->strict_mode_arguments_boilerplate();
+        isolate()->context()->global_context()->
+            strict_mode_arguments_boilerplate();
     arguments_object_size = kArgumentsObjectSizeStrict;
   } else {
-    boilerplate = Top::context()->global_context()->arguments_boilerplate();
+    boilerplate =
+        isolate()->context()->global_context()->arguments_boilerplate();
     arguments_object_size = kArgumentsObjectSize;
   }
 
@@ -2974,8 +2970,7 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
   int instance_size = fun->shared()->CalculateInstanceSize();
   int in_object_properties = fun->shared()->CalculateInObjectProperties();
   Object* map_obj;
-  { MaybeObject* maybe_map_obj =
-        Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
+  { MaybeObject* maybe_map_obj = AllocateMap(JS_OBJECT_TYPE, instance_size);
     if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
   }
 
@@ -3171,7 +3166,7 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
     PropertyDetails d =
         PropertyDetails(details.attributes(), CALLBACKS, details.index());
     Object* value = descs->GetCallbacksObject(i);
-    { MaybeObject* maybe_value = Heap::AllocateJSGlobalPropertyCell(value);
+    { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
       if (!maybe_value->ToObject(&value)) return maybe_value;
     }
 
@@ -3197,7 +3192,7 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
 
   // Setup the global object as a normalized object.
   global->set_map(new_map);
-  global->map()->set_instance_descriptors(Heap::empty_descriptor_array());
+  global->map()->set_instance_descriptors(empty_descriptor_array());
   global->set_properties(dictionary);
 
   // Make sure result is a global object with properties in dictionary.
@@ -3236,7 +3231,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
     { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
       if (!maybe_clone->ToObject(&clone)) return maybe_clone;
     }
-    ASSERT(Heap::InNewSpace(clone));
+    ASSERT(InNewSpace(clone));
     // Since we know the clone is allocated in new space, we can copy
     // the contents without worrying about updating the write barrier.
     CopyBlock(HeapObject::cast(clone)->address(),
@@ -3266,7 +3261,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
   }
   // Return the new clone.
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  ProducerHeapProfile::RecordJSObjectAllocation(clone);
+  isolate_->producer_heap_profile()->RecordJSObjectAllocation(clone);
 #endif
   return clone;
 }
@@ -3322,7 +3317,7 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
   // Count the number of characters in the UTF-8 string and check if
   // it is an ASCII string.
   Access<ScannerConstants::Utf8Decoder>
-      decoder(ScannerConstants::utf8_decoder());
+      decoder(isolate_->scanner_constants()->utf8_decoder());
   decoder->Reset(string.start(), string.length());
   int chars = 0;
   while (decoder->has_more()) {
@@ -3375,12 +3370,24 @@ Map* Heap::SymbolMapForString(String* string) {
 
   // Find the corresponding symbol map for strings.
   Map* map = string->map();
-  if (map == ascii_string_map()) return ascii_symbol_map();
-  if (map == string_map()) return symbol_map();
-  if (map == cons_string_map()) return cons_symbol_map();
-  if (map == cons_ascii_string_map()) return cons_ascii_symbol_map();
-  if (map == external_string_map()) return external_symbol_map();
-  if (map == external_ascii_string_map()) return external_ascii_symbol_map();
+  if (map == ascii_string_map()) {
+    return ascii_symbol_map();
+  }
+  if (map == string_map()) {
+    return symbol_map();
+  }
+  if (map == cons_string_map()) {
+    return cons_symbol_map();
+  }
+  if (map == cons_ascii_string_map()) {
+    return cons_ascii_symbol_map();
+  }
+  if (map == external_string_map()) {
+    return external_symbol_map();
+  }
+  if (map == external_ascii_string_map()) {
+    return external_ascii_symbol_map();
+  }
   if (map == external_string_with_ascii_data_map()) {
     return external_symbol_with_ascii_data_map();
   }
@@ -3554,7 +3561,7 @@ MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
   { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
-  if (Heap::InNewSpace(obj)) {
+  if (InNewSpace(obj)) {
     HeapObject* dst = HeapObject::cast(obj);
     dst->set_map(map);
     CopyBlock(dst->address() + kPointerSize,
@@ -3586,7 +3593,7 @@ MaybeObject* Heap::AllocateFixedArray(int length) {
   array->set_map(fixed_array_map());
   array->set_length(length);
   // Initialize body.
-  ASSERT(!Heap::InNewSpace(undefined_value()));
+  ASSERT(!InNewSpace(undefined_value()));
   MemsetPointer(array->data_start(), undefined_value(), length);
   return result;
 }
@@ -3617,20 +3624,21 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
 
 
 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
+    Heap* heap,
     int length,
     PretenureFlag pretenure,
     Object* filler) {
   ASSERT(length >= 0);
-  ASSERT(Heap::empty_fixed_array()->IsFixedArray());
-  if (length == 0) return Heap::empty_fixed_array();
+  ASSERT(heap->empty_fixed_array()->IsFixedArray());
+  if (length == 0) return heap->empty_fixed_array();
 
-  ASSERT(!Heap::InNewSpace(filler));
+  ASSERT(!heap->InNewSpace(filler));
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateRawFixedArray(length, pretenure);
+  { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
 
-  HeapObject::cast(result)->set_map(Heap::fixed_array_map());
+  HeapObject::cast(result)->set_map(heap->fixed_array_map());
   FixedArray* array = FixedArray::cast(result);
   array->set_length(length);
   MemsetPointer(array->data_start(), filler, length);
@@ -3639,13 +3647,19 @@ MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
 
 
 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
-  return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
+  return AllocateFixedArrayWithFiller(this,
+                                      length,
+                                      pretenure,
+                                      undefined_value());
 }
 
 
 MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
                                                PretenureFlag pretenure) {
-  return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
+  return AllocateFixedArrayWithFiller(this,
+                                      length,
+                                      pretenure,
+                                      the_hole_value());
 }
 
 
@@ -3665,7 +3679,7 @@ MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
 
 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateFixedArray(length, pretenure);
+  { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
@@ -3677,7 +3691,7 @@ MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
 MaybeObject* Heap::AllocateGlobalContext() {
   Object* result;
   { MaybeObject* maybe_result =
-        Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
+        AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   Context* context = reinterpret_cast<Context*>(result);
@@ -3691,7 +3705,7 @@ MaybeObject* Heap::AllocateGlobalContext() {
 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
   ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateFixedArray(length);
+  { MaybeObject* maybe_result = AllocateFixedArray(length);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   Context* context = reinterpret_cast<Context*>(result);
@@ -3712,12 +3726,12 @@ MaybeObject* Heap::AllocateWithContext(Context* previous,
                                        JSObject* extension,
                                        bool is_catch_context) {
   Object* result;
-  { MaybeObject* maybe_result =
-        Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
+  { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   Context* context = reinterpret_cast<Context*>(result);
-  context->set_map(is_catch_context ? catch_context_map() : context_map());
+  context->set_map(is_catch_context ? catch_context_map() :
+      context_map());
   context->set_closure(previous->closure());
   context->set_fcontext(previous->fcontext());
   context->set_previous(previous);
@@ -3733,7 +3747,8 @@ MaybeObject* Heap::AllocateWithContext(Context* previous,
 MaybeObject* Heap::AllocateStruct(InstanceType type) {
   Map* map;
   switch (type) {
-#define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
+#define MAKE_CASE(NAME, Name, name) \
+    case NAME##_TYPE: map = name##_map(); break;
 STRUCT_LIST(MAKE_CASE)
 #undef MAKE_CASE
     default:
@@ -3744,7 +3759,7 @@ STRUCT_LIST(MAKE_CASE)
   AllocationSpace space =
       (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
   Object* result;
-  { MaybeObject* maybe_result = Heap::Allocate(map, space);
+  { MaybeObject* maybe_result = Allocate(map, space);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   Struct::cast(result)->InitializeBody(size);
@@ -3758,8 +3773,11 @@ bool Heap::IdleNotification() {
   static const int kIdlesBeforeMarkCompact = 8;
   static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
   static const unsigned int kGCsBetweenCleanup = 4;
-  static int number_idle_notifications = 0;
-  static unsigned int last_gc_count = gc_count_;
+
+  if (!last_idle_notification_gc_count_init_) {
+    last_idle_notification_gc_count_ = gc_count_;
+    last_idle_notification_gc_count_init_ = true;
+  }
 
   bool uncommit = true;
   bool finished = false;
@@ -3768,56 +3786,56 @@ bool Heap::IdleNotification() {
   // GCs have taken place. This allows another round of cleanup based
   // on idle notifications if enough work has been carried out to
   // provoke a number of garbage collections.
-  if (gc_count_ - last_gc_count < kGCsBetweenCleanup) {
-    number_idle_notifications =
-        Min(number_idle_notifications + 1, kMaxIdleCount);
+  if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
+    number_idle_notifications_ =
+        Min(number_idle_notifications_ + 1, kMaxIdleCount);
   } else {
-    number_idle_notifications = 0;
-    last_gc_count = gc_count_;
+    number_idle_notifications_ = 0;
+    last_idle_notification_gc_count_ = gc_count_;
   }
 
-  if (number_idle_notifications == kIdlesBeforeScavenge) {
+  if (number_idle_notifications_ == kIdlesBeforeScavenge) {
     if (contexts_disposed_ > 0) {
-      HistogramTimerScope scope(&Counters::gc_context);
+      HistogramTimerScope scope(isolate_->counters()->gc_context());
       CollectAllGarbage(false);
     } else {
       CollectGarbage(NEW_SPACE);
     }
     new_space_.Shrink();
-    last_gc_count = gc_count_;
-  } else if (number_idle_notifications == kIdlesBeforeMarkSweep) {
+    last_idle_notification_gc_count_ = gc_count_;
+  } else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
     // Before doing the mark-sweep collections we clear the
     // compilation cache to avoid hanging on to source code and
     // generated code for cached functions.
-    CompilationCache::Clear();
+    isolate_->compilation_cache()->Clear();
 
     CollectAllGarbage(false);
     new_space_.Shrink();
-    last_gc_count = gc_count_;
+    last_idle_notification_gc_count_ = gc_count_;
 
-  } else if (number_idle_notifications == kIdlesBeforeMarkCompact) {
+  } else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
     CollectAllGarbage(true);
     new_space_.Shrink();
-    last_gc_count = gc_count_;
+    last_idle_notification_gc_count_ = gc_count_;
+    number_idle_notifications_ = 0;
     finished = true;
-
   } else if (contexts_disposed_ > 0) {
     if (FLAG_expose_gc) {
       contexts_disposed_ = 0;
     } else {
-      HistogramTimerScope scope(&Counters::gc_context);
+      HistogramTimerScope scope(isolate_->counters()->gc_context());
       CollectAllGarbage(false);
-      last_gc_count = gc_count_;
+      last_idle_notification_gc_count_ = gc_count_;
     }
     // If this is the first idle notification, we reset the
     // notification count to avoid letting idle notifications for
     // context disposal garbage collections start a potentially too
     // aggressive idle GC cycle.
-    if (number_idle_notifications <= 1) {
-      number_idle_notifications = 0;
+    if (number_idle_notifications_ <= 1) {
+      number_idle_notifications_ = 0;
       uncommit = false;
     }
-  } else if (number_idle_notifications > kIdlesBeforeMarkCompact) {
+  } else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
     // If we have received more than kIdlesBeforeMarkCompact idle
     // notifications we do not perform any cleanup because we don't
     // expect to gain much by doing so.
@@ -3827,7 +3845,7 @@ bool Heap::IdleNotification() {
   // Make sure that we have no pending context disposals and
   // conditionally uncommit from space.
   ASSERT(contexts_disposed_ == 0);
-  if (uncommit) Heap::UncommitFromSpace();
+  if (uncommit) UncommitFromSpace();
   return finished;
 }
 
@@ -3836,7 +3854,7 @@ bool Heap::IdleNotification() {
 
 void Heap::Print() {
   if (!HasBeenSetup()) return;
-  Top::PrintStack();
+  isolate()->PrintStack();
   AllSpaces spaces;
   for (Space* space = spaces.next(); space != NULL; space = spaces.next())
     space->Print();
@@ -3869,11 +3887,11 @@ void Heap::ReportHeapStatistics(const char* title) {
 
   PrintF("\n");
   PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
-  GlobalHandles::PrintStats();
+  isolate_->global_handles()->PrintStats();
   PrintF("\n");
 
   PrintF("Heap statistics : ");
-  MemoryAllocator::ReportStatistics();
+  isolate_->memory_allocator()->ReportStatistics();
   PrintF("To space : ");
   new_space_.ReportStatistics();
   PrintF("Old pointer space : ");
@@ -3956,7 +3974,7 @@ static void VerifyPointersUnderWatermark(
     Address start = page->ObjectAreaStart();
     Address end = page->AllocationWatermark();
 
-    Heap::IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
+    HEAP->IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
                               start,
                               end,
                               visit_dirty_region,
@@ -3977,7 +3995,7 @@ static void VerifyPointersUnderWatermark(LargeObjectSpace* space) {
         // When we are not in GC the Heap::InNewSpace() predicate
         // checks that pointers which satisfy predicate point into
         // the active semispace.
-        Heap::InNewSpace(*slot);
+        HEAP->InNewSpace(*slot);
         slot_address += kPointerSize;
       }
     }
@@ -4098,7 +4116,8 @@ void Heap::ZapFromSpace() {
 #endif  // DEBUG
 
 
-bool Heap::IteratePointersInDirtyRegion(Address start,
+bool Heap::IteratePointersInDirtyRegion(Heap* heap,
+                                        Address start,
                                         Address end,
                                         ObjectSlotCallback copy_object_func) {
   Address slot_address = start;
@@ -4106,10 +4125,10 @@ bool Heap::IteratePointersInDirtyRegion(Address start,
 
   while (slot_address < end) {
     Object** slot = reinterpret_cast<Object**>(slot_address);
-    if (Heap::InNewSpace(*slot)) {
+    if (heap->InNewSpace(*slot)) {
       ASSERT((*slot)->IsHeapObject());
       copy_object_func(reinterpret_cast<HeapObject**>(slot));
-      if (Heap::InNewSpace(*slot)) {
+      if (heap->InNewSpace(*slot)) {
         ASSERT((*slot)->IsHeapObject());
         pointers_to_new_space_found = true;
       }
@@ -4143,14 +4162,16 @@ static bool IteratePointersInDirtyMaps(Address start,
   Address map_address = start;
   bool pointers_to_new_space_found = false;
 
+  Heap* heap = HEAP;
   while (map_address < end) {
-    ASSERT(!Heap::InNewSpace(Memory::Object_at(map_address)));
+    ASSERT(!heap->InNewSpace(Memory::Object_at(map_address)));
     ASSERT(Memory::Object_at(map_address)->IsMap());
 
     Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
     Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
 
-    if (Heap::IteratePointersInDirtyRegion(pointer_fields_start,
+    if (Heap::IteratePointersInDirtyRegion(heap,
+                                           pointer_fields_start,
                                            pointer_fields_end,
                                            copy_object_func)) {
       pointers_to_new_space_found = true;
@@ -4164,6 +4185,7 @@ static bool IteratePointersInDirtyMaps(Address start,
 
 
 bool Heap::IteratePointersInDirtyMapsRegion(
+    Heap* heap,
     Address start,
     Address end,
     ObjectSlotCallback copy_object_func) {
@@ -4183,7 +4205,8 @@ bool Heap::IteratePointersInDirtyMapsRegion(
         Min(prev_map + Map::kPointerFieldsEndOffset, end);
 
     contains_pointers_to_new_space =
-      IteratePointersInDirtyRegion(pointer_fields_start,
+      IteratePointersInDirtyRegion(heap,
+                                   pointer_fields_start,
                                    pointer_fields_end,
                                    copy_object_func)
         || contains_pointers_to_new_space;
@@ -4205,7 +4228,8 @@ bool Heap::IteratePointersInDirtyMapsRegion(
         Min(end, map_aligned_end + Map::kPointerFieldsEndOffset);
 
     contains_pointers_to_new_space =
-      IteratePointersInDirtyRegion(pointer_fields_start,
+      IteratePointersInDirtyRegion(heap,
+                                   pointer_fields_start,
                                    pointer_fields_end,
                                    copy_object_func)
         || contains_pointers_to_new_space;
@@ -4225,10 +4249,10 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start,
 
   while (slot_address < end) {
     Object** slot = reinterpret_cast<Object**>(slot_address);
-    if (Heap::InFromSpace(*slot)) {
+    if (InFromSpace(*slot)) {
       ASSERT((*slot)->IsHeapObject());
       callback(reinterpret_cast<HeapObject**>(slot));
-      if (Heap::InNewSpace(*slot)) {
+      if (InNewSpace(*slot)) {
         ASSERT((*slot)->IsHeapObject());
         marks |= page->GetRegionMaskForAddress(slot_address);
       }
@@ -4267,7 +4291,7 @@ uint32_t Heap::IterateDirtyRegions(
   Address region_end = Min(second_region, area_end);
 
   if (marks & mask) {
-    if (visit_dirty_region(region_start, region_end, copy_object_func)) {
+    if (visit_dirty_region(this, region_start, region_end, copy_object_func)) {
       newmarks |= mask;
     }
   }
@@ -4279,7 +4303,10 @@ uint32_t Heap::IterateDirtyRegions(
 
   while (region_end <= area_end) {
     if (marks & mask) {
-      if (visit_dirty_region(region_start, region_end, copy_object_func)) {
+      if (visit_dirty_region(this,
+                             region_start,
+                             region_end,
+                             copy_object_func)) {
         newmarks |= mask;
       }
     }
@@ -4295,7 +4322,7 @@ uint32_t Heap::IterateDirtyRegions(
     // with region end. Check whether region covering last part of area is
     // dirty.
     if (marks & mask) {
-      if (visit_dirty_region(region_start, area_end, copy_object_func)) {
+      if (visit_dirty_region(this, region_start, area_end, copy_object_func)) {
         newmarks |= mask;
       }
     }
@@ -4361,7 +4388,7 @@ void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
   v->Synchronize("symbol_table");
   if (mode != VISIT_ALL_IN_SCAVENGE) {
     // Scavenge collections have special processing for this.
-    ExternalStringTable::Iterate(v);
+    external_string_table_.Iterate(v);
   }
   v->Synchronize("external_string_table");
 }
@@ -4374,42 +4401,42 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
   v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
   v->Synchronize("symbol");
 
-  Bootstrapper::Iterate(v);
+  isolate_->bootstrapper()->Iterate(v);
   v->Synchronize("bootstrapper");
-  Top::Iterate(v);
+  isolate_->Iterate(v);
   v->Synchronize("top");
   Relocatable::Iterate(v);
   v->Synchronize("relocatable");
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  Debug::Iterate(v);
+  isolate_->debug()->Iterate(v);
 #endif
   v->Synchronize("debug");
-  CompilationCache::Iterate(v);
+  isolate_->compilation_cache()->Iterate(v);
   v->Synchronize("compilationcache");
 
   // Iterate over local handles in handle scopes.
-  HandleScopeImplementer::Iterate(v);
+  isolate_->handle_scope_implementer()->Iterate(v);
   v->Synchronize("handlescope");
 
   // Iterate over the builtin code objects and code stubs in the
   // heap. Note that it is not necessary to iterate over code objects
   // on scavenge collections.
   if (mode != VISIT_ALL_IN_SCAVENGE) {
-    Builtins::IterateBuiltins(v);
+    isolate_->builtins()->IterateBuiltins(v);
   }
   v->Synchronize("builtins");
 
   // Iterate over global handles.
   if (mode == VISIT_ONLY_STRONG) {
-    GlobalHandles::IterateStrongRoots(v);
+    isolate_->global_handles()->IterateStrongRoots(v);
   } else {
-    GlobalHandles::IterateAllRoots(v);
+    isolate_->global_handles()->IterateAllRoots(v);
   }
   v->Synchronize("globalhandles");
 
   // Iterate over pointers being held by inactive threads.
-  ThreadManager::Iterate(v);
+  isolate_->thread_manager()->Iterate(v);
   v->Synchronize("threadmanager");
 
   // Iterate over the pointers the Serialization/Deserialization code is
@@ -4428,10 +4455,6 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
 }
 
 
-// Flag is set when the heap has been configured.  The heap can be repeatedly
-// configured through the API until it is setup.
-static bool heap_configured = false;
-
 // TODO(1236194): Since the heap size is configurable on the command line
 // and through the API, we should gracefully handle the case that the heap
 // size is not big enough to fit all the initial objects.
@@ -4478,7 +4501,7 @@ bool Heap::ConfigureHeap(int max_semispace_size,
   // The old generation is paged.
   max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize);
 
-  heap_configured = true;
+  configured_ = true;
   return true;
 }
 
@@ -4506,11 +4529,13 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
   *stats->cell_space_size = cell_space_->Size();
   *stats->cell_space_capacity = cell_space_->Capacity();
   *stats->lo_space_size = lo_space_->Size();
-  GlobalHandles::RecordStats(stats);
-  *stats->memory_allocator_size = MemoryAllocator::Size();
+  isolate_->global_handles()->RecordStats(stats);
+  *stats->memory_allocator_size = isolate()->memory_allocator()->Size();
   *stats->memory_allocator_capacity =
-      MemoryAllocator::Size() + MemoryAllocator::Available();
+      isolate()->memory_allocator()->Size() +
+      isolate()->memory_allocator()->Available();
   *stats->os_error = OS::GetLastError();
+      isolate()->memory_allocator()->Available();
   if (take_snapshot) {
     HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
     for (HeapObject* obj = iterator.next();
@@ -4542,8 +4567,177 @@ int Heap::PromotedExternalMemorySize() {
       - amount_of_external_allocated_memory_at_last_global_gc_;
 }
 
+#ifdef DEBUG
+
+// Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
+static const int kMarkTag = 2;
+
+
+class HeapDebugUtils {
+ public:
+  explicit HeapDebugUtils(Heap* heap)
+    : search_for_any_global_(false),
+      search_target_(NULL),
+      found_target_(false),
+      object_stack_(20),
+      heap_(heap) {
+  }
+
+  class MarkObjectVisitor : public ObjectVisitor {
+   public:
+    explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
+
+    void VisitPointers(Object** start, Object** end) {
+      // Copy all HeapObject pointers in [start, end)
+      for (Object** p = start; p < end; p++) {
+        if ((*p)->IsHeapObject())
+          utils_->MarkObjectRecursively(p);
+      }
+    }
+
+    HeapDebugUtils* utils_;
+  };
+
+  void MarkObjectRecursively(Object** p) {
+    if (!(*p)->IsHeapObject()) return;
+
+    HeapObject* obj = HeapObject::cast(*p);
+
+    Object* map = obj->map();
+
+    if (!map->IsHeapObject()) return;  // visited before
+
+    if (found_target_) return;  // stop if target found
+    object_stack_.Add(obj);
+    if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
+        (!search_for_any_global_ && (obj == search_target_))) {
+      found_target_ = true;
+      return;
+    }
+
+    // not visited yet
+    Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
+
+    Address map_addr = map_p->address();
+
+    obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
+
+    MarkObjectRecursively(&map);
+
+    MarkObjectVisitor mark_visitor(this);
+
+    obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
+                     &mark_visitor);
+
+    if (!found_target_)  // don't pop if found the target
+      object_stack_.RemoveLast();
+  }
+
+
+  class UnmarkObjectVisitor : public ObjectVisitor {
+   public:
+    explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
+
+    void VisitPointers(Object** start, Object** end) {
+      // Copy all HeapObject pointers in [start, end)
+      for (Object** p = start; p < end; p++) {
+        if ((*p)->IsHeapObject())
+          utils_->UnmarkObjectRecursively(p);
+      }
+    }
+
+    HeapDebugUtils* utils_;
+  };
+
+
+  void UnmarkObjectRecursively(Object** p) {
+    if (!(*p)->IsHeapObject()) return;
+
+    HeapObject* obj = HeapObject::cast(*p);
+
+    Object* map = obj->map();
+
+    if (map->IsHeapObject()) return;  // unmarked already
+
+    Address map_addr = reinterpret_cast<Address>(map);
+
+    map_addr -= kMarkTag;
+
+    ASSERT_TAG_ALIGNED(map_addr);
+
+    HeapObject* map_p = HeapObject::FromAddress(map_addr);
+
+    obj->set_map(reinterpret_cast<Map*>(map_p));
+
+    UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
+
+    UnmarkObjectVisitor unmark_visitor(this);
+
+    obj->IterateBody(Map::cast(map_p)->instance_type(),
+                     obj->SizeFromMap(Map::cast(map_p)),
+                     &unmark_visitor);
+  }
+
+
+  void MarkRootObjectRecursively(Object** root) {
+    if (search_for_any_global_) {
+      ASSERT(search_target_ == NULL);
+    } else {
+      ASSERT(search_target_->IsHeapObject());
+    }
+    found_target_ = false;
+    object_stack_.Clear();
+
+    MarkObjectRecursively(root);
+    UnmarkObjectRecursively(root);
+
+    if (found_target_) {
+      PrintF("=====================================\n");
+      PrintF("====        Path to object       ====\n");
+      PrintF("=====================================\n\n");
+
+      ASSERT(!object_stack_.is_empty());
+      for (int i = 0; i < object_stack_.length(); i++) {
+        if (i > 0) PrintF("\n     |\n     |\n     V\n\n");
+        Object* obj = object_stack_[i];
+        obj->Print();
+      }
+      PrintF("=====================================\n");
+    }
+  }
+
+  // Helper class for visiting HeapObjects recursively.
+  class MarkRootVisitor: public ObjectVisitor {
+   public:
+    explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
+
+    void VisitPointers(Object** start, Object** end) {
+      // Visit all HeapObject pointers in [start, end)
+      for (Object** p = start; p < end; p++) {
+        if ((*p)->IsHeapObject())
+          utils_->MarkRootObjectRecursively(p);
+      }
+    }
+
+    HeapDebugUtils* utils_;
+  };
+
+  bool search_for_any_global_;
+  Object* search_target_;
+  bool found_target_;
+  List<Object*> object_stack_;
+  Heap* heap_;
+
+  friend class Heap;
+};
+
+#endif
 
 bool Heap::Setup(bool create_heap_objects) {
+#ifdef DEBUG
+  debug_utils_ = new HeapDebugUtils(this);
+#endif
+
   // Initialize heap spaces and initial maps and objects. Whenever something
   // goes wrong, just return false. The caller should check the results and
   // call Heap::TearDown() to release allocated memory.
@@ -4552,13 +4746,19 @@ bool Heap::Setup(bool create_heap_objects) {
   // Configuration is based on the flags new-space-size (really the semispace
   // size) and old-space-size if set or the initial values of semispace_size_
   // and old_generation_size_ otherwise.
-  if (!heap_configured) {
+  if (!configured_) {
     if (!ConfigureHeapDefault()) return false;
   }
 
-  ScavengingVisitor::Initialize();
-  NewSpaceScavenger::Initialize();
-  MarkCompactCollector::Initialize();
+  gc_initializer_mutex->Lock();
+  static bool initialized_gc = false;
+  if (!initialized_gc) {
+      initialized_gc = true;
+      ScavengingVisitor::Initialize();
+      NewSpaceScavenger::Initialize();
+      MarkCompactCollector::Initialize();
+  }
+  gc_initializer_mutex->Unlock();
 
   MarkMapPointersAsEncoded(false);
 
@@ -4566,9 +4766,11 @@ bool Heap::Setup(bool create_heap_objects) {
   // space.  The chunk is double the size of the requested reserved
   // new space size to ensure that we can find a pair of semispaces that
   // are contiguous and aligned to their size.
-  if (!MemoryAllocator::Setup(MaxReserved(), MaxExecutableSize())) return false;
+  if (!isolate_->memory_allocator()->Setup(MaxReserved(), MaxExecutableSize()))
+      return false;
   void* chunk =
-      MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_);
+      isolate_->memory_allocator()->ReserveInitialChunk(
+          4 * reserved_semispace_size_);
   if (chunk == NULL) return false;
 
   // Align the pair of semispaces to their size, which must be a power
@@ -4581,13 +4783,19 @@ bool Heap::Setup(bool create_heap_objects) {
 
   // Initialize old pointer space.
   old_pointer_space_ =
-      new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
+      new OldSpace(this,
+                   max_old_generation_size_,
+                   OLD_POINTER_SPACE,
+                   NOT_EXECUTABLE);
   if (old_pointer_space_ == NULL) return false;
   if (!old_pointer_space_->Setup(NULL, 0)) return false;
 
   // Initialize old data space.
   old_data_space_ =
-      new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
+      new OldSpace(this,
+                   max_old_generation_size_,
+                   OLD_DATA_SPACE,
+                   NOT_EXECUTABLE);
   if (old_data_space_ == NULL) return false;
   if (!old_data_space_->Setup(NULL, 0)) return false;
 
@@ -4596,18 +4804,18 @@ bool Heap::Setup(bool create_heap_objects) {
   // On 64-bit platform(s), we put all code objects in a 2 GB range of
   // virtual address space, so that they can call each other with near calls.
   if (code_range_size_ > 0) {
-    if (!CodeRange::Setup(code_range_size_)) {
+    if (!isolate_->code_range()->Setup(code_range_size_)) {
       return false;
     }
   }
 
   code_space_ =
-      new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE);
+      new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
   if (code_space_ == NULL) return false;
   if (!code_space_->Setup(NULL, 0)) return false;
 
   // Initialize map space.
-  map_space_ = new MapSpace(FLAG_use_big_map_space
+  map_space_ = new MapSpace(this, FLAG_use_big_map_space
       ? max_old_generation_size_
       : MapSpace::kMaxMapPageIndex * Page::kPageSize,
       FLAG_max_map_space_pages,
@@ -4616,14 +4824,14 @@ bool Heap::Setup(bool create_heap_objects) {
   if (!map_space_->Setup(NULL, 0)) return false;
 
   // Initialize global property cell space.
-  cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE);
+  cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
   if (cell_space_ == NULL) return false;
   if (!cell_space_->Setup(NULL, 0)) return false;
 
   // The large object code space may contain code or data.  We set the memory
   // to be non-executable here for safety, but this means we need to enable it
   // explicitly when allocating large code objects.
-  lo_space_ = new LargeObjectSpace(LO_SPACE);
+  lo_space_ = new LargeObjectSpace(this, LO_SPACE);
   if (lo_space_ == NULL) return false;
   if (!lo_space_->Setup()) return false;
 
@@ -4638,12 +4846,12 @@ bool Heap::Setup(bool create_heap_objects) {
     global_contexts_list_ = undefined_value();
   }
 
-  LOG(IntPtrTEvent("heap-capacity", Capacity()));
-  LOG(IntPtrTEvent("heap-available", Available()));
+  LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
+  LOG(isolate_, IntPtrTEvent("heap-available", Available()));
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
   // This should be called only after initial objects have been created.
-  ProducerHeapProfile::Setup();
+  isolate_->producer_heap_profile()->Setup();
 #endif
 
   return true;
@@ -4651,6 +4859,8 @@ bool Heap::Setup(bool create_heap_objects) {
 
 
 void Heap::SetStackLimits() {
+  ASSERT(isolate_ != NULL);
+  ASSERT(isolate_ == isolate());
   // On 64 bit machines, pointers are generally out of range of Smis.  We write
   // something that looks like an out of range Smi to the GC.
 
@@ -4658,10 +4868,10 @@ void Heap::SetStackLimits() {
   // These are actually addresses, but the tag makes the GC ignore it.
   roots_[kStackLimitRootIndex] =
       reinterpret_cast<Object*>(
-          (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag);
+          (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
   roots_[kRealStackLimitRootIndex] =
       reinterpret_cast<Object*>(
-          (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag);
+          (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
 }
 
 
@@ -4671,16 +4881,16 @@ void Heap::TearDown() {
     PrintF("gc_count=%d ", gc_count_);
     PrintF("mark_sweep_count=%d ", ms_count_);
     PrintF("mark_compact_count=%d ", mc_count_);
-    PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause());
-    PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator());
+    PrintF("max_gc_pause=%d ", get_max_gc_pause());
+    PrintF("min_in_mutator=%d ", get_min_in_mutator());
     PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
-           GCTracer::get_max_alive_after_gc());
+           get_max_alive_after_gc());
     PrintF("\n\n");
   }
 
-  GlobalHandles::TearDown();
+  isolate_->global_handles()->TearDown();
 
-  ExternalStringTable::TearDown();
+  external_string_table_.TearDown();
 
   new_space_.TearDown();
 
@@ -4720,7 +4930,12 @@ void Heap::TearDown() {
     lo_space_ = NULL;
   }
 
-  MemoryAllocator::TearDown();
+  isolate_->memory_allocator()->TearDown();
+
+#ifdef DEBUG
+  delete debug_utils_;
+  debug_utils_ = NULL;
+#endif
 }
 
 
@@ -4809,7 +5024,7 @@ class PrintHandleVisitor: public ObjectVisitor {
 void Heap::PrintHandles() {
   PrintF("Handles:\n");
   PrintHandleVisitor v;
-  HandleScopeImplementer::Iterate(&v);
+  isolate_->handle_scope_implementer()->Iterate(&v);
 }
 
 #endif
@@ -4818,19 +5033,19 @@ void Heap::PrintHandles() {
 Space* AllSpaces::next() {
   switch (counter_++) {
     case NEW_SPACE:
-      return Heap::new_space();
+      return HEAP->new_space();
     case OLD_POINTER_SPACE:
-      return Heap::old_pointer_space();
+      return HEAP->old_pointer_space();
     case OLD_DATA_SPACE:
-      return Heap::old_data_space();
+      return HEAP->old_data_space();
     case CODE_SPACE:
-      return Heap::code_space();
+      return HEAP->code_space();
     case MAP_SPACE:
-      return Heap::map_space();
+      return HEAP->map_space();
     case CELL_SPACE:
-      return Heap::cell_space();
+      return HEAP->cell_space();
     case LO_SPACE:
-      return Heap::lo_space();
+      return HEAP->lo_space();
     default:
       return NULL;
   }
@@ -4840,15 +5055,15 @@ Space* AllSpaces::next() {
 PagedSpace* PagedSpaces::next() {
   switch (counter_++) {
     case OLD_POINTER_SPACE:
-      return Heap::old_pointer_space();
+      return HEAP->old_pointer_space();
     case OLD_DATA_SPACE:
-      return Heap::old_data_space();
+      return HEAP->old_data_space();
     case CODE_SPACE:
-      return Heap::code_space();
+      return HEAP->code_space();
     case MAP_SPACE:
-      return Heap::map_space();
+      return HEAP->map_space();
     case CELL_SPACE:
-      return Heap::cell_space();
+      return HEAP->cell_space();
     default:
       return NULL;
   }
@@ -4859,11 +5074,11 @@ PagedSpace* PagedSpaces::next() {
 OldSpace* OldSpaces::next() {
   switch (counter_++) {
     case OLD_POINTER_SPACE:
-      return Heap::old_pointer_space();
+      return HEAP->old_pointer_space();
     case OLD_DATA_SPACE:
-      return Heap::old_data_space();
+      return HEAP->old_data_space();
     case CODE_SPACE:
-      return Heap::code_space();
+      return HEAP->code_space();
     default:
       return NULL;
   }
@@ -4918,25 +5133,25 @@ ObjectIterator* SpaceIterator::CreateIterator() {
 
   switch (current_space_) {
     case NEW_SPACE:
-      iterator_ = new SemiSpaceIterator(Heap::new_space(), size_func_);
+      iterator_ = new SemiSpaceIterator(HEAP->new_space(), size_func_);
       break;
     case OLD_POINTER_SPACE:
-      iterator_ = new HeapObjectIterator(Heap::old_pointer_space(), size_func_);
+      iterator_ = new HeapObjectIterator(HEAP->old_pointer_space(), size_func_);
       break;
     case OLD_DATA_SPACE:
-      iterator_ = new HeapObjectIterator(Heap::old_data_space(), size_func_);
+      iterator_ = new HeapObjectIterator(HEAP->old_data_space(), size_func_);
       break;
     case CODE_SPACE:
-      iterator_ = new HeapObjectIterator(Heap::code_space(), size_func_);
+      iterator_ = new HeapObjectIterator(HEAP->code_space(), size_func_);
       break;
     case MAP_SPACE:
-      iterator_ = new HeapObjectIterator(Heap::map_space(), size_func_);
+      iterator_ = new HeapObjectIterator(HEAP->map_space(), size_func_);
       break;
     case CELL_SPACE:
-      iterator_ = new HeapObjectIterator(Heap::cell_space(), size_func_);
+      iterator_ = new HeapObjectIterator(HEAP->cell_space(), size_func_);
       break;
     case LO_SPACE:
-      iterator_ = new LargeObjectIterator(Heap::lo_space(), size_func_);
+      iterator_ = new LargeObjectIterator(HEAP->lo_space(), size_func_);
       break;
   }
 
@@ -4970,16 +5185,17 @@ class FreeListNodesFilter : public HeapObjectsFilter {
 
  private:
   void MarkFreeListNodes() {
-    Heap::old_pointer_space()->MarkFreeListNodes();
-    Heap::old_data_space()->MarkFreeListNodes();
-    MarkCodeSpaceFreeListNodes();
-    Heap::map_space()->MarkFreeListNodes();
-    Heap::cell_space()->MarkFreeListNodes();
+    Heap* heap = HEAP;
+    heap->old_pointer_space()->MarkFreeListNodes();
+    heap->old_data_space()->MarkFreeListNodes();
+    MarkCodeSpaceFreeListNodes(heap);
+    heap->map_space()->MarkFreeListNodes();
+    heap->cell_space()->MarkFreeListNodes();
   }
 
-  void MarkCodeSpaceFreeListNodes() {
+  void MarkCodeSpaceFreeListNodes(Heap* heap) {
     // For code space, using FreeListNode::IsFreeListNode is OK.
-    HeapObjectIterator iter(Heap::code_space());
+    HeapObjectIterator iter(heap->code_space());
     for (HeapObject* obj = iter.next_object();
          obj != NULL;
          obj = iter.next_object()) {
@@ -5041,7 +5257,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
       obj->SetMark();
     }
     UnmarkingVisitor visitor;
-    Heap::IterateRoots(&visitor, VISIT_ALL);
+    HEAP->IterateRoots(&visitor, VISIT_ALL);
     while (visitor.can_process())
       visitor.ProcessNext();
   }
@@ -5344,7 +5560,7 @@ static intptr_t CountTotalHolesSize() {
 }
 
 
-GCTracer::GCTracer()
+GCTracer::GCTracer(Heap* heap)
     : start_time_(0.0),
       start_size_(0),
       gc_count_(0),
@@ -5353,14 +5569,16 @@ GCTracer::GCTracer()
       marked_count_(0),
       allocated_since_last_gc_(0),
       spent_in_mutator_(0),
-      promoted_objects_size_(0) {
+      promoted_objects_size_(0),
+      heap_(heap) {
   // These two fields reflect the state of the previous full collection.
   // Set them before they are changed by the collector.
-  previous_has_compacted_ = MarkCompactCollector::HasCompacted();
-  previous_marked_count_ = MarkCompactCollector::previous_marked_count();
+  previous_has_compacted_ = heap_->mark_compact_collector_.HasCompacted();
+  previous_marked_count_ =
+      heap_->mark_compact_collector_.previous_marked_count();
   if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
   start_time_ = OS::TimeCurrentMillis();
-  start_size_ = Heap::SizeOfObjects();
+  start_size_ = heap_->SizeOfObjects();
 
   for (int i = 0; i < Scope::kNumberOfScopes; i++) {
     scopes_[i] = 0;
@@ -5368,10 +5586,11 @@ GCTracer::GCTracer()
 
   in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
 
-  allocated_since_last_gc_ = Heap::SizeOfObjects() - alive_after_last_gc_;
+  allocated_since_last_gc_ =
+      heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
 
-  if (last_gc_end_timestamp_ > 0) {
-    spent_in_mutator_ = Max(start_time_ - last_gc_end_timestamp_, 0.0);
+  if (heap_->last_gc_end_timestamp_ > 0) {
+    spent_in_mutator_ = Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
   }
 }
 
@@ -5380,20 +5599,21 @@ GCTracer::~GCTracer() {
   // Printf ONE line iff flag is set.
   if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
 
-  bool first_gc = (last_gc_end_timestamp_ == 0);
+  bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
 
-  alive_after_last_gc_ = Heap::SizeOfObjects();
-  last_gc_end_timestamp_ = OS::TimeCurrentMillis();
+  heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
+  heap_->last_gc_end_timestamp_ = OS::TimeCurrentMillis();
 
-  int time = static_cast<int>(last_gc_end_timestamp_ - start_time_);
+  int time = static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
 
   // Update cumulative GC statistics if required.
   if (FLAG_print_cumulative_gc_stat) {
-    max_gc_pause_ = Max(max_gc_pause_, time);
-    max_alive_after_gc_ = Max(max_alive_after_gc_, alive_after_last_gc_);
+    heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
+    heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
+                                     heap_->alive_after_last_gc_);
     if (!first_gc) {
-      min_in_mutator_ = Min(min_in_mutator_,
-                            static_cast<int>(spent_in_mutator_));
+      heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
+                                   static_cast<int>(spent_in_mutator_));
     }
   }
 
@@ -5418,7 +5638,8 @@ GCTracer::~GCTracer() {
         PrintF("s");
         break;
       case MARK_COMPACTOR:
-        PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms");
+        PrintF("%s",
+               heap_->mark_compact_collector_.HasCompacted() ? "mc" : "ms");
         break;
       default:
         UNREACHABLE();
@@ -5432,7 +5653,7 @@ GCTracer::~GCTracer() {
     PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
 
     PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
-    PrintF("total_size_after=%" V8_PTR_PREFIX "d ", Heap::SizeOfObjects());
+    PrintF("total_size_after=%" V8_PTR_PREFIX "d ", heap_->SizeOfObjects());
     PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
            in_free_list_or_wasted_before_gc_);
     PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
@@ -5444,7 +5665,7 @@ GCTracer::~GCTracer() {
   }
 
 #if defined(ENABLE_LOGGING_AND_PROFILING)
-  Heap::PrintShortHeapStatistics();
+  heap_->PrintShortHeapStatistics();
 #endif
 }
 
@@ -5454,8 +5675,8 @@ const char* GCTracer::CollectorString() {
     case SCAVENGER:
       return "Scavenge";
     case MARK_COMPACTOR:
-      return MarkCompactCollector::HasCompacted() ? "Mark-compact"
-                                                  : "Mark-sweep";
+      return heap_->mark_compact_collector_.HasCompacted() ? "Mark-compact"
+                                                           : "Mark-sweep";
   }
   return "Unknown GC";
 }
@@ -5475,13 +5696,13 @@ int KeyedLookupCache::Lookup(Map* map, String* name) {
   if ((key.map == map) && key.name->Equals(name)) {
     return field_offsets_[index];
   }
-  return -1;
+  return kNotFound;
 }
 
 
 void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
   String* symbol;
-  if (Heap::LookupSymbolIfExists(name, &symbol)) {
+  if (HEAP->LookupSymbolIfExists(name, &symbol)) {
     int index = Hash(map, symbol);
     Key& key = keys_[index];
     key.map = map;
@@ -5496,35 +5717,24 @@ void KeyedLookupCache::Clear() {
 }
 
 
-KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
-
-
-int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
-
-
 void DescriptorLookupCache::Clear() {
   for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
 }
 
 
-DescriptorLookupCache::Key
-DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
-
-int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
-
-
 #ifdef DEBUG
 void Heap::GarbageCollectionGreedyCheck() {
   ASSERT(FLAG_gc_greedy);
-  if (Bootstrapper::IsActive()) return;
+  if (isolate_->bootstrapper()->IsActive()) return;
   if (disallow_allocation_failure()) return;
   CollectGarbage(NEW_SPACE);
 }
 #endif
 
 
-TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t)
-  : type_(t) {
+TranscendentalCache::SubCache::SubCache(Type t)
+  : type_(t),
+    isolate_(Isolate::Current()) {
   uint32_t in0 = 0xffffffffu;  // Bit-pattern for a NaN that isn't
   uint32_t in1 = 0xffffffffu;  // generated by the FPU.
   for (int i = 0; i < kCacheSize; i++) {
@@ -5535,9 +5745,6 @@ TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t)
 }
 
 
-TranscendentalCache* TranscendentalCache::caches_[kNumberOfCaches];
-
-
 void TranscendentalCache::Clear() {
   for (int i = 0; i < kNumberOfCaches; i++) {
     if (caches_[i] != NULL) {
@@ -5551,8 +5758,8 @@ void TranscendentalCache::Clear() {
 void ExternalStringTable::CleanUp() {
   int last = 0;
   for (int i = 0; i < new_space_strings_.length(); ++i) {
-    if (new_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
-    if (Heap::InNewSpace(new_space_strings_[i])) {
+    if (new_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
+    if (heap_->InNewSpace(new_space_strings_[i])) {
       new_space_strings_[last++] = new_space_strings_[i];
     } else {
       old_space_strings_.Add(new_space_strings_[i]);
@@ -5561,8 +5768,8 @@ void ExternalStringTable::CleanUp() {
   new_space_strings_.Rewind(last);
   last = 0;
   for (int i = 0; i < old_space_strings_.length(); ++i) {
-    if (old_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
-    ASSERT(!Heap::InNewSpace(old_space_strings_[i]));
+    if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
+    ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
     old_space_strings_[last++] = old_space_strings_[i];
   }
   old_space_strings_.Rewind(last);
@@ -5576,7 +5783,4 @@ void ExternalStringTable::TearDown() {
 }
 
 
-List<Object*> ExternalStringTable::new_space_strings_;
-List<Object*> ExternalStringTable::old_space_strings_;
-
 } }  // namespace v8::internal
index a2b6fe80aa1c314aa5a459b2cb3c4321e6c344ed..88074d73e640c94965489487f5b9555649c389d9 100644 (file)
@@ -32,6 +32,7 @@
 
 #include "globals.h"
 #include "list.h"
+#include "mark-compact.h"
 #include "spaces.h"
 #include "splay-tree-inl.h"
 #include "v8-counters.h"
 namespace v8 {
 namespace internal {
 
+// TODO(isolates): remove HEAP here
+#define HEAP (_inline_get_heap_())
+class Heap;
+inline Heap* _inline_get_heap_();
+
 
 // Defines all the roots in Heap.
 #define STRONG_ROOT_LIST(V)                                      \
@@ -226,12 +232,15 @@ namespace internal {
 // Forward declarations.
 class GCTracer;
 class HeapStats;
+class Isolate;
 class WeakObjectRetainer;
 
 
-typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer);
+typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
+                                                      Object** pointer);
 
-typedef bool (*DirtyRegionCallback)(Address start,
+typedef bool (*DirtyRegionCallback)(Heap* heap,
+                                    Address start,
                                     Address end,
                                     ObjectSlotCallback copy_object_func);
 
@@ -239,103 +248,178 @@ typedef bool (*DirtyRegionCallback)(Address start,
 // The all static Heap captures the interface to the global object heap.
 // All JavaScript contexts by this process share the same object heap.
 
-class Heap : public AllStatic {
+#ifdef DEBUG
+class HeapDebugUtils;
+#endif
+
+
+// A queue of objects promoted during scavenge. Each object is accompanied
+// by it's size to avoid dereferencing a map pointer for scanning.
+class PromotionQueue {
+ public:
+  PromotionQueue() : front_(NULL), rear_(NULL) { }
+
+  void Initialize(Address start_address) {
+    front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
+  }
+
+  bool is_empty() { return front_ <= rear_; }
+
+  inline void insert(HeapObject* target, int size);
+
+  void remove(HeapObject** target, int* size) {
+    *target = reinterpret_cast<HeapObject*>(*(--front_));
+    *size = static_cast<int>(*(--front_));
+    // Assert no underflow.
+    ASSERT(front_ >= rear_);
+  }
+
+ private:
+  // The front of the queue is higher in memory than the rear.
+  intptr_t* front_;
+  intptr_t* rear_;
+
+  DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
+};
+
+
+// External strings table is a place where all external strings are
+// registered.  We need to keep track of such strings to properly
+// finalize them.
+class ExternalStringTable {
+ public:
+  // Registers an external string.
+  inline void AddString(String* string);
+
+  inline void Iterate(ObjectVisitor* v);
+
+  // Restores internal invariant and gets rid of collected strings.
+  // Must be called after each Iterate() that modified the strings.
+  void CleanUp();
+
+  // Destroys all allocated memory.
+  void TearDown();
+
+ private:
+  ExternalStringTable() { }
+
+  friend class Heap;
+
+  inline void Verify();
+
+  inline void AddOldString(String* string);
+
+  // Notifies the table that only a prefix of the new list is valid.
+  inline void ShrinkNewStrings(int position);
+
+  // To speed up scavenge collections new space string are kept
+  // separate from old space strings.
+  List<Object*> new_space_strings_;
+  List<Object*> old_space_strings_;
+
+  Heap* heap_;
+
+  DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
+};
+
+
+class Heap {
  public:
   // Configure heap size before setup. Return false if the heap has been
   // setup already.
-  static bool ConfigureHeap(int max_semispace_size,
-                            int max_old_gen_size,
-                            int max_executable_size);
-  static bool ConfigureHeapDefault();
+  bool ConfigureHeap(int max_semispace_size,
+                     int max_old_gen_size,
+                     int max_executable_size);
+  bool ConfigureHeapDefault();
 
   // Initializes the global object heap. If create_heap_objects is true,
   // also creates the basic non-mutable objects.
   // Returns whether it succeeded.
-  static bool Setup(bool create_heap_objects);
+  bool Setup(bool create_heap_objects);
 
   // Destroys all memory allocated by the heap.
-  static void TearDown();
+  void TearDown();
 
   // Set the stack limit in the roots_ array.  Some architectures generate
   // code that looks here, because it is faster than loading from the static
   // jslimit_/real_jslimit_ variable in the StackGuard.
-  static void SetStackLimits();
+  void SetStackLimits();
 
   // Returns whether Setup has been called.
-  static bool HasBeenSetup();
+  bool HasBeenSetup();
 
   // Returns the maximum amount of memory reserved for the heap.  For
   // the young generation, we reserve 4 times the amount needed for a
   // semi space.  The young generation consists of two semi spaces and
   // we reserve twice the amount needed for those in order to ensure
   // that new space can be aligned to its size.
-  static intptr_t MaxReserved() {
+  intptr_t MaxReserved() {
     return 4 * reserved_semispace_size_ + max_old_generation_size_;
   }
-  static int MaxSemiSpaceSize() { return max_semispace_size_; }
-  static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
-  static int InitialSemiSpaceSize() { return initial_semispace_size_; }
-  static intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
-  static intptr_t MaxExecutableSize() { return max_executable_size_; }
+  int MaxSemiSpaceSize() { return max_semispace_size_; }
+  int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
+  int InitialSemiSpaceSize() { return initial_semispace_size_; }
+  intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
+  intptr_t MaxExecutableSize() { return max_executable_size_; }
 
   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
   // more spaces are needed until it reaches the limit.
-  static intptr_t Capacity();
+  intptr_t Capacity();
 
   // Returns the amount of memory currently committed for the heap.
-  static intptr_t CommittedMemory();
+  intptr_t CommittedMemory();
 
   // Returns the amount of executable memory currently committed for the heap.
-  static intptr_t CommittedMemoryExecutable();
+  intptr_t CommittedMemoryExecutable();
 
   // Returns the available bytes in space w/o growing.
   // Heap doesn't guarantee that it can allocate an object that requires
   // all available bytes. Check MaxHeapObjectSize() instead.
-  static intptr_t Available();
+  intptr_t Available();
 
   // Returns the maximum object size in paged space.
-  static inline int MaxObjectSizeInPagedSpace();
+  inline int MaxObjectSizeInPagedSpace();
 
   // Returns of size of all objects residing in the heap.
-  static intptr_t SizeOfObjects();
+  intptr_t SizeOfObjects();
 
   // Return the starting address and a mask for the new space.  And-masking an
   // address with the mask will result in the start address of the new space
   // for all addresses in either semispace.
-  static Address NewSpaceStart() { return new_space_.start(); }
-  static uintptr_t NewSpaceMask() { return new_space_.mask(); }
-  static Address NewSpaceTop() { return new_space_.top(); }
-
-  static NewSpace* new_space() { return &new_space_; }
-  static OldSpace* old_pointer_space() { return old_pointer_space_; }
-  static OldSpace* old_data_space() { return old_data_space_; }
-  static OldSpace* code_space() { return code_space_; }
-  static MapSpace* map_space() { return map_space_; }
-  static CellSpace* cell_space() { return cell_space_; }
-  static LargeObjectSpace* lo_space() { return lo_space_; }
-
-  static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
-  static Address always_allocate_scope_depth_address() {
+  Address NewSpaceStart() { return new_space_.start(); }
+  uintptr_t NewSpaceMask() { return new_space_.mask(); }
+  Address NewSpaceTop() { return new_space_.top(); }
+
+  NewSpace* new_space() { return &new_space_; }
+  OldSpace* old_pointer_space() { return old_pointer_space_; }
+  OldSpace* old_data_space() { return old_data_space_; }
+  OldSpace* code_space() { return code_space_; }
+  MapSpace* map_space() { return map_space_; }
+  CellSpace* cell_space() { return cell_space_; }
+  LargeObjectSpace* lo_space() { return lo_space_; }
+
+  bool always_allocate() { return always_allocate_scope_depth_ != 0; }
+  Address always_allocate_scope_depth_address() {
     return reinterpret_cast<Address>(&always_allocate_scope_depth_);
   }
-  static bool linear_allocation() {
+  bool linear_allocation() {
     return linear_allocation_scope_depth_ != 0;
   }
 
-  static Address* NewSpaceAllocationTopAddress() {
+  Address* NewSpaceAllocationTopAddress() {
     return new_space_.allocation_top_address();
   }
-  static Address* NewSpaceAllocationLimitAddress() {
+  Address* NewSpaceAllocationLimitAddress() {
     return new_space_.allocation_limit_address();
   }
 
   // Uncommit unused semi space.
-  static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
+  bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
 
 #ifdef ENABLE_HEAP_PROTECTION
   // Protect/unprotect the heap by marking all spaces read-only/writable.
-  static void Protect();
-  static void Unprotect();
+  void Protect();
+  void Unprotect();
 #endif
 
   // Allocates and initializes a new JavaScript object based on a
@@ -343,71 +427,65 @@ class Heap : public AllStatic {
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateJSObject(
+  MUST_USE_RESULT MaybeObject* AllocateJSObject(
       JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
 
   // Allocates and initializes a new global object based on a constructor.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateGlobalObject(
-      JSFunction* constructor);
+  MUST_USE_RESULT MaybeObject* AllocateGlobalObject(JSFunction* constructor);
 
   // Returns a deep copy of the JavaScript object.
   // Properties and elements are copied too.
   // Returns failure if allocation failed.
-  MUST_USE_RESULT static MaybeObject* CopyJSObject(JSObject* source);
+  MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
 
   // Allocates the function prototype.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateFunctionPrototype(
-      JSFunction* function);
+  MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
 
   // Reinitialize an JSGlobalProxy based on a constructor.  The object
   // must have the same size as objects allocated using the
   // constructor.  The object is reinitialized and behaves as an
   // object that has been freshly allocated using the constructor.
-  MUST_USE_RESULT static MaybeObject* ReinitializeJSGlobalProxy(
-      JSFunction* constructor,
-      JSGlobalProxy* global);
+  MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
+      JSFunction* constructor, JSGlobalProxy* global);
 
   // Allocates and initializes a new JavaScript object based on a map.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateJSObjectFromMap(
+  MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
       Map* map, PretenureFlag pretenure = NOT_TENURED);
 
   // Allocates a heap object based on the map.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this function does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* Allocate(Map* map, AllocationSpace space);
+  MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
 
   // Allocates a JS Map in the heap.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this function does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateMap(InstanceType instance_type,
-                                             int instance_size);
+  MUST_USE_RESULT MaybeObject* AllocateMap(InstanceType instance_type,
+                                           int instance_size);
 
   // Allocates a partial map for bootstrapping.
-  MUST_USE_RESULT static MaybeObject* AllocatePartialMap(
-      InstanceType instance_type,
-      int instance_size);
+  MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
+                                                  int instance_size);
 
   // Allocate a map for the specified function
-  MUST_USE_RESULT static MaybeObject* AllocateInitialMap(JSFunction* fun);
+  MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
 
   // Allocates an empty code cache.
-  MUST_USE_RESULT static MaybeObject* AllocateCodeCache();
+  MUST_USE_RESULT MaybeObject* AllocateCodeCache();
 
   // Clear the Instanceof cache (used when a prototype changes).
-  static void ClearInstanceofCache() {
-    set_instanceof_cache_function(the_hole_value());
-  }
+  inline void ClearInstanceofCache();
 
   // Allocates and fully initializes a String.  There are two String
   // encodings: ASCII and two byte. One should choose between the three string
@@ -427,16 +505,16 @@ class Heap : public AllStatic {
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateStringFromAscii(
+  MUST_USE_RESULT MaybeObject* AllocateStringFromAscii(
       Vector<const char> str,
       PretenureFlag pretenure = NOT_TENURED);
-  MUST_USE_RESULT static inline MaybeObject* AllocateStringFromUtf8(
+  MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
       Vector<const char> str,
       PretenureFlag pretenure = NOT_TENURED);
-  MUST_USE_RESULT static MaybeObject* AllocateStringFromUtf8Slow(
+  MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
       Vector<const char> str,
       PretenureFlag pretenure = NOT_TENURED);
-  MUST_USE_RESULT static MaybeObject* AllocateStringFromTwoByte(
+  MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
       Vector<const uc16> str,
       PretenureFlag pretenure = NOT_TENURED);
 
@@ -444,27 +522,25 @@ class Heap : public AllStatic {
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this function does not perform a garbage collection.
-  MUST_USE_RESULT static inline MaybeObject* AllocateSymbol(
-      Vector<const char> str,
-      int chars,
-      uint32_t hash_field);
+  MUST_USE_RESULT inline MaybeObject* AllocateSymbol(Vector<const char> str,
+                                                     int chars,
+                                                     uint32_t hash_field);
 
-  MUST_USE_RESULT static inline MaybeObject* AllocateAsciiSymbol(
+  MUST_USE_RESULT inline MaybeObject* AllocateAsciiSymbol(
         Vector<const char> str,
         uint32_t hash_field);
 
-  MUST_USE_RESULT static inline MaybeObject* AllocateTwoByteSymbol(
+  MUST_USE_RESULT inline MaybeObject* AllocateTwoByteSymbol(
         Vector<const uc16> str,
         uint32_t hash_field);
 
-  MUST_USE_RESULT static MaybeObject* AllocateInternalSymbol(
+  MUST_USE_RESULT MaybeObject* AllocateInternalSymbol(
       unibrow::CharacterStream* buffer, int chars, uint32_t hash_field);
 
-  MUST_USE_RESULT static MaybeObject* AllocateExternalSymbol(
+  MUST_USE_RESULT MaybeObject* AllocateExternalSymbol(
       Vector<const char> str,
       int chars);
 
-
   // Allocates and partially initializes a String.  There are two String
   // encodings: ASCII and two byte.  These functions allocate a string of the
   // given length and set its map and length fields.  The characters of the
@@ -472,10 +548,10 @@ class Heap : public AllStatic {
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateRawAsciiString(
+  MUST_USE_RESULT MaybeObject* AllocateRawAsciiString(
       int length,
       PretenureFlag pretenure = NOT_TENURED);
-  MUST_USE_RESULT static MaybeObject* AllocateRawTwoByteString(
+  MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString(
       int length,
       PretenureFlag pretenure = NOT_TENURED);
 
@@ -483,27 +559,27 @@ class Heap : public AllStatic {
   // A cache is used for ascii codes.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed. Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* LookupSingleCharacterStringFromCode(
+  MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
       uint16_t code);
 
   // Allocate a byte array of the specified length
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length,
-                                                   PretenureFlag pretenure);
+  MUST_USE_RESULT MaybeObject* AllocateByteArray(int length,
+                                                 PretenureFlag pretenure);
 
   // Allocate a non-tenured byte array of the specified length
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length);
+  MUST_USE_RESULT MaybeObject* AllocateByteArray(int length);
 
   // Allocates an external array of the specified length and type.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateExternalArray(
+  MUST_USE_RESULT MaybeObject* AllocateExternalArray(
       int length,
       ExternalArrayType array_type,
       void* external_pointer,
@@ -513,71 +589,65 @@ class Heap : public AllStatic {
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateJSGlobalPropertyCell(
-      Object* value);
+  MUST_USE_RESULT MaybeObject* AllocateJSGlobalPropertyCell(Object* value);
 
   // Allocates a fixed array initialized with undefined values
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateFixedArray(
-      int length,
-      PretenureFlag pretenure);
+  MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length,
+                                                  PretenureFlag pretenure);
   // Allocates a fixed array initialized with undefined values
-  MUST_USE_RESULT static MaybeObject* AllocateFixedArray(int length);
+  MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length);
 
   // Allocates an uninitialized fixed array. It must be filled by the caller.
   //
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateUninitializedFixedArray(
-      int length);
+  MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
 
   // Make a copy of src and return it. Returns
   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
-  MUST_USE_RESULT static inline MaybeObject* CopyFixedArray(FixedArray* src);
+  MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
 
   // Make a copy of src, set the map, and return the copy. Returns
   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
-  MUST_USE_RESULT static MaybeObject* CopyFixedArrayWithMap(FixedArray* src,
-                                                            Map* map);
+  MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
 
   // Allocates a fixed array initialized with the hole values.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithHoles(
+  MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles(
       int length,
       PretenureFlag pretenure = NOT_TENURED);
 
   // AllocateHashTable is identical to AllocateFixedArray except
   // that the resulting object has hash_table_map as map.
-  MUST_USE_RESULT static MaybeObject* AllocateHashTable(
+  MUST_USE_RESULT MaybeObject* AllocateHashTable(
       int length, PretenureFlag pretenure = NOT_TENURED);
 
   // Allocate a global (but otherwise uninitialized) context.
-  MUST_USE_RESULT static MaybeObject* AllocateGlobalContext();
+  MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
 
   // Allocate a function context.
-  MUST_USE_RESULT static MaybeObject* AllocateFunctionContext(
-      int length,
-      JSFunction* closure);
+  MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
+                                                       JSFunction* closure);
 
   // Allocate a 'with' context.
-  MUST_USE_RESULT static MaybeObject* AllocateWithContext(
-      Context* previous,
-      JSObject* extension,
-      bool is_catch_context);
+  MUST_USE_RESULT MaybeObject* AllocateWithContext(Context* previous,
+                                                   JSObject* extension,
+                                                   bool is_catch_context);
 
   // Allocates a new utility object in the old generation.
-  MUST_USE_RESULT static MaybeObject* AllocateStruct(InstanceType type);
+  MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
 
   // Allocates a function initialized with a shared part.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateFunction(
+  MUST_USE_RESULT MaybeObject* AllocateFunction(
       Map* function_map,
       SharedFunctionInfo* shared,
       Object* prototype,
@@ -598,52 +668,51 @@ class Heap : public AllStatic {
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateArgumentsObject(Object* callee,
-                                                              int length);
+  MUST_USE_RESULT MaybeObject* AllocateArgumentsObject(
+      Object* callee, int length);
 
   // Same as NewNumberFromDouble, but may return a preallocated/immutable
   // number object (e.g., minus_zero_value_, nan_value_)
-  MUST_USE_RESULT static MaybeObject* NumberFromDouble(
+  MUST_USE_RESULT MaybeObject* NumberFromDouble(
       double value, PretenureFlag pretenure = NOT_TENURED);
 
   // Allocated a HeapNumber from value.
-  MUST_USE_RESULT static MaybeObject* AllocateHeapNumber(
+  MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
       double value,
       PretenureFlag pretenure);
-  // pretenure = NOT_TENURED.
-  MUST_USE_RESULT static MaybeObject* AllocateHeapNumber(double value);
+  // pretenure = NOT_TENURED
+  MUST_USE_RESULT MaybeObject* AllocateHeapNumber(double value);
 
   // Converts an int into either a Smi or a HeapNumber object.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static inline MaybeObject* NumberFromInt32(int32_t value);
+  MUST_USE_RESULT inline MaybeObject* NumberFromInt32(int32_t value);
 
   // Converts an int into either a Smi or a HeapNumber object.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static inline MaybeObject* NumberFromUint32(uint32_t value);
+  MUST_USE_RESULT inline MaybeObject* NumberFromUint32(uint32_t value);
 
   // Allocates a new proxy object.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateProxy(
-      Address proxy,
-      PretenureFlag pretenure = NOT_TENURED);
+  MUST_USE_RESULT MaybeObject* AllocateProxy(
+      Address proxy, PretenureFlag pretenure = NOT_TENURED);
 
   // Allocates a new SharedFunctionInfo object.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateSharedFunctionInfo(Object* name);
+  MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name);
 
   // Allocates a new JSMessageObject object.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note that this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateJSMessageObject(
+  MUST_USE_RESULT MaybeObject* AllocateJSMessageObject(
       String* type,
       JSArray* arguments,
       int start_position,
@@ -656,8 +725,8 @@ class Heap : public AllStatic {
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateConsString(String* first,
-                                                         String* second);
+  MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
+                                                  String* second);
 
   // Allocates a new sub string object which is a substring of an underlying
   // string buffer stretching from the index start (inclusive) to the index
@@ -665,7 +734,7 @@ class Heap : public AllStatic {
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateSubString(
+  MUST_USE_RESULT MaybeObject* AllocateSubString(
       String* buffer,
       int start,
       int end,
@@ -676,28 +745,27 @@ class Heap : public AllStatic {
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromAscii(
+  MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
       ExternalAsciiString::Resource* resource);
-  MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromTwoByte(
+  MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
       ExternalTwoByteString::Resource* resource);
 
   // Finalizes an external string by deleting the associated external
   // data and clearing the resource pointer.
-  static inline void FinalizeExternalString(String* string);
+  inline void FinalizeExternalString(String* string);
 
   // Allocates an uninitialized object.  The memory is non-executable if the
   // hardware and OS allow.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this function does not perform a garbage collection.
-  MUST_USE_RESULT static inline MaybeObject* AllocateRaw(
-      int size_in_bytes,
-      AllocationSpace space,
-      AllocationSpace retry_space);
+  MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
+                                                  AllocationSpace space,
+                                                  AllocationSpace retry_space);
 
   // Initialize a filler object to keep the ability to iterate over the heap
   // when shortening objects.
-  static void CreateFillerObjectAt(Address addr, int size);
+  void CreateFillerObjectAt(Address addr, int size);
 
   // Makes a new native code object
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
@@ -705,37 +773,36 @@ class Heap : public AllStatic {
   // self_reference. This allows generated code to reference its own Code
   // object by containing this pointer.
   // Please note this function does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* CreateCode(const CodeDesc& desc,
-                                                 Code::Flags flags,
-                                                 Handle<Object> self_reference,
-                                                 bool immovable = false);
+  MUST_USE_RESULT MaybeObject* CreateCode(const CodeDesc& desc,
+                                          Code::Flags flags,
+                                          Handle<Object> self_reference,
+                                          bool immovable = false);
 
-  MUST_USE_RESULT static MaybeObject* CopyCode(Code* code);
+  MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
 
   // Copy the code and scope info part of the code object, but insert
   // the provided data as the relocation information.
-  MUST_USE_RESULT static MaybeObject* CopyCode(Code* code,
-                                               Vector<byte> reloc_info);
+  MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
 
   // Finds the symbol for string in the symbol table.
   // If not found, a new symbol is added to the table and returned.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
   // failed.
   // Please note this function does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* LookupSymbol(Vector<const char> str);
-  MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(Vector<const char> str);
-  MUST_USE_RESULT static MaybeObject* LookupTwoByteSymbol(
+  MUST_USE_RESULT MaybeObject* LookupSymbol(Vector<const char> str);
+  MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(Vector<const char> str);
+  MUST_USE_RESULT MaybeObject* LookupTwoByteSymbol(
       Vector<const uc16> str);
-  MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(const char* str) {
+  MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(const char* str) {
     return LookupSymbol(CStrVector(str));
   }
-  MUST_USE_RESULT static MaybeObject* LookupSymbol(String* str);
-  static bool LookupSymbolIfExists(String* str, String** symbol);
-  static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
+  MUST_USE_RESULT MaybeObject* LookupSymbol(String* str);
+  bool LookupSymbolIfExists(String* str, String** symbol);
+  bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
 
   // Compute the matching symbol map for a string if possible.
   // NULL is returned if string is in new space or not flattened.
-  static Map* SymbolMapForString(String* str);
+  Map* SymbolMapForString(String* str);
 
   // Tries to flatten a string before compare operation.
   //
@@ -744,60 +811,60 @@ class Heap : public AllStatic {
   // string might stay non-flat even when not a failure is returned.
   //
   // Please note this function does not perform a garbage collection.
-  MUST_USE_RESULT static inline MaybeObject* PrepareForCompare(String* str);
+  MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
 
   // Converts the given boolean condition to JavaScript boolean value.
-  static Object* ToBoolean(bool condition) {
-    return condition ? true_value() : false_value();
-  }
+  inline Object* ToBoolean(bool condition);
 
   // Code that should be run before and after each GC.  Includes some
   // reporting/verification activities when compiled with DEBUG set.
-  static void GarbageCollectionPrologue();
-  static void GarbageCollectionEpilogue();
+  void GarbageCollectionPrologue();
+  void GarbageCollectionEpilogue();
 
   // Performs garbage collection operation.
   // Returns whether there is a chance that another major GC could
   // collect more garbage.
-  static bool CollectGarbage(AllocationSpace space, GarbageCollector collector);
+  bool CollectGarbage(AllocationSpace space, GarbageCollector collector);
 
   // Performs garbage collection operation.
   // Returns whether there is a chance that another major GC could
   // collect more garbage.
-  inline static bool CollectGarbage(AllocationSpace space);
+  inline bool CollectGarbage(AllocationSpace space);
 
   // Performs a full garbage collection. Force compaction if the
   // parameter is true.
-  static void CollectAllGarbage(bool force_compaction);
+  void CollectAllGarbage(bool force_compaction);
 
   // Last hope GC, should try to squeeze as much as possible.
-  static void CollectAllAvailableGarbage();
+  void CollectAllAvailableGarbage();
 
   // Notify the heap that a context has been disposed.
-  static int NotifyContextDisposed() { return ++contexts_disposed_; }
+  int NotifyContextDisposed() { return ++contexts_disposed_; }
 
   // Utility to invoke the scavenger. This is needed in test code to
   // ensure correct callback for weak global handles.
-  static void PerformScavenge();
+  void PerformScavenge();
+
+  PromotionQueue* promotion_queue() { return &promotion_queue_; }
 
 #ifdef DEBUG
   // Utility used with flag gc-greedy.
-  static void GarbageCollectionGreedyCheck();
+  void GarbageCollectionGreedyCheck();
 #endif
 
-  static void AddGCPrologueCallback(
+  void AddGCPrologueCallback(
       GCEpilogueCallback callback, GCType gc_type_filter);
-  static void RemoveGCPrologueCallback(GCEpilogueCallback callback);
+  void RemoveGCPrologueCallback(GCEpilogueCallback callback);
 
-  static void AddGCEpilogueCallback(
+  void AddGCEpilogueCallback(
       GCEpilogueCallback callback, GCType gc_type_filter);
-  static void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
+  void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
 
-  static void SetGlobalGCPrologueCallback(GCCallback callback) {
+  void SetGlobalGCPrologueCallback(GCCallback callback) {
     ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
     global_gc_prologue_callback_ = callback;
   }
-  static void SetGlobalGCEpilogueCallback(GCCallback callback) {
+  void SetGlobalGCEpilogueCallback(GCCallback callback) {
     ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
     global_gc_epilogue_callback_ = callback;
   }
@@ -805,10 +872,10 @@ class Heap : public AllStatic {
   // Heap root getters.  We have versions with and without type::cast() here.
   // You can't use type::cast during GC because the assert fails.
 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
-  static inline type* name() {                                                 \
+  type* name() {                                                               \
     return type::cast(roots_[k##camel_name##RootIndex]);                       \
   }                                                                            \
-  static inline type* raw_unchecked_##name() {                                 \
+  type* raw_unchecked_##name() {                                               \
     return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]);          \
   }
   ROOT_LIST(ROOT_ACCESSOR)
@@ -816,13 +883,13 @@ class Heap : public AllStatic {
 
 // Utility type maps
 #define STRUCT_MAP_ACCESSOR(NAME, Name, name)                                  \
-    static inline Map* name##_map() {                                          \
+    Map* name##_map() {                                                        \
       return Map::cast(roots_[k##Name##MapRootIndex]);                         \
     }
   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
 #undef STRUCT_MAP_ACCESSOR
 
-#define SYMBOL_ACCESSOR(name, str) static inline String* name() {              \
+#define SYMBOL_ACCESSOR(name, str) String* name() {                            \
     return String::cast(roots_[k##name##RootIndex]);                           \
   }
   SYMBOL_LIST(SYMBOL_ACCESSOR)
@@ -830,19 +897,19 @@ class Heap : public AllStatic {
 
   // The hidden_symbol is special because it is the empty string, but does
   // not match the empty string.
-  static String* hidden_symbol() { return hidden_symbol_; }
+  String* hidden_symbol() { return hidden_symbol_; }
 
-  static void set_global_contexts_list(Object* object) {
+  void set_global_contexts_list(Object* object) {
     global_contexts_list_ = object;
   }
-  static Object* global_contexts_list() { return global_contexts_list_; }
+  Object* global_contexts_list() { return global_contexts_list_; }
 
   // Iterates over all roots in the heap.
-  static void IterateRoots(ObjectVisitor* v, VisitMode mode);
+  void IterateRoots(ObjectVisitor* v, VisitMode mode);
   // Iterates over all strong roots in the heap.
-  static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
+  void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
   // Iterates over all the other roots in the heap.
-  static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
+  void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
 
   enum ExpectedPageWatermarkState {
     WATERMARK_SHOULD_BE_VALID,
@@ -856,7 +923,7 @@ class Heap : public AllStatic {
   // can_preallocate_during_iteration should be set to true.
   // All pages will be marked as having invalid watermark upon
   // iteration completion.
-  static void IterateDirtyRegions(
+  void IterateDirtyRegions(
       PagedSpace* space,
       DirtyRegionCallback visit_dirty_region,
       ObjectSlotCallback callback,
@@ -866,22 +933,23 @@ class Heap : public AllStatic {
   // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering
   // memory interval from start to top. For each dirty region call a
   // visit_dirty_region callback. Return updated bitvector of dirty marks.
-  static uint32_t IterateDirtyRegions(uint32_t marks,
-                                      Address start,
-                                      Address end,
-                                      DirtyRegionCallback visit_dirty_region,
-                                      ObjectSlotCallback callback);
+  uint32_t IterateDirtyRegions(uint32_t marks,
+                               Address start,
+                               Address end,
+                               DirtyRegionCallback visit_dirty_region,
+                               ObjectSlotCallback callback);
 
   // Iterate pointers to from semispace of new space found in memory interval
   // from start to end.
   // Update dirty marks for page containing start address.
-  static void IterateAndMarkPointersToFromSpace(Address start,
-                                                Address end,
-                                                ObjectSlotCallback callback);
+  void IterateAndMarkPointersToFromSpace(Address start,
+                                         Address end,
+                                         ObjectSlotCallback callback);
 
   // Iterate pointers to new space found in memory interval from start to end.
   // Return true if pointers to new space was found.
-  static bool IteratePointersInDirtyRegion(Address start,
+  static bool IteratePointersInDirtyRegion(Heap* heap,
+                                           Address start,
                                            Address end,
                                            ObjectSlotCallback callback);
 
@@ -889,127 +957,127 @@ class Heap : public AllStatic {
   // Iterate pointers to new space found in memory interval from start to end.
   // This interval is considered to belong to the map space.
   // Return true if pointers to new space was found.
-  static bool IteratePointersInDirtyMapsRegion(Address start,
+  static bool IteratePointersInDirtyMapsRegion(Heap* heap,
+                                               Address start,
                                                Address end,
                                                ObjectSlotCallback callback);
 
 
   // Returns whether the object resides in new space.
-  static inline bool InNewSpace(Object* object);
-  static inline bool InFromSpace(Object* object);
-  static inline bool InToSpace(Object* object);
+  inline bool InNewSpace(Object* object);
+  inline bool InFromSpace(Object* object);
+  inline bool InToSpace(Object* object);
 
   // Checks whether an address/object in the heap (including auxiliary
   // area and unused area).
-  static bool Contains(Address addr);
-  static bool Contains(HeapObject* value);
+  bool Contains(Address addr);
+  bool Contains(HeapObject* value);
 
   // Checks whether an address/object in a space.
   // Currently used by tests, serialization and heap verification only.
-  static bool InSpace(Address addr, AllocationSpace space);
-  static bool InSpace(HeapObject* value, AllocationSpace space);
+  bool InSpace(Address addr, AllocationSpace space);
+  bool InSpace(HeapObject* value, AllocationSpace space);
 
   // Finds out which space an object should get promoted to based on its type.
-  static inline OldSpace* TargetSpace(HeapObject* object);
-  static inline AllocationSpace TargetSpaceId(InstanceType type);
+  inline OldSpace* TargetSpace(HeapObject* object);
+  inline AllocationSpace TargetSpaceId(InstanceType type);
 
   // Sets the stub_cache_ (only used when expanding the dictionary).
-  static void public_set_code_stubs(NumberDictionary* value) {
+  void public_set_code_stubs(NumberDictionary* value) {
     roots_[kCodeStubsRootIndex] = value;
   }
 
   // Support for computing object sizes for old objects during GCs. Returns
   // a function that is guaranteed to be safe for computing object sizes in
   // the current GC phase.
-  static HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
+  HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
     return gc_safe_size_of_old_object_;
   }
 
   // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
-  static void public_set_non_monomorphic_cache(NumberDictionary* value) {
+  void public_set_non_monomorphic_cache(NumberDictionary* value) {
     roots_[kNonMonomorphicCacheRootIndex] = value;
   }
 
-  static void public_set_empty_script(Script* script) {
+  void public_set_empty_script(Script* script) {
     roots_[kEmptyScriptRootIndex] = script;
   }
 
   // Update the next script id.
-  static inline void SetLastScriptId(Object* last_script_id);
+  inline void SetLastScriptId(Object* last_script_id);
 
   // Generated code can embed this address to get access to the roots.
-  static Object** roots_address() { return roots_; }
+  Object** roots_address() { return roots_; }
 
   // Get address of global contexts list for serialization support.
-  static Object** global_contexts_list_address() {
+  Object** global_contexts_list_address() {
     return &global_contexts_list_;
   }
 
 #ifdef DEBUG
-  static void Print();
-  static void PrintHandles();
+  void Print();
+  void PrintHandles();
 
   // Verify the heap is in its normal state before or after a GC.
-  static void Verify();
+  void Verify();
 
   // Report heap statistics.
-  static void ReportHeapStatistics(const char* title);
-  static void ReportCodeStatistics(const char* title);
+  void ReportHeapStatistics(const char* title);
+  void ReportCodeStatistics(const char* title);
 
   // Fill in bogus values in from space
-  static void ZapFromSpace();
+  void ZapFromSpace();
 #endif
 
 #if defined(ENABLE_LOGGING_AND_PROFILING)
   // Print short heap statistics.
-  static void PrintShortHeapStatistics();
+  void PrintShortHeapStatistics();
 #endif
 
   // Makes a new symbol object
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this function does not perform a garbage collection.
-  MUST_USE_RESULT static MaybeObject* CreateSymbol(const char* str,
-                                                   int length,
-                                                   int hash);
-  MUST_USE_RESULT static MaybeObject* CreateSymbol(String* str);
+  MUST_USE_RESULT MaybeObject* CreateSymbol(
+      const char* str, int length, int hash);
+  MUST_USE_RESULT MaybeObject* CreateSymbol(String* str);
 
   // Write barrier support for address[offset] = o.
-  static inline void RecordWrite(Address address, int offset);
+  inline void RecordWrite(Address address, int offset);
 
   // Write barrier support for address[start : start + len[ = o.
-  static inline void RecordWrites(Address address, int start, int len);
+  inline void RecordWrites(Address address, int start, int len);
 
   // Given an address occupied by a live code object, return that object.
-  static Object* FindCodeObject(Address a);
+  Object* FindCodeObject(Address a);
 
   // Invoke Shrink on shrinkable spaces.
-  static void Shrink();
+  void Shrink();
 
   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
-  static inline HeapState gc_state() { return gc_state_; }
+  inline HeapState gc_state() { return gc_state_; }
 
 #ifdef DEBUG
-  static bool IsAllocationAllowed() { return allocation_allowed_; }
-  static inline bool allow_allocation(bool enable);
+  bool IsAllocationAllowed() { return allocation_allowed_; }
+  inline bool allow_allocation(bool enable);
 
-  static bool disallow_allocation_failure() {
+  bool disallow_allocation_failure() {
     return disallow_allocation_failure_;
   }
 
-  static void TracePathToObject(Object* target);
-  static void TracePathToGlobal();
+  void TracePathToObject(Object* target);
+  void TracePathToGlobal();
 #endif
 
   // Callback function passed to Heap::Iterate etc.  Copies an object if
   // necessary, the object might be promoted to an old space.  The caller must
   // ensure the precondition that the object is (a) a heap object and (b) in
   // the heap's from space.
-  static void ScavengePointer(HeapObject** p);
+  static inline void ScavengePointer(HeapObject** p);
   static inline void ScavengeObject(HeapObject** p, HeapObject* object);
 
   // Commits from space if it is uncommitted.
-  static void EnsureFromSpaceIsCommitted();
+  void EnsureFromSpaceIsCommitted();
 
   // Support for partial snapshots.  After calling this we can allocate a
   // certain number of bytes using only linear allocation (with a
@@ -1017,7 +1085,7 @@ class Heap : public AllStatic {
   // or causing a GC.  It returns true of space was reserved or false if a GC is
   // needed.  For paged spaces the space requested must include the space wasted
   // at the end of each page when allocating linearly.
-  static void ReserveSpace(
+  void ReserveSpace(
     int new_space_size,
     int pointer_space_size,
     int data_space_size,
@@ -1030,45 +1098,44 @@ class Heap : public AllStatic {
   // Support for the API.
   //
 
-  static bool CreateApiObjects();
+  bool CreateApiObjects();
 
   // Attempt to find the number in a small cache.  If we finds it, return
   // the string representation of the number.  Otherwise return undefined.
-  static Object* GetNumberStringCache(Object* number);
+  Object* GetNumberStringCache(Object* number);
 
   // Update the cache with a new number-string pair.
-  static void SetNumberStringCache(Object* number, String* str);
+  void SetNumberStringCache(Object* number, String* str);
 
   // Adjusts the amount of registered external memory.
   // Returns the adjusted value.
-  static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
+  inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
 
   // Allocate uninitialized fixed array.
-  MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray(int length);
-  MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray(
-      int length,
-      PretenureFlag pretenure);
+  MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
+  MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
+                                                     PretenureFlag pretenure);
 
   // True if we have reached the allocation limit in the old generation that
   // should force the next GC (caused normally) to be a full one.
-  static bool OldGenerationPromotionLimitReached() {
+  bool OldGenerationPromotionLimitReached() {
     return (PromotedSpaceSize() + PromotedExternalMemorySize())
            > old_gen_promotion_limit_;
   }
 
-  static intptr_t OldGenerationSpaceAvailable() {
+  intptr_t OldGenerationSpaceAvailable() {
     return old_gen_allocation_limit_ -
            (PromotedSpaceSize() + PromotedExternalMemorySize());
   }
 
   // True if we have reached the allocation limit in the old generation that
   // should artificially cause a GC right now.
-  static bool OldGenerationAllocationLimitReached() {
+  bool OldGenerationAllocationLimitReached() {
     return OldGenerationSpaceAvailable() < 0;
   }
 
   // Can be called when the embedding application is idle.
-  static bool IdleNotification();
+  bool IdleNotification();
 
   // Declare all the root indices.
   enum RootListIndex {
@@ -1090,84 +1157,109 @@ class Heap : public AllStatic {
     kRootListLength
   };
 
-  MUST_USE_RESULT static MaybeObject* NumberToString(
-      Object* number,
-      bool check_number_string_cache = true);
+  MUST_USE_RESULT MaybeObject* NumberToString(
+      Object* number, bool check_number_string_cache = true);
 
-  static Map* MapForExternalArrayType(ExternalArrayType array_type);
-  static RootListIndex RootIndexForExternalArrayType(
+  Map* MapForExternalArrayType(ExternalArrayType array_type);
+  RootListIndex RootIndexForExternalArrayType(
       ExternalArrayType array_type);
 
-  static void RecordStats(HeapStats* stats, bool take_snapshot = false);
+  void RecordStats(HeapStats* stats, bool take_snapshot = false);
 
   // Copy block of memory from src to dst. Size of block should be aligned
   // by pointer size.
   static inline void CopyBlock(Address dst, Address src, int byte_size);
 
-  static inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
-                                                             Address src,
-                                                             int byte_size);
+  inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
+                                                      Address src,
+                                                      int byte_size);
 
   // Optimized version of memmove for blocks with pointer size aligned sizes and
   // pointer size aligned addresses.
   static inline void MoveBlock(Address dst, Address src, int byte_size);
 
-  static inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
-                                                             Address src,
-                                                             int byte_size);
+  inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
+                                                      Address src,
+                                                      int byte_size);
 
   // Check new space expansion criteria and expand semispaces if it was hit.
-  static void CheckNewSpaceExpansionCriteria();
+  void CheckNewSpaceExpansionCriteria();
 
-  static inline void IncrementYoungSurvivorsCounter(int survived) {
+  inline void IncrementYoungSurvivorsCounter(int survived) {
     young_survivors_after_last_gc_ = survived;
     survived_since_last_expansion_ += survived;
   }
 
-  static void UpdateNewSpaceReferencesInExternalStringTable(
+  void UpdateNewSpaceReferencesInExternalStringTable(
       ExternalStringTableUpdaterCallback updater_func);
 
-  static void ProcessWeakReferences(WeakObjectRetainer* retainer);
+  void ProcessWeakReferences(WeakObjectRetainer* retainer);
 
   // Helper function that governs the promotion policy from new space to
   // old.  If the object's old address lies below the new space's age
   // mark or if we've already filled the bottom 1/16th of the to space,
   // we try to promote this object.
-  static inline bool ShouldBePromoted(Address old_address, int object_size);
+  inline bool ShouldBePromoted(Address old_address, int object_size);
+
+  int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
+
+  void ClearJSFunctionResultCaches();
+
+  void ClearNormalizedMapCaches();
+
+  GCTracer* tracer() { return tracer_; }
+
+  // Returns maximum GC pause.
+  int get_max_gc_pause() { return max_gc_pause_; }
+
+  // Returns maximum size of objects alive after GC.
+  intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
 
-  static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
+  // Returns minimal interval between two subsequent collections.
+  int get_min_in_mutator() { return min_in_mutator_; }
 
-  static void ClearJSFunctionResultCaches();
+  MarkCompactCollector* mark_compact_collector() {
+    return &mark_compact_collector_;
+  }
 
-  static void ClearNormalizedMapCaches();
+  ExternalStringTable* external_string_table() {
+    return &external_string_table_;
+  }
 
-  static GCTracer* tracer() { return tracer_; }
+  inline Isolate* isolate();
+  bool is_safe_to_read_maps() { return is_safe_to_read_maps_; }
 
-  static void CallGlobalGCPrologueCallback() {
+  void CallGlobalGCPrologueCallback() {
     if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_();
   }
 
-  static void CallGlobalGCEpilogueCallback() {
+  void CallGlobalGCEpilogueCallback() {
     if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_();
   }
 
  private:
-  static int reserved_semispace_size_;
-  static int max_semispace_size_;
-  static int initial_semispace_size_;
-  static intptr_t max_old_generation_size_;
-  static intptr_t max_executable_size_;
-  static intptr_t code_range_size_;
+  Heap();
+
+  // This can be calculated directly from a pointer to the heap; however, it is
+  // more expedient to get at the isolate directly from within Heap methods.
+  Isolate* isolate_;
+
+  int reserved_semispace_size_;
+  int max_semispace_size_;
+  int initial_semispace_size_;
+  intptr_t max_old_generation_size_;
+  intptr_t max_executable_size_;
+  intptr_t code_range_size_;
 
   // For keeping track of how much data has survived
   // scavenge since last new space expansion.
-  static int survived_since_last_expansion_;
+  int survived_since_last_expansion_;
 
-  static int always_allocate_scope_depth_;
-  static int linear_allocation_scope_depth_;
+  int always_allocate_scope_depth_;
+  int linear_allocation_scope_depth_;
 
   // For keeping track of context disposals.
-  static int contexts_disposed_;
+  int contexts_disposed_;
 
 #if defined(V8_TARGET_ARCH_X64)
   static const int kMaxObjectSizeInNewSpace = 1024*KB;
@@ -1175,76 +1267,78 @@ class Heap : public AllStatic {
   static const int kMaxObjectSizeInNewSpace = 512*KB;
 #endif
 
-  static NewSpace new_space_;
-  static OldSpace* old_pointer_space_;
-  static OldSpace* old_data_space_;
-  static OldSpace* code_space_;
-  static MapSpace* map_space_;
-  static CellSpace* cell_space_;
-  static LargeObjectSpace* lo_space_;
-  static HeapState gc_state_;
+  NewSpace new_space_;
+  OldSpace* old_pointer_space_;
+  OldSpace* old_data_space_;
+  OldSpace* code_space_;
+  MapSpace* map_space_;
+  CellSpace* cell_space_;
+  LargeObjectSpace* lo_space_;
+  HeapState gc_state_;
 
   // Returns the size of object residing in non new spaces.
-  static intptr_t PromotedSpaceSize();
+  intptr_t PromotedSpaceSize();
 
   // Returns the amount of external memory registered since last global gc.
-  static int PromotedExternalMemorySize();
+  int PromotedExternalMemorySize();
 
-  static int mc_count_;  // how many mark-compact collections happened
-  static int ms_count_;  // how many mark-sweep collections happened
-  static unsigned int gc_count_;  // how many gc happened
+  int mc_count_;  // how many mark-compact collections happened
+  int ms_count_;  // how many mark-sweep collections happened
+  unsigned int gc_count_;  // how many gc happened
 
   // Total length of the strings we failed to flatten since the last GC.
-  static int unflattened_strings_length_;
+  int unflattened_strings_length_;
 
 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
-  static inline void set_##name(type* value) {                                 \
+  inline void set_##name(type* value) {                                 \
     roots_[k##camel_name##RootIndex] = value;                                  \
   }
   ROOT_LIST(ROOT_ACCESSOR)
 #undef ROOT_ACCESSOR
 
 #ifdef DEBUG
-  static bool allocation_allowed_;
+  bool allocation_allowed_;
 
   // If the --gc-interval flag is set to a positive value, this
   // variable holds the value indicating the number of allocations
   // remain until the next failure and garbage collection.
-  static int allocation_timeout_;
+  int allocation_timeout_;
 
   // Do we expect to be able to handle allocation failure at this
   // time?
-  static bool disallow_allocation_failure_;
+  bool disallow_allocation_failure_;
+
+  HeapDebugUtils* debug_utils_;
 #endif  // DEBUG
 
   // Limit that triggers a global GC on the next (normally caused) GC.  This
   // is checked when we have already decided to do a GC to help determine
   // which collector to invoke.
-  static intptr_t old_gen_promotion_limit_;
+  intptr_t old_gen_promotion_limit_;
 
   // Limit that triggers a global GC as soon as is reasonable.  This is
   // checked before expanding a paged space in the old generation and on
   // every allocation in large object space.
-  static intptr_t old_gen_allocation_limit_;
+  intptr_t old_gen_allocation_limit_;
 
   // Limit on the amount of externally allocated memory allowed
   // between global GCs. If reached a global GC is forced.
-  static intptr_t external_allocation_limit_;
+  intptr_t external_allocation_limit_;
 
   // The amount of external memory registered through the API kept alive
   // by global handles
-  static int amount_of_external_allocated_memory_;
+  int amount_of_external_allocated_memory_;
 
   // Caches the amount of external memory registered at the last global gc.
-  static int amount_of_external_allocated_memory_at_last_global_gc_;
+  int amount_of_external_allocated_memory_at_last_global_gc_;
 
   // Indicates that an allocation has failed in the old generation since the
   // last GC.
-  static int old_gen_exhausted_;
+  int old_gen_exhausted_;
 
-  static Object* roots_[kRootListLength];
+  Object* roots_[kRootListLength];
 
-  static Object* global_contexts_list_;
+  Object* global_contexts_list_;
 
   struct StringTypeTable {
     InstanceType type;
@@ -1269,7 +1363,7 @@ class Heap : public AllStatic {
 
   // The special hidden symbol which is an empty string, but does not match
   // any string when looked up in properties.
-  static String* hidden_symbol_;
+  String* hidden_symbol_;
 
   // GC callback function, called before and after mark-compact GC.
   // Allocations in the callback function are disallowed.
@@ -1283,7 +1377,7 @@ class Heap : public AllStatic {
     GCPrologueCallback callback;
     GCType gc_type;
   };
-  static List<GCPrologueCallbackPair> gc_prologue_callbacks_;
+  List<GCPrologueCallbackPair> gc_prologue_callbacks_;
 
   struct GCEpilogueCallbackPair {
     GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
@@ -1295,88 +1389,91 @@ class Heap : public AllStatic {
     GCEpilogueCallback callback;
     GCType gc_type;
   };
-  static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
+  List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
 
-  static GCCallback global_gc_prologue_callback_;
-  static GCCallback global_gc_epilogue_callback_;
+  GCCallback global_gc_prologue_callback_;
+  GCCallback global_gc_epilogue_callback_;
 
   // Support for computing object sizes during GC.
-  static HeapObjectCallback gc_safe_size_of_old_object_;
+  HeapObjectCallback gc_safe_size_of_old_object_;
   static int GcSafeSizeOfOldObject(HeapObject* object);
   static int GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object);
 
   // Update the GC state. Called from the mark-compact collector.
-  static void MarkMapPointersAsEncoded(bool encoded) {
+  void MarkMapPointersAsEncoded(bool encoded) {
     gc_safe_size_of_old_object_ = encoded
         ? &GcSafeSizeOfOldObjectWithEncodedMap
         : &GcSafeSizeOfOldObject;
   }
 
   // Checks whether a global GC is necessary
-  static GarbageCollector SelectGarbageCollector(AllocationSpace space);
+  GarbageCollector SelectGarbageCollector(AllocationSpace space);
 
   // Performs garbage collection
   // Returns whether there is a chance another major GC could
   // collect more garbage.
-  static bool PerformGarbageCollection(GarbageCollector collector,
-                                       GCTracer* tracer);
+  bool PerformGarbageCollection(GarbageCollector collector,
+                                GCTracer* tracer);
+
+  static const intptr_t kMinimumPromotionLimit = 2 * MB;
+  static const intptr_t kMinimumAllocationLimit = 8 * MB;
+
+  inline void UpdateOldSpaceLimits();
 
   // Allocate an uninitialized object in map space.  The behavior is identical
   // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
   // have to test the allocation space argument and (b) can reduce code size
   // (since both AllocateRaw and AllocateRawMap are inlined).
-  MUST_USE_RESULT static inline MaybeObject* AllocateRawMap();
+  MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
 
   // Allocate an uninitialized object in the global property cell space.
-  MUST_USE_RESULT static inline MaybeObject* AllocateRawCell();
+  MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
 
   // Initializes a JSObject based on its map.
-  static void InitializeJSObjectFromMap(JSObject* obj,
-                                        FixedArray* properties,
-                                        Map* map);
+  void InitializeJSObjectFromMap(JSObject* obj,
+                                 FixedArray* properties,
+                                 Map* map);
 
-  static bool CreateInitialMaps();
-  static bool CreateInitialObjects();
+  bool CreateInitialMaps();
+  bool CreateInitialObjects();
 
-  // These two Create*EntryStub functions are here and forced to not be inlined
+  // These five Create*EntryStub functions are here and forced to not be inlined
   // because of a gcc-4.4 bug that assigns wrong vtable entries.
-  NO_INLINE(static void CreateJSEntryStub());
-  NO_INLINE(static void CreateJSConstructEntryStub());
+  NO_INLINE(void CreateJSEntryStub());
+  NO_INLINE(void CreateJSConstructEntryStub());
 
-  static void CreateFixedStubs();
+  void CreateFixedStubs();
 
-  MUST_USE_RESULT static MaybeObject* CreateOddball(const char* to_string,
-                                                    Object* to_number);
+  MaybeObject* CreateOddball(const char* to_string,
+                             Object* to_number,
+                             byte kind);
 
   // Allocate empty fixed array.
-  MUST_USE_RESULT static MaybeObject* AllocateEmptyFixedArray();
+  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
 
   // Performs a minor collection in new generation.
-  static void Scavenge();
+  void Scavenge();
 
   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
+      Heap* heap,
       Object** pointer);
 
-  static Address DoScavenge(ObjectVisitor* scavenge_visitor,
-                            Address new_space_front);
+  Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
 
   // Performs a major collection in the whole heap.
-  static void MarkCompact(GCTracer* tracer);
+  void MarkCompact(GCTracer* tracer);
 
   // Code to be run before and after mark-compact.
-  static void MarkCompactPrologue(bool is_compacting);
+  void MarkCompactPrologue(bool is_compacting);
 
   // Completely clear the Instanceof cache (to stop it keeping objects alive
   // around a GC).
-  static void CompletelyClearInstanceofCache() {
-    set_instanceof_cache_map(the_hole_value());
-    set_instanceof_cache_function(the_hole_value());
-  }
+  inline void CompletelyClearInstanceofCache();
 
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
   // Record statistics before and after garbage collection.
-  static void ReportStatisticsBeforeGC();
-  static void ReportStatisticsAfterGC();
+  void ReportStatisticsBeforeGC();
+  void ReportStatisticsAfterGC();
 #endif
 
   // Slow part of scavenge object.
@@ -1388,39 +1485,39 @@ class Heap : public AllStatic {
   // other parts of the VM could use it. Specifically, a function that creates
   // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
   // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT static inline MaybeObject* InitializeFunction(
+  MUST_USE_RESULT inline MaybeObject* InitializeFunction(
       JSFunction* function,
       SharedFunctionInfo* shared,
       Object* prototype);
 
-  static GCTracer* tracer_;
+  GCTracer* tracer_;
 
 
   // Initializes the number to string cache based on the max semispace size.
-  MUST_USE_RESULT static MaybeObject* InitializeNumberStringCache();
+  MUST_USE_RESULT MaybeObject* InitializeNumberStringCache();
   // Flush the number to string cache.
-  static void FlushNumberStringCache();
+  void FlushNumberStringCache();
 
-  static void UpdateSurvivalRateTrend(int start_new_space_size);
+  void UpdateSurvivalRateTrend(int start_new_space_size);
 
   enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
 
   static const int kYoungSurvivalRateThreshold = 90;
   static const int kYoungSurvivalRateAllowedDeviation = 15;
 
-  static int young_survivors_after_last_gc_;
-  static int high_survival_rate_period_length_;
-  static double survival_rate_;
-  static SurvivalRateTrend previous_survival_rate_trend_;
-  static SurvivalRateTrend survival_rate_trend_;
+  int young_survivors_after_last_gc_;
+  int high_survival_rate_period_length_;
+  double survival_rate_;
+  SurvivalRateTrend previous_survival_rate_trend_;
+  SurvivalRateTrend survival_rate_trend_;
 
-  static void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
+  void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
     ASSERT(survival_rate_trend != FLUCTUATING);
     previous_survival_rate_trend_ = survival_rate_trend_;
     survival_rate_trend_ = survival_rate_trend;
   }
 
-  static SurvivalRateTrend survival_rate_trend() {
+  SurvivalRateTrend survival_rate_trend() {
     if (survival_rate_trend_ == STABLE) {
       return STABLE;
     } else if (previous_survival_rate_trend_ == STABLE) {
@@ -1432,7 +1529,7 @@ class Heap : public AllStatic {
     }
   }
 
-  static bool IsStableOrIncreasingSurvivalTrend() {
+  bool IsStableOrIncreasingSurvivalTrend() {
     switch (survival_rate_trend()) {
       case STABLE:
       case INCREASING:
@@ -1442,22 +1539,64 @@ class Heap : public AllStatic {
     }
   }
 
-  static bool IsIncreasingSurvivalTrend() {
+  bool IsIncreasingSurvivalTrend() {
     return survival_rate_trend() == INCREASING;
   }
 
-  static bool IsHighSurvivalRate() {
+  bool IsHighSurvivalRate() {
     return high_survival_rate_period_length_ > 0;
   }
 
   static const int kInitialSymbolTableSize = 2048;
   static const int kInitialEvalCacheSize = 64;
 
+  // Maximum GC pause.
+  int max_gc_pause_;
+
+  // Maximum size of objects alive after GC.
+  intptr_t max_alive_after_gc_;
+
+  // Minimal interval between two subsequent collections.
+  int min_in_mutator_;
+
+  // Size of objects alive after last GC.
+  intptr_t alive_after_last_gc_;
+
+  double last_gc_end_timestamp_;
+
+  MarkCompactCollector mark_compact_collector_;
+
+  // This field contains the meaning of the WATERMARK_INVALIDATED flag.
+  // Instead of clearing this flag from all pages we just flip
+  // its meaning at the beginning of a scavenge.
+  intptr_t page_watermark_invalidated_mark_;
+
+  int number_idle_notifications_;
+  unsigned int last_idle_notification_gc_count_;
+  bool last_idle_notification_gc_count_init_;
+
+  // Shared state read by the scavenge collector and set by ScavengeObject.
+  PromotionQueue promotion_queue_;
+
+  // Flag is set when the heap has been configured.  The heap can be repeatedly
+  // configured through the API until it is setup.
+  bool configured_;
+
+  ExternalStringTable external_string_table_;
+
+  bool is_safe_to_read_maps_;
+
   friend class Factory;
+  friend class GCTracer;
   friend class DisallowAllocationFailure;
   friend class AlwaysAllocateScope;
   friend class LinearAllocationScope;
+  friend class Page;
+  friend class Isolate;
   friend class MarkCompactCollector;
+  friend class MapCompact;
+
+  DISALLOW_COPY_AND_ASSIGN(Heap);
 };
 
 
@@ -1501,13 +1640,13 @@ class AlwaysAllocateScope {
     // non-handle code to call handle code. The code still works but
     // performance will degrade, so we want to catch this situation
     // in debug mode.
-    ASSERT(Heap::always_allocate_scope_depth_ == 0);
-    Heap::always_allocate_scope_depth_++;
+    ASSERT(HEAP->always_allocate_scope_depth_ == 0);
+    HEAP->always_allocate_scope_depth_++;
   }
 
   ~AlwaysAllocateScope() {
-    Heap::always_allocate_scope_depth_--;
-    ASSERT(Heap::always_allocate_scope_depth_ == 0);
+    HEAP->always_allocate_scope_depth_--;
+    ASSERT(HEAP->always_allocate_scope_depth_ == 0);
   }
 };
 
@@ -1515,12 +1654,12 @@ class AlwaysAllocateScope {
 class LinearAllocationScope {
  public:
   LinearAllocationScope() {
-    Heap::linear_allocation_scope_depth_++;
+    HEAP->linear_allocation_scope_depth_++;
   }
 
   ~LinearAllocationScope() {
-    Heap::linear_allocation_scope_depth_--;
-    ASSERT(Heap::linear_allocation_scope_depth_ >= 0);
+    HEAP->linear_allocation_scope_depth_--;
+    ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
   }
 };
 
@@ -1537,7 +1676,7 @@ class VerifyPointersVisitor: public ObjectVisitor {
     for (Object** current = start; current < end; current++) {
       if ((*current)->IsHeapObject()) {
         HeapObject* object = HeapObject::cast(*current);
-        ASSERT(Heap::Contains(object));
+        ASSERT(HEAP->Contains(object));
         ASSERT(object->map()->IsMap());
       }
     }
@@ -1555,10 +1694,10 @@ class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor {
     for (Object** current = start; current < end; current++) {
       if ((*current)->IsHeapObject()) {
         HeapObject* object = HeapObject::cast(*current);
-        ASSERT(Heap::Contains(object));
+        ASSERT(HEAP->Contains(object));
         ASSERT(object->map()->IsMap());
-        if (Heap::InNewSpace(object)) {
-          ASSERT(Heap::InToSpace(object));
+        if (HEAP->InNewSpace(object)) {
+          ASSERT(HEAP->InToSpace(object));
           Address addr = reinterpret_cast<Address>(current);
           ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr));
         }
@@ -1672,28 +1811,37 @@ class HeapIterator BASE_EMBEDDED {
 class KeyedLookupCache {
  public:
   // Lookup field offset for (map, name). If absent, -1 is returned.
-  static int Lookup(Map* map, String* name);
+  int Lookup(Map* map, String* name);
 
   // Update an element in the cache.
-  static void Update(Map* map, String* name, int field_offset);
+  void Update(Map* map, String* name, int field_offset);
 
   // Clear the cache.
-  static void Clear();
+  void Clear();
 
   static const int kLength = 64;
   static const int kCapacityMask = kLength - 1;
   static const int kMapHashShift = 2;
+  static const int kNotFound = -1;
 
  private:
+  KeyedLookupCache() {
+    for (int i = 0; i < kLength; ++i) {
+      keys_[i].map = NULL;
+      keys_[i].name = NULL;
+      field_offsets_[i] = kNotFound;
+    }
+  }
+
   static inline int Hash(Map* map, String* name);
 
   // Get the address of the keys and field_offsets arrays.  Used in
   // generated code to perform cache lookups.
-  static Address keys_address() {
+  Address keys_address() {
     return reinterpret_cast<Address>(&keys_);
   }
 
-  static Address field_offsets_address() {
+  Address field_offsets_address() {
     return reinterpret_cast<Address>(&field_offsets_);
   }
 
@@ -1701,10 +1849,13 @@ class KeyedLookupCache {
     Map* map;
     String* name;
   };
-  static Key keys_[kLength];
-  static int field_offsets_[kLength];
+
+  Key keys_[kLength];
+  int field_offsets_[kLength];
 
   friend class ExternalReference;
+  friend class Isolate;
+  DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
 };
 
 
@@ -1716,7 +1867,7 @@ class DescriptorLookupCache {
  public:
   // Lookup descriptor index for (map, name).
   // If absent, kAbsent is returned.
-  static int Lookup(DescriptorArray* array, String* name) {
+  int Lookup(DescriptorArray* array, String* name) {
     if (!StringShape(name).IsSymbol()) return kAbsent;
     int index = Hash(array, name);
     Key& key = keys_[index];
@@ -1725,7 +1876,7 @@ class DescriptorLookupCache {
   }
 
   // Update an element in the cache.
-  static void Update(DescriptorArray* array, String* name, int result) {
+  void Update(DescriptorArray* array, String* name, int result) {
     ASSERT(result != kAbsent);
     if (StringShape(name).IsSymbol()) {
       int index = Hash(array, name);
@@ -1737,10 +1888,18 @@ class DescriptorLookupCache {
   }
 
   // Clear the cache.
-  static void Clear();
+  void Clear();
 
   static const int kAbsent = -2;
  private:
+  DescriptorLookupCache() {
+    for (int i = 0; i < kLength; ++i) {
+      keys_[i].array = NULL;
+      keys_[i].name = NULL;
+      results_[i] = kAbsent;
+    }
+  }
+
   static int Hash(DescriptorArray* array, String* name) {
     // Uses only lower 32 bits if pointers are larger.
     uint32_t array_hash =
@@ -1756,55 +1915,11 @@ class DescriptorLookupCache {
     String* name;
   };
 
-  static Key keys_[kLength];
-  static int results_[kLength];
-};
-
-
-// ----------------------------------------------------------------------------
-// Marking stack for tracing live objects.
-
-class MarkingStack {
- public:
-  void Initialize(Address low, Address high) {
-    top_ = low_ = reinterpret_cast<HeapObject**>(low);
-    high_ = reinterpret_cast<HeapObject**>(high);
-    overflowed_ = false;
-  }
-
-  bool is_full() { return top_ >= high_; }
-
-  bool is_empty() { return top_ <= low_; }
+  Key keys_[kLength];
+  int results_[kLength];
 
-  bool overflowed() { return overflowed_; }
-
-  void clear_overflowed() { overflowed_ = false; }
-
-  // Push the (marked) object on the marking stack if there is room,
-  // otherwise mark the object as overflowed and wait for a rescan of the
-  // heap.
-  void Push(HeapObject* object) {
-    CHECK(object->IsHeapObject());
-    if (is_full()) {
-      object->SetOverflow();
-      overflowed_ = true;
-    } else {
-      *(top_++) = object;
-    }
-  }
-
-  HeapObject* Pop() {
-    ASSERT(!is_empty());
-    HeapObject* object = *(--top_);
-    CHECK(object->IsHeapObject());
-    return object;
-  }
-
- private:
-  HeapObject** low_;
-  HeapObject** top_;
-  HeapObject** high_;
-  bool overflowed_;
+  friend class Isolate;
+  DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
 };
 
 
@@ -1821,11 +1936,11 @@ class MarkingStack {
 class DisallowAllocationFailure {
  public:
   DisallowAllocationFailure() {
-    old_state_ = Heap::disallow_allocation_failure_;
-    Heap::disallow_allocation_failure_ = true;
+    old_state_ = HEAP->disallow_allocation_failure_;
+    HEAP->disallow_allocation_failure_ = true;
   }
   ~DisallowAllocationFailure() {
-    Heap::disallow_allocation_failure_ = old_state_;
+    HEAP->disallow_allocation_failure_ = old_state_;
   }
  private:
   bool old_state_;
@@ -1834,11 +1949,11 @@ class DisallowAllocationFailure {
 class AssertNoAllocation {
  public:
   AssertNoAllocation() {
-    old_state_ = Heap::allow_allocation(false);
+    old_state_ = HEAP->allow_allocation(false);
   }
 
   ~AssertNoAllocation() {
-    Heap::allow_allocation(old_state_);
+    HEAP->allow_allocation(old_state_);
   }
 
  private:
@@ -1848,11 +1963,11 @@ class AssertNoAllocation {
 class DisableAssertNoAllocation {
  public:
   DisableAssertNoAllocation() {
-    old_state_ = Heap::allow_allocation(true);
+    old_state_ = HEAP->allow_allocation(true);
   }
 
   ~DisableAssertNoAllocation() {
-    Heap::allow_allocation(old_state_);
+    HEAP->allow_allocation(old_state_);
   }
 
  private:
@@ -1909,7 +2024,7 @@ class GCTracer BASE_EMBEDDED {
     double start_time_;
   };
 
-  GCTracer();
+  explicit GCTracer(Heap* heap);
   ~GCTracer();
 
   // Sets the collector.
@@ -1935,22 +2050,13 @@ class GCTracer BASE_EMBEDDED {
     promoted_objects_size_ += object_size;
   }
 
-  // Returns maximum GC pause.
-  static int get_max_gc_pause() { return max_gc_pause_; }
-
-  // Returns maximum size of objects alive after GC.
-  static intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
-
-  // Returns minimal interval between two subsequent collections.
-  static int get_min_in_mutator() { return min_in_mutator_; }
-
  private:
   // Returns a string matching the collector.
   const char* CollectorString();
 
   // Returns size of object in heap (in MB).
   double SizeOfHeapObjects() {
-    return (static_cast<double>(Heap::SizeOfObjects())) / MB;
+    return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
   }
 
   double start_time_;  // Timestamp set in the constructor.
@@ -1999,19 +2105,7 @@ class GCTracer BASE_EMBEDDED {
   // Size of objects promoted during the current collection.
   intptr_t promoted_objects_size_;
 
-  // Maximum GC pause.
-  static int max_gc_pause_;
-
-  // Maximum size of objects alive after GC.
-  static intptr_t max_alive_after_gc_;
-
-  // Minimal interval between two subsequent collections.
-  static int min_in_mutator_;
-
-  // Size of objects alive after last GC.
-  static intptr_t alive_after_last_gc_;
-
-  static double last_gc_end_timestamp_;
+  Heap* heap_;
 };
 
 
@@ -2021,131 +2115,71 @@ class TranscendentalCache {
   static const int kTranscendentalTypeBits = 3;
   STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
 
-  explicit TranscendentalCache(Type t);
-
   // Returns a heap number with f(input), where f is a math function specified
   // by the 'type' argument.
-  MUST_USE_RESULT static inline MaybeObject* Get(Type type, double input) {
-    TranscendentalCache* cache = caches_[type];
-    if (cache == NULL) {
-      caches_[type] = cache = new TranscendentalCache(type);
-    }
-    return cache->Get(input);
-  }
+  MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
 
   // The cache contains raw Object pointers.  This method disposes of
   // them before a garbage collection.
-  static void Clear();
+  void Clear();
 
  private:
-  MUST_USE_RESULT inline MaybeObject* Get(double input) {
-    Converter c;
-    c.dbl = input;
-    int hash = Hash(c);
-    Element e = elements_[hash];
-    if (e.in[0] == c.integers[0] &&
-        e.in[1] == c.integers[1]) {
-      ASSERT(e.output != NULL);
-      Counters::transcendental_cache_hit.Increment();
-      return e.output;
-    }
-    double answer = Calculate(input);
-    Counters::transcendental_cache_miss.Increment();
-    Object* heap_number;
-    { MaybeObject* maybe_heap_number = Heap::AllocateHeapNumber(answer);
-      if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
-    }
-    elements_[hash].in[0] = c.integers[0];
-    elements_[hash].in[1] = c.integers[1];
-    elements_[hash].output = heap_number;
-    return heap_number;
-  }
+  class SubCache {
+    static const int kCacheSize = 512;
 
-  inline double Calculate(double input) {
-    switch (type_) {
-      case ACOS:
-        return acos(input);
-      case ASIN:
-        return asin(input);
-      case ATAN:
-        return atan(input);
-      case COS:
-        return cos(input);
-      case EXP:
-        return exp(input);
-      case LOG:
-        return log(input);
-      case SIN:
-        return sin(input);
-      case TAN:
-        return tan(input);
-      default:
-        return 0.0;  // Never happens.
-    }
-  }
-  static const int kCacheSize = 512;
-  struct Element {
-    uint32_t in[2];
-    Object* output;
-  };
-  union Converter {
-    double dbl;
-    uint32_t integers[2];
-  };
-  inline static int Hash(const Converter& c) {
-    uint32_t hash = (c.integers[0] ^ c.integers[1]);
-    hash ^= static_cast<int32_t>(hash) >> 16;
-    hash ^= static_cast<int32_t>(hash) >> 8;
-    return (hash & (kCacheSize - 1));
-  }
-
-  static Address cache_array_address() {
-    // Used to create an external reference.
-    return reinterpret_cast<Address>(caches_);
-  }
+    explicit SubCache(Type t);
 
-  // Allow access to the caches_ array as an ExternalReference.
-  friend class ExternalReference;
-  // Inline implementation of the cache.
-  friend class TranscendentalCacheStub;
+    MUST_USE_RESULT inline MaybeObject* Get(double input);
 
-  static TranscendentalCache* caches_[kNumberOfCaches];
-  Element elements_[kCacheSize];
-  Type type_;
-};
+    inline double Calculate(double input);
 
+    struct Element {
+      uint32_t in[2];
+      Object* output;
+    };
 
-// External strings table is a place where all external strings are
-// registered.  We need to keep track of such strings to properly
-// finalize them.
-class ExternalStringTable : public AllStatic {
- public:
-  // Registers an external string.
-  inline static void AddString(String* string);
+    union Converter {
+      double dbl;
+      uint32_t integers[2];
+    };
 
-  inline static void Iterate(ObjectVisitor* v);
+    inline static int Hash(const Converter& c) {
+      uint32_t hash = (c.integers[0] ^ c.integers[1]);
+      hash ^= static_cast<int32_t>(hash) >> 16;
+      hash ^= static_cast<int32_t>(hash) >> 8;
+      return (hash & (kCacheSize - 1));
+    }
 
-  // Restores internal invariant and gets rid of collected strings.
-  // Must be called after each Iterate() that modified the strings.
-  static void CleanUp();
+    Element elements_[kCacheSize];
+    Type type_;
+    Isolate* isolate_;
 
-  // Destroys all allocated memory.
-  static void TearDown();
+    // Allow access to the caches_ array as an ExternalReference.
+    friend class ExternalReference;
+    // Inline implementation of the cache.
+    friend class TranscendentalCacheStub;
+    // For evaluating value.
+    friend class TranscendentalCache;
 
- private:
-  friend class Heap;
+    DISALLOW_COPY_AND_ASSIGN(SubCache);
+  };
 
-  inline static void Verify();
+  TranscendentalCache() {
+    for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
+  }
 
-  inline static void AddOldString(String* string);
+  // Used to create an external reference.
+  inline Address cache_array_address();
 
-  // Notifies the table that only a prefix of the new list is valid.
-  inline static void ShrinkNewStrings(int position);
+  // Instantiation
+  friend class Isolate;
+  // Inline implementation of the caching.
+  friend class TranscendentalCacheStub;
+  // Allow access to the caches_ array as an ExternalReference.
+  friend class ExternalReference;
 
-  // To speed up scavenge collections new space string are kept
-  // separate from old space strings.
-  static List<Object*> new_space_strings_;
-  static List<Object*> old_space_strings_;
+  SubCache* caches_[kNumberOfCaches];
+  DISALLOW_COPY_AND_ASSIGN(TranscendentalCache);
 };
 
 
@@ -2222,4 +2256,6 @@ class PathTracer : public ObjectVisitor {
 
 } }  // namespace v8::internal
 
+#undef HEAP
+
 #endif  // V8_HEAP_H_
index fa2deb8aa91d2c0bf0c21c990d5a90660fd6a0d2..e253795a5ea98c1d83d4146154c250b62e4411f2 100644 (file)
@@ -1043,7 +1043,7 @@ HConstant* HConstant::CopyToRepresentation(Representation r) const {
 HConstant* HConstant::CopyToTruncatedInt32() const {
   if (!has_double_value_) return NULL;
   int32_t truncated = NumberToInt32(*handle_);
-  return new HConstant(Factory::NewNumberFromInt(truncated),
+  return new HConstant(FACTORY->NewNumberFromInt(truncated),
                        Representation::Integer32());
 }
 
@@ -1054,7 +1054,7 @@ void HConstant::PrintDataTo(StringStream* stream) {
 
 
 bool HArrayLiteral::IsCopyOnWrite() const {
-  return constant_elements()->map() == Heap::fixed_cow_array_map();
+  return constant_elements()->map() == HEAP->fixed_cow_array_map();
 }
 
 
index 795103cbc4178b6313eec3b14f23dd576989b8b2..2397c5d87888b16aa34762c7e8b8f0ae256ca905 100644 (file)
@@ -1244,7 +1244,8 @@ class HCallConstantFunction: public HCall<0> {
   Handle<JSFunction> function() const { return function_; }
 
   bool IsApplyFunction() const {
-    return function_->code() == Builtins::builtin(Builtins::FunctionApply);
+    return function_->code() ==
+        Isolate::Current()->builtins()->builtin(Builtins::FunctionApply);
   }
 
   virtual void PrintDataTo(StringStream* stream);
@@ -1377,12 +1378,12 @@ class HCallNew: public HBinaryCall {
 class HCallRuntime: public HCall<0> {
  public:
   HCallRuntime(Handle<String> name,
-               Runtime::Function* c_function,
+               const Runtime::Function* c_function,
                int argument_count)
       : HCall<0>(argument_count), c_function_(c_function), name_(name) { }
   virtual void PrintDataTo(StringStream* stream);
 
-  Runtime::Function* function() const { return c_function_; }
+  const Runtime::Function* function() const { return c_function_; }
   Handle<String> name() const { return name_; }
 
   virtual Representation RequiredInputRepresentation(int index) const {
@@ -1392,7 +1393,7 @@ class HCallRuntime: public HCall<0> {
   DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call_runtime")
 
  private:
-  Runtime::Function* c_function_;
+  const Runtime::Function* c_function_;
   Handle<String> name_;
 };
 
@@ -1770,7 +1771,7 @@ class HCheckPrototypeMaps: public HTemplateInstruction<0> {
   }
 
   virtual intptr_t Hashcode() {
-    ASSERT(!Heap::IsAllocationAllowed());
+    ASSERT(!HEAP->IsAllocationAllowed());
     intptr_t hash = reinterpret_cast<intptr_t>(*prototype());
     hash = 17 * hash + reinterpret_cast<intptr_t>(*holder());
     return hash;
@@ -1934,7 +1935,7 @@ class HConstant: public HTemplateInstruction<0> {
 
   Handle<Object> handle() const { return handle_; }
 
-  bool InOldSpace() const { return !Heap::InNewSpace(*handle_); }
+  bool InOldSpace() const { return !HEAP->InNewSpace(*handle_); }
 
   virtual Representation RequiredInputRepresentation(int index) const {
     return Representation::None();
@@ -1959,7 +1960,7 @@ class HConstant: public HTemplateInstruction<0> {
   bool HasStringValue() const { return handle_->IsString(); }
 
   virtual intptr_t Hashcode() {
-    ASSERT(!Heap::allow_allocation(false));
+    ASSERT(!HEAP->allow_allocation(false));
     return reinterpret_cast<intptr_t>(*handle());
   }
 
@@ -2818,7 +2819,7 @@ class HLoadGlobal: public HTemplateInstruction<0> {
   virtual void PrintDataTo(StringStream* stream);
 
   virtual intptr_t Hashcode() {
-    ASSERT(!Heap::allow_allocation(false));
+    ASSERT(!HEAP->allow_allocation(false));
     return reinterpret_cast<intptr_t>(*cell_);
   }
 
index b1c1cb148764ef6b5ef5c4750cc4710b64f3d8c2..3fffd84c0e814438707ae0401ac2894083e93345 100644 (file)
@@ -25,6 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
 #include "hydrogen.h"
 
 #include "codegen.h"
@@ -511,12 +512,12 @@ HConstant* HGraph::GetConstantMinus1() {
 
 
 HConstant* HGraph::GetConstantTrue() {
-  return GetConstant(&constant_true_, Heap::true_value());
+  return GetConstant(&constant_true_, HEAP->true_value());
 }
 
 
 HConstant* HGraph::GetConstantFalse() {
-  return GetConstant(&constant_false_, Heap::false_value());
+  return GetConstant(&constant_false_, HEAP->false_value());
 }
 
 
@@ -1026,8 +1027,8 @@ HValueMap::HValueMap(const HValueMap* other)
       lists_size_(other->lists_size_),
       count_(other->count_),
       present_flags_(other->present_flags_),
-      array_(Zone::NewArray<HValueMapListElement>(other->array_size_)),
-      lists_(Zone::NewArray<HValueMapListElement>(other->lists_size_)),
+      array_(ZONE->NewArray<HValueMapListElement>(other->array_size_)),
+      lists_(ZONE->NewArray<HValueMapListElement>(other->lists_size_)),
       free_list_head_(other->free_list_head_) {
   memcpy(array_, other->array_, array_size_ * sizeof(HValueMapListElement));
   memcpy(lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement));
@@ -1106,7 +1107,7 @@ void HValueMap::Resize(int new_size) {
   }
 
   HValueMapListElement* new_array =
-      Zone::NewArray<HValueMapListElement>(new_size);
+      ZONE->NewArray<HValueMapListElement>(new_size);
   memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
 
   HValueMapListElement* old_array = array_;
@@ -1144,7 +1145,7 @@ void HValueMap::ResizeLists(int new_size) {
   ASSERT(new_size > lists_size_);
 
   HValueMapListElement* new_lists =
-      Zone::NewArray<HValueMapListElement>(new_size);
+      ZONE->NewArray<HValueMapListElement>(new_size);
   memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
 
   HValueMapListElement* old_lists = lists_;
@@ -1247,12 +1248,12 @@ class HGlobalValueNumberer BASE_EMBEDDED {
         info_(info),
         block_side_effects_(graph_->blocks()->length()),
         loop_side_effects_(graph_->blocks()->length()) {
-    ASSERT(Heap::allow_allocation(false));
+    ASSERT(HEAP->allow_allocation(false));
     block_side_effects_.AddBlock(0, graph_->blocks()->length());
     loop_side_effects_.AddBlock(0, graph_->blocks()->length());
   }
   ~HGlobalValueNumberer() {
-    ASSERT(!Heap::allow_allocation(true));
+    ASSERT(!HEAP->allow_allocation(true));
   }
 
   void Analyze();
@@ -2278,7 +2279,7 @@ void HGraphBuilder::SetupScope(Scope* scope) {
   if (scope->function() != NULL) BAILOUT("named function expression");
 
   HConstant* undefined_constant =
-      new HConstant(Factory::undefined_value(), Representation::Tagged());
+      new HConstant(FACTORY->undefined_value(), Representation::Tagged());
   AddInstruction(undefined_constant);
   graph_->set_undefined_constant(undefined_constant);
 
@@ -3624,7 +3625,7 @@ HInstruction* HGraphBuilder::BuildStoreKeyedFastElement(HValue* object,
   ASSERT(map->has_fast_elements());
   AddInstruction(new HCheckMap(object, map));
   HInstruction* elements = AddInstruction(new HLoadElements(object));
-  AddInstruction(new HCheckMap(elements, Factory::fixed_array_map()));
+  AddInstruction(new HCheckMap(elements, FACTORY->fixed_array_map()));
   bool is_array = (map->instance_type() == JS_ARRAY_TYPE);
   HInstruction* length = NULL;
   if (is_array) {
@@ -3935,7 +3936,7 @@ bool HGraphBuilder::TryInline(Call* expr) {
   CompilationInfo target_info(target);
   if (!ParserApi::Parse(&target_info) ||
       !Scope::Analyze(&target_info)) {
-    if (Top::has_pending_exception()) {
+    if (target_info.isolate()->has_pending_exception()) {
       // Parse or scope error, never optimize this function.
       SetStackOverflow();
       target->shared()->set_optimization_disabled(true);
@@ -4435,7 +4436,7 @@ void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
     BAILOUT("call to a JavaScript runtime function");
   }
 
-  Runtime::Function* function = expr->function();
+  const Runtime::Function* function = expr->function();
   ASSERT(function != NULL);
   if (function->intrinsic_type == Runtime::INLINE) {
     ASSERT(expr->name()->length() > 0);
@@ -4974,7 +4975,7 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
         Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
         // If the function is in new space we assume it's more likely to
         // change and thus prefer the general IC code.
-        if (!Heap::InNewSpace(*candidate)) {
+        if (!Isolate::Current()->heap()->InNewSpace(*candidate)) {
           target = candidate;
         }
       }
index d5fd7b87bbc88e734940087da3e76e6a78daf1bc..f7be363e06c217ff823be7e67c41e6c39181a84d 100644 (file)
@@ -204,11 +204,12 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
     visitor->VisitExternalReference(target_reference_address());
     CPU::FlushICache(pc_, sizeof(Address));
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  } else if (Debug::has_break_points() &&
-             ((RelocInfo::IsJSReturn(mode) &&
+  // TODO(isolates): Get a cached isolate below.
+  } else if (((RelocInfo::IsJSReturn(mode) &&
               IsPatchedReturnSequence()) ||
              (RelocInfo::IsDebugBreakSlot(mode) &&
-              IsPatchedDebugBreakSlotSequence()))) {
+              IsPatchedDebugBreakSlotSequence())) &&
+             Isolate::Current()->debug()->has_break_points()) {
     visitor->VisitDebugTarget(this);
 #endif
   } else if (mode == RelocInfo::RUNTIME_ENTRY) {
@@ -218,10 +219,10 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
 
 
 template<typename StaticVisitor>
-void RelocInfo::Visit() {
+void RelocInfo::Visit(Heap* heap) {
   RelocInfo::Mode mode = rmode();
   if (mode == RelocInfo::EMBEDDED_OBJECT) {
-    StaticVisitor::VisitPointer(target_object_address());
+    StaticVisitor::VisitPointer(heap, target_object_address());
     CPU::FlushICache(pc_, sizeof(Address));
   } else if (RelocInfo::IsCodeTarget(mode)) {
     StaticVisitor::VisitCodeTarget(this);
@@ -231,7 +232,7 @@ void RelocInfo::Visit() {
     StaticVisitor::VisitExternalReference(target_reference_address());
     CPU::FlushICache(pc_, sizeof(Address));
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  } else if (Debug::has_break_points() &&
+  } else if (heap->isolate()->debug()->has_break_points() &&
              ((RelocInfo::IsJSReturn(mode) &&
               IsPatchedReturnSequence()) ||
              (RelocInfo::IsDebugBreakSlot(mode) &&
@@ -266,7 +267,7 @@ Immediate::Immediate(Label* internal_offset) {
 Immediate::Immediate(Handle<Object> handle) {
   // Verify all Objects referred by code are NOT in new space.
   Object* obj = *handle;
-  ASSERT(!Heap::InNewSpace(obj));
+  ASSERT(!HEAP->InNewSpace(obj));
   if (obj->IsHeapObject()) {
     x_ = reinterpret_cast<intptr_t>(handle.location());
     rmode_ = RelocInfo::EMBEDDED_OBJECT;
@@ -299,7 +300,7 @@ void Assembler::emit(uint32_t x) {
 void Assembler::emit(Handle<Object> handle) {
   // Verify all Objects referred by code are NOT in new space.
   Object* obj = *handle;
-  ASSERT(!Heap::InNewSpace(obj));
+  ASSERT(!HEAP->InNewSpace(obj));
   if (obj->IsHeapObject()) {
     emit(reinterpret_cast<intptr_t>(handle.location()),
          RelocInfo::EMBEDDED_OBJECT);
index 10364eb1b5ff5f65f98c178430d672c30e8a3376..b323f0997be723ea90b652377d17bb916637f7a0 100644 (file)
@@ -48,16 +48,17 @@ namespace internal {
 // -----------------------------------------------------------------------------
 // Implementation of CpuFeatures
 
-// Safe default is no features.
-uint64_t CpuFeatures::supported_ = 0;
-uint64_t CpuFeatures::enabled_ = 0;
-uint64_t CpuFeatures::found_by_runtime_probing_ = 0;
+CpuFeatures::CpuFeatures()
+    : supported_(0),
+      enabled_(0),
+      found_by_runtime_probing_(0) {
+}
 
 
 // The Probe method needs executable memory, so it uses Heap::CreateCode.
 // Allocation failure is silent and leads to safe default.
 void CpuFeatures::Probe(bool portable) {
-  ASSERT(Heap::HasBeenSetup());
+  ASSERT(HEAP->HasBeenSetup());
   ASSERT(supported_ == 0);
   if (portable && Serializer::enabled()) {
     supported_ |= OS::CpuFeaturesImpliedByPlatform();
@@ -120,16 +121,16 @@ void CpuFeatures::Probe(bool portable) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-
   Object* code;
-  { MaybeObject* maybe_code = Heap::CreateCode(desc,
+  { MaybeObject* maybe_code = HEAP->CreateCode(desc,
                                                Code::ComputeFlags(Code::STUB),
                                                Handle<Code>::null());
     if (!maybe_code->ToObject(&code)) return;
   }
   if (!code->IsCode()) return;
 
-  PROFILE(CodeCreateEvent(Logger::BUILTIN_TAG,
+  PROFILE(ISOLATE,
+          CodeCreateEvent(Logger::BUILTIN_TAG,
                           Code::cast(code), "CpuFeatures::Probe"));
   typedef uint64_t (*F0)();
   F0 probe = FUNCTION_CAST<F0>(Code::cast(code)->entry());
@@ -295,20 +296,18 @@ bool Operand::is_reg(Register reg) const {
 static void InitCoverageLog();
 #endif
 
-// Spare buffer.
-byte* Assembler::spare_buffer_ = NULL;
-
 Assembler::Assembler(void* buffer, int buffer_size)
     : positions_recorder_(this),
       emit_debug_code_(FLAG_debug_code) {
+  Isolate* isolate = Isolate::Current();
   if (buffer == NULL) {
     // Do our own buffer management.
     if (buffer_size <= kMinimalBufferSize) {
       buffer_size = kMinimalBufferSize;
 
-      if (spare_buffer_ != NULL) {
-        buffer = spare_buffer_;
-        spare_buffer_ = NULL;
+      if (isolate->assembler_spare_buffer() != NULL) {
+        buffer = isolate->assembler_spare_buffer();
+        isolate->set_assembler_spare_buffer(NULL);
       }
     }
     if (buffer == NULL) {
@@ -348,9 +347,11 @@ Assembler::Assembler(void* buffer, int buffer_size)
 
 
 Assembler::~Assembler() {
+  Isolate* isolate = Isolate::Current();
   if (own_buffer_) {
-    if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) {
-      spare_buffer_ = buffer_;
+    if (isolate->assembler_spare_buffer() == NULL &&
+        buffer_size_ == kMinimalBufferSize) {
+      isolate->set_assembler_spare_buffer(buffer_);
     } else {
       DeleteArray(buffer_);
     }
@@ -369,7 +370,7 @@ void Assembler::GetCode(CodeDesc* desc) {
   desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
   desc->origin = this;
 
-  Counters::reloc_info_size.Increment(desc->reloc_size);
+  COUNTERS->reloc_info_size()->Increment(desc->reloc_size);
 }
 
 
@@ -387,7 +388,7 @@ void Assembler::CodeTargetAlign() {
 
 
 void Assembler::cpuid() {
-  ASSERT(CpuFeatures::IsEnabled(CPUID));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CPUID));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x0F);
@@ -748,7 +749,7 @@ void Assembler::movzx_w(Register dst, const Operand& src) {
 
 
 void Assembler::cmov(Condition cc, Register dst, int32_t imm32) {
-  ASSERT(CpuFeatures::IsEnabled(CMOV));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CMOV));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   UNIMPLEMENTED();
@@ -759,7 +760,7 @@ void Assembler::cmov(Condition cc, Register dst, int32_t imm32) {
 
 
 void Assembler::cmov(Condition cc, Register dst, Handle<Object> handle) {
-  ASSERT(CpuFeatures::IsEnabled(CMOV));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CMOV));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   UNIMPLEMENTED();
@@ -770,7 +771,7 @@ void Assembler::cmov(Condition cc, Register dst, Handle<Object> handle) {
 
 
 void Assembler::cmov(Condition cc, Register dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(CMOV));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CMOV));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   // Opcode: 0f 40 + cc /r.
@@ -1451,7 +1452,7 @@ void Assembler::nop() {
 
 
 void Assembler::rdtsc() {
-  ASSERT(CpuFeatures::IsEnabled(RDTSC));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(RDTSC));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x0F);
@@ -1857,7 +1858,7 @@ void Assembler::fistp_s(const Operand& adr) {
 
 
 void Assembler::fisttp_s(const Operand& adr) {
-  ASSERT(CpuFeatures::IsEnabled(SSE3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xDB);
@@ -1866,7 +1867,7 @@ void Assembler::fisttp_s(const Operand& adr) {
 
 
 void Assembler::fisttp_d(const Operand& adr) {
-  ASSERT(CpuFeatures::IsEnabled(SSE3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xDD);
@@ -2135,7 +2136,7 @@ void Assembler::setcc(Condition cc, Register reg) {
 
 
 void Assembler::cvttss2si(Register dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF3);
@@ -2146,7 +2147,7 @@ void Assembler::cvttss2si(Register dst, const Operand& src) {
 
 
 void Assembler::cvttsd2si(Register dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);
@@ -2157,7 +2158,7 @@ void Assembler::cvttsd2si(Register dst, const Operand& src) {
 
 
 void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);
@@ -2168,7 +2169,7 @@ void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) {
 
 
 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF3);
@@ -2179,7 +2180,7 @@ void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);
@@ -2190,7 +2191,7 @@ void Assembler::addsd(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);
@@ -2201,7 +2202,7 @@ void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);
@@ -2212,7 +2213,7 @@ void Assembler::subsd(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);
@@ -2223,7 +2224,7 @@ void Assembler::divsd(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2254,7 +2255,7 @@ void Assembler::andpd(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2265,7 +2266,7 @@ void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::movmskpd(Register dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2276,7 +2277,7 @@ void Assembler::movmskpd(Register dst, XMMRegister src) {
 
 
 void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);
@@ -2288,7 +2289,7 @@ void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x0F);
@@ -2298,7 +2299,7 @@ void Assembler::movaps(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2309,7 +2310,7 @@ void Assembler::movdqa(const Operand& dst, XMMRegister src) {
 
 
 void Assembler::movdqa(XMMRegister dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2320,7 +2321,7 @@ void Assembler::movdqa(XMMRegister dst, const Operand& src) {
 
 
 void Assembler::movdqu(const Operand& dst, XMMRegister src ) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF3);
@@ -2331,7 +2332,7 @@ void Assembler::movdqu(const Operand& dst, XMMRegister src ) {
 
 
 void Assembler::movdqu(XMMRegister dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF3);
@@ -2342,7 +2343,7 @@ void Assembler::movdqu(XMMRegister dst, const Operand& src) {
 
 
 void Assembler::movntdqa(XMMRegister dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2354,7 +2355,7 @@ void Assembler::movntdqa(XMMRegister dst, const Operand& src) {
 
 
 void Assembler::movntdq(const Operand& dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2390,7 +2391,7 @@ void Assembler::movdbl(const Operand& dst, XMMRegister src) {
 
 
 void Assembler::movsd(const Operand& dst, XMMRegister src ) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);  // double
@@ -2401,7 +2402,7 @@ void Assembler::movsd(const Operand& dst, XMMRegister src ) {
 
 
 void Assembler::movsd(XMMRegister dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);  // double
@@ -2412,7 +2413,7 @@ void Assembler::movsd(XMMRegister dst, const Operand& src) {
 
 
 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0xF2);
@@ -2423,7 +2424,7 @@ void Assembler::movsd(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::movd(XMMRegister dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2434,7 +2435,7 @@ void Assembler::movd(XMMRegister dst, const Operand& src) {
 
 
 void Assembler::movd(const Operand& dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2445,7 +2446,7 @@ void Assembler::movd(const Operand& dst, XMMRegister src) {
 
 
 void Assembler::pand(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2456,7 +2457,7 @@ void Assembler::pand(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::pxor(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2467,7 +2468,7 @@ void Assembler::pxor(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::por(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2478,7 +2479,7 @@ void Assembler::por(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::ptest(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2490,7 +2491,7 @@ void Assembler::ptest(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::psllq(XMMRegister reg, int8_t shift) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2502,7 +2503,7 @@ void Assembler::psllq(XMMRegister reg, int8_t shift) {
 
 
 void Assembler::psllq(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2513,7 +2514,7 @@ void Assembler::psllq(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::psrlq(XMMRegister reg, int8_t shift) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2525,7 +2526,7 @@ void Assembler::psrlq(XMMRegister reg, int8_t shift) {
 
 
 void Assembler::psrlq(XMMRegister dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2536,7 +2537,7 @@ void Assembler::psrlq(XMMRegister dst, XMMRegister src) {
 
 
 void Assembler::pshufd(XMMRegister dst, XMMRegister src, int8_t shuffle) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2548,7 +2549,7 @@ void Assembler::pshufd(XMMRegister dst, XMMRegister src, int8_t shuffle) {
 
 
 void Assembler::pextrd(const Operand& dst, XMMRegister src, int8_t offset) {
-  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2561,7 +2562,7 @@ void Assembler::pextrd(const Operand& dst, XMMRegister src, int8_t offset) {
 
 
 void Assembler::pinsrd(XMMRegister dst, const Operand& src, int8_t offset) {
-  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   EMIT(0x66);
@@ -2617,6 +2618,7 @@ void Assembler::RecordComment(const char* msg, bool force) {
 
 
 void Assembler::GrowBuffer() {
+  Isolate* isolate = Isolate::Current();
   ASSERT(overflow());
   if (!own_buffer_) FATAL("external code buffer is too small");
 
@@ -2630,7 +2632,7 @@ void Assembler::GrowBuffer() {
   // Some internal data structures overflow for very large buffers,
   // they must ensure that kMaximalBufferSize is not too large.
   if ((desc.buffer_size > kMaximalBufferSize) ||
-      (desc.buffer_size > Heap::MaxOldGenerationSize())) {
+      (desc.buffer_size > HEAP->MaxOldGenerationSize())) {
     V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
   }
 
@@ -2653,8 +2655,9 @@ void Assembler::GrowBuffer() {
           reloc_info_writer.pos(), desc.reloc_size);
 
   // Switch buffers.
-  if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) {
-    spare_buffer_ = buffer_;
+  if (isolate->assembler_spare_buffer() == NULL &&
+      buffer_size_ == kMinimalBufferSize) {
+    isolate->set_assembler_spare_buffer(buffer_);
   } else {
     DeleteArray(buffer_);
   }
index 7d9e3749137a78c94c3de6a21dfd4a8005494a90..4bcc1ec764559a9c480661cd5bd3e2d9dbc9d85a 100644 (file)
@@ -37,6 +37,7 @@
 #ifndef V8_IA32_ASSEMBLER_IA32_H_
 #define V8_IA32_ASSEMBLER_IA32_H_
 
+#include "isolate.h"
 #include "serialize.h"
 
 namespace v8 {
@@ -445,16 +446,16 @@ class Displacement BASE_EMBEDDED {
 //   } else {
 //     // Generate standard x87 floating point code.
 //   }
-class CpuFeatures : public AllStatic {
+class CpuFeatures {
  public:
   // Detect features of the target CPU. If the portable flag is set,
   // the method sets safe defaults if the serializer is enabled
   // (snapshots must be portable).
-  static void Probe(bool portable);
-  static void Clear() { supported_ = 0; }
+  void Probe(bool portable);
+  void Clear() { supported_ = 0; }
 
   // Check whether a feature is supported by the target CPU.
-  static bool IsSupported(CpuFeature f) {
+  bool IsSupported(CpuFeature f) const {
     if (f == SSE2 && !FLAG_enable_sse2) return false;
     if (f == SSE3 && !FLAG_enable_sse3) return false;
     if (f == SSE4_1 && !FLAG_enable_sse4_1) return false;
@@ -463,32 +464,47 @@ class CpuFeatures : public AllStatic {
     return (supported_ & (static_cast<uint64_t>(1) << f)) != 0;
   }
   // Check whether a feature is currently enabled.
-  static bool IsEnabled(CpuFeature f) {
+  bool IsEnabled(CpuFeature f) const {
     return (enabled_ & (static_cast<uint64_t>(1) << f)) != 0;
   }
   // Enable a specified feature within a scope.
   class Scope BASE_EMBEDDED {
 #ifdef DEBUG
    public:
-    explicit Scope(CpuFeature f) {
+    explicit Scope(CpuFeature f)
+        : cpu_features_(Isolate::Current()->cpu_features()),
+          isolate_(Isolate::Current()) {
       uint64_t mask = static_cast<uint64_t>(1) << f;
-      ASSERT(CpuFeatures::IsSupported(f));
-      ASSERT(!Serializer::enabled() || (found_by_runtime_probing_ & mask) == 0);
-      old_enabled_ = CpuFeatures::enabled_;
-      CpuFeatures::enabled_ |= mask;
+      ASSERT(cpu_features_->IsSupported(f));
+      ASSERT(!Serializer::enabled() ||
+          (cpu_features_->found_by_runtime_probing_ & mask) == 0);
+      old_enabled_ = cpu_features_->enabled_;
+      cpu_features_->enabled_ |= mask;
+    }
+    ~Scope() {
+      ASSERT_EQ(Isolate::Current(), isolate_);
+      cpu_features_->enabled_ = old_enabled_;
     }
-    ~Scope() { CpuFeatures::enabled_ = old_enabled_; }
    private:
     uint64_t old_enabled_;
+    CpuFeatures* cpu_features_;
+    Isolate* isolate_;
 #else
    public:
     explicit Scope(CpuFeature f) {}
 #endif
   };
+
  private:
-  static uint64_t supported_;
-  static uint64_t enabled_;
-  static uint64_t found_by_runtime_probing_;
+  CpuFeatures();
+
+  uint64_t supported_;
+  uint64_t enabled_;
+  uint64_t found_by_runtime_probing_;
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(CpuFeatures);
 };
 
 
@@ -1050,8 +1066,6 @@ class Assembler : public Malloced {
   int buffer_size_;
   // True if the assembler owns the buffer, false if buffer is external.
   bool own_buffer_;
-  // A previously allocated buffer of kMinimalBufferSize bytes, or NULL.
-  static byte* spare_buffer_;
 
   // code generation
   byte* pc_;  // the program counter; moves forward
index c7e552705d4eac5760cbceff73f669a2516b2919..09cbe76f4d78b54d238452b08c9b8b2c27f46257 100644 (file)
@@ -100,8 +100,8 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
   // Set expected number of arguments to zero (not changing eax).
   __ Set(ebx, Immediate(0));
   __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
-  __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
-         RelocInfo::CODE_TARGET);
+  __ jmp(Handle<Code>(Isolate::Current()->builtins()->builtin(
+      ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
 }
 
 
@@ -184,7 +184,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     // ebx: JSObject
     // edi: start of next object
     __ mov(Operand(ebx, JSObject::kMapOffset), eax);
-    __ mov(ecx, Factory::empty_fixed_array());
+    __ mov(ecx, FACTORY->empty_fixed_array());
     __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
     __ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
     // Set extra fields in the newly allocated object.
@@ -194,9 +194,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     { Label loop, entry;
       // To allow for truncation.
       if (count_constructions) {
-        __ mov(edx, Factory::one_pointer_filler_map());
+        __ mov(edx, FACTORY->one_pointer_filler_map());
       } else {
-        __ mov(edx, Factory::undefined_value());
+        __ mov(edx, FACTORY->undefined_value());
       }
       __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
       __ jmp(&entry);
@@ -252,7 +252,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     // edi: FixedArray
     // edx: number of elements
     // ecx: start of next object
-    __ mov(eax, Factory::fixed_array_map());
+    __ mov(eax, FACTORY->fixed_array_map());
     __ mov(Operand(edi, FixedArray::kMapOffset), eax);  // setup the map
     __ SmiTag(edx);
     __ mov(Operand(edi, FixedArray::kLengthOffset), edx);  // and length
@@ -262,7 +262,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     // edi: FixedArray
     // ecx: start of next object
     { Label loop, entry;
-      __ mov(edx, Factory::undefined_value());
+      __ mov(edx, FACTORY->undefined_value());
       __ lea(eax, Operand(edi, FixedArray::kHeaderSize));
       __ jmp(&entry);
       __ bind(&loop);
@@ -335,7 +335,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   if (is_api_function) {
     __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
     Handle<Code> code = Handle<Code>(
-        Builtins::builtin(Builtins::HandleApiCallConstruct));
+        Isolate::Current()->builtins()->builtin(
+            Builtins::HandleApiCallConstruct));
     ParameterCount expected(0);
     __ InvokeCode(code, expected, expected,
                   RelocInfo::CODE_TARGET, CALL_FUNCTION);
@@ -376,7 +377,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   __ pop(ecx);
   __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
   __ push(ecx);
-  __ IncrementCounter(&Counters::constructed_objects, 1);
+  __ IncrementCounter(COUNTERS->constructed_objects(), 1);
   __ ret(0);
 }
 
@@ -436,8 +437,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
 
   // Invoke the code.
   if (is_construct) {
-    __ call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
-            RelocInfo::CODE_TARGET);
+    __ call(Handle<Code>(Isolate::Current()->builtins()->builtin(
+        Builtins::JSConstructCall)), RelocInfo::CODE_TARGET);
   } else {
     ParameterCount actual(eax);
     __ InvokeFunction(edi, actual, CALL_FUNCTION);
@@ -566,7 +567,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
     __ test(eax, Operand(eax));
     __ j(not_zero, &done, taken);
     __ pop(ebx);
-    __ push(Immediate(Factory::undefined_value()));
+    __ push(Immediate(FACTORY->undefined_value()));
     __ push(ebx);
     __ inc(eax);
     __ bind(&done);
@@ -600,9 +601,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
     __ test(ebx, Immediate(kSmiTagMask));
     __ j(zero, &convert_to_object);
 
-    __ cmp(ebx, Factory::null_value());
+    __ cmp(ebx, FACTORY->null_value());
     __ j(equal, &use_global_receiver);
-    __ cmp(ebx, Factory::undefined_value());
+    __ cmp(ebx, FACTORY->undefined_value());
     __ j(equal, &use_global_receiver);
 
     // We don't use IsObjectJSObjectType here because we jump on success.
@@ -674,8 +675,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
     __ j(not_zero, &function, taken);
     __ Set(ebx, Immediate(0));
     __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
-    __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
-           RelocInfo::CODE_TARGET);
+    __ jmp(Handle<Code>(Isolate::Current()->builtins()->builtin(
+        ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
     __ bind(&function);
   }
 
@@ -688,7 +689,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
   __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
   __ SmiUntag(ebx);
   __ cmp(eax, Operand(ebx));
-  __ j(not_equal, Handle<Code>(builtin(ArgumentsAdaptorTrampoline)));
+  __ j(not_equal, Handle<Code>(Isolate::Current()->builtins()->builtin(
+      ArgumentsAdaptorTrampoline)));
 
   ParameterCount expected(0);
   __ InvokeCode(Operand(edx), expected, expected, JUMP_FUNCTION);
@@ -753,9 +755,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
   // Compute the receiver in non-strict mode.
   __ test(ebx, Immediate(kSmiTagMask));
   __ j(zero, &call_to_object);
-  __ cmp(ebx, Factory::null_value());
+  __ cmp(ebx, FACTORY->null_value());
   __ j(equal, &use_global_receiver);
-  __ cmp(ebx, Factory::undefined_value());
+  __ cmp(ebx, FACTORY->undefined_value());
   __ j(equal, &use_global_receiver);
 
   // If given receiver is already a JavaScript object then there's no
@@ -795,7 +797,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
   __ mov(edx, Operand(ebp, 2 * kPointerSize));  // load arguments
 
   // Use inline caching to speed up access to arguments.
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   __ call(ic, RelocInfo::CODE_TARGET);
   // It is important that we do not have a test instruction after the
   // call.  A test instruction after the call is used to indicate that
@@ -867,7 +870,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
   // scratch2: start of next object
   __ mov(FieldOperand(result, JSObject::kMapOffset), scratch1);
   __ mov(FieldOperand(result, JSArray::kPropertiesOffset),
-         Factory::empty_fixed_array());
+         FACTORY->empty_fixed_array());
   // Field JSArray::kElementsOffset is initialized later.
   __ mov(FieldOperand(result, JSArray::kLengthOffset), Immediate(0));
 
@@ -875,7 +878,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
   // fixed array.
   if (initial_capacity == 0) {
     __ mov(FieldOperand(result, JSArray::kElementsOffset),
-           Factory::empty_fixed_array());
+           FACTORY->empty_fixed_array());
     return;
   }
 
@@ -892,7 +895,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
   // scratch1: elements array
   // scratch2: start of next object
   __ mov(FieldOperand(scratch1, FixedArray::kMapOffset),
-         Factory::fixed_array_map());
+         FACTORY->fixed_array_map());
   __ mov(FieldOperand(scratch1, FixedArray::kLengthOffset),
          Immediate(Smi::FromInt(initial_capacity)));
 
@@ -903,7 +906,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
   if (initial_capacity <= kLoopUnfoldLimit) {
     // Use a scratch register here to have only one reloc info when unfolding
     // the loop.
-    __ mov(scratch3, Factory::the_hole_value());
+    __ mov(scratch3, FACTORY->the_hole_value());
     for (int i = 0; i < initial_capacity; i++) {
       __ mov(FieldOperand(scratch1,
                           FixedArray::kHeaderSize + i * kPointerSize),
@@ -913,7 +916,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
     Label loop, entry;
     __ jmp(&entry);
     __ bind(&loop);
-    __ mov(Operand(scratch1, 0), Factory::the_hole_value());
+    __ mov(Operand(scratch1, 0), FACTORY->the_hole_value());
     __ add(Operand(scratch1), Immediate(kPointerSize));
     __ bind(&entry);
     __ cmp(scratch1, Operand(scratch2));
@@ -968,7 +971,7 @@ static void AllocateJSArray(MacroAssembler* masm,
   // elements_array_end: start of next object
   // array_size: size of array (smi)
   __ mov(FieldOperand(result, JSObject::kMapOffset), elements_array);
-  __ mov(elements_array, Factory::empty_fixed_array());
+  __ mov(elements_array, FACTORY->empty_fixed_array());
   __ mov(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
   // Field JSArray::kElementsOffset is initialized later.
   __ mov(FieldOperand(result, JSArray::kLengthOffset), array_size);
@@ -987,7 +990,7 @@ static void AllocateJSArray(MacroAssembler* masm,
   // elements_array_end: start of next object
   // array_size: size of array (smi)
   __ mov(FieldOperand(elements_array, FixedArray::kMapOffset),
-         Factory::fixed_array_map());
+         FACTORY->fixed_array_map());
   // For non-empty JSArrays the length of the FixedArray and the JSArray is the
   // same.
   __ mov(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
@@ -999,7 +1002,7 @@ static void AllocateJSArray(MacroAssembler* masm,
     __ SmiUntag(array_size);
     __ lea(edi, Operand(elements_array,
                         FixedArray::kHeaderSize - kHeapObjectTag));
-    __ mov(eax, Factory::the_hole_value());
+    __ mov(eax, FACTORY->the_hole_value());
     __ cld();
     // Do not use rep stos when filling less than kRepStosThreshold
     // words.
@@ -1063,7 +1066,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
                        edi,
                        kPreallocatedArrayElements,
                        &prepare_generic_code_call);
-  __ IncrementCounter(&Counters::array_function_native, 1);
+  __ IncrementCounter(COUNTERS->array_function_native(), 1);
   __ pop(ebx);
   if (construct_call) {
     __ pop(edi);
@@ -1119,7 +1122,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
                   edi,
                   true,
                   &prepare_generic_code_call);
-  __ IncrementCounter(&Counters::array_function_native, 1);
+  __ IncrementCounter(COUNTERS->array_function_native(), 1);
   __ mov(eax, ebx);
   __ pop(ebx);
   if (construct_call) {
@@ -1146,7 +1149,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
                   edi,
                   false,
                   &prepare_generic_code_call);
-  __ IncrementCounter(&Counters::array_function_native, 1);
+  __ IncrementCounter(COUNTERS->array_function_native(), 1);
   __ mov(eax, ebx);
   __ pop(ebx);
   if (construct_call) {
@@ -1232,7 +1235,8 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
   // Jump to the generic array code in case the specialized code cannot handle
   // the construction.
   __ bind(&generic_array_code);
-  Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
+  Code* code = Isolate::Current()->builtins()->builtin(
+      Builtins::ArrayCodeGeneric);
   Handle<Code> array_code(code);
   __ jmp(array_code, RelocInfo::CODE_TARGET);
 }
@@ -1266,7 +1270,8 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
   // Jump to the generic construct code in case the specialized code cannot
   // handle the construction.
   __ bind(&generic_constructor);
-  Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
+  Code* code = Isolate::Current()->builtins()->builtin(
+      Builtins::JSConstructStubGeneric);
   Handle<Code> generic_construct_stub(code);
   __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
 }
@@ -1280,7 +1285,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   //  -- esp[(argc + 1) * 4] : receiver
   // -----------------------------------
-  __ IncrementCounter(&Counters::string_ctor_calls, 1);
+  __ IncrementCounter(COUNTERS->string_ctor_calls(), 1);
 
   if (FLAG_debug_code) {
     __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
@@ -1309,7 +1314,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
       edx,  // Scratch 2.
       false,  // Input is known to be smi?
       &not_cached);
-  __ IncrementCounter(&Counters::string_ctor_cached_number, 1);
+  __ IncrementCounter(COUNTERS->string_ctor_cached_number(), 1);
   __ bind(&argument_is_string);
   // ----------- S t a t e -------------
   //  -- ebx    : argument converted to string
@@ -1338,7 +1343,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ecx);
 
   // Set properties and elements.
-  __ Set(ecx, Immediate(Factory::empty_fixed_array()));
+  __ Set(ecx, Immediate(FACTORY->empty_fixed_array()));
   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
   __ mov(FieldOperand(eax, JSObject::kElementsOffset), ecx);
 
@@ -1361,12 +1366,12 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   Condition is_string = masm->IsObjectStringType(eax, ebx, ecx);
   __ j(NegateCondition(is_string), &convert_argument);
   __ mov(ebx, eax);
-  __ IncrementCounter(&Counters::string_ctor_string_value, 1);
+  __ IncrementCounter(COUNTERS->string_ctor_string_value(), 1);
   __ jmp(&argument_is_string);
 
   // Invoke the conversion builtin and put the result into ebx.
   __ bind(&convert_argument);
-  __ IncrementCounter(&Counters::string_ctor_conversions, 1);
+  __ IncrementCounter(COUNTERS->string_ctor_conversions(), 1);
   __ EnterInternalFrame();
   __ push(edi);  // Preserve the function.
   __ push(eax);
@@ -1379,7 +1384,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   // Load the empty string into ebx, remove the receiver from the
   // stack, and jump back to the case where the argument is a string.
   __ bind(&no_arguments);
-  __ Set(ebx, Immediate(Factory::empty_string()));
+  __ Set(ebx, Immediate(FACTORY->empty_string()));
   __ pop(ecx);
   __ lea(esp, Operand(esp, kPointerSize));
   __ push(ecx);
@@ -1388,7 +1393,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   // At this point the argument is already a string. Call runtime to
   // create a string wrapper.
   __ bind(&gc_required);
-  __ IncrementCounter(&Counters::string_ctor_gc_required, 1);
+  __ IncrementCounter(COUNTERS->string_ctor_gc_required(), 1);
   __ EnterInternalFrame();
   __ push(ebx);
   __ CallRuntime(Runtime::kNewStringWrapper, 1);
@@ -1439,7 +1444,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // -----------------------------------
 
   Label invoke, dont_adapt_arguments;
-  __ IncrementCounter(&Counters::arguments_adaptors, 1);
+  __ IncrementCounter(COUNTERS->arguments_adaptors(), 1);
 
   Label enough, too_few;
   __ cmp(eax, Operand(ebx));
@@ -1487,7 +1492,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
     Label fill;
     __ bind(&fill);
     __ inc(ecx);
-    __ push(Immediate(Factory::undefined_value()));
+    __ push(Immediate(FACTORY->undefined_value()));
     __ cmp(ecx, Operand(ebx));
     __ j(less, &fill);
 
@@ -1515,8 +1520,9 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   // We shouldn't be performing on-stack replacement in the first
   // place if the CPU features we need for the optimized Crankshaft
   // code aren't supported.
-  CpuFeatures::Probe(false);
-  if (!CpuFeatures::IsSupported(SSE2)) {
+  CpuFeatures* cpu_features = Isolate::Current()->cpu_features();
+  cpu_features->Probe(false);
+  if (!cpu_features->IsSupported(SSE2)) {
     __ Abort("Unreachable code: Cannot optimize without SSE2 support.");
     return;
   }
index 2f310ca9488de5ab9401957780464a2f1bd152e0..c8a1b371e4078b5a063180d18b325f18cace22bf 100644 (file)
@@ -32,6 +32,7 @@
 #include "code-stubs.h"
 #include "bootstrapper.h"
 #include "jsregexp.h"
+#include "isolate.h"
 #include "regexp-macro-assembler.h"
 
 namespace v8 {
@@ -48,7 +49,7 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
 
   __ bind(&check_heap_number);
   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
-  __ cmp(Operand(ebx), Immediate(Factory::heap_number_map()));
+  __ cmp(Operand(ebx), Immediate(FACTORY->heap_number_map()));
   __ j(not_equal, &call_builtin);
   __ ret(0);
 
@@ -82,16 +83,16 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
 
   // Initialize the rest of the function. We don't have to update the
   // write barrier because the allocated object is in new space.
-  __ mov(ebx, Immediate(Factory::empty_fixed_array()));
+  __ mov(ebx, Immediate(FACTORY->empty_fixed_array()));
   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
   __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
   __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
-         Immediate(Factory::the_hole_value()));
+         Immediate(FACTORY->the_hole_value()));
   __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
   __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
   __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
   __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
-         Immediate(Factory::undefined_value()));
+         Immediate(FACTORY->undefined_value()));
 
   // Initialize the code pointer in the function to be the one
   // found in the shared function info object.
@@ -108,7 +109,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
   __ pop(edx);
   __ push(esi);
   __ push(edx);
-  __ push(Immediate(Factory::false_value()));
+  __ push(Immediate(FACTORY->false_value()));
   __ push(ecx);  // Restore return address.
   __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
 }
@@ -125,7 +126,7 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
   __ mov(ecx, Operand(esp, 1 * kPointerSize));
 
   // Setup the object header.
-  __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
+  __ mov(FieldOperand(eax, HeapObject::kMapOffset), FACTORY->context_map());
   __ mov(FieldOperand(eax, Context::kLengthOffset),
          Immediate(Smi::FromInt(length)));
 
@@ -144,7 +145,7 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
   __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
 
   // Initialize the rest of the slots to undefined.
-  __ mov(ebx, Factory::undefined_value());
+  __ mov(ebx, FACTORY->undefined_value());
   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
     __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
   }
@@ -180,7 +181,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
   STATIC_ASSERT(kSmiTag == 0);
   __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
                            FixedArray::kHeaderSize));
-  __ cmp(ecx, Factory::undefined_value());
+  __ cmp(ecx, FACTORY->undefined_value());
   __ j(equal, &slow_case);
 
   if (FLAG_debug_code) {
@@ -188,11 +189,11 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
     Handle<Map> expected_map;
     if (mode_ == CLONE_ELEMENTS) {
       message = "Expected (writable) fixed array";
-      expected_map = Factory::fixed_array_map();
+      expected_map = FACTORY->fixed_array_map();
     } else {
       ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
       message = "Expected copy-on-write fixed array";
-      expected_map = Factory::fixed_cow_array_map();
+      expected_map = FACTORY->fixed_cow_array_map();
     }
     __ push(ecx);
     __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
@@ -241,7 +242,7 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
   __ mov(eax, Operand(esp, 1 * kPointerSize));
 
   // 'null' => false.
-  __ cmp(eax, Factory::null_value());
+  __ cmp(eax, FACTORY->null_value());
   __ j(equal, &false_result);
 
   // Get the map and type of the heap object.
@@ -267,7 +268,7 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
 
   __ bind(&not_string);
   // HeapNumber => false iff +0, -0, or NaN.
-  __ cmp(edx, Factory::heap_number_map());
+  __ cmp(edx, FACTORY->heap_number_map());
   __ j(not_equal, &true_result);
   __ fldz();
   __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
@@ -288,7 +289,8 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
 const char* GenericBinaryOpStub::GetName() {
   if (name_ != NULL) return name_;
   const int kMaxNameLength = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
+      kMaxNameLength);
   if (name_ == NULL) return "OOM";
   const char* op_name = Token::Name(op_);
   const char* overwrite_name;
@@ -362,7 +364,7 @@ void GenericBinaryOpStub::GenerateCall(
 
     // Update flags to indicate that arguments are in registers.
     SetArgsInRegisters();
-    __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
+    __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
   }
 
   // Call the stub.
@@ -398,7 +400,7 @@ void GenericBinaryOpStub::GenerateCall(
 
     // Update flags to indicate that arguments are in registers.
     SetArgsInRegisters();
-    __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
+    __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
   }
 
   // Call the stub.
@@ -433,7 +435,7 @@ void GenericBinaryOpStub::GenerateCall(
     }
     // Update flags to indicate that arguments are in registers.
     SetArgsInRegisters();
-    __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
+    __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
   }
 
   // Call the stub.
@@ -761,7 +763,7 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
         // number in eax.
         __ AllocateHeapNumber(eax, ecx, ebx, slow);
         // Store the result in the HeapNumber and return.
-        if (CpuFeatures::IsSupported(SSE2)) {
+        if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
           CpuFeatures::Scope use_sse2(SSE2);
           __ cvtsi2sd(xmm0, Operand(left));
           __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
@@ -811,7 +813,7 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
       }
       if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) {
         __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
-        if (CpuFeatures::IsSupported(SSE2)) {
+        if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
           CpuFeatures::Scope use_sse2(SSE2);
           FloatingPointHelper::LoadSSE2Smis(masm, ebx);
           switch (op_) {
@@ -876,7 +878,7 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
 void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
   Label call_runtime;
 
-  __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
+  __ IncrementCounter(COUNTERS->generic_binary_stub_calls(), 1);
 
   if (runtime_operands_type_ == BinaryOpIC::UNINIT_OR_SMI) {
     Label slow;
@@ -915,7 +917,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
         }
 
         Label not_floats;
-        if (CpuFeatures::IsSupported(SSE2)) {
+        if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
           CpuFeatures::Scope use_sse2(SSE2);
           if (static_operands_type_.IsNumber()) {
             if (FLAG_debug_code) {
@@ -1049,7 +1051,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
             default: UNREACHABLE();
           }
           // Store the result in the HeapNumber and return.
-          if (CpuFeatures::IsSupported(SSE2)) {
+          if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
             CpuFeatures::Scope use_sse2(SSE2);
             __ cvtsi2sd(xmm0, Operand(ebx));
             __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
@@ -1361,7 +1363,8 @@ void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
 const char* TypeRecordingBinaryOpStub::GetName() {
   if (name_ != NULL) return name_;
   const int kMaxNameLength = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
+      kMaxNameLength);
   if (name_ == NULL) return "OOM";
   const char* op_name = Token::Name(op_);
   const char* overwrite_name;
@@ -1643,7 +1646,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
         // number in eax.
         __ AllocateHeapNumber(eax, ecx, ebx, slow);
         // Store the result in the HeapNumber and return.
-        if (CpuFeatures::IsSupported(SSE2)) {
+        if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
           CpuFeatures::Scope use_sse2(SSE2);
           __ cvtsi2sd(xmm0, Operand(left));
           __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
@@ -1688,7 +1691,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
             break;
         }
         __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
-        if (CpuFeatures::IsSupported(SSE2)) {
+        if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
           CpuFeatures::Scope use_sse2(SSE2);
           FloatingPointHelper::LoadSSE2Smis(masm, ebx);
           switch (op_) {
@@ -1820,7 +1823,7 @@ void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
     case Token::DIV: {
       Label not_floats;
       Label not_int32;
-      if (CpuFeatures::IsSupported(SSE2)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
         CpuFeatures::Scope use_sse2(SSE2);
         FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
         FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
@@ -1941,7 +1944,7 @@ void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
           default: UNREACHABLE();
         }
         // Store the result in the HeapNumber and return.
-        if (CpuFeatures::IsSupported(SSE2)) {
+        if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
           CpuFeatures::Scope use_sse2(SSE2);
           __ cvtsi2sd(xmm0, Operand(ebx));
           __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
@@ -2021,7 +2024,7 @@ void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
     case Token::MUL:
     case Token::DIV: {
       Label not_floats;
-      if (CpuFeatures::IsSupported(SSE2)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
         CpuFeatures::Scope use_sse2(SSE2);
         FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
 
@@ -2124,7 +2127,7 @@ void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
           default: UNREACHABLE();
         }
         // Store the result in the HeapNumber and return.
-        if (CpuFeatures::IsSupported(SSE2)) {
+        if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
           CpuFeatures::Scope use_sse2(SSE2);
           __ cvtsi2sd(xmm0, Operand(ebx));
           __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
@@ -2195,7 +2198,7 @@ void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
 void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
   Label call_runtime;
 
-  __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
+  __ IncrementCounter(COUNTERS->generic_binary_stub_calls(), 1);
 
   switch (op_) {
     case Token::ADD:
@@ -2225,7 +2228,7 @@ void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
     case Token::MUL:
     case Token::DIV: {
       Label not_floats;
-      if (CpuFeatures::IsSupported(SSE2)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
         CpuFeatures::Scope use_sse2(SSE2);
         FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
 
@@ -2323,7 +2326,7 @@ void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
           default: UNREACHABLE();
         }
         // Store the result in the HeapNumber and return.
-        if (CpuFeatures::IsSupported(SSE2)) {
+        if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
           CpuFeatures::Scope use_sse2(SSE2);
           __ cvtsi2sd(xmm0, Operand(ebx));
           __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
@@ -2511,7 +2514,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
     __ bind(&input_not_smi);
     // Check if input is a HeapNumber.
     __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
-    __ cmp(Operand(ebx), Immediate(Factory::heap_number_map()));
+    __ cmp(Operand(ebx), Immediate(FACTORY->heap_number_map()));
     __ j(not_equal, &runtime_call);
     // Input is a HeapNumber. Push it on the FPU stack and load its
     // low and high words into ebx, edx.
@@ -2521,7 +2524,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
 
     __ bind(&loaded);
   } else {  // UNTAGGED.
-    if (CpuFeatures::IsSupported(SSE4_1)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(SSE4_1)) {
       CpuFeatures::Scope sse4_scope(SSE4_1);
       __ pextrd(Operand(edx), xmm1, 0x1);  // copy xmm1[63..32] to edx.
     } else {
@@ -2544,8 +2547,9 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
   __ mov(eax, ecx);
   __ sar(eax, 8);
   __ xor_(ecx, Operand(eax));
-  ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
-  __ and_(Operand(ecx), Immediate(TranscendentalCache::kCacheSize - 1));
+  ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
+  __ and_(Operand(ecx),
+          Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
 
   // ST[0] or xmm1 == double value.
   // ebx = low 32 bits of double value.
@@ -2554,14 +2558,15 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
   __ mov(eax,
          Immediate(ExternalReference::transcendental_cache_array_address()));
   // Eax points to cache array.
-  __ mov(eax, Operand(eax, type_ * sizeof(TranscendentalCache::caches_[0])));
+  __ mov(eax, Operand(eax, type_ * sizeof(
+      Isolate::Current()->transcendental_cache()->caches_[0])));
   // Eax points to the cache for the type type_.
   // If NULL, the cache hasn't been initialized yet, so go through runtime.
   __ test(eax, Operand(eax));
   __ j(zero, &runtime_call_clear_stack);
 #ifdef DEBUG
   // Check that the layout of cache elements match expectations.
-  { TranscendentalCache::Element test_elem[2];
+  { TranscendentalCache::SubCache::Element test_elem[2];
     char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
     char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
     char* elem_in0  = reinterpret_cast<char*>(&(test_elem[0].in[0]));
@@ -2770,7 +2775,8 @@ void IntegerConvert(MacroAssembler* masm,
   Label done, right_exponent, normal_exponent;
   Register scratch = ebx;
   Register scratch2 = edi;
-  if (type_info.IsInteger32() && CpuFeatures::IsEnabled(SSE2)) {
+  if (type_info.IsInteger32() &&
+      Isolate::Current()->cpu_features()->IsEnabled(SSE2)) {
     CpuFeatures::Scope scope(SSE2);
     __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset));
     return;
@@ -2973,14 +2979,14 @@ void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
 
   // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
   __ bind(&check_undefined_arg1);
-  __ cmp(edx, Factory::undefined_value());
+  __ cmp(edx, FACTORY->undefined_value());
   __ j(not_equal, conversion_failure);
   __ mov(edx, Immediate(0));
   __ jmp(&load_arg2);
 
   __ bind(&arg1_is_object);
   __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
-  __ cmp(ebx, Factory::heap_number_map());
+  __ cmp(ebx, FACTORY->heap_number_map());
   __ j(not_equal, &check_undefined_arg1);
 
   // Get the untagged integer version of the edx heap number in ecx.
@@ -3004,14 +3010,14 @@ void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
 
   // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
   __ bind(&check_undefined_arg2);
-  __ cmp(eax, Factory::undefined_value());
+  __ cmp(eax, FACTORY->undefined_value());
   __ j(not_equal, conversion_failure);
   __ mov(ecx, Immediate(0));
   __ jmp(&done);
 
   __ bind(&arg2_is_object);
   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
-  __ cmp(ebx, Factory::heap_number_map());
+  __ cmp(ebx, FACTORY->heap_number_map());
   __ j(not_equal, &check_undefined_arg2);
 
   // Get the untagged integer version of the eax heap number in ecx.
@@ -3098,14 +3104,14 @@ void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
   // Load operand in edx into xmm0, or branch to not_numbers.
   __ test(edx, Immediate(kSmiTagMask));
   __ j(zero, &load_smi_edx, not_taken);  // Argument in edx is a smi.
-  __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map());
+  __ cmp(FieldOperand(edx, HeapObject::kMapOffset), FACTORY->heap_number_map());
   __ j(not_equal, not_numbers);  // Argument in edx is not a number.
   __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
   __ bind(&load_eax);
   // Load operand in eax into xmm1, or branch to not_numbers.
   __ test(eax, Immediate(kSmiTagMask));
   __ j(zero, &load_smi_eax, not_taken);  // Argument in eax is a smi.
-  __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::heap_number_map());
+  __ cmp(FieldOperand(eax, HeapObject::kMapOffset), FACTORY->heap_number_map());
   __ j(equal, &load_float_eax);
   __ jmp(not_numbers);  // Argument in eax is not a number.
   __ bind(&load_smi_edx);
@@ -3223,14 +3229,14 @@ void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
   __ test(edx, Immediate(kSmiTagMask));
   __ j(zero, &test_other, not_taken);  // argument in edx is OK
   __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
-  __ cmp(scratch, Factory::heap_number_map());
+  __ cmp(scratch, FACTORY->heap_number_map());
   __ j(not_equal, non_float);  // argument in edx is not a number -> NaN
 
   __ bind(&test_other);
   __ test(eax, Immediate(kSmiTagMask));
   __ j(zero, &done);  // argument in eax is OK
   __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
-  __ cmp(scratch, Factory::heap_number_map());
+  __ cmp(scratch, FACTORY->heap_number_map());
   __ j(not_equal, non_float);  // argument in eax is not a number -> NaN
 
   // Fall-through: Both operands are numbers.
@@ -3276,7 +3282,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
     }
 
     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
-    __ cmp(edx, Factory::heap_number_map());
+    __ cmp(edx, FACTORY->heap_number_map());
     __ j(not_equal, &slow);
     if (overwrite_ == UNARY_OVERWRITE) {
       __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
@@ -3308,14 +3314,14 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
 
     // Check if the operand is a heap number.
     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
-    __ cmp(edx, Factory::heap_number_map());
+    __ cmp(edx, FACTORY->heap_number_map());
     __ j(not_equal, &slow, not_taken);
 
     // Convert the heap number in eax to an untagged integer in ecx.
     IntegerConvert(masm,
                    eax,
                    TypeInfo::Unknown(),
-                   CpuFeatures::IsSupported(SSE3),
+                   Isolate::Current()->cpu_features()->IsSupported(SSE3),
                    &slow);
 
     // Do the bitwise operation and check if the result fits in a smi.
@@ -3338,7 +3344,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
       __ AllocateHeapNumber(ebx, edx, edi, &slow);
       __ mov(eax, Operand(ebx));
     }
-    if (CpuFeatures::IsSupported(SSE2)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
       CpuFeatures::Scope use_sse2(SSE2);
       __ cvtsi2sd(xmm0, Operand(ecx));
       __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
@@ -3411,7 +3417,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
   // exponent is smi and base is a heapnumber.
   __ bind(&base_nonsmi);
   __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
-         Factory::heap_number_map());
+         FACTORY->heap_number_map());
   __ j(not_equal, &call_runtime);
 
   __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
@@ -3463,7 +3469,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
   // on doubles.
   __ bind(&exponent_nonsmi);
   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
-         Factory::heap_number_map());
+         FACTORY->heap_number_map());
   __ j(not_equal, &call_runtime);
   __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
   // Test if exponent is nan.
@@ -3480,7 +3486,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
 
   __ bind(&base_not_smi);
   __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
-         Factory::heap_number_map());
+         FACTORY->heap_number_map());
   __ j(not_equal, &call_runtime);
   __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
   __ and_(ecx, HeapNumber::kExponentMask);
@@ -3678,7 +3684,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
   __ lea(edi, Operand(eax, GetArgumentsObjectSize()));
   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
-         Immediate(Factory::fixed_array_map()));
+         Immediate(FACTORY->fixed_array_map()));
 
   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
   // Untag the length for the loop below.
@@ -3806,7 +3812,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // Check that the JSArray is in fast case.
   __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
   __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
-  __ cmp(eax, Factory::fixed_array_map());
+  __ cmp(eax, FACTORY->fixed_array_map());
   __ j(not_equal, &runtime);
   // Check that the last match info has space for the capture registers and the
   // additional information.
@@ -3844,7 +3850,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   __ j(not_zero, &runtime);
   // String is a cons string.
   __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
-  __ cmp(Operand(edx), Factory::empty_string());
+  __ cmp(Operand(edx), FACTORY->empty_string());
   __ j(not_equal, &runtime);
   __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
@@ -3894,11 +3900,16 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // edx: code
   // edi: encoding of subject string (1 if ascii 0 if two_byte);
   // All checks done. Now push arguments for native regexp code.
-  __ IncrementCounter(&Counters::regexp_entry_native, 1);
+  __ IncrementCounter(COUNTERS->regexp_entry_native(), 1);
 
-  static const int kRegExpExecuteArguments = 7;
+  // Isolates: note we add an additional parameter here (isolate pointer).
+  static const int kRegExpExecuteArguments = 8;
   __ EnterApiExitFrame(kRegExpExecuteArguments);
 
+  // Argument 8: Pass current isolate address.
+  __ mov(Operand(esp, 7 * kPointerSize),
+      Immediate(ExternalReference::isolate_address()));
+
   // Argument 7: Indicate that this is a direct call from JavaScript.
   __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
 
@@ -3961,7 +3972,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // stack overflow (on the backtrack stack) was detected in RegExp code but
   // haven't created the exception yet. Handle that in the runtime system.
   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
-  ExternalReference pending_exception(Top::k_pending_exception_address);
+  ExternalReference pending_exception(Isolate::k_pending_exception_address);
   __ mov(edx,
          Operand::StaticVariable(ExternalReference::the_hole_value_location()));
   __ mov(eax, Operand::StaticVariable(pending_exception));
@@ -3974,7 +3985,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   // Special handling of termination exceptions which are uncatchable
   // by javascript code.
-  __ cmp(eax, Factory::termination_exception());
+  __ cmp(eax, FACTORY->termination_exception());
   Label throw_termination_exception;
   __ j(equal, &throw_termination_exception);
 
@@ -3986,7 +3997,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   __ bind(&failure);
   // For failure to match, return null.
-  __ mov(Operand(eax), Factory::null_value());
+  __ mov(Operand(eax), FACTORY->null_value());
   __ ret(4 * kPointerSize);
 
   // Load RegExp data.
@@ -4087,7 +4098,7 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
   // Set elements to point to FixedArray allocated right after the JSArray.
   // Interleave operations for better latency.
   __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
-  __ mov(ecx, Immediate(Factory::empty_fixed_array()));
+  __ mov(ecx, Immediate(FACTORY->empty_fixed_array()));
   __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
   __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
   __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
@@ -4110,12 +4121,12 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
 
   // Set map.
   __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
-         Immediate(Factory::fixed_array_map()));
+         Immediate(FACTORY->fixed_array_map()));
   // Set length.
   __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
   // Fill contents of fixed-array with the-hole.
   __ SmiUntag(ecx);
-  __ mov(edx, Immediate(Factory::the_hole_value()));
+  __ mov(edx, Immediate(FACTORY->the_hole_value()));
   __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
   // Fill fixed array elements with hole.
   // eax: JSArray.
@@ -4180,7 +4191,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
     __ jmp(&smi_hash_calculated);
     __ bind(&not_smi);
     __ cmp(FieldOperand(object, HeapObject::kMapOffset),
-           Factory::heap_number_map());
+           FACTORY->heap_number_map());
     __ j(not_equal, not_found);
     STATIC_ASSERT(8 == kDoubleSize);
     __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
@@ -4196,7 +4207,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
                         FixedArray::kHeaderSize));
     __ test(probe, Immediate(kSmiTagMask));
     __ j(zero, not_found);
-    if (CpuFeatures::IsSupported(SSE2)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
       CpuFeatures::Scope fscope(SSE2);
       __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
       __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
@@ -4230,7 +4241,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
                       index,
                       times_twice_pointer_size,
                       FixedArray::kHeaderSize + kPointerSize));
-  __ IncrementCounter(&Counters::number_to_string_native, 1);
+  __ IncrementCounter(COUNTERS->number_to_string_native(), 1);
 }
 
 
@@ -4296,14 +4307,14 @@ void CompareStub::Generate(MacroAssembler* masm) {
       // Check for undefined.  undefined OP undefined is false even though
       // undefined == undefined.
       NearLabel check_for_nan;
-      __ cmp(edx, Factory::undefined_value());
+      __ cmp(edx, FACTORY->undefined_value());
       __ j(not_equal, &check_for_nan);
       __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
       __ ret(0);
       __ bind(&check_for_nan);
     }
 
-    // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
+    // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(),
     // so we do the second best thing - test it ourselves.
     // Note: if cc_ != equal, never_nan_nan_ is not used.
     if (never_nan_nan_ && (cc_ == equal)) {
@@ -4312,7 +4323,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
     } else {
       NearLabel heap_number;
       __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
-             Immediate(Factory::heap_number_map()));
+             Immediate(FACTORY->heap_number_map()));
       __ j(equal, &heap_number);
       if (cc_ != equal) {
         // Call runtime on identical JSObjects.  Otherwise return equal.
@@ -4389,7 +4400,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
 
     // Check if the non-smi operand is a heap number.
     __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
-           Immediate(Factory::heap_number_map()));
+           Immediate(FACTORY->heap_number_map()));
     // If heap number, handle it in the slow case.
     __ j(equal, &slow);
     // Return non-equal (ebx is not zero)
@@ -4434,7 +4445,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
   if (include_number_compare_) {
     Label non_number_comparison;
     Label unordered;
-    if (CpuFeatures::IsSupported(SSE2)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
       CpuFeatures::Scope use_sse2(SSE2);
       CpuFeatures::Scope use_cmov(CMOV);
 
@@ -4653,7 +4664,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   __ Set(eax, Immediate(argc_));
   __ Set(ebx, Immediate(0));
   __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
-  Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
+  Handle<Code> adaptor(Isolate::Current()->builtins()->builtin(
+      Builtins::ArgumentsAdaptorTrampoline));
   __ jmp(adaptor, RelocInfo::CODE_TARGET);
 }
 
@@ -4707,6 +4719,8 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
   // Call C function.
   __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
   __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
+  __ mov(Operand(esp, 2 * kPointerSize),
+         Immediate(ExternalReference::isolate_address()));
   __ call(Operand(ebx));
   // Result is in eax or edx:eax - do not destroy these registers!
 
@@ -4718,7 +4732,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
   // call as this may lead to crashes in the IC code later.
   if (FLAG_debug_code) {
     NearLabel okay;
-    __ cmp(eax, Factory::the_hole_value());
+    __ cmp(eax, FACTORY->the_hole_value());
     __ j(not_equal, &okay);
     __ int3();
     __ bind(&okay);
@@ -4732,7 +4746,8 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
   __ test(ecx, Immediate(kFailureTagMask));
   __ j(zero, &failure_returned, not_taken);
 
-  ExternalReference pending_exception_address(Top::k_pending_exception_address);
+  ExternalReference pending_exception_address(
+      Isolate::k_pending_exception_address);
 
   // Check that there is no pending exception, otherwise we
   // should have returned some failure value.
@@ -4774,7 +4789,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
 
   // Special handling of termination exceptions which are uncatchable
   // by javascript code.
-  __ cmp(eax, Factory::termination_exception());
+  __ cmp(eax, FACTORY->termination_exception());
   __ j(equal, throw_termination_exception);
 
   // Handle normal exception.
@@ -4874,12 +4889,12 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ push(ebx);
 
   // Save copies of the top frame descriptor on the stack.
-  ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
+  ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address);
   __ push(Operand::StaticVariable(c_entry_fp));
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
   // If this is the outermost JS call, set js_entry_sp value.
-  ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
+  ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address);
   __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
   __ j(not_equal, &not_outermost_js);
   __ mov(Operand::StaticVariable(js_entry_sp), ebp);
@@ -4891,7 +4906,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
   // Caught exception: Store result (exception) in the pending
   // exception field in the JSEnv and return a failure sentinel.
-  ExternalReference pending_exception(Top::k_pending_exception_address);
+  ExternalReference pending_exception(Isolate::k_pending_exception_address);
   __ mov(Operand::StaticVariable(pending_exception), eax);
   __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
   __ jmp(&exit);
@@ -4924,7 +4939,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ call(Operand(edx));
 
   // Unlink this frame from the handler chain.
-  __ pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
+  __ pop(Operand::StaticVariable(ExternalReference(
+      Isolate::k_handler_address)));
   // Pop next_sp.
   __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
 
@@ -4939,7 +4955,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
   // Restore the top frame descriptor from the stack.
   __ bind(&exit);
-  __ pop(Operand::StaticVariable(ExternalReference(Top::k_c_entry_fp_address)));
+  __ pop(Operand::StaticVariable(ExternalReference(
+      Isolate::k_c_entry_fp_address)));
 
   // Restore callee-saved registers (C calling conventions).
   __ pop(ebx);
@@ -5064,7 +5081,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
   __ bind(&loop);
   __ cmp(scratch, Operand(prototype));
   __ j(equal, &is_instance);
-  __ cmp(Operand(scratch), Immediate(Factory::null_value()));
+  __ cmp(Operand(scratch), Immediate(FACTORY->null_value()));
   __ j(equal, &is_not_instance);
   __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
   __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
@@ -5078,7 +5095,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
                                 times_pointer_size, roots_address), eax);
   } else {
     // Get return address and delta to inlined map check.
-    __ mov(eax, Factory::true_value());
+    __ mov(eax, FACTORY->true_value());
     __ mov(scratch, Operand(esp, 0 * kPointerSize));
     __ sub(scratch, Operand(esp, 1 * kPointerSize));
     if (FLAG_debug_code) {
@@ -5100,7 +5117,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
         scratch, times_pointer_size, roots_address), eax);
   } else {
     // Get return address and delta to inlined map check.
-    __ mov(eax, Factory::false_value());
+    __ mov(eax, FACTORY->false_value());
     __ mov(scratch, Operand(esp, 0 * kPointerSize));
     __ sub(scratch, Operand(esp, 1 * kPointerSize));
     if (FLAG_debug_code) {
@@ -5124,7 +5141,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
   __ j(not_equal, &slow, not_taken);
 
   // Null is not instance of anything.
-  __ cmp(object, Factory::null_value());
+  __ cmp(object, FACTORY->null_value());
   __ j(not_equal, &object_not_null);
   __ Set(eax, Immediate(Smi::FromInt(1)));
   __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
@@ -5165,10 +5182,10 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
     NearLabel true_value, done;
     __ test(eax, Operand(eax));
     __ j(zero, &true_value);
-    __ mov(eax, Factory::false_value());
+    __ mov(eax, FACTORY->false_value());
     __ jmp(&done);
     __ bind(&true_value);
-    __ mov(eax, Factory::true_value());
+    __ mov(eax, FACTORY->true_value());
     __ bind(&done);
     __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
   }
@@ -5203,7 +5220,8 @@ const char* CompareStub::GetName() {
 
   if (name_ != NULL) return name_;
   const int kMaxNameLength = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
+      kMaxNameLength);
   if (name_ == NULL) return "OOM";
 
   const char* cc_name;
@@ -5296,7 +5314,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
   // the case we would rather go to the runtime system now to flatten
   // the string.
   __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
-         Immediate(Factory::empty_string()));
+         Immediate(FACTORY->empty_string()));
   __ j(not_equal, &call_runtime_);
   // Get the first of the two strings and load its instance type.
   __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
@@ -5341,7 +5359,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
   // Index is not a smi.
   __ bind(&index_not_smi_);
   // If index is a heap number, try converting it to an integer.
-  __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
+  __ CheckMap(index_, FACTORY->heap_number_map(), index_not_number_, true);
   call_helper.BeforeCall(masm);
   __ push(object_);
   __ push(index_);
@@ -5402,7 +5420,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
                     ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
   __ j(not_zero, &slow_case_, not_taken);
 
-  __ Set(result_, Immediate(Factory::single_character_string_cache()));
+  __ Set(result_, Immediate(FACTORY->single_character_string_cache()));
   STATIC_ASSERT(kSmiTag == 0);
   STATIC_ASSERT(kSmiTagSize == 1);
   STATIC_ASSERT(kSmiShiftSize == 0);
@@ -5410,7 +5428,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
   __ mov(result_, FieldOperand(result_,
                                code_, times_half_pointer_size,
                                FixedArray::kHeaderSize));
-  __ cmp(result_, Factory::undefined_value());
+  __ cmp(result_, FACTORY->undefined_value());
   __ j(equal, &slow_case_, not_taken);
   __ bind(&exit_);
 }
@@ -5496,7 +5514,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ test(ecx, Operand(ecx));
   __ j(not_zero, &second_not_zero_length);
   // Second string is empty, result is first string which is already in eax.
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
   __ bind(&second_not_zero_length);
   __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
@@ -5505,7 +5523,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ j(not_zero, &both_not_zero_length);
   // First string is empty, result is second string which is in edx.
   __ mov(eax, edx);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
 
   // Both strings are non-empty.
@@ -5539,7 +5557,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   StringHelper::GenerateTwoCharacterSymbolTableProbe(
       masm, ebx, ecx, eax, edx, edi,
       &make_two_character_string_no_reload, &make_two_character_string);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
 
   // Allocate a two character string.
@@ -5551,7 +5569,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
   __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
   __ bind(&make_two_character_string_no_reload);
-  __ IncrementCounter(&Counters::string_add_make_two_char, 1);
+  __ IncrementCounter(COUNTERS->string_add_make_two_char(), 1);
   __ AllocateAsciiString(eax,  // Result.
                          2,    // Length.
                          edi,  // Scratch 1.
@@ -5562,7 +5580,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ or_(ebx, Operand(ecx));
   // Set the characters in the new string.
   __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
 
   __ bind(&longer_than_two);
@@ -5593,7 +5611,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
   __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
   __ mov(eax, ecx);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
   __ bind(&non_ascii);
   // At least one of the strings is two-byte. Check whether it happens
@@ -5670,7 +5688,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   // edx: first char of second argument
   // edi: length of second argument
   StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
 
   // Handle creating a flat two byte result.
@@ -5711,7 +5729,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   // edx: first char of second argument
   // edi: length of second argument
   StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
 
   // Just jump to runtime to add the two strings.
@@ -5936,9 +5954,9 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
                         SymbolTable::kElementsStartOffset));
 
     // If entry is undefined no string with this hash can be found.
-    __ cmp(candidate, Factory::undefined_value());
+    __ cmp(candidate, FACTORY->undefined_value());
     __ j(equal, not_found);
-    __ cmp(candidate, Factory::null_value());
+    __ cmp(candidate, FACTORY->null_value());
     __ j(equal, &next_probe[i]);
 
     // If length is not 2 the string is not a candidate.
@@ -6135,7 +6153,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   // esi: character of sub string start
   StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
   __ mov(esi, edx);  // Restore esi.
-  __ IncrementCounter(&Counters::sub_string_native, 1);
+  __ IncrementCounter(COUNTERS->sub_string_native(), 1);
   __ ret(3 * kPointerSize);
 
   __ bind(&non_ascii_flat);
@@ -6176,7 +6194,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   __ mov(esi, edx);  // Restore esi.
 
   __ bind(&return_eax);
-  __ IncrementCounter(&Counters::sub_string_native, 1);
+  __ IncrementCounter(COUNTERS->sub_string_native(), 1);
   __ ret(3 * kPointerSize);
 
   // Just jump to runtime to create the sub string.
@@ -6195,7 +6213,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
   Label result_greater;
   Label compare_lengths;
 
-  __ IncrementCounter(&Counters::string_compare_native, 1);
+  __ IncrementCounter(COUNTERS->string_compare_native(), 1);
 
   // Find minimum length.
   NearLabel left_shorter;
@@ -6286,7 +6304,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
   STATIC_ASSERT(EQUAL == 0);
   STATIC_ASSERT(kSmiTag == 0);
   __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
-  __ IncrementCounter(&Counters::string_compare_native, 1);
+  __ IncrementCounter(COUNTERS->string_compare_native(), 1);
   __ ret(2 * kPointerSize);
 
   __ bind(&not_same);
@@ -6353,7 +6371,8 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
 
   // Inlining the double comparison and falling back to the general compare
   // stub if NaN is involved or SS2 or CMOV is unsupported.
-  if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) {
+  CpuFeatures* cpu_features = Isolate::Current()->cpu_features();
+  if (cpu_features->IsSupported(SSE2) && cpu_features->IsSupported(CMOV)) {
     CpuFeatures::Scope scope1(SSE2);
     CpuFeatures::Scope scope2(CMOV);
 
index e01d7059cb6331b7eb107953f37f2375ae3ab2ca..9e5f0a5de83afb2665f3b0d51b02a91c1cb8c0ef 100644 (file)
@@ -96,7 +96,7 @@ class GenericBinaryOpStub: public CodeStub {
     if (static_operands_type_.IsSmi()) {
       mode_ = NO_OVERWRITE;
     }
-    use_sse3_ = CpuFeatures::IsSupported(SSE3);
+    use_sse3_ = Isolate::Current()->cpu_features()->IsSupported(SSE3);
     ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
   }
 
@@ -235,7 +235,7 @@ class TypeRecordingBinaryOpStub: public CodeStub {
         operands_type_(TRBinaryOpIC::UNINITIALIZED),
         result_type_(TRBinaryOpIC::UNINITIALIZED),
         name_(NULL) {
-    use_sse3_ = CpuFeatures::IsSupported(SSE3);
+    use_sse3_ = Isolate::Current()->cpu_features()->IsSupported(SSE3);
     ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
   }
 
index 2b6a307d74f61cfc407fdd4d5921bc3c78ebec75..ee6f788c53ae9084dc6b048166fa110fb9ce5150 100644 (file)
@@ -154,7 +154,8 @@ CodeGenerator::CodeGenerator(MacroAssembler* masm)
       safe_int32_mode_enabled_(true),
       function_return_is_shadowed_(false),
       in_spilled_code_(false),
-      jit_cookie_((FLAG_mask_constants_with_cookie) ? V8::RandomPrivate() : 0) {
+      jit_cookie_((FLAG_mask_constants_with_cookie) ?
+                  V8::RandomPrivate(Isolate::Current()) : 0) {
 }
 
 
@@ -182,7 +183,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
   ASSERT_EQ(0, loop_nesting_);
   loop_nesting_ = info->is_in_loop() ? 1 : 0;
 
-  JumpTarget::set_compiling_deferred_code(false);
+  Isolate::Current()->set_jump_target_compiling_deferred_code(false);
 
   {
     CodeGenState state(this);
@@ -284,7 +285,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
 
     // Initialize ThisFunction reference if present.
     if (scope()->is_function_scope() && scope()->function() != NULL) {
-      frame_->Push(Factory::the_hole_value());
+      frame_->Push(FACTORY->the_hole_value());
       StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
     }
 
@@ -320,7 +321,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
     if (!scope()->HasIllegalRedeclaration()) {
       Comment cmnt(masm_, "[ function body");
 #ifdef DEBUG
-      bool is_builtin = Bootstrapper::IsActive();
+      bool is_builtin = info->isolate()->bootstrapper()->IsActive();
       bool should_trace =
           is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
       if (should_trace) {
@@ -337,7 +338,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
         ASSERT(!function_return_is_shadowed_);
         CodeForReturnPosition(info->function());
         frame_->PrepareForReturn();
-        Result undefined(Factory::undefined_value());
+        Result undefined(FACTORY->undefined_value());
         if (function_return_.is_bound()) {
           function_return_.Jump(&undefined);
         } else {
@@ -369,9 +370,9 @@ void CodeGenerator::Generate(CompilationInfo* info) {
 
   // Process any deferred code using the register allocator.
   if (!HasStackOverflow()) {
-    JumpTarget::set_compiling_deferred_code(true);
+    info->isolate()->set_jump_target_compiling_deferred_code(true);
     ProcessDeferred();
-    JumpTarget::set_compiling_deferred_code(false);
+    info->isolate()->set_jump_target_compiling_deferred_code(false);
   }
 
   // There is no need to delete the register allocator, it is a
@@ -555,7 +556,7 @@ void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
     __ sar(val, 1);
     // If there was an overflow, bits 30 and 31 of the original number disagree.
     __ xor_(val, 0x80000000u);
-    if (CpuFeatures::IsSupported(SSE2)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
       CpuFeatures::Scope fscope(SSE2);
       __ cvtsi2sd(xmm0, Operand(val));
     } else {
@@ -573,7 +574,7 @@ void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
                           no_reg, &allocation_failed);
     VirtualFrame* clone = new VirtualFrame(frame_);
     scratch.Unuse();
-    if (CpuFeatures::IsSupported(SSE2)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
       CpuFeatures::Scope fscope(SSE2);
       __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
     } else {
@@ -586,7 +587,7 @@ void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
     RegisterFile empty_regs;
     SetFrame(clone, &empty_regs);
     __ bind(&allocation_failed);
-    if (!CpuFeatures::IsSupported(SSE2)) {
+    if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
       // Pop the value from the floating point stack.
       __ fstp(0);
     }
@@ -613,7 +614,7 @@ void CodeGenerator::Load(Expression* expr) {
       safe_int32_mode_enabled() &&
       expr->side_effect_free() &&
       expr->num_bit_ops() > 2 &&
-      CpuFeatures::IsSupported(SSE2)) {
+      Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
     BreakTarget unsafe_bailout;
     JumpTarget done;
     unsafe_bailout.set_expected_height(frame_->height());
@@ -634,12 +635,12 @@ void CodeGenerator::Load(Expression* expr) {
     if (dest.false_was_fall_through()) {
       // The false target was just bound.
       JumpTarget loaded;
-      frame_->Push(Factory::false_value());
+      frame_->Push(FACTORY->false_value());
       // There may be dangling jumps to the true target.
       if (true_target.is_linked()) {
         loaded.Jump();
         true_target.Bind();
-        frame_->Push(Factory::true_value());
+        frame_->Push(FACTORY->true_value());
         loaded.Bind();
       }
 
@@ -647,11 +648,11 @@ void CodeGenerator::Load(Expression* expr) {
       // There is true, and possibly false, control flow (with true as
       // the fall through).
       JumpTarget loaded;
-      frame_->Push(Factory::true_value());
+      frame_->Push(FACTORY->true_value());
       if (false_target.is_linked()) {
         loaded.Jump();
         false_target.Bind();
-        frame_->Push(Factory::false_value());
+        frame_->Push(FACTORY->false_value());
         loaded.Bind();
       }
 
@@ -666,14 +667,14 @@ void CodeGenerator::Load(Expression* expr) {
         loaded.Jump();  // Don't lose the current TOS.
         if (true_target.is_linked()) {
           true_target.Bind();
-          frame_->Push(Factory::true_value());
+          frame_->Push(FACTORY->true_value());
           if (false_target.is_linked()) {
             loaded.Jump();
           }
         }
         if (false_target.is_linked()) {
           false_target.Bind();
-          frame_->Push(Factory::false_value());
+          frame_->Push(FACTORY->false_value());
         }
         loaded.Bind();
       }
@@ -751,7 +752,7 @@ Result CodeGenerator::StoreArgumentsObject(bool initial) {
     // When using lazy arguments allocation, we store the arguments marker value
     // as a sentinel indicating that the arguments object hasn't been
     // allocated yet.
-    frame_->Push(Factory::arguments_marker());
+    frame_->Push(FACTORY->arguments_marker());
   } else {
     ArgumentsAccessStub stub(is_strict_mode()
         ? ArgumentsAccessStub::NEW_STRICT
@@ -783,7 +784,7 @@ Result CodeGenerator::StoreArgumentsObject(bool initial) {
       // been assigned a proper value.
       skip_arguments = !probe.handle()->IsArgumentsMarker();
     } else {
-      __ cmp(Operand(probe.reg()), Immediate(Factory::arguments_marker()));
+      __ cmp(Operand(probe.reg()), Immediate(FACTORY->arguments_marker()));
       probe.Unuse();
       done.Branch(not_equal);
     }
@@ -914,15 +915,15 @@ void CodeGenerator::ToBoolean(ControlDestination* dest) {
   } else {
     // Fast case checks.
     // 'false' => false.
-    __ cmp(value.reg(), Factory::false_value());
+    __ cmp(value.reg(), FACTORY->false_value());
     dest->false_target()->Branch(equal);
 
     // 'true' => true.
-    __ cmp(value.reg(), Factory::true_value());
+    __ cmp(value.reg(), FACTORY->true_value());
     dest->true_target()->Branch(equal);
 
     // 'undefined' => false.
-    __ cmp(value.reg(), Factory::undefined_value());
+    __ cmp(value.reg(), FACTORY->undefined_value());
     dest->false_target()->Branch(equal);
 
     // Smi => false iff zero.
@@ -993,7 +994,8 @@ class DeferredInlineBinaryOperation: public DeferredCode {
 
 
 Label* DeferredInlineBinaryOperation::NonSmiInputLabel() {
-  if (Token::IsBitOp(op_) && CpuFeatures::IsSupported(SSE2)) {
+  if (Token::IsBitOp(op_) &&
+      Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
     return &non_smi_input_;
   } else {
     return entry_label();
@@ -1016,7 +1018,7 @@ void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond,
 void DeferredInlineBinaryOperation::Generate() {
   // Registers are not saved implicitly for this stub, so we should not
   // tread on the registers that were not passed to us.
-  if (CpuFeatures::IsSupported(SSE2) &&
+  if (Isolate::Current()->cpu_features()->IsSupported(SSE2) &&
       ((op_ == Token::ADD) ||
        (op_ == Token::SUB) ||
        (op_ == Token::MUL) ||
@@ -1029,7 +1031,7 @@ void DeferredInlineBinaryOperation::Generate() {
       __ j(zero, &left_smi);
       if (!left_info_.IsNumber()) {
         __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
-               Factory::heap_number_map());
+               FACTORY->heap_number_map());
         __ j(not_equal, &call_runtime);
       }
       __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
@@ -1058,7 +1060,7 @@ void DeferredInlineBinaryOperation::Generate() {
       __ j(zero, &right_smi);
       if (!right_info_.IsNumber()) {
         __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
-               Factory::heap_number_map());
+               FACTORY->heap_number_map());
         __ j(not_equal, &call_runtime);
       }
       __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
@@ -1152,7 +1154,7 @@ void DeferredInlineBinaryOperation::GenerateNonSmiInput() {
     // The left_ and right_ registers have not been initialized yet.
     __ mov(right_, Immediate(smi_value_));
     __ mov(left_, Operand(dst_));
-    if (!CpuFeatures::IsSupported(SSE2)) {
+    if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
       __ jmp(entry_label());
       return;
     } else {
@@ -1265,7 +1267,8 @@ void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() {
   // This trashes right_.
   __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2);
   __ bind(&allocation_ok);
-  if (CpuFeatures::IsSupported(SSE2) && op_ != Token::SHR) {
+  if (Isolate::Current()->cpu_features()->IsSupported(SSE2) &&
+      op_ != Token::SHR) {
     CpuFeatures::Scope use_sse2(SSE2);
     ASSERT(Token::IsBitOp(op_));
     // Signed conversion.
@@ -1507,7 +1510,7 @@ Result CodeGenerator::GenerateGenericBinaryOpStubCall(GenericBinaryOpStub* stub,
 
 
 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
-  Object* answer_object = Heap::undefined_value();
+  Object* answer_object = HEAP->undefined_value();
   switch (op) {
     case Token::ADD:
       if (Smi::IsValid(left + right)) {
@@ -1579,7 +1582,7 @@ bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
       UNREACHABLE();
       break;
   }
-  if (answer_object == Heap::undefined_value()) {
+  if (answer_object->IsUndefined()) {
     return false;
   }
   frame_->Push(Handle<Object>(answer_object));
@@ -3028,13 +3031,14 @@ void CodeGenerator::ConstantSmiComparison(Condition cc,
       // Jump or fall through to here if we are comparing a non-smi to a
       // constant smi.  If the non-smi is a heap number and this is not
       // a loop condition, inline the floating point code.
-      if (!is_loop_condition && CpuFeatures::IsSupported(SSE2)) {
+      if (!is_loop_condition &&
+          Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
         // Right side is a constant smi and left side has been checked
         // not to be a smi.
         CpuFeatures::Scope use_sse2(SSE2);
         JumpTarget not_number;
         __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
-               Immediate(Factory::heap_number_map()));
+               Immediate(FACTORY->heap_number_map()));
         not_number.Branch(not_equal, left_side);
         __ movdbl(xmm1,
                   FieldOperand(left_reg, HeapNumber::kValueOffset));
@@ -3100,7 +3104,7 @@ static void CheckComparisonOperand(MacroAssembler* masm_,
     __ test(operand->reg(), Immediate(kSmiTagMask));
     __ j(zero, &done);
     __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
-           Immediate(Factory::heap_number_map()));
+           Immediate(FACTORY->heap_number_map()));
     not_numbers->Branch(not_equal, left_side, right_side, not_taken);
     __ bind(&done);
   }
@@ -3167,7 +3171,7 @@ static void LoadComparisonOperandSSE2(MacroAssembler* masm_,
     __ j(zero, &smi);
     if (!operand->type_info().IsNumber()) {
       __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
-             Immediate(Factory::heap_number_map()));
+             Immediate(FACTORY->heap_number_map()));
       not_numbers->Branch(not_equal, left_side, right_side, taken);
     }
     __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
@@ -3192,7 +3196,7 @@ void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
   ASSERT(right_side->is_register());
 
   JumpTarget not_numbers;
-  if (CpuFeatures::IsSupported(SSE2)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
     CpuFeatures::Scope use_sse2(SSE2);
 
     // Load left and right operand into registers xmm0 and xmm1 and compare.
@@ -3274,7 +3278,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
   // give us a megamorphic load site. Not super, but it works.
   Load(applicand);
   frame()->Dup();
-  Handle<String> name = Factory::LookupAsciiSymbol("apply");
+  Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
   frame()->Push(name);
   Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
   __ nop();
@@ -3306,7 +3310,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
     if (probe.is_constant()) {
       try_lazy = probe.handle()->IsArgumentsMarker();
     } else {
-      __ cmp(Operand(probe.reg()), Immediate(Factory::arguments_marker()));
+      __ cmp(Operand(probe.reg()), Immediate(FACTORY->arguments_marker()));
       probe.Unuse();
       __ j(not_equal, &slow);
     }
@@ -3342,7 +3346,8 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
       __ j(not_equal, &build_args);
       __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset));
       __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag));
-      Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
+      Handle<Code> apply_code(Isolate::Current()->builtins()->builtin(
+          Builtins::FunctionApply));
       __ cmp(Operand(ecx), Immediate(apply_code));
       __ j(not_equal, &build_args);
 
@@ -3569,7 +3574,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
     // 'undefined') because we may have a (legal) redeclaration and we
     // must not destroy the current value.
     if (node->mode() == Variable::CONST) {
-      frame_->EmitPush(Immediate(Factory::the_hole_value()));
+      frame_->EmitPush(Immediate(FACTORY->the_hole_value()));
     } else if (node->fun() != NULL) {
       Load(node->fun());
     } else {
@@ -3585,7 +3590,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
   // If we have a function or a constant, we need to initialize the variable.
   Expression* val = NULL;
   if (node->mode() == Variable::CONST) {
-    val = new Literal(Factory::the_hole_value());
+    val = new Literal(FACTORY->the_hole_value());
   } else {
     val = node->fun();  // NULL if we don't have a function
   }
@@ -4366,9 +4371,9 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
   frame_->EmitPop(eax);
 
   // eax: value to be iterated over
-  __ cmp(eax, Factory::undefined_value());
+  __ cmp(eax, FACTORY->undefined_value());
   exit.Branch(equal);
-  __ cmp(eax, Factory::null_value());
+  __ cmp(eax, FACTORY->null_value());
   exit.Branch(equal);
 
   // Stack layout in body:
@@ -4407,14 +4412,14 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
   loop.Bind();
   // Check that there are no elements.
   __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
-  __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
+  __ cmp(Operand(edx), Immediate(FACTORY->empty_fixed_array()));
   call_runtime.Branch(not_equal);
   // Check that instance descriptors are not empty so that we can
   // check for an enum cache.  Leave the map in ebx for the subsequent
   // prototype load.
   __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
-  __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array()));
+  __ cmp(Operand(edx), Immediate(FACTORY->empty_descriptor_array()));
   call_runtime.Branch(equal);
   // Check that there in an enum cache in the non-empty instance
   // descriptors.  This is the case if the next enumeration index
@@ -4426,12 +4431,12 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
   __ cmp(ecx, Operand(eax));
   check_prototype.Branch(equal);
   __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
-  __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
+  __ cmp(Operand(edx), Immediate(FACTORY->empty_fixed_array()));
   call_runtime.Branch(not_equal);
   check_prototype.Bind();
   // Load the prototype from the map and loop if non-null.
   __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
-  __ cmp(Operand(ecx), Immediate(Factory::null_value()));
+  __ cmp(Operand(ecx), Immediate(FACTORY->null_value()));
   loop.Branch(not_equal);
   // The enum cache is valid.  Load the map of the object being
   // iterated over and use the cache for the iteration.
@@ -4450,7 +4455,7 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
   // Runtime::kGetPropertyNamesFast)
   __ mov(edx, Operand(eax));
   __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
-  __ cmp(ecx, Factory::meta_map());
+  __ cmp(ecx, FACTORY->meta_map());
   fixed_array.Branch(not_equal);
 
   use_cache.Bind();
@@ -4642,7 +4647,7 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
   function_return_is_shadowed_ = function_return_was_shadowed;
 
   // Get an external reference to the handler address.
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
 
   // Make sure that there's nothing left on the stack above the
   // handler structure.
@@ -4768,7 +4773,7 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
   function_return_is_shadowed_ = function_return_was_shadowed;
 
   // Get an external reference to the handler address.
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
 
   // If we can fall off the end of the try block, unlink from the try
   // chain and set the state on the frame to FALLING.
@@ -4780,7 +4785,7 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
 
     // Fake a top of stack value (unneeded when FALLING) and set the
     // state in ecx, then jump around the unlink blocks if any.
-    frame_->EmitPush(Immediate(Factory::undefined_value()));
+    frame_->EmitPush(Immediate(FACTORY->undefined_value()));
     __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
     if (nof_unlinks > 0) {
       finally_block.Jump();
@@ -4823,7 +4828,7 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
         frame_->EmitPush(eax);
       } else {
         // Fake TOS for targets that shadowed breaks and continues.
-        frame_->EmitPush(Immediate(Factory::undefined_value()));
+        frame_->EmitPush(Immediate(FACTORY->undefined_value()));
       }
       __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
       if (--nof_unlinks > 0) {
@@ -4931,8 +4936,8 @@ Result CodeGenerator::InstantiateFunction(
     frame()->EmitPush(esi);
     frame()->EmitPush(Immediate(function_info));
     frame()->EmitPush(Immediate(pretenure
-                                ? Factory::true_value()
-                                : Factory::false_value()));
+                                ? FACTORY->true_value()
+                                : FACTORY->false_value()));
     return frame()->CallRuntime(Runtime::kNewClosure, 3);
   }
 }
@@ -5040,9 +5045,9 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
     Comment cmnt(masm_, "[ Load const");
     Label exit;
     __ mov(ecx, SlotOperand(slot, ecx));
-    __ cmp(ecx, Factory::the_hole_value());
+    __ cmp(ecx, FACTORY->the_hole_value());
     __ j(not_equal, &exit);
-    __ mov(ecx, Factory::undefined_value());
+    __ mov(ecx, FACTORY->undefined_value());
     __ bind(&exit);
     frame()->EmitPush(ecx);
 
@@ -5092,7 +5097,7 @@ void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
   // indicates that we haven't loaded the arguments object yet, we
   // need to do it now.
   JumpTarget exit;
-  __ cmp(Operand(result.reg()), Immediate(Factory::arguments_marker()));
+  __ cmp(Operand(result.reg()), Immediate(FACTORY->arguments_marker()));
   frame()->Push(&result);
   exit.Branch(not_equal);
 
@@ -5146,7 +5151,7 @@ Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
     __ bind(&next);
     // Terminate at global context.
     __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
-           Immediate(Factory::global_context_map()));
+           Immediate(FACTORY->global_context_map()));
     __ j(equal, &fast);
     // Check that extension is NULL.
     __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
@@ -5206,9 +5211,9 @@ void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
       __ mov(result->reg(),
              ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
       if (potential_slot->var()->mode() == Variable::CONST) {
-        __ cmp(result->reg(), Factory::the_hole_value());
+        __ cmp(result->reg(), FACTORY->the_hole_value());
         done->Branch(not_equal, result);
-        __ mov(result->reg(), Factory::undefined_value());
+        __ mov(result->reg(), FACTORY->undefined_value());
       }
       done->Jump(result);
     } else if (rewrite != NULL) {
@@ -5295,7 +5300,7 @@ void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
       VirtualFrame::SpilledScope spilled_scope;
       Comment cmnt(masm_, "[ Init const");
       __ mov(ecx, SlotOperand(slot, ecx));
-      __ cmp(ecx, Factory::the_hole_value());
+      __ cmp(ecx, FACTORY->the_hole_value());
       exit.Branch(not_equal);
     }
 
@@ -5466,7 +5471,7 @@ class DeferredAllocateInNewSpace: public DeferredCode {
                              Register target,
                              int registers_to_save = 0)
     : size_(size), target_(target), registers_to_save_(registers_to_save) {
-    ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
+    ASSERT(size >= kPointerSize && size <= HEAP->MaxObjectSizeInNewSpace());
     ASSERT_EQ(0, registers_to_save & target.bit());
     set_comment("[ DeferredAllocateInNewSpace");
   }
@@ -5527,7 +5532,7 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
   // jump to the deferred code passing the literals array.
   DeferredRegExpLiteral* deferred =
       new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
-  __ cmp(boilerplate.reg(), Factory::undefined_value());
+  __ cmp(boilerplate.reg(), FACTORY->undefined_value());
   deferred->Branch(equal);
   deferred->BindExit();
 
@@ -5685,11 +5690,11 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
   frame_->Push(node->constant_elements());
   int length = node->values()->length();
   Result clone;
-  if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
+  if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
     clone = frame_->CallStub(&stub, 3);
-    __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
+    __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1);
   } else if (node->depth() > 1) {
     clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -6092,7 +6097,7 @@ void CodeGenerator::VisitCall(Call* node) {
     Load(function);
 
     // Allocate a frame slot for the receiver.
-    frame_->Push(Factory::undefined_value());
+    frame_->Push(FACTORY->undefined_value());
 
     // Load the arguments.
     int arg_count = args->length();
@@ -6124,7 +6129,7 @@ void CodeGenerator::VisitCall(Call* node) {
       if (arg_count > 0) {
         frame_->PushElementAt(arg_count);
       } else {
-        frame_->Push(Factory::undefined_value());
+        frame_->Push(FACTORY->undefined_value());
       }
       frame_->PushParameterAt(-1);
 
@@ -6146,7 +6151,7 @@ void CodeGenerator::VisitCall(Call* node) {
     if (arg_count > 0) {
       frame_->PushElementAt(arg_count);
     } else {
-      frame_->Push(Factory::undefined_value());
+      frame_->Push(FACTORY->undefined_value());
     }
     frame_->PushParameterAt(-1);
 
@@ -6440,7 +6445,7 @@ void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
   }
 #endif
   // Finally, we're expected to leave a value on the top of the stack.
-  frame_->Push(Factory::undefined_value());
+  frame_->Push(FACTORY->undefined_value());
 }
 
 
@@ -6483,13 +6488,13 @@ class DeferredStringCharCodeAt : public DeferredCode {
     __ bind(&need_conversion_);
     // Move the undefined value into the result register, which will
     // trigger conversion.
-    __ Set(result_, Immediate(Factory::undefined_value()));
+    __ Set(result_, Immediate(FACTORY->undefined_value()));
     __ jmp(exit_label());
 
     __ bind(&index_out_of_range_);
     // When the index is out of range, the spec requires us to return
     // NaN.
-    __ Set(result_, Immediate(Factory::nan_value()));
+    __ Set(result_, Immediate(FACTORY->nan_value()));
     __ jmp(exit_label());
   }
 
@@ -6612,7 +6617,7 @@ class DeferredStringCharAt : public DeferredCode {
     __ bind(&index_out_of_range_);
     // When the index is out of range, the spec requires us to return
     // the empty string.
-    __ Set(result_, Immediate(Factory::empty_string()));
+    __ Set(result_, Immediate(FACTORY->empty_string()));
     __ jmp(exit_label());
   }
 
@@ -6730,7 +6735,7 @@ void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
   __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
   __ sar(array_length, 1);
   __ j(not_zero, &non_trivial_array);
-  __ mov(result_operand, Factory::empty_string());
+  __ mov(result_operand, FACTORY->empty_string());
   __ jmp(&done);
 
   // Save the array length.
@@ -6941,7 +6946,7 @@ void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
 
 
   __ bind(&bailout);
-  __ mov(result_operand, Factory::undefined_value());
+  __ mov(result_operand, FACTORY->undefined_value());
   __ bind(&done);
   __ mov(eax, result_operand);
   // Drop temp values from the stack, and restore context register.
@@ -6982,7 +6987,7 @@ void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
 
   __ test(obj.reg(), Immediate(kSmiTagMask));
   destination()->false_target()->Branch(zero);
-  __ cmp(obj.reg(), Factory::null_value());
+  __ cmp(obj.reg(), FACTORY->null_value());
   destination()->true_target()->Branch(equal);
 
   Result map = allocator()->Allocate();
@@ -7053,7 +7058,7 @@ class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
     // Check for fast case object. Generate false result for slow case object.
     __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
     __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
-    __ cmp(scratch1_, Factory::hash_table_map());
+    __ cmp(scratch1_, FACTORY->hash_table_map());
     __ j(equal, &false_result);
 
     // Look for valueOf symbol in the descriptor array, and indicate false if
@@ -7080,7 +7085,7 @@ class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
     __ jmp(&entry);
     __ bind(&loop);
     __ mov(scratch2_, FieldOperand(map_result_, 0));
-    __ cmp(scratch2_, Factory::value_of_symbol());
+    __ cmp(scratch2_, FACTORY->value_of_symbol());
     __ j(equal, &false_result);
     __ add(Operand(map_result_), Immediate(kPointerSize));
     __ bind(&entry);
@@ -7295,17 +7300,17 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
 
   // Functions have class 'Function'.
   function.Bind();
-  frame_->Push(Factory::function_class_symbol());
+  frame_->Push(FACTORY->function_class_symbol());
   leave.Jump();
 
   // Objects with a non-function constructor have class 'Object'.
   non_function_constructor.Bind();
-  frame_->Push(Factory::Object_symbol());
+  frame_->Push(FACTORY->Object_symbol());
   leave.Jump();
 
   // Non-JS objects have class null.
   null.Bind();
-  frame_->Push(Factory::null_value());
+  frame_->Push(FACTORY->null_value());
 
   // All done.
   leave.Bind();
@@ -7447,7 +7452,7 @@ void CodeGenerator::GenerateRandomHeapNumber(
   // by computing:
   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
   // This is implemented on both SSE2 and FPU.
-  if (CpuFeatures::IsSupported(SSE2)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
     CpuFeatures::Scope fscope(SSE2);
     __ mov(ebx, Immediate(0x49800000));  // 1.0 x 2^20 as single.
     __ movd(xmm1, Operand(ebx));
@@ -7664,10 +7669,10 @@ void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
 
   Handle<FixedArray> jsfunction_result_caches(
-      Top::global_context()->jsfunction_result_caches());
+      Isolate::Current()->global_context()->jsfunction_result_caches());
   if (jsfunction_result_caches->length() <= cache_id) {
     __ Abort("Attempt to use undefined cache.");
-    frame_->Push(Factory::undefined_value());
+    frame_->Push(FACTORY->undefined_value());
     return;
   }
 
@@ -7784,7 +7789,7 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
   // Check the object's elements are in fast case and writable.
   __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
   __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
-         Immediate(Factory::fixed_array_map()));
+         Immediate(FACTORY->fixed_array_map()));
   deferred->Branch(not_equal);
 
   // Smi-tagging is equivalent to multiplying by 2.
@@ -7825,7 +7830,7 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
   __ bind(&done);
 
   deferred->BindExit();
-  frame_->Push(Factory::undefined_value());
+  frame_->Push(FACTORY->undefined_value());
 }
 
 
@@ -7853,7 +7858,7 @@ void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
   ASSERT(args->length() == 2);
   Load(args->at(0));
   Load(args->at(1));
-  if (!CpuFeatures::IsSupported(SSE2)) {
+  if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
     Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
     frame_->Push(&res);
   } else {
@@ -7894,7 +7899,7 @@ void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
     // exponent is smi and base is a heapnumber.
     __ bind(&base_nonsmi);
     __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
-           Factory::heap_number_map());
+           FACTORY->heap_number_map());
     call_runtime.Branch(not_equal);
 
     __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
@@ -7945,7 +7950,7 @@ void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
     // on doubles.
     __ bind(&exponent_nonsmi);
     __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
-           Factory::heap_number_map());
+           FACTORY->heap_number_map());
     call_runtime.Branch(not_equal);
     __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
     // Test if exponent is nan.
@@ -7961,7 +7966,7 @@ void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
     __ jmp(&handle_special_cases);
     __ bind(&base_not_smi);
     __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
-           Factory::heap_number_map());
+           FACTORY->heap_number_map());
     call_runtime.Branch(not_equal);
     __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
     __ and_(answer.reg(), HeapNumber::kExponentMask);
@@ -8070,7 +8075,7 @@ void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
   ASSERT_EQ(args->length(), 1);
   Load(args->at(0));
 
-  if (!CpuFeatures::IsSupported(SSE2)) {
+  if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
     Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
     frame()->Push(&result);
   } else {
@@ -8092,7 +8097,7 @@ void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
     __ jmp(&load_done);
     __ bind(&non_smi);
     __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
-           Factory::heap_number_map());
+           FACTORY->heap_number_map());
     __ j(not_equal, &runtime);
     __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
 
@@ -8198,7 +8203,7 @@ void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
 
   ZoneList<Expression*>* args = node->arguments();
   Comment cmnt(masm_, "[ CallRuntime");
-  Runtime::Function* function = node->function();
+  const Runtime::Function* function = node->function();
 
   if (function == NULL) {
     // Push the builtins object found in the current global object.
@@ -8281,12 +8286,12 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
       } else {
         // Default: Result of deleting non-global, not dynamically
         // introduced variables is false.
-        frame_->Push(Factory::false_value());
+        frame_->Push(FACTORY->false_value());
       }
     } else {
       // Default: Result of deleting expressions is true.
       Load(node->expression());  // may have side-effects
-      frame_->SetElementAt(0, Factory::true_value());
+      frame_->SetElementAt(0, FACTORY->true_value());
     }
 
   } else if (op == Token::TYPEOF) {
@@ -8307,10 +8312,10 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
         expression->AsLiteral()->IsNull())) {
       // Omit evaluating the value of the primitive literal.
       // It will be discarded anyway, and can have no side effect.
-      frame_->Push(Factory::undefined_value());
+      frame_->Push(FACTORY->undefined_value());
     } else {
       Load(node->expression());
-      frame_->SetElementAt(0, Factory::undefined_value());
+      frame_->SetElementAt(0, FACTORY->undefined_value());
     }
 
   } else {
@@ -9112,16 +9117,16 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
     Result answer = frame_->Pop();
     answer.ToRegister();
 
-    if (check->Equals(Heap::number_symbol())) {
+    if (check->Equals(HEAP->number_symbol())) {
       __ test(answer.reg(), Immediate(kSmiTagMask));
       destination()->true_target()->Branch(zero);
       frame_->Spill(answer.reg());
       __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
-      __ cmp(answer.reg(), Factory::heap_number_map());
+      __ cmp(answer.reg(), FACTORY->heap_number_map());
       answer.Unuse();
       destination()->Split(equal);
 
-    } else if (check->Equals(Heap::string_symbol())) {
+    } else if (check->Equals(HEAP->string_symbol())) {
       __ test(answer.reg(), Immediate(kSmiTagMask));
       destination()->false_target()->Branch(zero);
 
@@ -9137,15 +9142,15 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       answer.Unuse();
       destination()->Split(below);
 
-    } else if (check->Equals(Heap::boolean_symbol())) {
-      __ cmp(answer.reg(), Factory::true_value());
+    } else if (check->Equals(HEAP->boolean_symbol())) {
+      __ cmp(answer.reg(), FACTORY->true_value());
       destination()->true_target()->Branch(equal);
-      __ cmp(answer.reg(), Factory::false_value());
+      __ cmp(answer.reg(), FACTORY->false_value());
       answer.Unuse();
       destination()->Split(equal);
 
-    } else if (check->Equals(Heap::undefined_symbol())) {
-      __ cmp(answer.reg(), Factory::undefined_value());
+    } else if (check->Equals(HEAP->undefined_symbol())) {
+      __ cmp(answer.reg(), FACTORY->undefined_value());
       destination()->true_target()->Branch(equal);
 
       __ test(answer.reg(), Immediate(kSmiTagMask));
@@ -9159,7 +9164,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       answer.Unuse();
       destination()->Split(not_zero);
 
-    } else if (check->Equals(Heap::function_symbol())) {
+    } else if (check->Equals(HEAP->function_symbol())) {
       __ test(answer.reg(), Immediate(kSmiTagMask));
       destination()->false_target()->Branch(zero);
       frame_->Spill(answer.reg());
@@ -9169,10 +9174,10 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
       answer.Unuse();
       destination()->Split(equal);
-    } else if (check->Equals(Heap::object_symbol())) {
+    } else if (check->Equals(HEAP->object_symbol())) {
       __ test(answer.reg(), Immediate(kSmiTagMask));
       destination()->false_target()->Branch(zero);
-      __ cmp(answer.reg(), Factory::null_value());
+      __ cmp(answer.reg(), FACTORY->null_value());
       destination()->true_target()->Branch(equal);
 
       Result map = allocator()->Allocate();
@@ -9215,7 +9220,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       Result scratch = allocator()->Allocate();
       ASSERT(scratch.is_valid());
       __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
-      __ cmp(scratch.reg(), Factory::heap_number_map());
+      __ cmp(scratch.reg(), FACTORY->heap_number_map());
       JumpTarget not_a_number;
       not_a_number.Branch(not_equal, &lhs);
       __ mov(scratch.reg(),
@@ -9302,7 +9307,7 @@ void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
   Load(node->expression());
   Result operand = frame_->Pop();
   operand.ToRegister();
-  __ cmp(operand.reg(), Factory::null_value());
+  __ cmp(operand.reg(), FACTORY->null_value());
   if (node->is_strict()) {
     operand.Unuse();
     destination()->Split(equal);
@@ -9310,7 +9315,7 @@ void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
     // The 'null' value is only equal to 'undefined' if using non-strict
     // comparisons.
     destination()->true_target()->Branch(equal);
-    __ cmp(operand.reg(), Factory::undefined_value());
+    __ cmp(operand.reg(), FACTORY->undefined_value());
     destination()->true_target()->Branch(equal);
     __ test(operand.reg(), Immediate(kSmiTagMask));
     destination()->false_target()->Branch(equal);
@@ -9383,7 +9388,8 @@ void DeferredReferenceGetNamedValue::Generate() {
     __ mov(eax, receiver_);
   }
   __ Set(ecx, Immediate(name_));
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   RelocInfo::Mode mode = is_contextual_
       ? RelocInfo::CODE_TARGET_CONTEXT
       : RelocInfo::CODE_TARGET;
@@ -9402,13 +9408,13 @@ void DeferredReferenceGetNamedValue::Generate() {
   // instruction that gets patched and coverage code gets in the way.
   if (is_contextual_) {
     masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site);
-    __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
+    __ IncrementCounter(COUNTERS->named_load_global_inline_miss(), 1);
     if (is_dont_delete_) {
-      __ IncrementCounter(&Counters::dont_delete_hint_miss, 1);
+      __ IncrementCounter(COUNTERS->dont_delete_hint_miss(), 1);
     }
   } else {
     masm_->test(eax, Immediate(-delta_to_patch_site));
-    __ IncrementCounter(&Counters::named_load_inline_miss, 1);
+    __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1);
   }
 
   if (!dst_.is(eax)) __ mov(dst_, eax);
@@ -9462,7 +9468,8 @@ void DeferredReferenceGetKeyedValue::Generate() {
   // it in the IC initialization code and patch the cmp instruction.
   // This means that we cannot allow test instructions after calls to
   // KeyedLoadIC stubs in other places.
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   __ call(ic, RelocInfo::CODE_TARGET);
   // The delta from the start of the map-compare instruction to the
   // test instruction.  We use masm_-> directly here instead of the __
@@ -9473,7 +9480,7 @@ void DeferredReferenceGetKeyedValue::Generate() {
   // Here we use masm_-> instead of the __ macro because this is the
   // instruction that gets patched and coverage code gets in the way.
   masm_->test(eax, Immediate(-delta_to_patch_site));
-  __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_inline_miss(), 1);
 
   if (!dst_.is(eax)) __ mov(dst_, eax);
 }
@@ -9509,7 +9516,7 @@ class DeferredReferenceSetKeyedValue: public DeferredCode {
 
 
 void DeferredReferenceSetKeyedValue::Generate() {
-  __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
+  __ IncrementCounter(COUNTERS->keyed_store_inline_miss(), 1);
   // Move value_ to eax, key_ to ecx, and receiver_ to edx.
   Register old_value = value_;
 
@@ -9563,7 +9570,7 @@ void DeferredReferenceSetKeyedValue::Generate() {
   }
 
   // Call the IC stub.
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
       (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
                                     : Builtins::KeyedStoreIC_Initialize));
   __ call(ic, RelocInfo::CODE_TARGET);
@@ -9588,7 +9595,7 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
 
   bool contextual_load_in_builtin =
       is_contextual &&
-      (Bootstrapper::IsActive() ||
+      (Isolate::Current()->bootstrapper()->IsActive() ||
        (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
 
   Result result;
@@ -9634,7 +9641,7 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
     // use the double underscore macro that may insert instructions).
     // Initially use an invalid map to force a failure.
     masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
-                Immediate(Factory::null_value()));
+                Immediate(FACTORY->null_value()));
     // This branch is always a forwards branch so it's always a fixed size
     // which allows the assert below to succeed and patching to work.
     deferred->Branch(not_equal);
@@ -9646,14 +9653,16 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
 
     if (is_contextual) {
       // Load the (initialy invalid) cell and get its value.
-      masm()->mov(result.reg(), Factory::null_value());
+      masm()->mov(result.reg(), FACTORY->null_value());
       if (FLAG_debug_code) {
         __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
-               Factory::global_property_cell_map());
+               FACTORY->global_property_cell_map());
         __ Assert(equal, "Uninitialized inlined contextual load");
       }
       __ mov(result.reg(),
              FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset));
+      __ cmp(result.reg(), FACTORY->the_hole_value());
+      deferred->Branch(equal);
       bool is_dont_delete = false;
       if (!info_->closure().is_null()) {
         // When doing lazy compilation we can check if the global cell
@@ -9672,15 +9681,15 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
       }
       deferred->set_is_dont_delete(is_dont_delete);
       if (!is_dont_delete) {
-        __ cmp(result.reg(), Factory::the_hole_value());
+        __ cmp(result.reg(), FACTORY->the_hole_value());
         deferred->Branch(equal);
       } else if (FLAG_debug_code) {
-        __ cmp(result.reg(), Factory::the_hole_value());
+        __ cmp(result.reg(), FACTORY->the_hole_value());
         __ Check(not_equal, "DontDelete cells can't contain the hole");
       }
-      __ IncrementCounter(&Counters::named_load_global_inline, 1);
+      __ IncrementCounter(COUNTERS->named_load_global_inline(), 1);
       if (is_dont_delete) {
-        __ IncrementCounter(&Counters::dont_delete_hint_hit, 1);
+        __ IncrementCounter(COUNTERS->dont_delete_hint_hit(), 1);
       }
     } else {
       // The initial (invalid) offset has to be large enough to force a 32-bit
@@ -9688,7 +9697,7 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
       // kMaxInt (minus kHeapObjectTag).
       int offset = kMaxInt;
       masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
-      __ IncrementCounter(&Counters::named_load_inline, 1);
+      __ IncrementCounter(COUNTERS->named_load_inline(), 1);
     }
 
     deferred->BindExit();
@@ -9734,7 +9743,7 @@ Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
     // Initially use an invalid map to force a failure.
     __ bind(&patch_site);
     masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
-                Immediate(Factory::null_value()));
+                Immediate(FACTORY->null_value()));
     // This branch is always a forwards branch so it's always a fixed size
     // which allows the assert below to succeed and patching to work.
     slow.Branch(not_equal, &value, &receiver);
@@ -9844,7 +9853,7 @@ Result CodeGenerator::EmitKeyedLoad() {
     // Use masm-> here instead of the double underscore macro since extra
     // coverage code can interfere with the patching.
     masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
-               Immediate(Factory::null_value()));
+               Immediate(FACTORY->null_value()));
     deferred->Branch(not_equal);
 
     // Check that the key is a smi.
@@ -9874,9 +9883,9 @@ Result CodeGenerator::EmitKeyedLoad() {
                         times_2,
                         FixedArray::kHeaderSize));
     result = elements;
-    __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
+    __ cmp(Operand(result.reg()), Immediate(FACTORY->the_hole_value()));
     deferred->Branch(equal);
-    __ IncrementCounter(&Counters::keyed_load_inline, 1);
+    __ IncrementCounter(COUNTERS->keyed_load_inline(), 1);
 
     deferred->BindExit();
   } else {
@@ -9968,7 +9977,7 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
     // which will allow the debugger to break for fast case stores.
     __ bind(deferred->patch_site());
     __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
-           Immediate(Factory::fixed_array_map()));
+           Immediate(FACTORY->fixed_array_map()));
     deferred->Branch(not_equal);
 
     // Check that the key is within bounds.  Both the key and the length of
@@ -9981,7 +9990,7 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
 
     // Store the value.
     __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
-    __ IncrementCounter(&Counters::keyed_store_inline, 1);
+    __ IncrementCounter(COUNTERS->keyed_store_inline(), 1);
 
     deferred->BindExit();
   } else {
@@ -10184,7 +10193,7 @@ MemCopyFunction CreateMemCopyFunction() {
     __ int3();
     __ bind(&ok);
   }
-  if (CpuFeatures::IsSupported(SSE2)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
     CpuFeatures::Scope enable(SSE2);
     __ push(edi);
     __ push(esi);
@@ -10212,7 +10221,7 @@ MemCopyFunction CreateMemCopyFunction() {
     __ test(Operand(src), Immediate(0x0F));
     __ j(not_zero, &unaligned_source);
     {
-      __ IncrementCounter(&Counters::memcopy_aligned, 1);
+      __ IncrementCounter(COUNTERS->memcopy_aligned(), 1);
       // Copy loop for aligned source and destination.
       __ mov(edx, count);
       Register loop_count = ecx;
@@ -10260,7 +10269,7 @@ MemCopyFunction CreateMemCopyFunction() {
       // Copy loop for unaligned source and aligned destination.
       // If source is not aligned, we can't read it as efficiently.
       __ bind(&unaligned_source);
-      __ IncrementCounter(&Counters::memcopy_unaligned, 1);
+      __ IncrementCounter(COUNTERS->memcopy_unaligned(), 1);
       __ mov(edx, ecx);
       Register loop_count = ecx;
       Register count = edx;
@@ -10304,7 +10313,7 @@ MemCopyFunction CreateMemCopyFunction() {
     }
 
   } else {
-    __ IncrementCounter(&Counters::memcopy_noxmm, 1);
+    __ IncrementCounter(COUNTERS->memcopy_noxmm(), 1);
     // SSE2 not supported. Unlikely to happen in practice.
     __ push(edi);
     __ push(esi);
index da967c0962e2dbb7bb1024503b8f938445842ab0..acd651b4abf52629501397923f378dbf14313aa0 100644 (file)
@@ -780,6 +780,7 @@ class CodeGenerator: public AstVisitor {
   int jit_cookie_;
 
   friend class VirtualFrame;
+  friend class Isolate;
   friend class JumpTarget;
   friend class Reference;
   friend class Result;
@@ -789,6 +790,7 @@ class CodeGenerator: public AstVisitor {
   friend class LCodeGen;
 
   friend class CodeGeneratorPatcher;  // Used in test-log-stack-tracer.cc
+  friend class InlineRuntimeFunctionsTable;
 
   DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
 };
index d64257f39e9c34d559939a8698f356c4f96daf0d..286ed7b80137255cab9c495614733bcc42413fba 100644 (file)
@@ -42,9 +42,10 @@ namespace v8 {
 namespace internal {
 
 void CPU::Setup() {
-  CpuFeatures::Clear();
-  CpuFeatures::Probe(true);
-  if (!CpuFeatures::IsSupported(SSE2) || Serializer::enabled()) {
+  CpuFeatures* cpu_features = Isolate::Current()->cpu_features();
+  cpu_features->Clear();
+  cpu_features->Probe(true);
+  if (!cpu_features->IsSupported(SSE2) || Serializer::enabled()) {
     V8::DisableCrankshaft();
   }
 }
index 678cc931157bcb1f1149205790374ef75d5958b7..72edaa797e419eb05b4cd9ab4a689af642cd4284 100644 (file)
@@ -49,7 +49,8 @@ bool BreakLocationIterator::IsDebugBreakAtReturn() {
 void BreakLocationIterator::SetDebugBreakAtReturn() {
   ASSERT(Assembler::kJSReturnSequenceLength >=
          Assembler::kCallInstructionLength);
-  rinfo()->PatchCodeWithCall(Debug::debug_break_return()->entry(),
+  Isolate* isolate = Isolate::Current();
+  rinfo()->PatchCodeWithCall(isolate->debug()->debug_break_return()->entry(),
       Assembler::kJSReturnSequenceLength - Assembler::kCallInstructionLength);
 }
 
@@ -78,8 +79,9 @@ bool BreakLocationIterator::IsDebugBreakAtSlot() {
 
 void BreakLocationIterator::SetDebugBreakAtSlot() {
   ASSERT(IsDebugBreakSlot());
+  Isolate* isolate = Isolate::Current();
   rinfo()->PatchCodeWithCall(
-      Debug::debug_break_slot()->entry(),
+      isolate->debug()->debug_break_slot()->entry(),
       Assembler::kDebugBreakSlotLength - Assembler::kCallInstructionLength);
 }
 
index 49308bf593a7d7af3ef1497d6860fbca7b7ac21e..82d3f1e76ccf6f88542db935646f58270ebe8239 100644 (file)
@@ -119,12 +119,13 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
   // a non-live object in the extra space at the end of the former reloc info.
   Address junk_address = reloc_info->address() + reloc_info->Size();
   ASSERT(junk_address <= reloc_end_address);
-  Heap::CreateFillerObjectAt(junk_address, reloc_end_address - junk_address);
+  HEAP->CreateFillerObjectAt(junk_address, reloc_end_address - junk_address);
 
   // Add the deoptimizing code to the list.
   DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
-  node->set_next(deoptimizing_code_list_);
-  deoptimizing_code_list_ = node;
+  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
+  node->set_next(data->deoptimizing_code_list_);
+  data->deoptimizing_code_list_ = node;
 
   // Set the code for the function to non-optimized version.
   function->ReplaceCode(function->shared()->code());
@@ -323,7 +324,8 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
         optimized_code_->entry() + pc_offset);
     output_[0]->SetPc(pc);
   }
-  Code* continuation = Builtins::builtin(Builtins::NotifyOSR);
+  Code* continuation =
+      Isolate::Current()->builtins()->builtin(Builtins::NotifyOSR);
   output_[0]->SetContinuation(
       reinterpret_cast<uint32_t>(continuation->entry()));
 
@@ -490,9 +492,10 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
 
   // Set the continuation for the topmost frame.
   if (is_topmost) {
+    Builtins* builtins = isolate_->builtins();
     Code* continuation = (bailout_type_ == EAGER)
-        ? Builtins::builtin(Builtins::NotifyDeoptimized)
-        : Builtins::builtin(Builtins::NotifyLazyDeoptimized);
+        ? builtins->builtin(Builtins::NotifyDeoptimized)
+        : builtins->builtin(Builtins::NotifyLazyDeoptimized);
     output_frame->SetContinuation(
         reinterpret_cast<uint32_t>(continuation->entry()));
   }
index a7d38ce3bf7be080fa29c25e538b0d1c9848819a..cc2f8495f7810810b58baa10a50100dce4f25c8d 100644 (file)
@@ -1518,9 +1518,8 @@ static const char* xmm_regs[8] = {
 
 
 const char* NameConverter::NameOfAddress(byte* addr) const {
-  static v8::internal::EmbeddedVector<char, 32> tmp_buffer;
-  v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr);
-  return tmp_buffer.start();
+  v8::internal::OS::SNPrintF(tmp_buffer_, "%p", addr);
+  return tmp_buffer_.start();
 }
 
 
index 80846949a0d67960c07394fcfc6c8cb2b493806c..69f9a007212f27df0e953322d0fab4b9d29c50cf 100644 (file)
@@ -28,6 +28,8 @@
 #ifndef V8_IA32_FRAMES_IA32_H_
 #define V8_IA32_FRAMES_IA32_H_
 
+#include "memory.h"
+
 namespace v8 {
 namespace internal {
 
index f40254cfdd7a1ff35434a3824376a416cc619397..0516581f859f32b2219bffaed292276f6cc364af 100644 (file)
@@ -129,9 +129,9 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
   { Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = scope()->num_stack_slots();
     if (locals_count == 1) {
-      __ push(Immediate(Factory::undefined_value()));
+      __ push(Immediate(isolate()->factory()->undefined_value()));
     } else if (locals_count > 1) {
-      __ mov(eax, Immediate(Factory::undefined_value()));
+      __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
       for (int i = 0; i < locals_count; i++) {
         __ push(eax);
       }
@@ -252,7 +252,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
   // Always emit a 'return undefined' in case control fell off the end of
   // the body.
   { Comment cmnt(masm_, "[ return <undefined>;");
-    __ mov(eax, Factory::undefined_value());
+    __ mov(eax, isolate()->factory()->undefined_value());
     EmitReturnSequence();
   }
 }
@@ -467,10 +467,10 @@ void FullCodeGenerator::AccumulatorValueContext::Plug(
     Label* materialize_false) const {
   NearLabel done;
   __ bind(materialize_true);
-  __ mov(result_register(), Factory::true_value());
+  __ mov(result_register(), isolate()->factory()->true_value());
   __ jmp(&done);
   __ bind(materialize_false);
-  __ mov(result_register(), Factory::false_value());
+  __ mov(result_register(), isolate()->factory()->false_value());
   __ bind(&done);
 }
 
@@ -480,10 +480,10 @@ void FullCodeGenerator::StackValueContext::Plug(
     Label* materialize_false) const {
   NearLabel done;
   __ bind(materialize_true);
-  __ push(Immediate(Factory::true_value()));
+  __ push(Immediate(isolate()->factory()->true_value()));
   __ jmp(&done);
   __ bind(materialize_false);
-  __ push(Immediate(Factory::false_value()));
+  __ push(Immediate(isolate()->factory()->false_value()));
   __ bind(&done);
 }
 
@@ -500,15 +500,17 @@ void FullCodeGenerator::EffectContext::Plug(bool flag) const {
 
 
 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
-  Handle<Object> value =
-      flag ? Factory::true_value() : Factory::false_value();
+  Handle<Object> value = flag
+      ? isolate()->factory()->true_value()
+      : isolate()->factory()->false_value();
   __ mov(result_register(), value);
 }
 
 
 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
-  Handle<Object> value =
-      flag ? Factory::true_value() : Factory::false_value();
+  Handle<Object> value = flag
+      ? isolate()->factory()->true_value()
+      : isolate()->factory()->false_value();
   __ push(Immediate(value));
 }
 
@@ -530,11 +532,11 @@ void FullCodeGenerator::DoTest(Label* if_true,
                                Label* if_false,
                                Label* fall_through) {
   // Emit the inlined tests assumed by the stub.
-  __ cmp(result_register(), Factory::undefined_value());
+  __ cmp(result_register(), isolate()->factory()->undefined_value());
   __ j(equal, if_false);
-  __ cmp(result_register(), Factory::true_value());
+  __ cmp(result_register(), isolate()->factory()->true_value());
   __ j(equal, if_true);
-  __ cmp(result_register(), Factory::false_value());
+  __ cmp(result_register(), isolate()->factory()->false_value());
   __ j(equal, if_false);
   STATIC_ASSERT(kSmiTag == 0);
   __ test(result_register(), Operand(result_register()));
@@ -629,7 +631,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
   }
 
   if (should_normalize) {
-    __ cmp(eax, Factory::true_value());
+    __ cmp(eax, isolate()->factory()->true_value());
     Split(equal, if_true, if_false, NULL);
     __ bind(&skip);
   }
@@ -650,7 +652,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
       case Slot::LOCAL:
         if (mode == Variable::CONST) {
           __ mov(Operand(ebp, SlotOffset(slot)),
-                 Immediate(Factory::the_hole_value()));
+                 Immediate(isolate()->factory()->the_hole_value()));
         } else if (function != NULL) {
           VisitForAccumulatorValue(function);
           __ mov(Operand(ebp, SlotOffset(slot)), result_register());
@@ -672,7 +674,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
         }
         if (mode == Variable::CONST) {
           __ mov(ContextOperand(esi, slot->index()),
-                 Immediate(Factory::the_hole_value()));
+                 Immediate(isolate()->factory()->the_hole_value()));
           // No write barrier since the hole value is in old space.
         } else if (function != NULL) {
           VisitForAccumulatorValue(function);
@@ -695,7 +697,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
         // 'undefined') because we may have a (legal) redeclaration and we
         // must not destroy the current value.
         if (mode == Variable::CONST) {
-          __ push(Immediate(Factory::the_hole_value()));
+          __ push(Immediate(isolate()->factory()->the_hole_value()));
         } else if (function != NULL) {
           VisitForStackValue(function);
         } else {
@@ -723,15 +725,15 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
         __ pop(edx);
       } else {
         __ mov(edx, eax);
-        __ mov(eax, Factory::the_hole_value());
+        __ mov(eax, isolate()->factory()->the_hole_value());
       }
       ASSERT(prop->key()->AsLiteral() != NULL &&
              prop->key()->AsLiteral()->handle()->IsSmi());
       __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
 
-      Handle<Code> ic(Builtins::builtin(
-          is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
-                           : Builtins::KeyedStoreIC_Initialize));
+      Handle<Code> ic(isolate()->builtins()->builtin(is_strict_mode()
+          ? Builtins::KeyedStoreIC_Initialize_Strict
+          : Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
     }
   }
@@ -848,9 +850,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   // ignore null and undefined in contrast to the specification; see
   // ECMA-262 section 12.6.4.
   VisitForAccumulatorValue(stmt->enumerable());
-  __ cmp(eax, Factory::undefined_value());
+  __ cmp(eax, isolate()->factory()->undefined_value());
   __ j(equal, &exit);
-  __ cmp(eax, Factory::null_value());
+  __ cmp(eax, isolate()->factory()->null_value());
   __ j(equal, &exit);
 
   // Convert the object to a JS object.
@@ -876,7 +878,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   // Check that there are no elements.  Register ecx contains the
   // current JS object we've reached through the prototype chain.
   __ cmp(FieldOperand(ecx, JSObject::kElementsOffset),
-         Factory::empty_fixed_array());
+         isolate()->factory()->empty_fixed_array());
   __ j(not_equal, &call_runtime);
 
   // Check that instance descriptors are not empty so that we can
@@ -884,7 +886,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   // prototype load.
   __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
-  __ cmp(edx, Factory::empty_descriptor_array());
+  __ cmp(edx, isolate()->factory()->empty_descriptor_array());
   __ j(equal, &call_runtime);
 
   // Check that there is an enum cache in the non-empty instance
@@ -899,13 +901,13 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   __ cmp(ecx, Operand(eax));
   __ j(equal, &check_prototype);
   __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
-  __ cmp(edx, Factory::empty_fixed_array());
+  __ cmp(edx, isolate()->factory()->empty_fixed_array());
   __ j(not_equal, &call_runtime);
 
   // Load the prototype from the map and loop if non-null.
   __ bind(&check_prototype);
   __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
-  __ cmp(ecx, Factory::null_value());
+  __ cmp(ecx, isolate()->factory()->null_value());
   __ j(not_equal, &next);
 
   // The enum cache is valid.  Load the map of the object being
@@ -923,7 +925,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   // modification check. Otherwise, we got a fixed array, and we have
   // to do a slow check.
   NearLabel fixed_array;
-  __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::meta_map());
+  __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
+         isolate()->factory()->meta_map());
   __ j(not_equal, &fixed_array);
 
   // We got a map in register eax. Get the enumeration cache from it.
@@ -1029,8 +1032,8 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
     __ push(esi);
     __ push(Immediate(info));
     __ push(Immediate(pretenure
-                      ? Factory::true_value()
-                      : Factory::false_value()));
+                      ? isolate()->factory()->true_value()
+                      : isolate()->factory()->false_value()));
     __ CallRuntime(Runtime::kNewClosure, 3);
   }
   context()->Plug(eax);
@@ -1082,7 +1085,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
     __ bind(&next);
     // Terminate at global context.
     __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
-           Immediate(Factory::global_context_map()));
+           Immediate(isolate()->factory()->global_context_map()));
     __ j(equal, &fast);
     // Check that extension is NULL.
     __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
@@ -1098,7 +1101,8 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
   // load IC call.
   __ mov(eax, GlobalObjectOperand());
   __ mov(ecx, slot->var()->name());
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
       ? RelocInfo::CODE_TARGET
       : RelocInfo::CODE_TARGET_CONTEXT;
@@ -1159,9 +1163,9 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
       __ mov(eax,
              ContextSlotOperandCheckExtensions(potential_slot, slow));
       if (potential_slot->var()->mode() == Variable::CONST) {
-        __ cmp(eax, Factory::the_hole_value());
+        __ cmp(eax, isolate()->factory()->the_hole_value());
         __ j(not_equal, done);
-        __ mov(eax, Factory::undefined_value());
+        __ mov(eax, isolate()->factory()->undefined_value());
       }
       __ jmp(done);
     } else if (rewrite != NULL) {
@@ -1181,7 +1185,8 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
                  ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
                                                    slow));
           __ mov(eax, Immediate(key_literal->handle()));
-          Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+          Handle<Code> ic(isolate()->builtins()->builtin(
+              Builtins::KeyedLoadIC_Initialize));
           EmitCallIC(ic, RelocInfo::CODE_TARGET);
           __ jmp(done);
         }
@@ -1204,7 +1209,8 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
     // object on the stack.
     __ mov(eax, GlobalObjectOperand());
     __ mov(ecx, var->name());
-    Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(
+        Builtins::LoadIC_Initialize));
     EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
     context()->Plug(eax);
 
@@ -1234,9 +1240,9 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
       NearLabel done;
       MemOperand slot_operand = EmitSlotSearch(slot, eax);
       __ mov(eax, slot_operand);
-      __ cmp(eax, Factory::the_hole_value());
+      __ cmp(eax, isolate()->factory()->the_hole_value());
       __ j(not_equal, &done);
-      __ mov(eax, Factory::undefined_value());
+      __ mov(eax, isolate()->factory()->undefined_value());
       __ bind(&done);
       context()->Plug(eax);
     } else {
@@ -1267,7 +1273,8 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
     __ mov(eax, Immediate(key_literal->handle()));
 
     // Do a keyed property load.
-    Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(
+        Builtins::KeyedLoadIC_Initialize));
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
 
     // Drop key and object left on the stack by IC.
@@ -1289,7 +1296,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
   int literal_offset =
       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
   __ mov(ebx, FieldOperand(ecx, literal_offset));
-  __ cmp(ebx, Factory::undefined_value());
+  __ cmp(ebx, isolate()->factory()->undefined_value());
   __ j(not_equal, &materialized);
 
   // Create regexp literal using runtime function
@@ -1372,7 +1379,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
             VisitForAccumulatorValue(value);
             __ mov(ecx, Immediate(key->handle()));
             __ mov(edx, Operand(esp, 0));
-            Handle<Code> ic(Builtins::builtin(
+            Handle<Code> ic(isolate()->builtins()->builtin(
                 is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                                  : Builtins::StoreIC_Initialize));
             EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1426,12 +1433,13 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
   __ push(Immediate(Smi::FromInt(expr->literal_index())));
   __ push(Immediate(expr->constant_elements()));
-  if (expr->constant_elements()->map() == Heap::fixed_cow_array_map()) {
+  if (expr->constant_elements()->map() ==
+      isolate()->heap()->fixed_cow_array_map()) {
     ASSERT(expr->depth() == 1);
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
     __ CallStub(&stub);
-    __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
+    __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
   } else if (expr->depth() > 1) {
     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -1616,14 +1624,16 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
   Literal* key = prop->key()->AsLiteral();
   __ mov(ecx, Immediate(key->handle()));
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
 }
 
 
 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
 }
 
@@ -1764,7 +1774,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
       __ mov(edx, eax);
       __ pop(eax);  // Restore value.
       __ mov(ecx, prop->key()->AsLiteral()->handle());
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                            : Builtins::StoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1787,7 +1797,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
         __ pop(edx);
       }
       __ pop(eax);  // Restore value.
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
                            : Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1813,7 +1823,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
     // ecx, and the global object on the stack.
     __ mov(ecx, var->name());
     __ mov(edx, GlobalObjectOperand());
-    Handle<Code> ic(Builtins::builtin(
+    Handle<Code> ic(isolate()->builtins()->builtin(
         is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                          : Builtins::StoreIC_Initialize));
     EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
@@ -1832,14 +1842,14 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
         break;
       case Slot::LOCAL:
         __ mov(edx, Operand(ebp, SlotOffset(slot)));
-        __ cmp(edx, Factory::the_hole_value());
+        __ cmp(edx, isolate()->factory()->the_hole_value());
         __ j(not_equal, &skip);
         __ mov(Operand(ebp, SlotOffset(slot)), eax);
         break;
       case Slot::CONTEXT: {
         __ mov(ecx, ContextOperand(esi, Context::FCONTEXT_INDEX));
         __ mov(edx, ContextOperand(ecx, slot->index()));
-        __ cmp(edx, Factory::the_hole_value());
+        __ cmp(edx, isolate()->factory()->the_hole_value());
         __ j(not_equal, &skip);
         __ mov(ContextOperand(ecx, slot->index()), eax);
         int offset = Context::SlotOffset(slot->index());
@@ -1916,7 +1926,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   } else {
     __ pop(edx);
   }
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                        : Builtins::StoreIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1956,7 +1966,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   }
   // Record source code position before IC call.
   SetSourcePosition(expr->position());
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
                        : Builtins::KeyedStoreIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -2008,7 +2018,8 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
   // Record source position of the IC call.
   SetSourcePosition(expr->position());
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
+  Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
+      arg_count, in_loop);
   EmitCallIC(ic, mode);
   RecordJSReturnSite(expr);
   // Restore context register.
@@ -2040,7 +2051,8 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   // Record source position of the IC call.
   SetSourcePosition(expr->position());
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
-  Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
+  Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(
+      arg_count, in_loop);
   __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize));  // Key.
   EmitCallIC(ic, mode);
   RecordJSReturnSite(expr);
@@ -2077,7 +2089,7 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
   if (arg_count > 0) {
     __ push(Operand(esp, arg_count * kPointerSize));
   } else {
-    __ push(Immediate(Factory::undefined_value()));
+    __ push(Immediate(FACTORY->undefined_value()));
   }
 
   // Push the receiver of the enclosing function.
@@ -2113,7 +2125,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     { PreservePositionScope pos_scope(masm()->positions_recorder());
       VisitForStackValue(fun);
       // Reserved receiver slot.
-      __ push(Immediate(Factory::undefined_value()));
+      __ push(Immediate(isolate()->factory()->undefined_value()));
 
       // Push the arguments.
       for (int i = 0; i < arg_count; i++) {
@@ -2232,7 +2244,8 @@ void FullCodeGenerator::VisitCall(Call* expr) {
         // Record source code position for IC call.
         SetSourcePosition(prop->position());
 
-        Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+        Handle<Code> ic(isolate()->builtins()->builtin(
+            Builtins::KeyedLoadIC_Initialize));
         EmitCallIC(ic, RelocInfo::CODE_TARGET);
         // Push result (function).
         __ push(eax);
@@ -2253,7 +2266,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     // also use the full code generator.
     FunctionLiteral* lit = fun->AsFunctionLiteral();
     if (lit != NULL &&
-        lit->name()->Equals(Heap::empty_string()) &&
+        lit->name()->Equals(isolate()->heap()->empty_string()) &&
         loop_depth() == 0) {
       lit->set_try_full_codegen(true);
     }
@@ -2300,7 +2313,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ Set(eax, Immediate(arg_count));
   __ mov(edi, Operand(esp, arg_count * kPointerSize));
 
-  Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
+  Handle<Code> construct_builtin(isolate()->builtins()->builtin(
+      Builtins::JSConstructCall));
   __ call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
   context()->Plug(eax);
 }
@@ -2360,7 +2374,7 @@ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
 
   __ test(eax, Immediate(kSmiTagMask));
   __ j(zero, if_false);
-  __ cmp(eax, Factory::null_value());
+  __ cmp(eax, isolate()->factory()->null_value());
   __ j(equal, if_true);
   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   // Undetectable objects behave like undefined when tested with typeof.
@@ -2639,17 +2653,17 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
 
   // Functions have class 'Function'.
   __ bind(&function);
-  __ mov(eax, Factory::function_class_symbol());
+  __ mov(eax, isolate()->factory()->function_class_symbol());
   __ jmp(&done);
 
   // Objects with a non-function constructor have class 'Object'.
   __ bind(&non_function_constructor);
-  __ mov(eax, Factory::Object_symbol());
+  __ mov(eax, isolate()->factory()->Object_symbol());
   __ jmp(&done);
 
   // Non-JS objects have class null.
   __ bind(&null);
-  __ mov(eax, Factory::null_value());
+  __ mov(eax, isolate()->factory()->null_value());
 
   // All done.
   __ bind(&done);
@@ -2675,7 +2689,7 @@ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
   }
 #endif
   // Finally, we're expected to leave a value on the top of the stack.
-  __ mov(eax, Factory::undefined_value());
+  __ mov(eax, isolate()->factory()->undefined_value());
   context()->Plug(eax);
 }
 
@@ -2703,7 +2717,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
   // by computing:
   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
   // This is implemented on both SSE2 and FPU.
-  if (CpuFeatures::IsSupported(SSE2)) {
+  if (isolate()->cpu_features()->IsSupported(SSE2)) {
     CpuFeatures::Scope fscope(SSE2);
     __ mov(ebx, Immediate(0x49800000));  // 1.0 x 2^20 as single.
     __ movd(xmm1, Operand(ebx));
@@ -2872,13 +2886,13 @@ void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
   __ bind(&index_out_of_range);
   // When the index is out of range, the spec requires us to return
   // NaN.
-  __ Set(result, Immediate(Factory::nan_value()));
+  __ Set(result, Immediate(isolate()->factory()->nan_value()));
   __ jmp(&done);
 
   __ bind(&need_conversion);
   // Move the undefined value into the result register, which will
   // trigger conversion.
-  __ Set(result, Immediate(Factory::undefined_value()));
+  __ Set(result, Immediate(isolate()->factory()->undefined_value()));
   __ jmp(&done);
 
   NopRuntimeCallHelper call_helper;
@@ -2921,7 +2935,7 @@ void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
   __ bind(&index_out_of_range);
   // When the index is out of range, the spec requires us to return
   // the empty string.
-  __ Set(result, Immediate(Factory::empty_string()));
+  __ Set(result, Immediate(isolate()->factory()->empty_string()));
   __ jmp(&done);
 
   __ bind(&need_conversion);
@@ -3060,7 +3074,7 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
   // Check the object's elements are in fast case and writable.
   __ mov(elements, FieldOperand(object, JSObject::kElementsOffset));
   __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
-         Immediate(Factory::fixed_array_map()));
+         Immediate(isolate()->factory()->fixed_array_map()));
   __ j(not_equal, &slow_case);
 
   // Check that both indices are smis.
@@ -3098,7 +3112,7 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
   __ bind(&new_space);
   // We are done. Drop elements from the stack, and return undefined.
   __ add(Operand(esp), Immediate(3 * kPointerSize));
-  __ mov(eax, Factory::undefined_value());
+  __ mov(eax, isolate()->factory()->undefined_value());
   __ jmp(&done);
 
   __ bind(&slow_case);
@@ -3116,10 +3130,10 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
 
   Handle<FixedArray> jsfunction_result_caches(
-      Top::global_context()->jsfunction_result_caches());
+      isolate()->global_context()->jsfunction_result_caches());
   if (jsfunction_result_caches->length() <= cache_id) {
     __ Abort("Attempt to use undefined cache.");
-    __ mov(eax, Factory::undefined_value());
+    __ mov(eax, isolate()->factory()->undefined_value());
     context()->Plug(eax);
     return;
   }
@@ -3176,7 +3190,8 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
   __ and_(Operand(tmp), right);
   __ test(Operand(tmp), Immediate(kSmiTagMask));
   __ j(zero, &fail);
-  __ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
+  __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
+  __ CmpInstanceType(tmp, JS_REGEXP_TYPE);
   __ j(not_equal, &fail);
   __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
   __ j(not_equal, &fail);
@@ -3184,10 +3199,10 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
   __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
   __ j(equal, &ok);
   __ bind(&fail);
-  __ mov(eax, Immediate(Factory::false_value()));
+  __ mov(eax, Immediate(isolate()->factory()->false_value()));
   __ jmp(&done);
   __ bind(&ok);
-  __ mov(eax, Immediate(Factory::true_value()));
+  __ mov(eax, Immediate(isolate()->factory()->true_value()));
   __ bind(&done);
 
   context()->Plug(eax);
@@ -3280,7 +3295,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
   __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
   __ SmiUntag(array_length);
   __ j(not_zero, &non_trivial_array);
-  __ mov(result_operand, Factory::empty_string());
+  __ mov(result_operand, FACTORY->empty_string());
   __ jmp(&done);
 
   // Save the array length.
@@ -3491,7 +3506,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
 
 
   __ bind(&bailout);
-  __ mov(result_operand, Factory::undefined_value());
+  __ mov(result_operand, FACTORY->undefined_value());
   __ bind(&done);
   __ mov(eax, result_operand);
   // Drop temp values from the stack, and restore context register.
@@ -3529,7 +3544,8 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
     // Call the JS runtime function via a call IC.
     __ Set(ecx, Immediate(expr->name()));
     InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
-    Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
+    Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
+        arg_count, in_loop);
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
     // Restore context register.
     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -3595,7 +3611,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
     case Token::VOID: {
       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
       VisitForEffect(expr->expression());
-      context()->Plug(Factory::undefined_value());
+      context()->Plug(isolate()->factory()->undefined_value());
       break;
     }
 
@@ -3843,7 +3859,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     case NAMED_PROPERTY: {
       __ mov(ecx, prop->key()->AsLiteral()->handle());
       __ pop(edx);
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                            : Builtins::StoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3860,7 +3876,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     case KEYED_PROPERTY: {
       __ pop(ecx);
       __ pop(edx);
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
                            : Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3888,7 +3904,8 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
     Comment cmnt(masm_, "Global variable");
     __ mov(eax, GlobalObjectOperand());
     __ mov(ecx, Immediate(proxy->name()));
-    Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(
+        Builtins::LoadIC_Initialize));
     // Use a regular load, not a contextual load, to avoid a reference
     // error.
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3941,12 +3958,12 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
   }
   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
 
-  if (check->Equals(Heap::number_symbol())) {
+  if (check->Equals(isolate()->heap()->number_symbol())) {
     __ JumpIfSmi(eax, if_true);
     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
-           Factory::heap_number_map());
+           isolate()->factory()->heap_number_map());
     Split(equal, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::string_symbol())) {
+  } else if (check->Equals(isolate()->heap()->string_symbol())) {
     __ JumpIfSmi(eax, if_false);
     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
     __ j(above_equal, if_false);
@@ -3954,13 +3971,13 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
               1 << Map::kIsUndetectable);
     Split(zero, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::boolean_symbol())) {
-    __ cmp(eax, Factory::true_value());
+  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
+    __ cmp(eax, isolate()->factory()->true_value());
     __ j(equal, if_true);
-    __ cmp(eax, Factory::false_value());
+    __ cmp(eax, isolate()->factory()->false_value());
     Split(equal, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::undefined_symbol())) {
-    __ cmp(eax, Factory::undefined_value());
+  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
+    __ cmp(eax, isolate()->factory()->undefined_value());
     __ j(equal, if_true);
     __ JumpIfSmi(eax, if_false);
     // Check for undetectable objects => true.
@@ -3968,13 +3985,13 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
     __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
     __ test(ecx, Immediate(1 << Map::kIsUndetectable));
     Split(not_zero, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::function_symbol())) {
+  } else if (check->Equals(isolate()->heap()->function_symbol())) {
     __ JumpIfSmi(eax, if_false);
     __ CmpObjectType(eax, FIRST_FUNCTION_CLASS_TYPE, edx);
     Split(above_equal, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::object_symbol())) {
+  } else if (check->Equals(isolate()->heap()->object_symbol())) {
     __ JumpIfSmi(eax, if_false);
-    __ cmp(eax, Factory::null_value());
+    __ cmp(eax, isolate()->factory()->null_value());
     __ j(equal, if_true);
     __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edx);
     __ j(below, if_false);
@@ -4022,7 +4039,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
       VisitForStackValue(expr->right());
       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
       PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
-      __ cmp(eax, Factory::true_value());
+      __ cmp(eax, isolate()->factory()->true_value());
       Split(equal, if_true, if_false, fall_through);
       break;
 
@@ -4115,12 +4132,12 @@ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
   VisitForAccumulatorValue(expr->expression());
   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
 
-  __ cmp(eax, Factory::null_value());
+  __ cmp(eax, isolate()->factory()->null_value());
   if (expr->is_strict()) {
     Split(equal, if_true, if_false, fall_through);
   } else {
     __ j(equal, if_true);
-    __ cmp(eax, Factory::undefined_value());
+    __ cmp(eax, isolate()->factory()->undefined_value());
     __ j(equal, if_true);
     __ test(eax, Immediate(kSmiTagMask));
     __ j(zero, if_false);
@@ -4155,16 +4172,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
          mode == RelocInfo::CODE_TARGET_CONTEXT);
   switch (ic->kind()) {
     case Code::LOAD_IC:
-      __ IncrementCounter(&Counters::named_load_full, 1);
+      __ IncrementCounter(isolate()->counters()->named_load_full(), 1);
       break;
     case Code::KEYED_LOAD_IC:
-      __ IncrementCounter(&Counters::keyed_load_full, 1);
+      __ IncrementCounter(isolate()->counters()->keyed_load_full(), 1);
       break;
     case Code::STORE_IC:
-      __ IncrementCounter(&Counters::named_store_full, 1);
+      __ IncrementCounter(isolate()->counters()->named_store_full(), 1);
       break;
     case Code::KEYED_STORE_IC:
-      __ IncrementCounter(&Counters::keyed_store_full, 1);
+      __ IncrementCounter(isolate()->counters()->keyed_store_full(), 1);
     default:
       break;
   }
@@ -4198,16 +4215,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
   switch (ic->kind()) {
     case Code::LOAD_IC:
-      __ IncrementCounter(&Counters::named_load_full, 1);
+      __ IncrementCounter(COUNTERS->named_load_full(), 1);
       break;
     case Code::KEYED_LOAD_IC:
-      __ IncrementCounter(&Counters::keyed_load_full, 1);
+      __ IncrementCounter(COUNTERS->keyed_load_full(), 1);
       break;
     case Code::STORE_IC:
-      __ IncrementCounter(&Counters::named_store_full, 1);
+      __ IncrementCounter(COUNTERS->named_store_full(), 1);
       break;
     case Code::KEYED_STORE_IC:
-      __ IncrementCounter(&Counters::keyed_store_full, 1);
+      __ IncrementCounter(COUNTERS->keyed_store_full(), 1);
     default:
       break;
   }
index 98622ac52d1dce82050454bed0e828de6a4918e4..ae8ed6a638bd0c74cf5f4b6b8f5a550113c8a572 100644 (file)
@@ -93,7 +93,7 @@ static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
   __ j(not_zero, miss, not_taken);
 
   __ mov(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
-  __ CheckMap(r0, Factory::hash_table_map(), miss, true);
+  __ CheckMap(r0, FACTORY->hash_table_map(), miss, true);
 }
 
 
@@ -475,7 +475,7 @@ static void GenerateFastArrayLoad(MacroAssembler* masm,
   __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset));
   if (not_fast_array != NULL) {
     // Check that the object is in fast mode and writable.
-    __ CheckMap(scratch, Factory::fixed_array_map(), not_fast_array, true);
+    __ CheckMap(scratch, FACTORY->fixed_array_map(), not_fast_array, true);
   } else {
     __ AssertFastElements(scratch);
   }
@@ -485,7 +485,7 @@ static void GenerateFastArrayLoad(MacroAssembler* masm,
   // Fast case: Do the load.
   ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
   __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
-  __ cmp(Operand(scratch), Immediate(Factory::the_hole_value()));
+  __ cmp(Operand(scratch), Immediate(FACTORY->the_hole_value()));
   // In case the loaded value is the_hole we have to consult GetProperty
   // to ensure the prototype chain is searched.
   __ j(equal, out_of_range);
@@ -555,7 +555,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
                         eax,
                         NULL,
                         &slow);
-  __ IncrementCounter(&Counters::keyed_load_generic_smi, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_smi(), 1);
   __ ret(0);
 
   __ bind(&check_number_dictionary);
@@ -568,7 +568,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   // ebx: untagged index
   // eax: key
   // ecx: elements
-  __ CheckMap(ecx, Factory::hash_table_map(), &slow, true);
+  __ CheckMap(ecx, FACTORY->hash_table_map(), &slow, true);
   Label slow_pop_receiver;
   // Push receiver on the stack to free up a register for the dictionary
   // probing.
@@ -593,7 +593,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   // Slow case: jump to runtime.
   // edx: receiver
   // eax: key
-  __ IncrementCounter(&Counters::keyed_load_generic_slow, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_slow(), 1);
   GenerateRuntimeGetProperty(masm);
 
   __ bind(&check_string);
@@ -606,7 +606,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   // cache. Otherwise probe the dictionary.
   __ mov(ebx, FieldOperand(edx, JSObject::kPropertiesOffset));
   __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
-         Immediate(Factory::hash_table_map()));
+         Immediate(FACTORY->hash_table_map()));
   __ j(equal, &probe_dictionary);
 
   // Load the map of the receiver, compute the keyed lookup cache hash
@@ -648,7 +648,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
   __ add(ecx, Operand(edi));
   __ mov(eax, FieldOperand(edx, ecx, times_pointer_size, 0));
-  __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1);
   __ ret(0);
 
   // Load property array property.
@@ -656,7 +656,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   __ mov(eax, FieldOperand(edx, JSObject::kPropertiesOffset));
   __ mov(eax, FieldOperand(eax, edi, times_pointer_size,
                            FixedArray::kHeaderSize));
-  __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1);
   __ ret(0);
 
   // Do a quick inline probe of the receiver's dictionary, if it
@@ -668,7 +668,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   GenerateGlobalInstanceTypeCheck(masm, ecx, &slow);
 
   GenerateDictionaryLoad(masm, &slow, ebx, eax, ecx, edi, eax);
-  __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_symbol(), 1);
   __ ret(0);
 
   __ bind(&index_string);
@@ -789,7 +789,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
   // ecx: key (a smi)
   __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
   // Check that the object is in fast mode and writable.
-  __ CheckMap(edi, Factory::fixed_array_map(), &slow, true);
+  __ CheckMap(edi, FACTORY->fixed_array_map(), &slow, true);
   __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
   __ j(below, &fast, taken);
 
@@ -822,7 +822,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
   // edx: receiver, a JSArray
   // ecx: key, a smi.
   __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
-  __ CheckMap(edi, Factory::fixed_array_map(), &slow, true);
+  __ CheckMap(edi, FACTORY->fixed_array_map(), &slow, true);
 
   // Check the key against the length in the array, compute the
   // address to store into and fall through to fast case.
@@ -861,7 +861,8 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
                                          Code::kNoExtraICState,
                                          NORMAL,
                                          argc);
-  StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, eax);
+  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
+                                                  eax);
 
   // If the stub cache probing failed, the receiver might be a value.
   // For value objects, we use the map of the prototype objects for
@@ -888,9 +889,9 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
 
   // Check for boolean.
   __ bind(&non_string);
-  __ cmp(edx, Factory::true_value());
+  __ cmp(edx, FACTORY->true_value());
   __ j(equal, &boolean, not_taken);
-  __ cmp(edx, Factory::false_value());
+  __ cmp(edx, FACTORY->false_value());
   __ j(not_equal, &miss, taken);
   __ bind(&boolean);
   StubCompiler::GenerateLoadGlobalFunctionPrototype(
@@ -898,7 +899,8 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
 
   // Probe the stub cache for the value object.
   __ bind(&probe);
-  StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
+  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
+                                                  no_reg);
   __ bind(&miss);
 }
 
@@ -965,9 +967,9 @@ static void GenerateCallMiss(MacroAssembler* masm,
   // -----------------------------------
 
   if (id == IC::kCallIC_Miss) {
-    __ IncrementCounter(&Counters::call_miss, 1);
+    __ IncrementCounter(COUNTERS->call_miss(), 1);
   } else {
-    __ IncrementCounter(&Counters::keyed_call_miss, 1);
+    __ IncrementCounter(COUNTERS->keyed_call_miss(), 1);
   }
 
   // Get the receiver of the function from the stack; 1 ~ return address.
@@ -1089,7 +1091,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
 
   GenerateFastArrayLoad(
       masm, edx, ecx, eax, edi, &check_number_dictionary, &slow_load);
-  __ IncrementCounter(&Counters::keyed_call_generic_smi_fast, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_smi_fast(), 1);
 
   __ bind(&do_call);
   // receiver in edx is not used after this point.
@@ -1101,14 +1103,14 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   // eax: elements
   // ecx: smi key
   // Check whether the elements is a number dictionary.
-  __ CheckMap(eax, Factory::hash_table_map(), &slow_load, true);
+  __ CheckMap(eax, FACTORY->hash_table_map(), &slow_load, true);
   __ mov(ebx, ecx);
   __ SmiUntag(ebx);
   // ebx: untagged index
   // Receiver in edx will be clobbered, need to reload it on miss.
   GenerateNumberDictionaryLoad(
       masm, &slow_reload_receiver, eax, ecx, ebx, edx, edi, edi);
-  __ IncrementCounter(&Counters::keyed_call_generic_smi_dict, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_smi_dict(), 1);
   __ jmp(&do_call);
 
   __ bind(&slow_reload_receiver);
@@ -1117,7 +1119,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   __ bind(&slow_load);
   // This branch is taken when calling KeyedCallIC_Miss is neither required
   // nor beneficial.
-  __ IncrementCounter(&Counters::keyed_call_generic_slow_load, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_slow_load(), 1);
   __ EnterInternalFrame();
   __ push(ecx);  // save the key
   __ push(edx);  // pass the receiver
@@ -1139,14 +1141,14 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
       masm, edx, eax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
 
   __ mov(ebx, FieldOperand(edx, JSObject::kPropertiesOffset));
-  __ CheckMap(ebx, Factory::hash_table_map(), &lookup_monomorphic_cache, true);
+  __ CheckMap(ebx, FACTORY->hash_table_map(), &lookup_monomorphic_cache, true);
 
   GenerateDictionaryLoad(masm, &slow_load, ebx, ecx, eax, edi, edi);
-  __ IncrementCounter(&Counters::keyed_call_generic_lookup_dict, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_dict(), 1);
   __ jmp(&do_call);
 
   __ bind(&lookup_monomorphic_cache);
-  __ IncrementCounter(&Counters::keyed_call_generic_lookup_cache, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_cache(), 1);
   GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
   // Fall through on miss.
 
@@ -1157,7 +1159,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   // - the value loaded is not a function,
   // - there is hope that the runtime will create a monomorphic call stub
   //   that will get fetched next time.
-  __ IncrementCounter(&Counters::keyed_call_generic_slow, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_slow(), 1);
   GenerateMiss(masm, argc);
 
   __ bind(&index_string);
@@ -1212,7 +1214,8 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
   Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
                                          NOT_IN_LOOP,
                                          MONOMORPHIC);
-  StubCache::GenerateProbe(masm, flags, eax, ecx, ebx, edx);
+  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, eax, ecx, ebx,
+                                                  edx);
 
   // Cache miss: Jump to runtime.
   GenerateMiss(masm);
@@ -1247,7 +1250,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
   //  -- esp[0] : return address
   // -----------------------------------
 
-  __ IncrementCounter(&Counters::load_miss, 1);
+  __ IncrementCounter(COUNTERS->load_miss(), 1);
 
   __ pop(ebx);
   __ push(eax);  // receiver
@@ -1375,7 +1378,7 @@ bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
   // (-1) or we should be clearing the inlined version.
   ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt - 1 ||
          *reinterpret_cast<int*>(offset_address) == -1 ||
-         (offset == 0 && map == Heap::null_value()));
+         (offset == 0 && map == HEAP->null_value()));
   *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
 
   // Patch the offset in the write-barrier code. The offset is the
@@ -1385,7 +1388,7 @@ bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
   // (-1) or we should be clearing the inlined version.
   ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt ||
          *reinterpret_cast<int*>(offset_address) == -1 ||
-         (offset == 0 && map == Heap::null_value()));
+         (offset == 0 && map == HEAP->null_value()));
   *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
 
   return true;
@@ -1433,7 +1436,7 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
   //  -- esp[0] : return address
   // -----------------------------------
 
-  __ IncrementCounter(&Counters::keyed_load_miss, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_miss(), 1);
 
   __ pop(ebx);
   __ push(edx);  // receiver
@@ -1476,7 +1479,8 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
                                          NOT_IN_LOOP,
                                          MONOMORPHIC,
                                          strict_mode);
-  StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
+  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
+                                                  no_reg);
 
   // Cache miss: Jump to runtime.
   GenerateMiss(masm);
@@ -1580,14 +1584,14 @@ void StoreIC::GenerateNormal(MacroAssembler* masm) {
   __ push(edx);
   GenerateDictionaryStore(masm, &restore_miss, ebx, ecx, eax, edx, edi);
   __ Drop(1);
-  __ IncrementCounter(&Counters::store_normal_hit, 1);
+  __ IncrementCounter(COUNTERS->store_normal_hit(), 1);
   __ ret(0);
 
   __ bind(&restore_miss);
   __ pop(edx);
 
   __ bind(&miss);
-  __ IncrementCounter(&Counters::store_normal_miss, 1);
+  __ IncrementCounter(COUNTERS->store_normal_miss(), 1);
   GenerateMiss(masm);
 }
 
index 5df44ca9b7b9b6a9edf67cfe948d4c9e3dabe862..4bb79e4d712ba4fdad824570c87f5f4e9e0b3caa 100644 (file)
@@ -466,7 +466,7 @@ void LCodeGen::CallCode(Handle<Code> code,
 }
 
 
-void LCodeGen::CallRuntime(Runtime::Function* fun,
+void LCodeGen::CallRuntime(const Runtime::Function* fun,
                            int argc,
                            LInstruction* instr,
                            bool adjusted) {
@@ -586,14 +586,14 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
   if (length == 0) return;
   ASSERT(FLAG_deopt);
   Handle<DeoptimizationInputData> data =
-      Factory::NewDeoptimizationInputData(length, TENURED);
+      factory()->NewDeoptimizationInputData(length, TENURED);
 
   Handle<ByteArray> translations = translations_.CreateByteArray();
   data->SetTranslationByteArray(*translations);
   data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
 
   Handle<FixedArray> literals =
-      Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
+      factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
   for (int i = 0; i < deoptimization_literals_.length(); i++) {
     literals->set(i, *deoptimization_literals_[i]);
   }
@@ -1095,7 +1095,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
     uint64_t int_val = BitCast<uint64_t, double>(v);
     int32_t lower = static_cast<int32_t>(int_val);
     int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
-    if (CpuFeatures::IsSupported(SSE4_1)) {
+    if (isolate()->cpu_features()->IsSupported(SSE4_1)) {
       CpuFeatures::Scope scope(SSE4_1);
       if (lower != 0) {
         __ Set(temp, Immediate(lower));
@@ -1298,17 +1298,17 @@ void LCodeGen::DoBranch(LBranch* instr) {
     ASSERT(r.IsTagged());
     Register reg = ToRegister(instr->InputAt(0));
     if (instr->hydrogen()->type().IsBoolean()) {
-      __ cmp(reg, Factory::true_value());
+      __ cmp(reg, factory()->true_value());
       EmitBranch(true_block, false_block, equal);
     } else {
       Label* true_label = chunk_->GetAssemblyLabel(true_block);
       Label* false_label = chunk_->GetAssemblyLabel(false_block);
 
-      __ cmp(reg, Factory::undefined_value());
+      __ cmp(reg, factory()->undefined_value());
       __ j(equal, false_label);
-      __ cmp(reg, Factory::true_value());
+      __ cmp(reg, factory()->true_value());
       __ j(equal, true_label);
-      __ cmp(reg, Factory::false_value());
+      __ cmp(reg, factory()->false_value());
       __ j(equal, false_label);
       __ test(reg, Operand(reg));
       __ j(equal, false_label);
@@ -1318,7 +1318,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
       // Test for double values. Zero is false.
       NearLabel call_stub;
       __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
-             Factory::heap_number_map());
+             factory()->heap_number_map());
       __ j(not_equal, &call_stub);
       __ fldz();
       __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
@@ -1441,11 +1441,11 @@ void LCodeGen::DoCmpID(LCmpID* instr) {
 
   NearLabel done;
   Condition cc = TokenToCondition(instr->op(), instr->is_double());
-  __ mov(ToRegister(result), Factory::true_value());
+  __ mov(ToRegister(result), factory()->true_value());
   __ j(cc, &done);
 
   __ bind(&unordered);
-  __ mov(ToRegister(result), Factory::false_value());
+  __ mov(ToRegister(result), factory()->false_value());
   __ bind(&done);
 }
 
@@ -1476,10 +1476,10 @@ void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
   Register result = ToRegister(instr->result());
 
   __ cmp(left, Operand(right));
-  __ mov(result, Factory::true_value());
+  __ mov(result, factory()->true_value());
   NearLabel done;
   __ j(equal, &done);
-  __ mov(result, Factory::false_value());
+  __ mov(result, factory()->false_value());
   __ bind(&done);
 }
 
@@ -1502,17 +1502,17 @@ void LCodeGen::DoIsNull(LIsNull* instr) {
   // TODO(fsc): If the expression is known to be a smi, then it's
   // definitely not null. Materialize false.
 
-  __ cmp(reg, Factory::null_value());
+  __ cmp(reg, factory()->null_value());
   if (instr->is_strict()) {
-    __ mov(result, Factory::true_value());
+    __ mov(result, factory()->true_value());
     NearLabel done;
     __ j(equal, &done);
-    __ mov(result, Factory::false_value());
+    __ mov(result, factory()->false_value());
     __ bind(&done);
   } else {
     NearLabel true_value, false_value, done;
     __ j(equal, &true_value);
-    __ cmp(reg, Factory::undefined_value());
+    __ cmp(reg, factory()->undefined_value());
     __ j(equal, &true_value);
     __ test(reg, Immediate(kSmiTagMask));
     __ j(zero, &false_value);
@@ -1524,10 +1524,10 @@ void LCodeGen::DoIsNull(LIsNull* instr) {
     __ test(scratch, Immediate(1 << Map::kIsUndetectable));
     __ j(not_zero, &true_value);
     __ bind(&false_value);
-    __ mov(result, Factory::false_value());
+    __ mov(result, factory()->false_value());
     __ jmp(&done);
     __ bind(&true_value);
-    __ mov(result, Factory::true_value());
+    __ mov(result, factory()->true_value());
     __ bind(&done);
   }
 }
@@ -1542,14 +1542,14 @@ void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
   int true_block = chunk_->LookupDestination(instr->true_block_id());
   int false_block = chunk_->LookupDestination(instr->false_block_id());
 
-  __ cmp(reg, Factory::null_value());
+  __ cmp(reg, factory()->null_value());
   if (instr->is_strict()) {
     EmitBranch(true_block, false_block, equal);
   } else {
     Label* true_label = chunk_->GetAssemblyLabel(true_block);
     Label* false_label = chunk_->GetAssemblyLabel(false_block);
     __ j(equal, true_label);
-    __ cmp(reg, Factory::undefined_value());
+    __ cmp(reg, factory()->undefined_value());
     __ j(equal, true_label);
     __ test(reg, Immediate(kSmiTagMask));
     __ j(zero, false_label);
@@ -1576,7 +1576,7 @@ Condition LCodeGen::EmitIsObject(Register input,
   __ test(input, Immediate(kSmiTagMask));
   __ j(equal, is_not_object);
 
-  __ cmp(input, Factory::null_value());
+  __ cmp(input, isolate()->factory()->null_value());
   __ j(equal, is_object);
 
   __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
@@ -1603,11 +1603,11 @@ void LCodeGen::DoIsObject(LIsObject* instr) {
   __ j(true_cond, &is_true);
 
   __ bind(&is_false);
-  __ mov(result, Factory::false_value());
+  __ mov(result, factory()->false_value());
   __ jmp(&done);
 
   __ bind(&is_true);
-  __ mov(result, Factory::true_value());
+  __ mov(result, factory()->true_value());
 
   __ bind(&done);
 }
@@ -1635,10 +1635,10 @@ void LCodeGen::DoIsSmi(LIsSmi* instr) {
 
   ASSERT(instr->hydrogen()->value()->representation().IsTagged());
   __ test(input, Immediate(kSmiTagMask));
-  __ mov(result, Factory::true_value());
+  __ mov(result, factory()->true_value());
   NearLabel done;
   __ j(zero, &done);
-  __ mov(result, Factory::false_value());
+  __ mov(result, factory()->false_value());
   __ bind(&done);
 }
 
@@ -1684,10 +1684,10 @@ void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
   __ j(zero, &is_false);
   __ CmpObjectType(input, TestType(instr->hydrogen()), result);
   __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
-  __ mov(result, Factory::true_value());
+  __ mov(result, factory()->true_value());
   __ jmp(&done);
   __ bind(&is_false);
-  __ mov(result, Factory::false_value());
+  __ mov(result, factory()->false_value());
   __ bind(&done);
 }
 
@@ -1727,12 +1727,12 @@ void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
   Register result = ToRegister(instr->result());
 
   ASSERT(instr->hydrogen()->value()->representation().IsTagged());
-  __ mov(result, Factory::true_value());
+  __ mov(result, factory()->true_value());
   __ test(FieldOperand(input, String::kHashFieldOffset),
           Immediate(String::kContainsCachedArrayIndexMask));
   NearLabel done;
   __ j(zero, &done);
-  __ mov(result, Factory::false_value());
+  __ mov(result, factory()->false_value());
   __ bind(&done);
 }
 
@@ -1821,11 +1821,11 @@ void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
   __ j(not_equal, &is_false);
 
   __ bind(&is_true);
-  __ mov(result, Factory::true_value());
+  __ mov(result, factory()->true_value());
   __ jmp(&done);
 
   __ bind(&is_false);
-  __ mov(result, Factory::false_value());
+  __ mov(result, factory()->false_value());
   __ bind(&done);
 }
 
@@ -1873,10 +1873,10 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
   NearLabel true_value, done;
   __ test(eax, Operand(eax));
   __ j(zero, &true_value);
-  __ mov(ToRegister(instr->result()), Factory::false_value());
+  __ mov(ToRegister(instr->result()), factory()->false_value());
   __ jmp(&done);
   __ bind(&true_value);
-  __ mov(ToRegister(instr->result()), Factory::true_value());
+  __ mov(ToRegister(instr->result()), factory()->true_value());
   __ bind(&done);
 }
 
@@ -1928,16 +1928,16 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
   Register map = ToRegister(instr->TempAt(0));
   __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
   __ bind(deferred->map_check());  // Label for calculating code patching.
-  __ cmp(map, Factory::the_hole_value());  // Patched to cached map.
+  __ cmp(map, factory()->the_hole_value());  // Patched to cached map.
   __ j(not_equal, &cache_miss, not_taken);
-  __ mov(eax, Factory::the_hole_value());  // Patched to either true or false.
+  __ mov(eax, factory()->the_hole_value());  // Patched to either true or false.
   __ jmp(&done);
 
   // The inlined call site cache did not match. Check for null and string
   // before calling the deferred code.
   __ bind(&cache_miss);
   // Null is not an instance of anything.
-  __ cmp(object, Factory::null_value());
+  __ cmp(object, factory()->null_value());
   __ j(equal, &false_result);
 
   // String values are not instances of anything.
@@ -1948,7 +1948,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
   __ jmp(deferred->entry());
 
   __ bind(&false_result);
-  __ mov(ToRegister(instr->result()), Factory::false_value());
+  __ mov(ToRegister(instr->result()), factory()->false_value());
 
   // Here result has either true or false. Deferred code also produces true or
   // false object.
@@ -2020,10 +2020,10 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
   NearLabel true_value, done;
   __ test(eax, Operand(eax));
   __ j(condition, &true_value);
-  __ mov(ToRegister(instr->result()), Factory::false_value());
+  __ mov(ToRegister(instr->result()), factory()->false_value());
   __ jmp(&done);
   __ bind(&true_value);
-  __ mov(ToRegister(instr->result()), Factory::true_value());
+  __ mov(ToRegister(instr->result()), factory()->true_value());
   __ bind(&done);
 }
 
@@ -2067,7 +2067,7 @@ void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
   Register result = ToRegister(instr->result());
   __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
   if (instr->hydrogen()->check_hole_value()) {
-    __ cmp(result, Factory::the_hole_value());
+    __ cmp(result, factory()->the_hole_value());
     DeoptimizeIf(equal, instr->environment());
   }
 }
@@ -2082,7 +2082,7 @@ void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
   // to update the property details in the property dictionary to mark
   // it as no longer deleted. We deoptimize in that case.
   if (instr->hydrogen()->check_hole_value()) {
-    __ cmp(cell_operand, Factory::the_hole_value());
+    __ cmp(cell_operand, factory()->the_hole_value());
     DeoptimizeIf(equal, instr->environment());
   }
 
@@ -2128,7 +2128,7 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
   ASSERT(ToRegister(instr->result()).is(eax));
 
   __ mov(ecx, instr->name());
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(Builtins::LoadIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2153,7 +2153,7 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
          FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
 
   // Check that the function has a prototype or an initial map.
-  __ cmp(Operand(result), Immediate(Factory::the_hole_value()));
+  __ cmp(Operand(result), Immediate(factory()->the_hole_value()));
   DeoptimizeIf(equal, instr->environment());
 
   // If the function does not have an initial map, we're done.
@@ -2182,13 +2182,13 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
   if (FLAG_debug_code) {
     NearLabel done;
     __ cmp(FieldOperand(result, HeapObject::kMapOffset),
-           Immediate(Factory::fixed_array_map()));
+           Immediate(factory()->fixed_array_map()));
     __ j(equal, &done);
     __ cmp(FieldOperand(result, HeapObject::kMapOffset),
-           Immediate(Factory::external_pixel_array_map()));
+           Immediate(factory()->external_pixel_array_map()));
     __ j(equal, &done);
     __ cmp(FieldOperand(result, HeapObject::kMapOffset),
-           Immediate(Factory::fixed_cow_array_map()));
+           Immediate(factory()->fixed_cow_array_map()));
     __ Check(equal, "Check for fast elements or pixel array failed.");
     __ bind(&done);
   }
@@ -2232,7 +2232,7 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
                               FixedArray::kHeaderSize));
 
   // Check for the hole value.
-  __ cmp(result, Factory::the_hole_value());
+  __ cmp(result, factory()->the_hole_value());
   DeoptimizeIf(equal, instr->environment());
 }
 
@@ -2253,7 +2253,8 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
   ASSERT(ToRegister(instr->object()).is(edx));
   ASSERT(ToRegister(instr->key()).is(eax));
 
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2318,9 +2319,9 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
   // If the receiver is null or undefined, we have to pass the global object
   // as a receiver.
   NearLabel global_object, receiver_ok;
-  __ cmp(receiver, Factory::null_value());
+  __ cmp(receiver, factory()->null_value());
   __ j(equal, &global_object);
-  __ cmp(receiver, Factory::undefined_value());
+  __ cmp(receiver, factory()->undefined_value());
   __ j(equal, &global_object);
 
   // The receiver should be a JS object.
@@ -2458,7 +2459,7 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
   Register input_reg = ToRegister(instr->InputAt(0));
   __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
-         Factory::heap_number_map());
+         factory()->heap_number_map());
   DeoptimizeIf(not_equal, instr->environment());
 
   Label done;
@@ -2753,7 +2754,8 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
   ASSERT(ToRegister(instr->result()).is(eax));
 
   int arity = instr->arity();
-  Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
+  Handle<Code> ic = isolate()->stub_cache()->
+      ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2763,7 +2765,8 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
   ASSERT(ToRegister(instr->result()).is(eax));
 
   int arity = instr->arity();
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
+  Handle<Code> ic = isolate()->stub_cache()->
+      ComputeCallInitialize(arity, NOT_IN_LOOP);
   __ mov(ecx, instr->name());
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 }
@@ -2785,7 +2788,8 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
   ASSERT(ToRegister(instr->result()).is(eax));
 
   int arity = instr->arity();
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
+  Handle<Code> ic = isolate()->stub_cache()->
+      ComputeCallInitialize(arity, NOT_IN_LOOP);
   __ mov(ecx, instr->name());
   CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
 }
@@ -2803,7 +2807,8 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   ASSERT(ToRegister(instr->constructor()).is(edi));
   ASSERT(ToRegister(instr->result()).is(eax));
 
-  Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
+  Handle<Code> builtin(isolate()->builtins()->builtin(
+      Builtins::JSConstructCall));
   __ Set(eax, Immediate(instr->arity()));
   CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
 }
@@ -2850,7 +2855,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
   ASSERT(ToRegister(instr->value()).is(eax));
 
   __ mov(ecx, instr->name());
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
                          : Builtins::StoreIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -2920,7 +2925,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
   ASSERT(ToRegister(instr->key()).is(ecx));
   ASSERT(ToRegister(instr->value()).is(eax));
 
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
                          : Builtins::KeyedStoreIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -2982,7 +2987,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
   // the case we would rather go to the runtime system now to flatten
   // the string.
   __ cmp(FieldOperand(string, ConsString::kSecondOffset),
-         Immediate(Factory::empty_string()));
+         Immediate(factory()->empty_string()));
   __ j(not_equal, deferred->entry());
   // Get the first of the two strings and load its instance type.
   __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
@@ -3087,11 +3092,11 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
 
   __ cmp(char_code, String::kMaxAsciiCharCode);
   __ j(above, deferred->entry());
-  __ Set(result, Immediate(Factory::single_character_string_cache()));
+  __ Set(result, Immediate(factory()->single_character_string_cache()));
   __ mov(result, FieldOperand(result,
                               char_code, times_pointer_size,
                               FixedArray::kHeaderSize));
-  __ cmp(result, Factory::undefined_value());
+  __ cmp(result, factory()->undefined_value());
   __ j(equal, deferred->entry());
   __ bind(deferred->exit());
 }
@@ -3269,10 +3274,10 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
 
   // Heap number map check.
   __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
-         Factory::heap_number_map());
+         factory()->heap_number_map());
   __ j(equal, &heap_number);
 
-  __ cmp(input_reg, Factory::undefined_value());
+  __ cmp(input_reg, factory()->undefined_value());
   DeoptimizeIf(not_equal, env);
 
   // Convert undefined to NaN.
@@ -3310,19 +3315,19 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
 
   // Heap number map check.
   __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
-         Factory::heap_number_map());
+         factory()->heap_number_map());
 
   if (instr->truncating()) {
     __ j(equal, &heap_number);
     // Check for undefined. Undefined is converted to zero for truncating
     // conversions.
-    __ cmp(input_reg, Factory::undefined_value());
+    __ cmp(input_reg, factory()->undefined_value());
     DeoptimizeIf(not_equal, instr->environment());
     __ mov(input_reg, 0);
     __ jmp(&done);
 
     __ bind(&heap_number);
-    if (CpuFeatures::IsSupported(SSE3)) {
+    if (isolate()->cpu_features()->IsSupported(SSE3)) {
       CpuFeatures::Scope scope(SSE3);
       NearLabel convert;
       // Use more powerful conversion when sse3 is available.
@@ -3432,7 +3437,7 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
     // the JS bitwise operations.
     __ cvttsd2si(result_reg, Operand(input_reg));
     __ cmp(result_reg, 0x80000000u);
-    if (CpuFeatures::IsSupported(SSE3)) {
+    if (isolate()->cpu_features()->IsSupported(SSE3)) {
       // This will deoptimize if the exponent of the input in out of range.
       CpuFeatures::Scope scope(SSE3);
       NearLabel convert, done;
@@ -3599,9 +3604,9 @@ void LCodeGen::DoCheckMap(LCheckMap* instr) {
 
 
 void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
-  if (Heap::InNewSpace(*object)) {
+  if (isolate()->heap()->InNewSpace(*object)) {
     Handle<JSGlobalPropertyCell> cell =
-        Factory::NewJSGlobalPropertyCell(object);
+        isolate()->factory()->NewJSGlobalPropertyCell(object);
     __ mov(result, Operand::Cell(cell));
   } else {
     __ mov(result, object);
@@ -3694,7 +3699,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
   int literal_offset = FixedArray::kHeaderSize +
       instr->hydrogen()->literal_index() * kPointerSize;
   __ mov(ebx, FieldOperand(ecx, literal_offset));
-  __ cmp(ebx, Factory::undefined_value());
+  __ cmp(ebx, factory()->undefined_value());
   __ j(not_equal, &materialized);
 
   // Create regexp literal using runtime function
@@ -3748,8 +3753,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
     __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
     __ push(Immediate(shared_info));
     __ push(Immediate(pretenure
-                      ? Factory::true_value()
-                      : Factory::false_value()));
+                      ? factory()->true_value()
+                      : factory()->false_value()));
     CallRuntime(Runtime::kNewClosure, 3, instr, false);
   }
 }
@@ -3779,11 +3784,11 @@ void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
                                                   instr->type_literal());
   __ j(final_branch_condition, &true_label);
   __ bind(&false_label);
-  __ mov(result, Factory::false_value());
+  __ mov(result, factory()->false_value());
   __ jmp(&done);
 
   __ bind(&true_label);
-  __ mov(result, Factory::true_value());
+  __ mov(result, factory()->true_value());
 
   __ bind(&done);
 }
@@ -3810,13 +3815,13 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
                                  Register input,
                                  Handle<String> type_name) {
   Condition final_branch_condition = no_condition;
-  if (type_name->Equals(Heap::number_symbol())) {
+  if (type_name->Equals(heap()->number_symbol())) {
     __ JumpIfSmi(input, true_label);
     __ cmp(FieldOperand(input, HeapObject::kMapOffset),
-           Factory::heap_number_map());
+           factory()->heap_number_map());
     final_branch_condition = equal;
 
-  } else if (type_name->Equals(Heap::string_symbol())) {
+  } else if (type_name->Equals(heap()->string_symbol())) {
     __ JumpIfSmi(input, false_label);
     __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
     __ j(above_equal, false_label);
@@ -3824,14 +3829,14 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
               1 << Map::kIsUndetectable);
     final_branch_condition = zero;
 
-  } else if (type_name->Equals(Heap::boolean_symbol())) {
-    __ cmp(input, Factory::true_value());
+  } else if (type_name->Equals(heap()->boolean_symbol())) {
+    __ cmp(input, factory()->true_value());
     __ j(equal, true_label);
-    __ cmp(input, Factory::false_value());
+    __ cmp(input, factory()->false_value());
     final_branch_condition = equal;
 
-  } else if (type_name->Equals(Heap::undefined_symbol())) {
-    __ cmp(input, Factory::undefined_value());
+  } else if (type_name->Equals(heap()->undefined_symbol())) {
+    __ cmp(input, factory()->undefined_value());
     __ j(equal, true_label);
     __ JumpIfSmi(input, false_label);
     // Check for undetectable objects => true.
@@ -3840,7 +3845,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
               1 << Map::kIsUndetectable);
     final_branch_condition = not_zero;
 
-  } else if (type_name->Equals(Heap::function_symbol())) {
+  } else if (type_name->Equals(heap()->function_symbol())) {
     __ JumpIfSmi(input, false_label);
     __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
     __ j(equal, true_label);
@@ -3848,9 +3853,9 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
     __ CmpInstanceType(input, JS_REGEXP_TYPE);
     final_branch_condition = equal;
 
-  } else if (type_name->Equals(Heap::object_symbol())) {
+  } else if (type_name->Equals(heap()->object_symbol())) {
     __ JumpIfSmi(input, false_label);
-    __ cmp(input, Factory::null_value());
+    __ cmp(input, factory()->null_value());
     __ j(equal, true_label);
     // Regular expressions => 'function', not 'object'.
     __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, input);
@@ -3881,11 +3886,11 @@ void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
   EmitIsConstructCall(result);
   __ j(equal, &true_label);
 
-  __ mov(result, Factory::false_value());
+  __ mov(result, factory()->false_value());
   __ jmp(&done);
 
   __ bind(&true_label);
-  __ mov(result, Factory::true_value());
+  __ mov(result, factory()->true_value());
 
   __ bind(&done);
 }
index c5c76c18b20b25ae4ce95962f9e99aa423b767cf..d06603c542f95f02b1fa6220b0584342ec7a2d14 100644 (file)
@@ -68,6 +68,9 @@ class LCodeGen BASE_EMBEDDED {
   // Simple accessors.
   MacroAssembler* masm() const { return masm_; }
   CompilationInfo* info() const { return info_; }
+  Isolate* isolate() const { return info_->isolate(); }
+  Factory* factory() const { return isolate()->factory(); }
+  Heap* heap() const { return isolate()->heap(); }
 
   // Support for converting LOperands to assembler types.
   Operand ToOperand(LOperand* op) const;
@@ -163,11 +166,11 @@ class LCodeGen BASE_EMBEDDED {
 
   void CallCode(Handle<Code> code, RelocInfo::Mode mode, LInstruction* instr,
                 bool adjusted = true);
-  void CallRuntime(Runtime::Function* fun, int argc, LInstruction* instr,
+  void CallRuntime(const Runtime::Function* fun, int argc, LInstruction* instr,
                    bool adjusted = true);
   void CallRuntime(Runtime::FunctionId id, int argc, LInstruction* instr,
                    bool adjusted = true) {
-    Runtime::Function* function = Runtime::FunctionForId(id);
+    const Runtime::Function* function = Runtime::FunctionForId(id);
     CallRuntime(function, argc, instr, adjusted);
   }
 
index 45c790f3f0d5a35374694b997ad9ecf1450c860b..eabfecc58c6264f81113feebf39c10c7ff429b7a 100644 (file)
@@ -25,6 +25,8 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
+
 #include "ia32/lithium-gap-resolver-ia32.h"
 #include "ia32/lithium-codegen-ia32.h"
 
index 3408451c0a3923f791cf59ac2b154e94e3057fa8..1f3ded4dbc8fb1b69fa8cb0596218b2c2fe25e73 100644 (file)
@@ -1633,8 +1633,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
       LOperand* value = UseRegister(instr->value());
       bool needs_check = !instr->value()->type().IsSmi();
       if (needs_check) {
+        CpuFeatures* cpu_features = Isolate::Current()->cpu_features();
         LOperand* xmm_temp =
-            (instr->CanTruncateToInt32() && CpuFeatures::IsSupported(SSE3))
+            (instr->CanTruncateToInt32() && !cpu_features->IsSupported(SSE3))
             ? NULL
             : FixedTemp(xmm1);
         LTaggedToI* res = new LTaggedToI(value, xmm_temp);
@@ -1655,7 +1656,7 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
     } else {
       ASSERT(to.IsInteger32());
       bool needs_temp = instr->CanTruncateToInt32() &&
-          !CpuFeatures::IsSupported(SSE3);
+          !Isolate::Current()->cpu_features()->IsSupported(SSE3);
       LOperand* value = needs_temp ?
           UseTempRegister(instr->value()) : UseRegister(instr->value());
       LOperand* temp = needs_temp ? TempRegister() : NULL;
index 3d0926749960e048eb7723ffefce67e7209e95f1..0c0dd5f6b8b536ec8b4aa29490eec173ca8ee31a 100644 (file)
@@ -1491,7 +1491,7 @@ class LCallRuntime: public LTemplateInstruction<1, 0, 0> {
   DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime")
   DECLARE_HYDROGEN_ACCESSOR(CallRuntime)
 
-  Runtime::Function* function() const { return hydrogen()->function(); }
+  const Runtime::Function* function() const { return hydrogen()->function(); }
   int arity() const { return hydrogen()->argument_count(); }
 };
 
index d5c3f5333a7db215d595bc00ef27244a17f960f2..542533c041a84156254c90ace37a86910481b2ac 100644 (file)
@@ -45,7 +45,7 @@ MacroAssembler::MacroAssembler(void* buffer, int size)
     : Assembler(buffer, size),
       generating_stub_(false),
       allow_stub_calls_(true),
-      code_object_(Heap::undefined_value()) {
+      code_object_(HEAP->undefined_value()) {
 }
 
 
@@ -231,7 +231,7 @@ void MacroAssembler::IsInstanceJSObjectType(Register map,
 
 
 void MacroAssembler::FCmp() {
-  if (CpuFeatures::IsSupported(CMOV)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(CMOV)) {
     fucomip();
     ffree(0);
     fincstp();
@@ -250,7 +250,7 @@ void MacroAssembler::AbortIfNotNumber(Register object) {
   test(object, Immediate(kSmiTagMask));
   j(zero, &ok);
   cmp(FieldOperand(object, HeapObject::kMapOffset),
-      Factory::heap_number_map());
+      FACTORY->heap_number_map());
   Assert(equal, "Operand not a number");
   bind(&ok);
 }
@@ -286,7 +286,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
   push(Immediate(Smi::FromInt(type)));
   push(Immediate(CodeObject()));
   if (emit_debug_code()) {
-    cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
+    cmp(Operand(esp, 0), Immediate(FACTORY->undefined_value()));
     Check(not_equal, "code object not properly patched");
   }
 }
@@ -316,8 +316,8 @@ void MacroAssembler::EnterExitFramePrologue() {
   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
 
   // Save the frame pointer and the context in top.
-  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
-  ExternalReference context_address(Top::k_context_address);
+  ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address);
+  ExternalReference context_address(Isolate::k_context_address);
   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
   mov(Operand::StaticVariable(context_address), esi);
 }
@@ -339,7 +339,7 @@ void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
   }
 
   // Get the required frame alignment for the OS.
-  static const int kFrameAlignment = OS::ActivationFrameAlignment();
+  const int kFrameAlignment = OS::ActivationFrameAlignment();
   if (kFrameAlignment > 0) {
     ASSERT(IsPowerOf2(kFrameAlignment));
     and_(esp, -kFrameAlignment);
@@ -358,7 +358,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles) {
   mov(edi, Operand(eax));
   lea(esi, Operand(ebp, eax, times_4, offset));
 
-  EnterExitFrameEpilogue(2, save_doubles);
+  // Reserve space for argc, argv and isolate.
+  EnterExitFrameEpilogue(3, save_doubles);
 }
 
 
@@ -394,14 +395,14 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles) {
 
 void MacroAssembler::LeaveExitFrameEpilogue() {
   // Restore current context from top and clear it in debug mode.
-  ExternalReference context_address(Top::k_context_address);
+  ExternalReference context_address(Isolate::k_context_address);
   mov(esi, Operand::StaticVariable(context_address));
 #ifdef DEBUG
   mov(Operand::StaticVariable(context_address), Immediate(0));
 #endif
 
   // Clear the top frame.
-  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
+  ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address);
   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
 }
 
@@ -435,15 +436,16 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
     push(Immediate(0));  // NULL frame pointer.
   }
   // Save the current handler as the next handler.
-  push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
+  push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address)));
   // Link this handler as the new current one.
-  mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
+  mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address)),
+      esp);
 }
 
 
 void MacroAssembler::PopTryHandler() {
   ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
-  pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
+  pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address)));
   add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
 }
 
@@ -458,7 +460,7 @@ void MacroAssembler::Throw(Register value) {
   }
 
   // Drop the sp to the top of the handler.
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
   mov(esp, Operand::StaticVariable(handler_address));
 
   // Restore next handler and frame pointer, discard handler state.
@@ -494,7 +496,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
   }
 
   // Drop sp to the top stack handler.
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
   mov(esp, Operand::StaticVariable(handler_address));
 
   // Unwind the handlers until the ENTRY handler is found.
@@ -516,12 +518,13 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
 
   if (type == OUT_OF_MEMORY) {
     // Set external caught exception to false.
-    ExternalReference external_caught(Top::k_external_caught_exception_address);
+    ExternalReference external_caught(
+        Isolate::k_external_caught_exception_address);
     mov(eax, false);
     mov(Operand::StaticVariable(external_caught), eax);
 
     // Set pending exception and eax to out of memory exception.
-    ExternalReference pending_exception(Top::k_pending_exception_address);
+    ExternalReference pending_exception(Isolate::k_pending_exception_address);
     mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
     mov(Operand::StaticVariable(pending_exception), eax);
   }
@@ -564,7 +567,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
     push(scratch);
     // Read the first word and compare to global_context_map.
     mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
-    cmp(scratch, Factory::global_context_map());
+    cmp(scratch, FACTORY->global_context_map());
     Check(equal, "JSGlobalObject::global_context should be a global context.");
     pop(scratch);
   }
@@ -585,13 +588,13 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
 
   // Check the context is a global context.
   if (emit_debug_code()) {
-    cmp(holder_reg, Factory::null_value());
+    cmp(holder_reg, FACTORY->null_value());
     Check(not_equal, "JSGlobalProxy::context() should not be null.");
 
     push(holder_reg);
     // Read the first word and compare to global_context_map(),
     mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
-    cmp(holder_reg, Factory::global_context_map());
+    cmp(holder_reg, FACTORY->global_context_map());
     Check(equal, "JSGlobalObject::global_context should be a global context.");
     pop(holder_reg);
   }
@@ -830,7 +833,7 @@ void MacroAssembler::AllocateHeapNumber(Register result,
 
   // Set the map.
   mov(FieldOperand(result, HeapObject::kMapOffset),
-      Immediate(Factory::heap_number_map()));
+      Immediate(FACTORY->heap_number_map()));
 }
 
 
@@ -860,7 +863,7 @@ void MacroAssembler::AllocateTwoByteString(Register result,
 
   // Set the map, length and hash field.
   mov(FieldOperand(result, HeapObject::kMapOffset),
-      Immediate(Factory::string_map()));
+      Immediate(FACTORY->string_map()));
   mov(scratch1, length);
   SmiTag(scratch1);
   mov(FieldOperand(result, String::kLengthOffset), scratch1);
@@ -895,7 +898,7 @@ void MacroAssembler::AllocateAsciiString(Register result,
 
   // Set the map, length and hash field.
   mov(FieldOperand(result, HeapObject::kMapOffset),
-      Immediate(Factory::ascii_string_map()));
+      Immediate(FACTORY->ascii_string_map()));
   mov(scratch1, length);
   SmiTag(scratch1);
   mov(FieldOperand(result, String::kLengthOffset), scratch1);
@@ -921,7 +924,7 @@ void MacroAssembler::AllocateAsciiString(Register result,
 
   // Set the map, length and hash field.
   mov(FieldOperand(result, HeapObject::kMapOffset),
-      Immediate(Factory::ascii_string_map()));
+      Immediate(FACTORY->ascii_string_map()));
   mov(FieldOperand(result, String::kLengthOffset),
       Immediate(Smi::FromInt(length)));
   mov(FieldOperand(result, String::kHashFieldOffset),
@@ -943,7 +946,7 @@ void MacroAssembler::AllocateConsString(Register result,
 
   // Set the map. The other fields are left uninitialized.
   mov(FieldOperand(result, HeapObject::kMapOffset),
-      Immediate(Factory::cons_string_map()));
+      Immediate(FACTORY->cons_string_map()));
 }
 
 
@@ -961,7 +964,7 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
 
   // Set the map. The other fields are left uninitialized.
   mov(FieldOperand(result, HeapObject::kMapOffset),
-      Immediate(Factory::cons_ascii_string_map()));
+      Immediate(FACTORY->cons_ascii_string_map()));
 }
 
 
@@ -1079,7 +1082,7 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
   // If the prototype or initial map is the hole, don't return it and
   // simply miss the cache instead. This will allow us to allocate a
   // prototype object on-demand in the runtime system.
-  cmp(Operand(result), Immediate(Factory::the_hole_value()));
+  cmp(Operand(result), Immediate(FACTORY->the_hole_value()));
   j(equal, miss, not_taken);
 
   // If the function does not have an initial map, we're done.
@@ -1145,7 +1148,7 @@ void MacroAssembler::IllegalOperation(int num_arguments) {
   if (num_arguments > 0) {
     add(Operand(esp), Immediate(num_arguments * kPointerSize));
   }
-  mov(eax, Immediate(Factory::undefined_value()));
+  mov(eax, Immediate(FACTORY->undefined_value()));
 }
 
 
@@ -1174,7 +1177,7 @@ void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
 
 
 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
-  Runtime::Function* function = Runtime::FunctionForId(id);
+  const Runtime::Function* function = Runtime::FunctionForId(id);
   Set(eax, Immediate(function->nargs));
   mov(ebx, Immediate(ExternalReference(function)));
   CEntryStub ces(1);
@@ -1189,7 +1192,8 @@ MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
 }
 
 
-void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
+void MacroAssembler::CallRuntime(const Runtime::Function* f,
+                                 int num_arguments) {
   // If the expected number of arguments of the runtime function is
   // constant, we check that the actual number of arguments match the
   // expectation.
@@ -1209,13 +1213,13 @@ void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
 }
 
 
-MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
+MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
                                             int num_arguments) {
   if (f->nargs >= 0 && f->nargs != num_arguments) {
     IllegalOperation(num_arguments);
     // Since we did not call the stub, there was no allocation failure.
     // Return some non-failure object.
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
 
   // TODO(1236192): Most runtime routines don't need the number of
@@ -1375,7 +1379,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
   ExternalReference scheduled_exception_address =
       ExternalReference::scheduled_exception_address();
   cmp(Operand::StaticVariable(scheduled_exception_address),
-      Immediate(Factory::the_hole_value()));
+      Immediate(FACTORY->the_hole_value()));
   j(not_equal, &promote_scheduled_exception, not_taken);
   LeaveApiExitFrame();
   ret(stack_space * kPointerSize);
@@ -1387,13 +1391,14 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
   }
   bind(&empty_handle);
   // It was zero; the result is undefined.
-  mov(eax, Factory::undefined_value());
+  mov(eax, FACTORY->undefined_value());
   jmp(&prologue);
 
   // HandleScope limit has changed. Delete allocated extensions.
   bind(&delete_allocated_handles);
   mov(Operand::StaticVariable(limit_address), edi);
   mov(edi, eax);
+  mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
   mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions()));
   call(Operand(eax));
   mov(eax, edi);
@@ -1467,7 +1472,8 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
 
   if (!definitely_matches) {
     Handle<Code> adaptor =
-        Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
+        Handle<Code>(Isolate::Current()->builtins()->builtin(
+            Builtins::ArgumentsAdaptorTrampoline));
     if (!code_constant.is_null()) {
       mov(edx, Immediate(code_constant));
       add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
@@ -1645,7 +1651,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   if (emit_debug_code()) {
     Label ok, fail;
-    CheckMap(map, Factory::meta_map(), &fail, false);
+    CheckMap(map, FACTORY->meta_map(), &fail, false);
     jmp(&ok);
     bind(&fail);
     Abort("Global functions must have initial map");
@@ -1795,10 +1801,10 @@ void MacroAssembler::AssertFastElements(Register elements) {
   if (emit_debug_code()) {
     Label ok;
     cmp(FieldOperand(elements, HeapObject::kMapOffset),
-        Immediate(Factory::fixed_array_map()));
+        Immediate(FACTORY->fixed_array_map()));
     j(equal, &ok);
     cmp(FieldOperand(elements, HeapObject::kMapOffset),
-        Immediate(Factory::fixed_cow_array_map()));
+        Immediate(FACTORY->fixed_cow_array_map()));
     j(equal, &ok);
     Abort("JSObject with fast elements map has slow elements");
     bind(&ok);
@@ -1863,7 +1869,7 @@ void MacroAssembler::JumpIfNotNumber(Register reg,
   if (emit_debug_code()) AbortIfSmi(reg);
   if (!info.IsNumber()) {
     cmp(FieldOperand(reg, HeapObject::kMapOffset),
-        Factory::heap_number_map());
+        FACTORY->heap_number_map());
     j(not_equal, on_not_number);
   }
 }
@@ -1968,6 +1974,9 @@ void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
 
 
 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
+  // Reserve space for Isolate address which is always passed as last parameter
+  num_arguments += 1;
+
   int frameAlignment = OS::ActivationFrameAlignment();
   if (frameAlignment != 0) {
     // Make stack end at alignment and make room for num_arguments words
@@ -1993,6 +2002,11 @@ void MacroAssembler::CallCFunction(ExternalReference function,
 
 void MacroAssembler::CallCFunction(Register function,
                                    int num_arguments) {
+  // Pass current isolate address as additional parameter.
+  mov(Operand(esp, num_arguments * kPointerSize),
+      Immediate(ExternalReference::isolate_address()));
+  num_arguments += 1;
+
   // Check stack alignment.
   if (emit_debug_code()) {
     CheckStackAlignment();
index d7cfeb56c75caa82b9e5e60bde35c710b0ddf2ac..3addba924d367594380e903b087451fe9f842c49 100644 (file)
@@ -474,13 +474,13 @@ class MacroAssembler: public Assembler {
   void StubReturn(int argc);
 
   // Call a runtime routine.
-  void CallRuntime(Runtime::Function* f, int num_arguments);
+  void CallRuntime(const Runtime::Function* f, int num_arguments);
   void CallRuntimeSaveDoubles(Runtime::FunctionId id);
 
   // Call a runtime function, returning the CodeStub object called.
   // Try to generate the stub code if necessary.  Do not perform a GC
   // but instead return a retry after GC failure.
-  MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::Function* f,
+  MUST_USE_RESULT MaybeObject* TryCallRuntime(const Runtime::Function* f,
                                               int num_arguments);
 
   // Convenience function: Same as above, but takes the fid instead.
@@ -702,7 +702,7 @@ void MacroAssembler::InNewSpace(Register object,
     int32_t new_space_start = reinterpret_cast<int32_t>(
         ExternalReference::new_space_start().address());
     lea(scratch, Operand(object, -new_space_start));
-    and_(scratch, Heap::NewSpaceMask());
+    and_(scratch, HEAP->NewSpaceMask());
     j(cc, branch);
   }
 }
index 121344884102c26479edbc41ada8283e233c4487..a25aabc97baa988cbe7fe409f83714cc79caaf86 100644 (file)
@@ -56,6 +56,7 @@ namespace internal {
  *
  * Each call to a public method should retain this convention.
  * The stack will have the following structure:
+ *       - Isolate* isolate     (Address of the current isolate)
  *       - direct_call          (if 1, direct call from JavaScript code, if 0
  *                               call through the runtime system)
  *       - stack_area_base      (High end of the memory area to use as
@@ -866,10 +867,12 @@ Handle<Object> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
 
   CodeDesc code_desc;
   masm_->GetCode(&code_desc);
-  Handle<Code> code = Factory::NewCode(code_desc,
-                                       Code::ComputeFlags(Code::REGEXP),
-                                       masm_->CodeObject());
-  PROFILE(RegExpCodeCreateEvent(*code, *source));
+  Isolate* isolate = ISOLATE;
+  Handle<Code> code =
+      isolate->factory()->NewCode(code_desc,
+                                  Code::ComputeFlags(Code::REGEXP),
+                                  masm_->CodeObject());
+  PROFILE(isolate, RegExpCodeCreateEvent(*code, *source));
   return Handle<Object>::cast(code);
 }
 
@@ -1039,8 +1042,10 @@ static T& frame_entry(Address re_frame, int frame_offset) {
 int RegExpMacroAssemblerIA32::CheckStackGuardState(Address* return_address,
                                                    Code* re_code,
                                                    Address re_frame) {
-  if (StackGuard::IsStackOverflow()) {
-    Top::StackOverflow();
+  Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
+  ASSERT(isolate == Isolate::Current());
+  if (isolate->stack_guard()->IsStackOverflow()) {
+    isolate->StackOverflow();
     return EXCEPTION;
   }
 
index 51e2cb01a1d26e92654a6e1f854ffd2310138e28..0af61f2139072ece32b9dde260048859d2f0210b 100644 (file)
@@ -126,6 +126,7 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
   static const int kRegisterOutput = kInputEnd + kPointerSize;
   static const int kStackHighEnd = kRegisterOutput + kPointerSize;
   static const int kDirectCall = kStackHighEnd + kPointerSize;
+  static const int kIsolate = kDirectCall + kPointerSize;
   // Below the frame pointer - local stack variables.
   // When adding local variables remember to push space for them in
   // the frame in GetCode.
index d840c0cc5c62cb59f43fb73ddc333f9ece067669..6db13d47085a93383355eec01dd4a6d07b16e9c0 100644 (file)
@@ -42,12 +42,14 @@ namespace internal {
 void Result::ToRegister() {
   ASSERT(is_valid());
   if (is_constant()) {
-    Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate();
+    CodeGenerator* code_generator =
+        CodeGeneratorScope::Current(Isolate::Current());
+    Result fresh = code_generator->allocator()->Allocate();
     ASSERT(fresh.is_valid());
     if (is_untagged_int32()) {
       fresh.set_untagged_int32(true);
       if (handle()->IsSmi()) {
-      CodeGeneratorScope::Current()->masm()->Set(
+      code_generator->masm()->Set(
           fresh.reg(),
           Immediate(Smi::cast(*handle())->value()));
       } else if (handle()->IsHeapNumber()) {
@@ -56,25 +58,23 @@ void Result::ToRegister() {
         if (double_value == 0 && signbit(double_value)) {
           // Negative zero must not be converted to an int32 unless
           // the context allows it.
-          CodeGeneratorScope::Current()->unsafe_bailout_->Branch(equal);
-          CodeGeneratorScope::Current()->unsafe_bailout_->Branch(not_equal);
+          code_generator->unsafe_bailout_->Branch(equal);
+          code_generator->unsafe_bailout_->Branch(not_equal);
         } else if (double_value == value) {
-          CodeGeneratorScope::Current()->masm()->Set(
-              fresh.reg(), Immediate(value));
+          code_generator->masm()->Set(fresh.reg(), Immediate(value));
         } else {
-          CodeGeneratorScope::Current()->unsafe_bailout_->Branch(equal);
-          CodeGeneratorScope::Current()->unsafe_bailout_->Branch(not_equal);
+          code_generator->unsafe_bailout_->Branch(equal);
+          code_generator->unsafe_bailout_->Branch(not_equal);
         }
       } else {
         // Constant is not a number.  This was not predicted by AST analysis.
-        CodeGeneratorScope::Current()->unsafe_bailout_->Branch(equal);
-        CodeGeneratorScope::Current()->unsafe_bailout_->Branch(not_equal);
+        code_generator->unsafe_bailout_->Branch(equal);
+        code_generator->unsafe_bailout_->Branch(not_equal);
       }
-    } else if (CodeGeneratorScope::Current()->IsUnsafeSmi(handle())) {
-      CodeGeneratorScope::Current()->MoveUnsafeSmi(fresh.reg(), handle());
+    } else if (code_generator->IsUnsafeSmi(handle())) {
+      code_generator->MoveUnsafeSmi(fresh.reg(), handle());
     } else {
-      CodeGeneratorScope::Current()->masm()->Set(fresh.reg(),
-                                                 Immediate(handle()));
+      code_generator->masm()->Set(fresh.reg(), Immediate(handle()));
     }
     // This result becomes a copy of the fresh one.
     fresh.set_type_info(type_info());
@@ -85,17 +85,19 @@ void Result::ToRegister() {
 
 
 void Result::ToRegister(Register target) {
+  CodeGenerator* code_generator =
+      CodeGeneratorScope::Current(Isolate::Current());
   ASSERT(is_valid());
   if (!is_register() || !reg().is(target)) {
-    Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate(target);
+    Result fresh = code_generator->allocator()->Allocate(target);
     ASSERT(fresh.is_valid());
     if (is_register()) {
-      CodeGeneratorScope::Current()->masm()->mov(fresh.reg(), reg());
+      code_generator->masm()->mov(fresh.reg(), reg());
     } else {
       ASSERT(is_constant());
       if (is_untagged_int32()) {
         if (handle()->IsSmi()) {
-          CodeGeneratorScope::Current()->masm()->Set(
+          code_generator->masm()->Set(
               fresh.reg(),
               Immediate(Smi::cast(*handle())->value()));
         } else {
@@ -105,22 +107,20 @@ void Result::ToRegister(Register target) {
           if (double_value == 0 && signbit(double_value)) {
             // Negative zero must not be converted to an int32 unless
             // the context allows it.
-            CodeGeneratorScope::Current()->unsafe_bailout_->Branch(equal);
-            CodeGeneratorScope::Current()->unsafe_bailout_->Branch(not_equal);
+            code_generator->unsafe_bailout_->Branch(equal);
+            code_generator->unsafe_bailout_->Branch(not_equal);
           } else if (double_value == value) {
-            CodeGeneratorScope::Current()->masm()->Set(
-                fresh.reg(), Immediate(value));
+            code_generator->masm()->Set(fresh.reg(), Immediate(value));
           } else {
-            CodeGeneratorScope::Current()->unsafe_bailout_->Branch(equal);
-            CodeGeneratorScope::Current()->unsafe_bailout_->Branch(not_equal);
+            code_generator->unsafe_bailout_->Branch(equal);
+            code_generator->unsafe_bailout_->Branch(not_equal);
           }
         }
       } else {
-        if (CodeGeneratorScope::Current()->IsUnsafeSmi(handle())) {
-          CodeGeneratorScope::Current()->MoveUnsafeSmi(fresh.reg(), handle());
+        if (code_generator->IsUnsafeSmi(handle())) {
+          code_generator->MoveUnsafeSmi(fresh.reg(), handle());
         } else {
-          CodeGeneratorScope::Current()->masm()->Set(fresh.reg(),
-                                                     Immediate(handle()));
+          code_generator->masm()->Set(fresh.reg(), Immediate(handle()));
         }
       }
     }
@@ -128,9 +128,9 @@ void Result::ToRegister(Register target) {
     fresh.set_untagged_int32(is_untagged_int32());
     *this = fresh;
   } else if (is_register() && reg().is(target)) {
-    ASSERT(CodeGeneratorScope::Current()->has_valid_frame());
-    CodeGeneratorScope::Current()->frame()->Spill(target);
-    ASSERT(CodeGeneratorScope::Current()->allocator()->count(target) == 1);
+    ASSERT(code_generator->has_valid_frame());
+    code_generator->frame()->Spill(target);
+    ASSERT(code_generator->allocator()->count(target) == 1);
   }
   ASSERT(is_register());
   ASSERT(reg().is(target));
index 43b7ea3b0cc907a8e0abf9c9ca04d4e391aaab8d..cb660cd326e589dca2752a7ff0969e14ff39b41d 100644 (file)
@@ -40,12 +40,12 @@ namespace internal {
 
 
 typedef int (*regexp_matcher)(String*, int, const byte*,
-                              const byte*, int*, Address, int);
+                              const byte*, int*, Address, int, Isolate*);
 
 // Call the generated regexp code directly. The code at the entry address should
-// expect seven int/pointer sized arguments and return an int.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
-  (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6))
+// expect eight int/pointer sized arguments and return an int.
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+  (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
 
 
 #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
index 7e86a887d801ea97ea16403fdf39bc24eb2ca660..bc64085f4492ee0b7f48b8fc87caa176d555596b 100644 (file)
@@ -39,14 +39,15 @@ namespace internal {
 #define __ ACCESS_MASM(masm)
 
 
-static void ProbeTable(MacroAssembler* masm,
+static void ProbeTable(Isolate* isolate,
+                       MacroAssembler* masm,
                        Code::Flags flags,
                        StubCache::Table table,
                        Register name,
                        Register offset,
                        Register extra) {
-  ExternalReference key_offset(SCTableReference::keyReference(table));
-  ExternalReference value_offset(SCTableReference::valueReference(table));
+  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
+  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
 
   Label miss;
 
@@ -113,8 +114,8 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
                                              Register r0,
                                              Register r1) {
   ASSERT(name->IsSymbol());
-  __ IncrementCounter(&Counters::negative_lookups, 1);
-  __ IncrementCounter(&Counters::negative_lookups_miss, 1);
+  __ IncrementCounter(COUNTERS->negative_lookups(), 1);
+  __ IncrementCounter(COUNTERS->negative_lookups_miss(), 1);
 
   Label done;
   __ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset));
@@ -137,7 +138,7 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
 
   // Check that the properties array is a dictionary.
   __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
-         Immediate(Factory::hash_table_map()));
+         Immediate(FACTORY->hash_table_map()));
   __ j(not_equal, miss_label);
 
   // Compute the capacity mask.
@@ -177,7 +178,7 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
     ASSERT_EQ(kSmiTagSize, 1);
     __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
                                 kElementsStartOffset - kHeapObjectTag));
-    __ cmp(entity_name, Factory::undefined_value());
+    __ cmp(entity_name, FACTORY->undefined_value());
     if (i != kProbes - 1) {
       __ j(equal, &done, taken);
 
@@ -197,7 +198,7 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
   }
 
   __ bind(&done);
-  __ DecrementCounter(&Counters::negative_lookups_miss, 1);
+  __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1);
 }
 
 
@@ -208,6 +209,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
                               Register scratch,
                               Register extra,
                               Register extra2) {
+  Isolate* isolate = Isolate::Current();
   Label miss;
   USE(extra2);  // The register extra2 is not used on the ia32 platform.
 
@@ -240,7 +242,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
   __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
 
   // Probe the primary table.
-  ProbeTable(masm, flags, kPrimary, name, scratch, extra);
+  ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra);
 
   // Primary miss: Compute hash for secondary probe.
   __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
@@ -252,7 +254,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
   __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
 
   // Probe the secondary table.
-  ProbeTable(masm, flags, kSecondary, name, scratch, extra);
+  ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra);
 
   // Cache miss: Fall-through and let caller handle the miss by
   // entering the runtime system.
@@ -274,10 +276,11 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
     MacroAssembler* masm, int index, Register prototype, Label* miss) {
   // Check we're still in the same context.
   __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)),
-         Top::global());
+         Isolate::Current()->global());
   __ j(not_equal, miss);
   // Get the global function with the given index.
-  JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
+  JSFunction* function = JSFunction::cast(
+      Isolate::Current()->global_context()->get(index));
   // Load its initial map. The global functions all have initial maps.
   __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
   // Load the prototype from the initial map.
@@ -395,7 +398,7 @@ static void PushInterceptorArguments(MacroAssembler* masm,
                                      JSObject* holder_obj) {
   __ push(name);
   InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
-  ASSERT(!Heap::InNewSpace(interceptor));
+  ASSERT(!HEAP->InNewSpace(interceptor));
   Register scratch = name;
   __ mov(scratch, Immediate(Handle<Object>(interceptor)));
   __ push(scratch);
@@ -480,7 +483,7 @@ static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
   __ mov(Operand(esp, 2 * kPointerSize), edi);
   Object* call_data = optimization.api_call_info()->data();
   Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
-  if (Heap::InNewSpace(call_data)) {
+  if (HEAP->InNewSpace(call_data)) {
     __ mov(ecx, api_call_info_handle);
     __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset));
     __ mov(Operand(esp, 3 * kPointerSize), ebx);
@@ -574,7 +577,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
                      name,
                      holder,
                      miss);
-      return Heap::undefined_value();  // Success.
+      return HEAP->undefined_value();  // Success.
     }
   }
 
@@ -610,10 +613,10 @@ class CallInterceptorCompiler BASE_EMBEDDED {
                              (depth2 != kInvalidProtoDepth);
     }
 
-    __ IncrementCounter(&Counters::call_const_interceptor, 1);
+    __ IncrementCounter(COUNTERS->call_const_interceptor(), 1);
 
     if (can_do_fast_api_call) {
-      __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1);
+      __ IncrementCounter(COUNTERS->call_const_interceptor_fast_api(), 1);
       ReserveSpaceForFastApiCall(masm, scratch1);
     }
 
@@ -672,7 +675,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
       FreeSpaceForFastApiCall(masm, scratch1);
     }
 
-    return Heap::undefined_value();  // Success.
+    return HEAP->undefined_value();  // Success.
   }
 
   void CompileRegular(MacroAssembler* masm,
@@ -728,7 +731,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
     __ pop(receiver);  // Restore the holder.
     __ LeaveInternalFrame();
 
-    __ cmp(eax, Factory::no_interceptor_result_sentinel());
+    __ cmp(eax, FACTORY->no_interceptor_result_sentinel());
     __ j(not_equal, interceptor_succeeded);
   }
 
@@ -742,9 +745,9 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
   ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
   Code* code = NULL;
   if (kind == Code::LOAD_IC) {
-    code = Builtins::builtin(Builtins::LoadIC_Miss);
+    code = Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Miss);
   } else {
-    code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
+    code = Isolate::Current()->builtins()->builtin(Builtins::KeyedLoadIC_Miss);
   }
 
   Handle<Code> ic(code);
@@ -851,10 +854,10 @@ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
   if (Serializer::enabled()) {
     __ mov(scratch, Immediate(Handle<Object>(cell)));
     __ cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
-           Immediate(Factory::the_hole_value()));
+           Immediate(FACTORY->the_hole_value()));
   } else {
     __ cmp(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
-           Immediate(Factory::the_hole_value()));
+           Immediate(FACTORY->the_hole_value()));
   }
   __ j(not_equal, miss, not_taken);
   return cell;
@@ -930,7 +933,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
         !current->IsJSGlobalObject() &&
         !current->IsJSGlobalProxy()) {
       if (!name->IsSymbol()) {
-        MaybeObject* maybe_lookup_result = Heap::LookupSymbol(name);
+        MaybeObject* maybe_lookup_result = HEAP->LookupSymbol(name);
         Object* lookup_result = NULL;  // Initialization to please compiler.
         if (!maybe_lookup_result->ToObject(&lookup_result)) {
           set_failure(Failure::cast(maybe_lookup_result));
@@ -950,7 +953,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
       __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
       reg = holder_reg;  // from now the object is in holder_reg
       __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
-    } else if (Heap::InNewSpace(prototype)) {
+    } else if (HEAP->InNewSpace(prototype)) {
       // Get the map of the current object.
       __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
       __ cmp(Operand(scratch1), Immediate(Handle<Map>(current->map())));
@@ -997,7 +1000,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
   ASSERT(current == holder);
 
   // Log the check depth.
-  LOG(IntEvent("check-maps-depth", depth + 1));
+  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
 
   // Check the holder map.
   __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
@@ -1080,7 +1083,7 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
   ASSERT(!scratch2.is(reg));
   __ push(reg);  // holder
   // Push data from AccessorInfo.
-  if (Heap::InNewSpace(callback_handle->data())) {
+  if (HEAP->InNewSpace(callback_handle->data())) {
     __ mov(scratch1, Immediate(callback_handle));
     __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset));
   } else {
@@ -1204,7 +1207,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
     // Check if interceptor provided a value for property.  If it's
     // the case, return immediately.
     Label interceptor_failed;
-    __ cmp(eax, Factory::no_interceptor_result_sentinel());
+    __ cmp(eax, FACTORY->no_interceptor_result_sentinel());
     __ j(equal, &interceptor_failed);
     __ LeaveInternalFrame();
     __ ret(0);
@@ -1325,7 +1328,7 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
   }
 
   // Check that the cell contains the same function.
-  if (Heap::InNewSpace(function)) {
+  if (HEAP->InNewSpace(function)) {
     // We can't embed a pointer to a function in new space so we have
     // to verify that the shared function info is unchanged. This has
     // the nice side effect that multiple closures based on the same
@@ -1348,8 +1351,9 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
 
 
 MaybeObject* CallStubCompiler::GenerateMissBranch() {
-  MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(),
-                                                      kind_);
+  MaybeObject* maybe_obj =
+      Isolate::Current()->stub_cache()->ComputeCallMiss(
+          arguments().immediate(), kind_);
   Object* obj;
   if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   __ jmp(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
@@ -1429,7 +1433,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
   // -----------------------------------
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value();
 
   Label miss;
 
@@ -1459,7 +1463,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
 
     // Check that the elements are in fast mode and writable.
     __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
-           Immediate(Factory::fixed_array_map()));
+           Immediate(FACTORY->fixed_array_map()));
     __ j(not_equal, &call_builtin);
 
     if (argc == 1) {  // Otherwise fall through to call builtin.
@@ -1535,7 +1539,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
       // ... and fill the rest with holes.
       for (int i = 1; i < kAllocationDelta; i++) {
         __ mov(Operand(edx, i * kPointerSize),
-               Immediate(Factory::the_hole_value()));
+               Immediate(FACTORY->the_hole_value()));
       }
 
       // Restore receiver to edx as finish sequence assumes it's here.
@@ -1581,7 +1585,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
   // -----------------------------------
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value();
 
   Label miss, return_undefined, call_builtin;
 
@@ -1603,7 +1607,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
 
   // Check that the elements are in fast mode and writable.
   __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
-         Immediate(Factory::fixed_array_map()));
+         Immediate(FACTORY->fixed_array_map()));
   __ j(not_equal, &call_builtin);
 
   // Get the array's length into ecx and calculate new length.
@@ -1617,7 +1621,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
   __ mov(eax, FieldOperand(ebx,
                            ecx, times_half_pointer_size,
                            FixedArray::kHeaderSize));
-  __ cmp(Operand(eax), Immediate(Factory::the_hole_value()));
+  __ cmp(Operand(eax), Immediate(FACTORY->the_hole_value()));
   __ j(equal, &call_builtin);
 
   // Set the array's length.
@@ -1627,11 +1631,11 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
   __ mov(FieldOperand(ebx,
                       ecx, times_half_pointer_size,
                       FixedArray::kHeaderSize),
-         Immediate(Factory::the_hole_value()));
+         Immediate(FACTORY->the_hole_value()));
   __ ret((argc + 1) * kPointerSize);
 
   __ bind(&return_undefined);
-  __ mov(eax, Immediate(Factory::undefined_value()));
+  __ mov(eax, Immediate(FACTORY->undefined_value()));
   __ ret((argc + 1) * kPointerSize);
 
   __ bind(&call_builtin);
@@ -1665,7 +1669,7 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1697,7 +1701,7 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
   if (argc > 0) {
     __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
   } else {
-    __ Set(index, Immediate(Factory::undefined_value()));
+    __ Set(index, Immediate(FACTORY->undefined_value()));
   }
 
   StringCharCodeAtGenerator char_code_at_generator(receiver,
@@ -1716,7 +1720,7 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
 
   if (index_out_of_range.is_linked()) {
     __ bind(&index_out_of_range);
-    __ Set(eax, Immediate(Factory::nan_value()));
+    __ Set(eax, Immediate(FACTORY->nan_value()));
     __ ret((argc + 1) * kPointerSize);
   }
 
@@ -1749,7 +1753,7 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1782,7 +1786,7 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
   if (argc > 0) {
     __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
   } else {
-    __ Set(index, Immediate(Factory::undefined_value()));
+    __ Set(index, Immediate(FACTORY->undefined_value()));
   }
 
   StringCharAtGenerator char_at_generator(receiver,
@@ -1802,7 +1806,7 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
 
   if (index_out_of_range.is_linked()) {
     __ bind(&index_out_of_range);
-    __ Set(eax, Immediate(Factory::empty_string()));
+    __ Set(eax, Immediate(FACTORY->empty_string()));
     __ ret((argc + 1) * kPointerSize);
   }
 
@@ -1838,7 +1842,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
 
   // If the object is not a JSObject or we got an unexpected number of
   // arguments, bail out to the regular call.
-  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+  if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
 
   Label miss;
   GenerateNameCheck(name, &miss);
@@ -1908,14 +1912,15 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
   //  -- esp[(argc + 1) * 4] : receiver
   // -----------------------------------
 
-  if (!CpuFeatures::IsSupported(SSE2)) return Heap::undefined_value();
+  if (!Isolate::Current()->cpu_features()->IsSupported(SSE2))
+    return HEAP->undefined_value();
   CpuFeatures::Scope use_sse2(SSE2);
 
   const int argc = arguments().immediate();
 
   // If the object is not a JSObject or we got an unexpected number of
   // arguments, bail out to the regular call.
-  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+  if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
 
   Label miss;
   GenerateNameCheck(name, &miss);
@@ -1946,7 +1951,7 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
 
   // Check if the argument is a heap number and load its value into xmm0.
   Label slow;
-  __ CheckMap(eax, Factory::heap_number_map(), &slow, true);
+  __ CheckMap(eax, FACTORY->heap_number_map(), &slow, true);
   __ movdbl(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
 
   // Check if the argument is strictly positive. Note this also
@@ -2039,7 +2044,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
 
   // If the object is not a JSObject or we got an unexpected number of
   // arguments, bail out to the regular call.
-  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+  if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
 
   Label miss;
   GenerateNameCheck(name, &miss);
@@ -2090,7 +2095,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
   // Check if the argument is a heap number and load its exponent and
   // sign into ebx.
   __ bind(&not_smi);
-  __ CheckMap(eax, Factory::heap_number_map(), &slow, true);
+  __ CheckMap(eax, FACTORY->heap_number_map(), &slow, true);
   __ mov(ebx, FieldOperand(eax, HeapNumber::kExponentOffset));
 
   // Check the sign of the argument. If the argument is positive,
@@ -2137,11 +2142,11 @@ MaybeObject* CallStubCompiler::CompileFastApiCall(
   ASSERT(optimization.is_simple_api_call());
   // Bail out if object is a global object as we don't want to
   // repatch it to global receiver.
-  if (object->IsGlobalObject()) return Heap::undefined_value();
-  if (cell != NULL) return Heap::undefined_value();
+  if (object->IsGlobalObject()) return HEAP->undefined_value();
+  if (cell != NULL) return HEAP->undefined_value();
   int depth = optimization.GetPrototypeDepthOfExpectedType(
             JSObject::cast(object), holder);
-  if (depth == kInvalidProtoDepth) return Heap::undefined_value();
+  if (depth == kInvalidProtoDepth) return HEAP->undefined_value();
 
   Label miss, miss_before_stack_reserved;
 
@@ -2155,8 +2160,8 @@ MaybeObject* CallStubCompiler::CompileFastApiCall(
   __ test(edx, Immediate(kSmiTagMask));
   __ j(zero, &miss_before_stack_reserved, not_taken);
 
-  __ IncrementCounter(&Counters::call_const, 1);
-  __ IncrementCounter(&Counters::call_const_fast_api, 1);
+  __ IncrementCounter(COUNTERS->call_const(), 1);
+  __ IncrementCounter(COUNTERS->call_const_fast_api(), 1);
 
   // Allocate space for v8::Arguments implicit values. Must be initialized
   // before calling any runtime function.
@@ -2232,7 +2237,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
   SharedFunctionInfo* function_info = function->shared();
   switch (check) {
     case RECEIVER_MAP_CHECK:
-      __ IncrementCounter(&Counters::call_const, 1);
+      __ IncrementCounter(COUNTERS->call_const(), 1);
 
       // Check that the maps haven't changed.
       CheckPrototypes(JSObject::cast(object), edx, holder,
@@ -2293,9 +2298,9 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
       } else {
         Label fast;
         // Check that the object is a boolean.
-        __ cmp(edx, Factory::true_value());
+        __ cmp(edx, FACTORY->true_value());
         __ j(equal, &fast, taken);
-        __ cmp(edx, Factory::false_value());
+        __ cmp(edx, FACTORY->false_value());
         __ j(not_equal, &miss, not_taken);
         __ bind(&fast);
         // Check that the maps starting from the prototype haven't changed.
@@ -2436,7 +2441,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
 
   // Jump to the cached code (tail call).
-  __ IncrementCounter(&Counters::call_global_inline, 1);
+  __ IncrementCounter(COUNTERS->call_global_inline(), 1);
   ASSERT(function->is_compiled());
   ParameterCount expected(function->shared()->formal_parameter_count());
   if (V8::UseCrankshaft()) {
@@ -2453,7 +2458,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
 
   // Handle call cache miss.
   __ bind(&miss);
-  __ IncrementCounter(&Counters::call_global_inline_miss, 1);
+  __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1);
   Object* obj;
   { MaybeObject* maybe_obj = GenerateMissBranch();
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
@@ -2487,7 +2492,8 @@ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
   // Handle store cache miss.
   __ bind(&miss);
   __ mov(ecx, Immediate(Handle<String>(name)));  // restore name
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2538,7 +2544,8 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
 
   // Handle store cache miss.
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2588,7 +2595,8 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
 
   // Handle store cache miss.
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2624,20 +2632,21 @@ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
   // cell could have been deleted and reintroducing the global needs
   // to update the property details in the property dictionary of the
   // global object. We bail out to the runtime system to do that.
-  __ cmp(cell_operand, Factory::the_hole_value());
+  __ cmp(cell_operand, FACTORY->the_hole_value());
   __ j(equal, &miss);
 
   // Store the value in the cell.
   __ mov(cell_operand, eax);
 
   // Return the value (register eax).
-  __ IncrementCounter(&Counters::named_store_global_inline, 1);
+  __ IncrementCounter(COUNTERS->named_store_global_inline(), 1);
   __ ret(0);
 
   // Handle store cache miss.
   __ bind(&miss);
-  __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1);
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2657,7 +2666,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_store_field, 1);
+  __ IncrementCounter(COUNTERS->keyed_store_field(), 1);
 
   // Check that the name has not changed.
   __ cmp(Operand(ecx), Immediate(Handle<String>(name)));
@@ -2673,8 +2682,9 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
 
   // Handle store cache miss.
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_store_field, 1);
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+  __ DecrementCounter(COUNTERS->keyed_store_field(), 1);
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedStoreIC_Miss));
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2708,7 +2718,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
   // Get the elements array and make sure it is a fast element array, not 'cow'.
   __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
   __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
-         Immediate(Factory::fixed_array_map()));
+         Immediate(FACTORY->fixed_array_map()));
   __ j(not_equal, &miss, not_taken);
 
   // Check that the key is within bounds.
@@ -2731,7 +2741,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
 
   // Handle store cache miss.
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+  Handle<Code> ic(
+      Isolate::Current()->builtins()->builtin(Builtins::KeyedStoreIC_Miss));
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2776,14 +2787,14 @@ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
 
   // Return undefined if maps of the full prototype chain are still the
   // same and no global property with this name contains a value.
-  __ mov(eax, Factory::undefined_value());
+  __ mov(eax, FACTORY->undefined_value());
   __ ret(0);
 
   __ bind(&miss);
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NONEXISTENT, Heap::empty_string());
+  return GetCode(NONEXISTENT, HEAP->empty_string());
 }
 
 
@@ -2920,19 +2931,19 @@ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
 
   // Check for deleted property if property can actually be deleted.
   if (!is_dont_delete) {
-    __ cmp(ebx, Factory::the_hole_value());
+    __ cmp(ebx, FACTORY->the_hole_value());
     __ j(equal, &miss, not_taken);
   } else if (FLAG_debug_code) {
-    __ cmp(ebx, Factory::the_hole_value());
+    __ cmp(ebx, FACTORY->the_hole_value());
     __ Check(not_equal, "DontDelete cells can't contain the hole");
   }
 
-  __ IncrementCounter(&Counters::named_load_global_stub, 1);
+  __ IncrementCounter(COUNTERS->named_load_global_stub(), 1);
   __ mov(eax, ebx);
   __ ret(0);
 
   __ bind(&miss);
-  __ IncrementCounter(&Counters::named_load_global_stub_miss, 1);
+  __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1);
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
@@ -2951,7 +2962,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_field, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_field(), 1);
 
   // Check that the name has not changed.
   __ cmp(Operand(eax), Immediate(Handle<String>(name)));
@@ -2960,7 +2971,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
   GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss);
 
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_field, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_field(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -2980,7 +2991,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_callback, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_callback(), 1);
 
   // Check that the name has not changed.
   __ cmp(Operand(eax), Immediate(Handle<String>(name)));
@@ -2995,7 +3006,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
 
   __ bind(&miss);
 
-  __ DecrementCounter(&Counters::keyed_load_callback, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_callback(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -3014,7 +3025,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_constant_function, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_constant_function(), 1);
 
   // Check that the name has not changed.
   __ cmp(Operand(eax), Immediate(Handle<String>(name)));
@@ -3023,7 +3034,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
   GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi,
                        value, name, &miss);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_constant_function, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_constant_function(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -3041,7 +3052,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_interceptor, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_interceptor(), 1);
 
   // Check that the name has not changed.
   __ cmp(Operand(eax), Immediate(Handle<String>(name)));
@@ -3060,7 +3071,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
                           name,
                           &miss);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_interceptor, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_interceptor(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -3076,7 +3087,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_array_length, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_array_length(), 1);
 
   // Check that the name has not changed.
   __ cmp(Operand(eax), Immediate(Handle<String>(name)));
@@ -3084,7 +3095,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
 
   GenerateLoadArrayLength(masm(), edx, ecx, &miss);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_array_length, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_array_length(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -3100,7 +3111,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_string_length, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1);
 
   // Check that the name has not changed.
   __ cmp(Operand(eax), Immediate(Handle<String>(name)));
@@ -3108,7 +3119,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
 
   GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_string_length, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -3124,7 +3135,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
 
   // Check that the name has not changed.
   __ cmp(Operand(eax), Immediate(Handle<String>(name)));
@@ -3132,7 +3143,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
 
   GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -3172,7 +3183,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
   // Load the result and make sure it's not the hole.
   __ mov(ebx, Operand(ecx, eax, times_2,
                       FixedArray::kHeaderSize - kHeapObjectTag));
-  __ cmp(ebx, Factory::the_hole_value());
+  __ cmp(ebx, FACTORY->the_hole_value());
   __ j(equal, &miss, not_taken);
   __ mov(eax, ebx);
   __ ret(0);
@@ -3201,7 +3212,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
   // code for the function thereby hitting the break points.
   __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kDebugInfoOffset));
-  __ cmp(ebx, Factory::undefined_value());
+  __ cmp(ebx, FACTORY->undefined_value());
   __ j(not_equal, &generic_stub_call, not_taken);
 #endif
 
@@ -3238,7 +3249,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
   // ebx: initial map
   // edx: JSObject (untagged)
   __ mov(Operand(edx, JSObject::kMapOffset), ebx);
-  __ mov(ebx, Factory::empty_fixed_array());
+  __ mov(ebx, FACTORY->empty_fixed_array());
   __ mov(Operand(edx, JSObject::kPropertiesOffset), ebx);
   __ mov(Operand(edx, JSObject::kElementsOffset), ebx);
 
@@ -3255,7 +3266,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
   __ lea(ecx, Operand(esp, eax, times_4, 1 * kPointerSize));
 
   // Use edi for holding undefined which is used in several places below.
-  __ mov(edi, Factory::undefined_value());
+  __ mov(edi, FACTORY->undefined_value());
 
   // eax: argc
   // ecx: first argument
@@ -3272,7 +3283,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
       int arg_number = shared->GetThisPropertyAssignmentArgument(i);
       __ mov(ebx, edi);
       __ cmp(eax, arg_number);
-      if (CpuFeatures::IsSupported(CMOV)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(CMOV)) {
         CpuFeatures::Scope use_cmov(CMOV);
         __ cmov(above, ebx, Operand(ecx, arg_number * -kPointerSize));
       } else {
@@ -3307,14 +3318,15 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
   __ pop(ecx);
   __ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
   __ push(ecx);
-  __ IncrementCounter(&Counters::constructed_objects, 1);
-  __ IncrementCounter(&Counters::constructed_objects_stub, 1);
+  __ IncrementCounter(COUNTERS->constructed_objects(), 1);
+  __ IncrementCounter(COUNTERS->constructed_objects_stub(), 1);
   __ ret(0);
 
   // Jump to the generic stub in case the specialized code cannot handle the
   // construction.
   __ bind(&generic_stub_call);
-  Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
+  Code* code = Isolate::Current()->builtins()->builtin(
+      Builtins::JSConstructStubGeneric);
   Handle<Code> generic_construct_stub(code);
   __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
 
@@ -3455,7 +3467,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
 
   // Slow case: Jump to runtime.
   __ bind(&slow);
-  __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_external_array_slow(), 1);
   // ----------- S t a t e -------------
   //  -- eax    : key
   //  -- edx    : receiver
@@ -3568,7 +3580,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
     // edi: elements array
     // ebx: untagged index
     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
-           Immediate(Factory::heap_number_map()));
+           Immediate(FACTORY->heap_number_map()));
     __ j(not_equal, &slow);
 
     // The WebGL specification leaves the behavior of storing NaN and
@@ -3589,10 +3601,10 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
       // processors that don't support SSE2. The code in IntegerConvert
       // (code-stubs-ia32.cc) is roughly what is needed here though the
       // conversion failure case does not need to be handled.
-      if (CpuFeatures::IsSupported(SSE2)) {
+      if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
         if (array_type != kExternalIntArray &&
             array_type != kExternalUnsignedIntArray) {
-          ASSERT(CpuFeatures::IsSupported(SSE2));
+          ASSERT(Isolate::Current()->cpu_features()->IsSupported(SSE2));
           CpuFeatures::Scope scope(SSE2);
           __ cvttsd2si(ecx, FieldOperand(eax, HeapNumber::kValueOffset));
           // ecx: untagged integer value
@@ -3620,7 +3632,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
               break;
           }
         } else {
-          if (CpuFeatures::IsSupported(SSE3)) {
+          if (Isolate::Current()->cpu_features()->IsSupported(SSE3)) {
             CpuFeatures::Scope scope(SSE3);
             // fisttp stores values as signed integers. To represent the
             // entire range of int and unsigned int arrays, store as a
@@ -3633,7 +3645,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
             __ pop(ecx);
             __ add(Operand(esp), Immediate(kPointerSize));
           } else {
-            ASSERT(CpuFeatures::IsSupported(SSE2));
+            ASSERT(Isolate::Current()->cpu_features()->IsSupported(SSE2));
             CpuFeatures::Scope scope(SSE2);
             // We can easily implement the correct rounding behavior for the
             // range [0, 2^31-1]. For the time being, to keep this code simple,
index 93d711e9362b03cc9967491af063f43a2e7a1179..317028ddcea5902a5418eafc9d840e9718408f93 100644 (file)
@@ -501,7 +501,7 @@ void VirtualFrame::AllocateStackSlots() {
     // them later.  First sync everything above the stack pointer so we can
     // use pushes to allocate and initialize the locals.
     SyncRange(stack_pointer_ + 1, element_count() - 1);
-    Handle<Object> undefined = Factory::undefined_value();
+    Handle<Object> undefined = FACTORY->undefined_value();
     FrameElement initial_value =
         FrameElement::ConstantElement(undefined, FrameElement::SYNCED);
     if (count == 1) {
@@ -824,11 +824,11 @@ void VirtualFrame::UntaggedPushFrameSlotAt(int index) {
         __ bind(&not_smi);
         if (!original.type_info().IsNumber()) {
           __ cmp(FieldOperand(fresh_reg, HeapObject::kMapOffset),
-                 Factory::heap_number_map());
+                 FACTORY->heap_number_map());
           cgen()->unsafe_bailout_->Branch(not_equal);
         }
 
-        if (!CpuFeatures::IsSupported(SSE2)) {
+        if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
           UNREACHABLE();
         } else {
           CpuFeatures::Scope use_sse2(SSE2);
@@ -931,7 +931,7 @@ Result VirtualFrame::CallJSFunction(int arg_count) {
 }
 
 
-Result VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
+Result VirtualFrame::CallRuntime(const Runtime::Function* f, int arg_count) {
   PrepareForCall(arg_count, arg_count);
   ASSERT(cgen()->HasValidEntryRegisters());
   __ CallRuntime(f, arg_count);
@@ -1016,7 +1016,8 @@ Result VirtualFrame::CallLoadIC(RelocInfo::Mode mode) {
   PrepareForCall(0, 0);  // No stack arguments.
   MoveResultsToRegisters(&name, &receiver, ecx, eax);
 
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   return RawCallCodeObject(ic, mode);
 }
 
@@ -1028,7 +1029,8 @@ Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
   PrepareForCall(0, 0);
   MoveResultsToRegisters(&key, &receiver, eax, edx);
 
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   return RawCallCodeObject(ic, mode);
 }
 
@@ -1038,7 +1040,7 @@ Result VirtualFrame::CallStoreIC(Handle<String> name,
                                  StrictModeFlag strict_mode) {
   // Value and (if not contextual) receiver are on top of the frame.
   // The IC expects name in ecx, value in eax, and receiver in edx.
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
       (strict_mode == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
                                    : Builtins::StoreIC_Initialize));
 
@@ -1105,7 +1107,7 @@ Result VirtualFrame::CallKeyedStoreIC(StrictModeFlag strict_mode) {
     receiver.Unuse();
   }
 
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
       (strict_mode == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
                                    : Builtins::KeyedStoreIC_Initialize));
   return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
@@ -1119,7 +1121,8 @@ Result VirtualFrame::CallCallIC(RelocInfo::Mode mode,
   // The IC expects the name in ecx and the rest on the stack and
   // drops them all.
   InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
+  Handle<Code> ic = Isolate::Current()->stub_cache()->ComputeCallInitialize(
+      arg_count, in_loop);
   // Spill args, receiver, and function.  The call will drop args and
   // receiver.
   Result name = Pop();
@@ -1137,7 +1140,9 @@ Result VirtualFrame::CallKeyedCallIC(RelocInfo::Mode mode,
   // The IC expects the name in ecx and the rest on the stack and
   // drops them all.
   InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
-  Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
+  Handle<Code> ic =
+      Isolate::Current()->stub_cache()->ComputeKeyedCallInitialize(arg_count,
+                                                                   in_loop);
   // Spill args, receiver, and function.  The call will drop args and
   // receiver.
   Result name = Pop();
@@ -1152,7 +1157,8 @@ Result VirtualFrame::CallConstructor(int arg_count) {
   // Arguments, receiver, and function are on top of the frame.  The
   // IC expects arg count in eax, function in edi, and the arguments
   // and receiver on the stack.
-  Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::JSConstructCall));
   // Duplicate the function before preparing the frame.
   PushElementAt(arg_count);
   Result function = Pop();
index 51874309d5c5ad1452b4f4ab5f0dc6fc2672f5d8..504a8fc33c9fcc56623e9a78ca3cc33197adaeb3 100644 (file)
@@ -67,7 +67,9 @@ class VirtualFrame: public ZoneObject {
    private:
     bool previous_state_;
 
-    CodeGenerator* cgen() {return CodeGeneratorScope::Current();}
+    CodeGenerator* cgen() {
+      return CodeGeneratorScope::Current(Isolate::Current());
+    }
   };
 
   // An illegal index into the virtual frame.
@@ -79,7 +81,9 @@ class VirtualFrame: public ZoneObject {
   // Construct a virtual frame as a clone of an existing one.
   explicit inline VirtualFrame(VirtualFrame* original);
 
-  CodeGenerator* cgen() { return CodeGeneratorScope::Current(); }
+  CodeGenerator* cgen() {
+    return CodeGeneratorScope::Current(Isolate::Current());
+  }
 
   MacroAssembler* masm() { return cgen()->masm(); }
 
@@ -344,7 +348,7 @@ class VirtualFrame: public ZoneObject {
 
   // Call runtime given the number of arguments expected on (and
   // removed from) the stack.
-  Result CallRuntime(Runtime::Function* f, int arg_count);
+  Result CallRuntime(const Runtime::Function* f, int arg_count);
   Result CallRuntime(Runtime::FunctionId id, int arg_count);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
index 9d358eddb31a3dc887078c4ce784e0aff4d3fe6f..b4f789cb44b97b575ea73268973f8d220946c690 100644 (file)
@@ -41,13 +41,14 @@ Address IC::address() {
   Address result = pc() - Assembler::kCallTargetAddressOffset;
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
+  Debug* debug = Isolate::Current()->debug();
   // First check if any break points are active if not just return the address
   // of the call.
-  if (!Debug::has_break_points()) return result;
+  if (!debug->has_break_points()) return result;
 
   // At least one break point is active perform additional test to ensure that
   // break point locations are updated correctly.
-  if (Debug::IsDebugBreak(Assembler::target_address_at(result))) {
+  if (debug->IsDebugBreak(Assembler::target_address_at(result))) {
     // If the call site is a call to debug break then return the address in
     // the original code instead of the address in the running code. This will
     // cause the original code to be updated and keeps the breakpoint active in
index 38585a9df8618cc4a496810fe6970f82fbb7bf9b..7e87b1b5fb45c315116a10cd16ce7cc6e8180ffa 100644 (file)
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -65,8 +65,8 @@ void IC::TraceIC(const char* type,
                  const char* extra_info) {
   if (FLAG_trace_ic) {
     State new_state = StateFrom(new_target,
-                                Heap::undefined_value(),
-                                Heap::undefined_value());
+                                HEAP->undefined_value(),
+                                HEAP->undefined_value());
     PrintF("[%s (%c->%c)%s", type,
            TransitionMarkFromState(old_state),
            TransitionMarkFromState(new_state),
@@ -78,11 +78,13 @@ void IC::TraceIC(const char* type,
 #endif
 
 
-IC::IC(FrameDepth depth) {
+IC::IC(FrameDepth depth, Isolate* isolate) : isolate_(isolate) {
+  ASSERT(isolate == Isolate::Current());
   // To improve the performance of the (much used) IC code, we unfold
   // a few levels of the stack frame iteration code. This yields a
   // ~35% speedup when running DeltaBlue with the '--nouse-ic' flag.
-  const Address entry = Top::c_entry_fp(Top::GetCurrentThread());
+  const Address entry =
+      Isolate::c_entry_fp(isolate->thread_local_top());
   Address* pc_address =
       reinterpret_cast<Address*>(entry + ExitFrameConstants::kCallerPCOffset);
   Address fp = Memory::Address_at(entry + ExitFrameConstants::kCallerFPOffset);
@@ -136,9 +138,11 @@ Address IC::OriginalCodeAddress() {
 #endif
 
 
-static bool HasNormalObjectsInPrototypeChain(LookupResult* lookup,
+static bool HasNormalObjectsInPrototypeChain(Isolate* isolate,
+                                             LookupResult* lookup,
                                              Object* receiver) {
-  Object* end = lookup->IsProperty() ? lookup->holder() : Heap::null_value();
+  Object* end = lookup->IsProperty()
+      ? lookup->holder() : isolate->heap()->null_value();
   for (Object* current = receiver;
        current != end;
        current = current->GetPrototype()) {
@@ -231,7 +235,7 @@ IC::State IC::StateFrom(Code* target, Object* receiver, Object* name) {
 
 RelocInfo::Mode IC::ComputeMode() {
   Address addr = address();
-  Code* code = Code::cast(Heap::FindCodeObject(addr));
+  Code* code = Code::cast(isolate()->heap()->FindCodeObject(addr));
   for (RelocIterator it(code, RelocInfo::kCodeTargetMask);
        !it.done(); it.next()) {
     RelocInfo* info = it.rinfo();
@@ -245,18 +249,19 @@ RelocInfo::Mode IC::ComputeMode() {
 Failure* IC::TypeError(const char* type,
                        Handle<Object> object,
                        Handle<Object> key) {
-  HandleScope scope;
+  HandleScope scope(isolate());
   Handle<Object> args[2] = { key, object };
-  Handle<Object> error = Factory::NewTypeError(type, HandleVector(args, 2));
-  return Top::Throw(*error);
+  Handle<Object> error = isolate()->factory()->NewTypeError(
+      type, HandleVector(args, 2));
+  return isolate()->Throw(*error);
 }
 
 
 Failure* IC::ReferenceError(const char* type, Handle<String> name) {
-  HandleScope scope;
-  Handle<Object> error =
-      Factory::NewReferenceError(type, HandleVector(&name, 1));
-  return Top::Throw(*error);
+  HandleScope scope(isolate());
+  Handle<Object> error = isolate()->factory()->NewReferenceError(
+      type, HandleVector(&name, 1));
+  return isolate()->Throw(*error);
 }
 
 
@@ -292,9 +297,10 @@ void CallICBase::Clear(Address address, Code* target) {
   State state = target->ic_state();
   if (state == UNINITIALIZED) return;
   Code* code =
-      StubCache::FindCallInitialize(target->arguments_count(),
-                                    target->ic_in_loop(),
-                                    target->kind());
+      Isolate::Current()->stub_cache()->FindCallInitialize(
+          target->arguments_count(),
+          target->ic_in_loop(),
+          target->kind());
   SetTargetAtAddress(address, code);
 }
 
@@ -302,7 +308,7 @@ void CallICBase::Clear(Address address, Code* target) {
 void KeyedLoadIC::ClearInlinedVersion(Address address) {
   // Insert null as the map to check for to make sure the map check fails
   // sending control flow to the IC instead of the inlined version.
-  PatchInlinedLoad(address, Heap::null_value());
+  PatchInlinedLoad(address, HEAP->null_value());
 }
 
 
@@ -320,10 +326,11 @@ void LoadIC::ClearInlinedVersion(Address address) {
   // Reset the map check of the inlined inobject property load (if
   // present) to guarantee failure by holding an invalid map (the null
   // value).  The offset can be patched to anything.
-  PatchInlinedLoad(address, Heap::null_value(), 0);
+  Heap* heap = HEAP;
+  PatchInlinedLoad(address, heap->null_value(), 0);
   PatchInlinedContextualLoad(address,
-                             Heap::null_value(),
-                             Heap::null_value(),
+                             heap->null_value(),
+                             heap->null_value(),
                              true);
 }
 
@@ -339,7 +346,7 @@ void StoreIC::ClearInlinedVersion(Address address) {
   // Reset the map check of the inlined inobject property store (if
   // present) to guarantee failure by holding an invalid map (the null
   // value).  The offset can be patched to anything.
-  PatchInlinedStore(address, Heap::null_value(), 0);
+  PatchInlinedStore(address, HEAP->null_value(), 0);
 }
 
 
@@ -357,14 +364,14 @@ void KeyedStoreIC::ClearInlinedVersion(Address address) {
   // Insert null as the elements map to check for.  This will make
   // sure that the elements fast-case map check fails so that control
   // flows to the IC instead of the inlined version.
-  PatchInlinedStore(address, Heap::null_value());
+  PatchInlinedStore(address, HEAP->null_value());
 }
 
 
 void KeyedStoreIC::RestoreInlinedVersion(Address address) {
   // Restore the fast-case elements map check so that the inlined
   // version can be used again.
-  PatchInlinedStore(address, Heap::fixed_array_map());
+  PatchInlinedStore(address, HEAP->fixed_array_map());
 }
 
 
@@ -423,8 +430,8 @@ static void LookupForRead(Object* object,
 
 
 Object* CallICBase::TryCallAsFunction(Object* object) {
-  HandleScope scope;
-  Handle<Object> target(object);
+  HandleScope scope(isolate());
+  Handle<Object> target(object, isolate());
   Handle<Object> delegate = Execution::GetFunctionDelegate(target);
 
   if (delegate->IsJSFunction()) {
@@ -459,7 +466,7 @@ void CallICBase::ReceiverToObjectIfRequired(Handle<Object> callee,
     StackFrameLocator locator;
     JavaScriptFrame* frame = locator.FindJavaScriptFrame(0);
     int index = frame->ComputeExpressionsCount() - (argc + 1);
-    frame->SetExpression(index, *Factory::ToObject(object));
+    frame->SetExpression(index, *isolate()->factory()->ToObject(object));
   }
 }
 
@@ -531,7 +538,7 @@ MaybeObject* CallICBase::LoadFunction(State state,
 
   ASSERT(!result->IsTheHole());
 
-  HandleScope scope;
+  HandleScope scope(isolate());
   // Wrap result in a handle because ReceiverToObjectIfRequired may allocate
   // new object and cause GC.
   Handle<Object> result_handle(result);
@@ -543,11 +550,12 @@ MaybeObject* CallICBase::LoadFunction(State state,
   if (result_handle->IsJSFunction()) {
 #ifdef ENABLE_DEBUGGER_SUPPORT
     // Handle stepping into a function if step into is active.
-    if (Debug::StepInActive()) {
+    Debug* debug = isolate()->debug();
+    if (debug->StepInActive()) {
       // Protect the result in a handle as the debugger can allocate and might
       // cause GC.
-      Handle<JSFunction> function(JSFunction::cast(*result_handle));
-      Debug::HandleStepIn(function, object, fp(), false);
+      Handle<JSFunction> function(JSFunction::cast(*result_handle), isolate());
+      debug->HandleStepIn(function, object, fp(), false);
       return *function;
     }
 #endif
@@ -573,7 +581,7 @@ bool CallICBase::TryUpdateExtraICState(LookupResult* lookup,
 
   // Fetch the arguments passed to the called function.
   const int argc = target()->arguments_count();
-  Address entry = Top::c_entry_fp(Top::GetCurrentThread());
+  Address entry = isolate()->c_entry_fp(isolate()->thread_local_top());
   Address fp = Memory::Address_at(entry + ExitFrameConstants::kCallerFPOffset);
   Arguments args(argc + 1,
                  &Memory::Object_at(fp +
@@ -623,13 +631,13 @@ MaybeObject* CallICBase::ComputeMonomorphicStub(
   switch (lookup->type()) {
     case FIELD: {
       int index = lookup->GetFieldIndex();
-      maybe_code = StubCache::ComputeCallField(argc,
-                                               in_loop,
-                                               kind_,
-                                               *name,
-                                               *object,
-                                               lookup->holder(),
-                                               index);
+      maybe_code = isolate()->stub_cache()->ComputeCallField(argc,
+                                                             in_loop,
+                                                             kind_,
+                                                             *name,
+                                                             *object,
+                                                             lookup->holder(),
+                                                             index);
       break;
     }
     case CONSTANT_FUNCTION: {
@@ -637,14 +645,15 @@ MaybeObject* CallICBase::ComputeMonomorphicStub(
       // call; used for rewriting to monomorphic state and making sure
       // that the code stub is in the stub cache.
       JSFunction* function = lookup->GetConstantFunction();
-      maybe_code = StubCache::ComputeCallConstant(argc,
-                                                  in_loop,
-                                                  kind_,
-                                                  extra_ic_state,
-                                                  *name,
-                                                  *object,
-                                                  lookup->holder(),
-                                                  function);
+      maybe_code =
+          isolate()->stub_cache()->ComputeCallConstant(argc,
+                                                       in_loop,
+                                                       kind_,
+                                                       extra_ic_state,
+                                                       *name,
+                                                       *object,
+                                                       lookup->holder(),
+                                                       function);
       break;
     }
     case NORMAL: {
@@ -657,35 +666,36 @@ MaybeObject* CallICBase::ComputeMonomorphicStub(
             JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
         if (!cell->value()->IsJSFunction()) return NULL;
         JSFunction* function = JSFunction::cast(cell->value());
-        maybe_code = StubCache::ComputeCallGlobal(argc,
-                                                  in_loop,
-                                                  kind_,
-                                                  *name,
-                                                  *receiver,
-                                                  global,
-                                                  cell,
-                                                  function);
+        maybe_code = isolate()->stub_cache()->ComputeCallGlobal(argc,
+                                                                in_loop,
+                                                                kind_,
+                                                                *name,
+                                                                *receiver,
+                                                                global,
+                                                                cell,
+                                                                function);
       } else {
         // There is only one shared stub for calling normalized
         // properties. It does not traverse the prototype chain, so the
         // property must be found in the receiver for the stub to be
         // applicable.
         if (lookup->holder() != *receiver) return NULL;
-        maybe_code = StubCache::ComputeCallNormal(argc,
-                                                  in_loop,
-                                                  kind_,
-                                                  *name,
-                                                  *receiver);
+        maybe_code = isolate()->stub_cache()->ComputeCallNormal(argc,
+                                                                in_loop,
+                                                                kind_,
+                                                                *name,
+                                                                *receiver);
       }
       break;
     }
     case INTERCEPTOR: {
       ASSERT(HasInterceptorGetter(lookup->holder()));
-      maybe_code = StubCache::ComputeCallInterceptor(argc,
-                                                     kind_,
-                                                     *name,
-                                                     *object,
-                                                     lookup->holder());
+      maybe_code = isolate()->stub_cache()->ComputeCallInterceptor(
+          argc,
+          kind_,
+          *name,
+          *object,
+          lookup->holder());
       break;
     }
     default:
@@ -705,7 +715,8 @@ void CallICBase::UpdateCaches(LookupResult* lookup,
   if (!lookup->IsProperty() || !lookup->IsCacheable()) return;
 
   if (lookup->holder() != *object &&
-      HasNormalObjectsInPrototypeChain(lookup, object->GetPrototype())) {
+      HasNormalObjectsInPrototypeChain(
+          isolate(), lookup, object->GetPrototype())) {
     // Suppress optimization for prototype chains with slow properties objects
     // in the middle.
     return;
@@ -720,7 +731,9 @@ void CallICBase::UpdateCaches(LookupResult* lookup,
     // This is the first time we execute this inline cache.
     // Set the target to the pre monomorphic stub to delay
     // setting the monomorphic state.
-    maybe_code = StubCache::ComputeCallPreMonomorphic(argc, in_loop, kind_);
+    maybe_code = isolate()->stub_cache()->ComputeCallPreMonomorphic(argc,
+                                                                    in_loop,
+                                                                    kind_);
   } else if (state == MONOMORPHIC) {
     if (kind_ == Code::CALL_IC &&
         TryUpdateExtraICState(lookup, object, &extra_ic_state)) {
@@ -740,7 +753,9 @@ void CallICBase::UpdateCaches(LookupResult* lookup,
                                           object,
                                           name);
     } else {
-      maybe_code = StubCache::ComputeCallMegamorphic(argc, in_loop, kind_);
+      maybe_code = isolate()->stub_cache()->ComputeCallMegamorphic(argc,
+                                                                   in_loop,
+                                                                   kind_);
     }
   } else {
     maybe_code = ComputeMonomorphicStub(lookup,
@@ -768,7 +783,7 @@ void CallICBase::UpdateCaches(LookupResult* lookup,
                               object->GetPrototype())->map();
 
     // Update the stub cache.
-    StubCache::Set(*name, map, Code::cast(code));
+    isolate()->stub_cache()->Set(*name, map, Code::cast(code));
   }
 
   USE(had_proto_failure);
@@ -797,7 +812,7 @@ MaybeObject* KeyedCallIC::LoadFunction(State state,
   if (FLAG_use_ic && state != MEGAMORPHIC && !object->IsAccessCheckNeeded()) {
     int argc = target()->arguments_count();
     InLoopFlag in_loop = target()->ic_in_loop();
-    MaybeObject* maybe_code = StubCache::ComputeCallMegamorphic(
+    MaybeObject* maybe_code = isolate()->stub_cache()->ComputeCallMegamorphic(
         argc, in_loop, Code::KEYED_CALL_IC);
     Object* code;
     if (maybe_code->ToObject(&code)) {
@@ -809,9 +824,9 @@ MaybeObject* KeyedCallIC::LoadFunction(State state,
     }
   }
 
-  HandleScope scope;
+  HandleScope scope(isolate());
   Handle<Object> result = GetProperty(object, key);
-  RETURN_IF_EMPTY_HANDLE(result);
+  RETURN_IF_EMPTY_HANDLE(isolate(), result);
 
   // Make receiver an object if the callee requires it. Strict mode or builtin
   // functions do not wrap the receiver, non-strict functions and objects
@@ -852,8 +867,8 @@ MaybeObject* LoadIC::Load(State state,
     // objects is read-only and therefore always returns the length of
     // the underlying string value.  See ECMA-262 15.5.5.1.
     if ((object->IsString() || object->IsStringWrapper()) &&
-        name->Equals(Heap::length_symbol())) {
-      HandleScope scope;
+        name->Equals(isolate()->heap()->length_symbol())) {
+      HandleScope scope(isolate());
 #ifdef DEBUG
       if (FLAG_trace_ic) PrintF("[LoadIC : +#length /string]\n");
 #endif
@@ -862,24 +877,29 @@ MaybeObject* LoadIC::Load(State state,
           Map* map = HeapObject::cast(*object)->map();
           const int offset = String::kLengthOffset;
           PatchInlinedLoad(address(), map, offset);
-          set_target(Builtins::builtin(Builtins::LoadIC_StringLength));
+          set_target(isolate()->builtins()->builtin(
+              Builtins::LoadIC_StringLength));
         } else {
-          set_target(Builtins::builtin(Builtins::LoadIC_StringWrapperLength));
+          set_target(isolate()->builtins()->builtin(
+              Builtins::LoadIC_StringWrapperLength));
         }
       } else if (state == MONOMORPHIC && object->IsStringWrapper()) {
-        set_target(Builtins::builtin(Builtins::LoadIC_StringWrapperLength));
+        set_target(isolate()->builtins()->builtin(
+            Builtins::LoadIC_StringWrapperLength));
       } else {
         set_target(non_monomorphic_stub);
       }
       // Get the string if we have a string wrapper object.
       if (object->IsJSValue()) {
-        object = Handle<Object>(Handle<JSValue>::cast(object)->value());
+        object = Handle<Object>(Handle<JSValue>::cast(object)->value(),
+                                isolate());
       }
       return Smi::FromInt(String::cast(*object)->length());
     }
 
     // Use specialized code for getting the length of arrays.
-    if (object->IsJSArray() && name->Equals(Heap::length_symbol())) {
+    if (object->IsJSArray() &&
+        name->Equals(isolate()->heap()->length_symbol())) {
 #ifdef DEBUG
       if (FLAG_trace_ic) PrintF("[LoadIC : +#length /array]\n");
 #endif
@@ -887,7 +907,8 @@ MaybeObject* LoadIC::Load(State state,
         Map* map = HeapObject::cast(*object)->map();
         const int offset = JSArray::kLengthOffset;
         PatchInlinedLoad(address(), map, offset);
-        set_target(Builtins::builtin(Builtins::LoadIC_ArrayLength));
+        set_target(isolate()->builtins()->builtin(
+            Builtins::LoadIC_ArrayLength));
       } else {
         set_target(non_monomorphic_stub);
       }
@@ -895,13 +916,15 @@ MaybeObject* LoadIC::Load(State state,
     }
 
     // Use specialized code for getting prototype of functions.
-    if (object->IsJSFunction() && name->Equals(Heap::prototype_symbol()) &&
+    if (object->IsJSFunction() &&
+        name->Equals(isolate()->heap()->prototype_symbol()) &&
         JSFunction::cast(*object)->should_have_prototype()) {
 #ifdef DEBUG
       if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n");
 #endif
       if (state == PREMONOMORPHIC) {
-        set_target(Builtins::builtin(Builtins::LoadIC_FunctionPrototype));
+        set_target(isolate()->builtins()->builtin(
+            Builtins::LoadIC_FunctionPrototype));
       } else {
         set_target(non_monomorphic_stub);
       }
@@ -923,7 +946,7 @@ MaybeObject* LoadIC::Load(State state,
     if (FLAG_strict || IsContextual(object)) {
       return ReferenceError("not_defined", name);
     }
-    LOG(SuspectReadEvent(*name, *object));
+    LOG(isolate(), SuspectReadEvent(*name, *object));
   }
 
   bool can_be_inlined_precheck =
@@ -974,7 +997,7 @@ MaybeObject* LoadIC::Load(State state,
                                    lookup.IsDontDelete())) {
       set_target(megamorphic_stub());
       TRACE_IC_NAMED("[LoadIC : inline contextual patch %s]\n", name);
-      ASSERT(cell->value() != Heap::the_hole_value());
+      ASSERT(cell->value() != isolate()->heap()->the_hole_value());
       return cell->value();
     }
   } else {
@@ -1021,7 +1044,7 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
   if (!object->IsJSObject()) return;
   Handle<JSObject> receiver = Handle<JSObject>::cast(object);
 
-  if (HasNormalObjectsInPrototypeChain(lookup, *object)) return;
+  if (HasNormalObjectsInPrototypeChain(isolate(), lookup, *object)) return;
 
   // Compute the code stub for this load.
   MaybeObject* maybe_code = NULL;
@@ -1033,20 +1056,23 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
     maybe_code = pre_monomorphic_stub();
   } else if (!lookup->IsProperty()) {
     // Nonexistent property. The result is undefined.
-    maybe_code = StubCache::ComputeLoadNonexistent(*name, *receiver);
+    maybe_code = isolate()->stub_cache()->ComputeLoadNonexistent(*name,
+                                                                 *receiver);
   } else {
     // Compute monomorphic stub.
     switch (lookup->type()) {
       case FIELD: {
-        maybe_code = StubCache::ComputeLoadField(*name, *receiver,
-                                                 lookup->holder(),
-                                                 lookup->GetFieldIndex());
+        maybe_code = isolate()->stub_cache()->ComputeLoadField(
+            *name,
+            *receiver,
+            lookup->holder(),
+            lookup->GetFieldIndex());
         break;
       }
       case CONSTANT_FUNCTION: {
         Object* constant = lookup->GetConstantFunction();
-        maybe_code = StubCache::ComputeLoadConstant(*name, *receiver,
-                                                    lookup->holder(), constant);
+        maybe_code = isolate()->stub_cache()->ComputeLoadConstant(
+            *name, *receiver, lookup->holder(), constant);
         break;
       }
       case NORMAL: {
@@ -1054,7 +1080,7 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
           GlobalObject* global = GlobalObject::cast(lookup->holder());
           JSGlobalPropertyCell* cell =
               JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
-          maybe_code = StubCache::ComputeLoadGlobal(*name,
+          maybe_code = isolate()->stub_cache()->ComputeLoadGlobal(*name,
                                                     *receiver,
                                                     global,
                                                     cell,
@@ -1065,7 +1091,7 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
           // property must be found in the receiver for the stub to be
           // applicable.
           if (lookup->holder() != *receiver) return;
-          maybe_code = StubCache::ComputeLoadNormal();
+          maybe_code = isolate()->stub_cache()->ComputeLoadNormal();
         }
         break;
       }
@@ -1074,14 +1100,14 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
         AccessorInfo* callback =
             AccessorInfo::cast(lookup->GetCallbackObject());
         if (v8::ToCData<Address>(callback->getter()) == 0) return;
-        maybe_code = StubCache::ComputeLoadCallback(*name, *receiver,
-                                                    lookup->holder(), callback);
+        maybe_code = isolate()->stub_cache()->ComputeLoadCallback(
+            *name, *receiver, lookup->holder(), callback);
         break;
       }
       case INTERCEPTOR: {
         ASSERT(HasInterceptorGetter(lookup->holder()));
-        maybe_code = StubCache::ComputeLoadInterceptor(*name, *receiver,
-                                                       lookup->holder());
+        maybe_code = isolate()->stub_cache()->ComputeLoadInterceptor(
+            *name, *receiver, lookup->holder());
         break;
       }
       default:
@@ -1105,7 +1131,7 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
     Map* map = JSObject::cast(object->IsJSObject() ? *object :
                               object->GetPrototype())->map();
 
-    StubCache::Set(*name, map, Code::cast(code));
+    isolate()->stub_cache()->Set(*name, map, Code::cast(code));
   }
 
 #ifdef DEBUG
@@ -1130,11 +1156,13 @@ MaybeObject* KeyedLoadIC::Load(State state,
       // TODO(1073): don't ignore the current stub state.
 
       // Use specialized code for getting the length of strings.
-      if (object->IsString() && name->Equals(Heap::length_symbol())) {
+      if (object->IsString() &&
+          name->Equals(isolate()->heap()->length_symbol())) {
         Handle<String> string = Handle<String>::cast(object);
         Object* code = NULL;
         { MaybeObject* maybe_code =
-              StubCache::ComputeKeyedLoadStringLength(*name, *string);
+              isolate()->stub_cache()->ComputeKeyedLoadStringLength(*name,
+                                                                    *string);
           if (!maybe_code->ToObject(&code)) return maybe_code;
         }
         set_target(Code::cast(code));
@@ -1145,11 +1173,13 @@ MaybeObject* KeyedLoadIC::Load(State state,
       }
 
       // Use specialized code for getting the length of arrays.
-      if (object->IsJSArray() && name->Equals(Heap::length_symbol())) {
+      if (object->IsJSArray() &&
+          name->Equals(isolate()->heap()->length_symbol())) {
         Handle<JSArray> array = Handle<JSArray>::cast(object);
         Object* code;
         { MaybeObject* maybe_code =
-              StubCache::ComputeKeyedLoadArrayLength(*name, *array);
+              isolate()->stub_cache()->ComputeKeyedLoadArrayLength(*name,
+                                                                   *array);
           if (!maybe_code->ToObject(&code)) return maybe_code;
         }
         set_target(Code::cast(code));
@@ -1160,12 +1190,14 @@ MaybeObject* KeyedLoadIC::Load(State state,
       }
 
       // Use specialized code for getting prototype of functions.
-      if (object->IsJSFunction() && name->Equals(Heap::prototype_symbol()) &&
+      if (object->IsJSFunction() &&
+          name->Equals(isolate()->heap()->prototype_symbol()) &&
         JSFunction::cast(*object)->should_have_prototype()) {
         Handle<JSFunction> function = Handle<JSFunction>::cast(object);
         Object* code;
         { MaybeObject* maybe_code =
-              StubCache::ComputeKeyedLoadFunctionPrototype(*name, *function);
+              isolate()->stub_cache()->ComputeKeyedLoadFunctionPrototype(
+                  *name, *function);
           if (!maybe_code->ToObject(&code)) return maybe_code;
         }
         set_target(Code::cast(code));
@@ -1180,10 +1212,10 @@ MaybeObject* KeyedLoadIC::Load(State state,
     // the element or char if so.
     uint32_t index = 0;
     if (name->AsArrayIndex(&index)) {
-      HandleScope scope;
+      HandleScope scope(isolate());
       // Rewrite to the generic keyed load stub.
       if (FLAG_use_ic) set_target(generic_stub());
-      return Runtime::GetElementOrCharAt(object, index);
+      return Runtime::GetElementOrCharAt(isolate(), object, index);
     }
 
     // Named lookup.
@@ -1233,9 +1265,8 @@ MaybeObject* KeyedLoadIC::Load(State state,
         Handle<JSObject> receiver = Handle<JSObject>::cast(object);
         if (receiver->HasExternalArrayElements()) {
           MaybeObject* probe =
-              StubCache::ComputeKeyedLoadOrStoreExternalArray(*receiver,
-                                                              false,
-                                                              kNonStrictMode);
+              isolate()->stub_cache()->ComputeKeyedLoadOrStoreExternalArray(
+                  *receiver, false, kNonStrictMode);
           stub = probe->IsFailure() ?
               NULL : Code::cast(probe->ToObjectUnchecked());
         } else if (receiver->HasIndexedInterceptor()) {
@@ -1243,7 +1274,7 @@ MaybeObject* KeyedLoadIC::Load(State state,
         } else if (key->IsSmi() &&
                    receiver->map()->has_fast_elements()) {
           MaybeObject* probe =
-              StubCache::ComputeKeyedLoadSpecialized(*receiver);
+              isolate()->stub_cache()->ComputeKeyedLoadSpecialized(*receiver);
           stub = probe->IsFailure() ?
               NULL : Code::cast(probe->ToObjectUnchecked());
         }
@@ -1269,7 +1300,7 @@ MaybeObject* KeyedLoadIC::Load(State state,
   }
 
   // Get the property.
-  return Runtime::GetObjectProperty(object, key);
+  return Runtime::GetObjectProperty(isolate(), object, key);
 }
 
 
@@ -1281,7 +1312,7 @@ void KeyedLoadIC::UpdateCaches(LookupResult* lookup, State state,
   if (!object->IsJSObject()) return;
   Handle<JSObject> receiver = Handle<JSObject>::cast(object);
 
-  if (HasNormalObjectsInPrototypeChain(lookup, *object)) return;
+  if (HasNormalObjectsInPrototypeChain(isolate(), lookup, *object)) return;
 
   // Compute the code stub for this load.
   MaybeObject* maybe_code = NULL;
@@ -1296,17 +1327,14 @@ void KeyedLoadIC::UpdateCaches(LookupResult* lookup, State state,
     // Compute a monomorphic stub.
     switch (lookup->type()) {
       case FIELD: {
-        maybe_code = StubCache::ComputeKeyedLoadField(*name, *receiver,
-                                                      lookup->holder(),
-                                                      lookup->GetFieldIndex());
+        maybe_code = isolate()->stub_cache()->ComputeKeyedLoadField(
+            *name, *receiver, lookup->holder(), lookup->GetFieldIndex());
         break;
       }
       case CONSTANT_FUNCTION: {
         Object* constant = lookup->GetConstantFunction();
-        maybe_code = StubCache::ComputeKeyedLoadConstant(*name,
-                                                         *receiver,
-                                                         lookup->holder(),
-                                                         constant);
+        maybe_code = isolate()->stub_cache()->ComputeKeyedLoadConstant(
+            *name, *receiver, lookup->holder(), constant);
         break;
       }
       case CALLBACKS: {
@@ -1314,16 +1342,14 @@ void KeyedLoadIC::UpdateCaches(LookupResult* lookup, State state,
         AccessorInfo* callback =
             AccessorInfo::cast(lookup->GetCallbackObject());
         if (v8::ToCData<Address>(callback->getter()) == 0) return;
-        maybe_code = StubCache::ComputeKeyedLoadCallback(*name,
-                                                         *receiver,
-                                                         lookup->holder(),
-                                                         callback);
+        maybe_code = isolate()->stub_cache()->ComputeKeyedLoadCallback(
+            *name, *receiver, lookup->holder(), callback);
         break;
       }
       case INTERCEPTOR: {
         ASSERT(HasInterceptorGetter(lookup->holder()));
-        maybe_code = StubCache::ComputeKeyedLoadInterceptor(*name, *receiver,
-                                                            lookup->holder());
+        maybe_code = isolate()->stub_cache()->ComputeKeyedLoadInterceptor(
+            *name, *receiver, lookup->holder());
         break;
       }
       default: {
@@ -1399,7 +1425,7 @@ MaybeObject* StoreIC::Store(State state,
   if (!object->IsJSObject()) {
     // The length property of string values is read-only. Throw in strict mode.
     if (strict_mode == kStrictMode && object->IsString() &&
-        name->Equals(Heap::length_symbol())) {
+        name->Equals(isolate()->heap()->length_symbol())) {
       return TypeError("strict_read_only_property", object, name);
     }
     // Ignore stores where the receiver is not a JSObject.
@@ -1411,7 +1437,7 @@ MaybeObject* StoreIC::Store(State state,
   // Check if the given name is an array index.
   uint32_t index;
   if (name->AsArrayIndex(&index)) {
-    HandleScope scope;
+    HandleScope scope(isolate());
     Handle<Object> result = SetElement(receiver, index, value, strict_mode);
     if (result.is_null()) return Failure::Exception();
     return *value;
@@ -1419,7 +1445,7 @@ MaybeObject* StoreIC::Store(State state,
 
   // Use specialized code for setting the length of arrays.
   if (receiver->IsJSArray()
-      && name->Equals(Heap::length_symbol())
+      && name->Equals(isolate()->heap()->length_symbol())
       && receiver->AllowsSetElementsLength()) {
 #ifdef DEBUG
     if (FLAG_trace_ic) PrintF("[StoreIC : +#length /array]\n");
@@ -1427,7 +1453,7 @@ MaybeObject* StoreIC::Store(State state,
     Builtins::Name target = (strict_mode == kStrictMode)
         ? Builtins::StoreIC_ArrayLength_Strict
         : Builtins::StoreIC_ArrayLength;
-    set_target(Builtins::builtin(target));
+    set_target(isolate()->builtins()->builtin(target));
     return receiver->SetProperty(*name, *value, NONE, strict_mode);
   }
 
@@ -1543,17 +1569,17 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
   Object* code = NULL;
   switch (type) {
     case FIELD: {
-      maybe_code = StubCache::ComputeStoreField(
+      maybe_code = isolate()->stub_cache()->ComputeStoreField(
           *name, *receiver, lookup->GetFieldIndex(), NULL, strict_mode);
       break;
     }
     case MAP_TRANSITION: {
       if (lookup->GetAttributes() != NONE) return;
-      HandleScope scope;
+      HandleScope scope(isolate());
       ASSERT(type == MAP_TRANSITION);
       Handle<Map> transition(lookup->GetTransitionMap());
       int index = transition->PropertyIndexFor(*name);
-      maybe_code = StubCache::ComputeStoreField(
+      maybe_code = isolate()->stub_cache()->ComputeStoreField(
           *name, *receiver, index, *transition, strict_mode);
       break;
     }
@@ -1565,11 +1591,11 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
         Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver);
         JSGlobalPropertyCell* cell =
             JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
-        maybe_code = StubCache::ComputeStoreGlobal(
+        maybe_code = isolate()->stub_cache()->ComputeStoreGlobal(
             *name, *global, cell, strict_mode);
       } else {
         if (lookup->holder() != *receiver) return;
-        maybe_code = StubCache::ComputeStoreNormal(strict_mode);
+        maybe_code = isolate()->stub_cache()->ComputeStoreNormal(strict_mode);
       }
       break;
     }
@@ -1577,13 +1603,13 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
       if (!lookup->GetCallbackObject()->IsAccessorInfo()) return;
       AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
       if (v8::ToCData<Address>(callback->setter()) == 0) return;
-      maybe_code = StubCache::ComputeStoreCallback(
+      maybe_code = isolate()->stub_cache()->ComputeStoreCallback(
           *name, *receiver, callback, strict_mode);
       break;
     }
     case INTERCEPTOR: {
       ASSERT(!receiver->GetNamedInterceptor()->setter()->IsUndefined());
-      maybe_code = StubCache::ComputeStoreInterceptor(
+      maybe_code = isolate()->stub_cache()->ComputeStoreInterceptor(
           *name, *receiver, strict_mode);
       break;
     }
@@ -1607,7 +1633,9 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
     }
   } else if (state == MEGAMORPHIC) {
     // Update the stub cache.
-    StubCache::Set(*name, receiver->map(), Code::cast(code));
+    isolate()->stub_cache()->Set(*name,
+                                 receiver->map(),
+                                 Code::cast(code));
   }
 
 #ifdef DEBUG
@@ -1637,7 +1665,7 @@ MaybeObject* KeyedStoreIC::Store(State state,
     // Check if the given name is an array index.
     uint32_t index;
     if (name->AsArrayIndex(&index)) {
-      HandleScope scope;
+      HandleScope scope(isolate());
       Handle<Object> result = SetElement(receiver, index, value, strict_mode);
       if (result.is_null()) return Failure::Exception();
       return *value;
@@ -1669,13 +1697,14 @@ MaybeObject* KeyedStoreIC::Store(State state,
         Handle<JSObject> receiver = Handle<JSObject>::cast(object);
         if (receiver->HasExternalArrayElements()) {
           MaybeObject* probe =
-              StubCache::ComputeKeyedLoadOrStoreExternalArray(
+              isolate()->stub_cache()->ComputeKeyedLoadOrStoreExternalArray(
                   *receiver, true, strict_mode);
           stub = probe->IsFailure() ?
               NULL : Code::cast(probe->ToObjectUnchecked());
         } else if (key->IsSmi() && receiver->map()->has_fast_elements()) {
           MaybeObject* probe =
-              StubCache::ComputeKeyedStoreSpecialized(*receiver, strict_mode);
+              isolate()->stub_cache()->ComputeKeyedStoreSpecialized(
+                  *receiver, strict_mode);
           stub = probe->IsFailure() ?
               NULL : Code::cast(probe->ToObjectUnchecked());
         }
@@ -1685,7 +1714,8 @@ MaybeObject* KeyedStoreIC::Store(State state,
   }
 
   // Set the property.
-  return Runtime::SetObjectProperty(object, key, value, NONE, strict_mode);
+  return Runtime::SetObjectProperty(
+      isolate(), object , key, value, NONE, strict_mode);
 }
 
 
@@ -1718,17 +1748,17 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
 
   switch (type) {
     case FIELD: {
-      maybe_code = StubCache::ComputeKeyedStoreField(
+      maybe_code = isolate()->stub_cache()->ComputeKeyedStoreField(
           *name, *receiver, lookup->GetFieldIndex(), NULL, strict_mode);
       break;
     }
     case MAP_TRANSITION: {
       if (lookup->GetAttributes() == NONE) {
-        HandleScope scope;
+        HandleScope scope(isolate());
         ASSERT(type == MAP_TRANSITION);
         Handle<Map> transition(lookup->GetTransitionMap());
         int index = transition->PropertyIndexFor(*name);
-        maybe_code = StubCache::ComputeKeyedStoreField(
+        maybe_code = isolate()->stub_cache()->ComputeKeyedStoreField(
             *name, *receiver, index, *transition, strict_mode);
         break;
       }
@@ -1769,11 +1799,12 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
 // Static IC stub generators.
 //
 
-static JSFunction* CompileFunction(JSFunction* function,
+static JSFunction* CompileFunction(Isolate* isolate,
+                                   JSFunction* function,
                                    InLoopFlag in_loop) {
   // Compile now with optimization.
-  HandleScope scope;
-  Handle<JSFunction> function_handle(function);
+  HandleScope scope(isolate);
+  Handle<JSFunction> function_handle(function, isolate);
   if (in_loop == IN_LOOP) {
     CompileLazyInLoop(function_handle, CLEAR_EXCEPTION);
   } else {
@@ -1784,10 +1815,11 @@ static JSFunction* CompileFunction(JSFunction* function,
 
 
 // Used from ic-<arch>.cc.
-MUST_USE_RESULT MaybeObject* CallIC_Miss(Arguments args) {
+MUST_USE_RESULT MaybeObject* CallIC_Miss(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 2);
-  CallIC ic;
+  CallIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
   Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
   MaybeObject* maybe_result = ic.LoadFunction(state,
@@ -1807,15 +1839,18 @@ MUST_USE_RESULT MaybeObject* CallIC_Miss(Arguments args) {
   if (!result->IsJSFunction() || JSFunction::cast(result)->is_compiled()) {
     return result;
   }
-  return CompileFunction(JSFunction::cast(result), ic.target()->ic_in_loop());
+  return CompileFunction(isolate,
+                         JSFunction::cast(result),
+                         ic.target()->ic_in_loop());
 }
 
 
 // Used from ic-<arch>.cc.
-MUST_USE_RESULT MaybeObject* KeyedCallIC_Miss(Arguments args) {
+MUST_USE_RESULT MaybeObject* KeyedCallIC_Miss(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 2);
-  KeyedCallIC ic;
+  KeyedCallIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
   Object* result;
   { MaybeObject* maybe_result =
@@ -1826,35 +1861,40 @@ MUST_USE_RESULT MaybeObject* KeyedCallIC_Miss(Arguments args) {
   if (!result->IsJSFunction() || JSFunction::cast(result)->is_compiled()) {
     return result;
   }
-  return CompileFunction(JSFunction::cast(result), ic.target()->ic_in_loop());
+  return CompileFunction(isolate,
+                         JSFunction::cast(result),
+                         ic.target()->ic_in_loop());
 }
 
 
 // Used from ic-<arch>.cc.
-MUST_USE_RESULT MaybeObject* LoadIC_Miss(Arguments args) {
+MUST_USE_RESULT MaybeObject* LoadIC_Miss(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 2);
-  LoadIC ic;
+  LoadIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
   return ic.Load(state, args.at<Object>(0), args.at<String>(1));
 }
 
 
 // Used from ic-<arch>.cc
-MUST_USE_RESULT MaybeObject* KeyedLoadIC_Miss(Arguments args) {
+MUST_USE_RESULT MaybeObject* KeyedLoadIC_Miss(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 2);
-  KeyedLoadIC ic;
+  KeyedLoadIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
   return ic.Load(state, args.at<Object>(0), args.at<Object>(1));
 }
 
 
 // Used from ic-<arch>.cc.
-MUST_USE_RESULT MaybeObject* StoreIC_Miss(Arguments args) {
+MUST_USE_RESULT MaybeObject* StoreIC_Miss(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 3);
-  StoreIC ic;
+  StoreIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
   Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
   return ic.Store(state,
@@ -1865,7 +1905,8 @@ MUST_USE_RESULT MaybeObject* StoreIC_Miss(Arguments args) {
 }
 
 
-MUST_USE_RESULT MaybeObject* StoreIC_ArrayLength(Arguments args) {
+MUST_USE_RESULT MaybeObject* StoreIC_ArrayLength(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation nha;
 
   ASSERT(args.length() == 2);
@@ -1886,7 +1927,9 @@ MUST_USE_RESULT MaybeObject* StoreIC_ArrayLength(Arguments args) {
 // Extend storage is called in a store inline cache when
 // it is necessary to extend the properties array of a
 // JSObject.
-MUST_USE_RESULT MaybeObject* SharedStoreIC_ExtendStorage(Arguments args) {
+MUST_USE_RESULT MaybeObject* SharedStoreIC_ExtendStorage(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 3);
 
@@ -1920,10 +1963,11 @@ MUST_USE_RESULT MaybeObject* SharedStoreIC_ExtendStorage(Arguments args) {
 
 
 // Used from ic-<arch>.cc.
-MUST_USE_RESULT MaybeObject* KeyedStoreIC_Miss(Arguments args) {
+MUST_USE_RESULT MaybeObject* KeyedStoreIC_Miss(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 3);
-  KeyedStoreIC ic;
+  KeyedStoreIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
   Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
   return ic.Store(state,
@@ -1993,10 +2037,11 @@ BinaryOpIC::TypeInfo BinaryOpIC::GetTypeInfo(Object* left,
 Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info);
 
 
-MUST_USE_RESULT MaybeObject* BinaryOp_Patch(Arguments args) {
+MUST_USE_RESULT MaybeObject* BinaryOp_Patch(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 5);
 
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<Object> left = args.at<Object>(0);
   Handle<Object> right = args.at<Object>(1);
   int key = Smi::cast(args[2])->value();
@@ -2007,7 +2052,7 @@ MUST_USE_RESULT MaybeObject* BinaryOp_Patch(Arguments args) {
   BinaryOpIC::TypeInfo type = BinaryOpIC::GetTypeInfo(*left, *right);
   Handle<Code> code = GetBinaryOpStub(key, type);
   if (!code.is_null()) {
-    BinaryOpIC ic;
+    BinaryOpIC ic(isolate);
     ic.patch(*code);
     if (FLAG_trace_ic) {
       PrintF("[BinaryOpIC (%s->%s)#%s]\n",
@@ -2017,7 +2062,8 @@ MUST_USE_RESULT MaybeObject* BinaryOp_Patch(Arguments args) {
     }
   }
 
-  Handle<JSBuiltinsObject> builtins = Top::builtins();
+  Handle<JSBuiltinsObject> builtins = Handle<JSBuiltinsObject>(
+      isolate->thread_local_top()->context_->builtins(), isolate);
   Object* builtin = NULL;  // Initialization calms down the compiler.
   switch (op) {
     case Token::ADD:
@@ -2057,7 +2103,8 @@ MUST_USE_RESULT MaybeObject* BinaryOp_Patch(Arguments args) {
       UNREACHABLE();
   }
 
-  Handle<JSFunction> builtin_function(JSFunction::cast(builtin));
+  Handle<JSFunction> builtin_function(JSFunction::cast(builtin),
+                                      isolate);
 
   bool caught_exception;
   Object** builtin_args[] = { right.location() };
@@ -2156,10 +2203,11 @@ Handle<Code> GetTypeRecordingBinaryOpStub(int key,
                                           TRBinaryOpIC::TypeInfo result_type);
 
 
-MaybeObject* TypeRecordingBinaryOp_Patch(Arguments args) {
+MaybeObject* TypeRecordingBinaryOp_Patch(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 5);
 
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<Object> left = args.at<Object>(0);
   Handle<Object> right = args.at<Object>(1);
   int key = Smi::cast(args[2])->value();
@@ -2201,7 +2249,7 @@ MaybeObject* TypeRecordingBinaryOp_Patch(Arguments args) {
              TRBinaryOpIC::GetName(result_type),
              Token::Name(op));
     }
-    TRBinaryOpIC ic;
+    TRBinaryOpIC ic(isolate);
     ic.patch(*code);
 
     // Activate inlined smi code.
@@ -2210,7 +2258,8 @@ MaybeObject* TypeRecordingBinaryOp_Patch(Arguments args) {
     }
   }
 
-  Handle<JSBuiltinsObject> builtins = Top::builtins();
+  Handle<JSBuiltinsObject> builtins = Handle<JSBuiltinsObject>(
+      isolate->thread_local_top()->context_->builtins(), isolate);
   Object* builtin = NULL;  // Initialization calms down the compiler.
   switch (op) {
     case Token::ADD:
@@ -2250,7 +2299,7 @@ MaybeObject* TypeRecordingBinaryOp_Patch(Arguments args) {
       UNREACHABLE();
   }
 
-  Handle<JSFunction> builtin_function(JSFunction::cast(builtin));
+  Handle<JSFunction> builtin_function(JSFunction::cast(builtin), isolate);
 
   bool caught_exception;
   Object** builtin_args[] = { right.location() };
@@ -2310,16 +2359,17 @@ CompareIC::State CompareIC::TargetState(State state,
 
 
 // Used from ic_<arch>.cc.
-Code* CompareIC_Miss(Arguments args) {
+Code* CompareIC_Miss(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 3);
-  CompareIC ic(static_cast<Token::Value>(Smi::cast(args[2])->value()));
+  CompareIC ic(isolate, static_cast<Token::Value>(Smi::cast(args[2])->value()));
   ic.UpdateCaches(args.at<Object>(0), args.at<Object>(1));
   return ic.target();
 }
 
 
-static Address IC_utilities[] = {
+static const Address IC_utilities[] = {
 #define ADDR(name) FUNCTION_ADDR(name),
     IC_UTIL_LIST(ADDR)
     NULL
index e12cbaf8ef26d5c3b128a3c80d7f2d65cb0fa8b5..91a626f3a505c819b7b80e30cb745655e8bab2d4 100644 (file)
--- a/src/ic.h
+++ b/src/ic.h
@@ -86,7 +86,7 @@ class IC {
 
   // Construct the IC structure with the given number of extra
   // JavaScript frames on the stack.
-  explicit IC(FrameDepth depth);
+  IC(FrameDepth depth, Isolate* isolate);
 
   // Get the call-site target; used for determining the state.
   Code* target() { return GetTargetAtAddress(address()); }
@@ -130,6 +130,7 @@ class IC {
  protected:
   Address fp() const { return fp_; }
   Address pc() const { return *pc_address_; }
+  Isolate* isolate() const { return isolate_; }
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Computes the address in the original code when the code running is
@@ -148,10 +149,10 @@ class IC {
                       const char* extra_info = "");
 #endif
 
-  static Failure* TypeError(const char* type,
-                            Handle<Object> object,
-                            Handle<Object> key);
-  static Failure* ReferenceError(const char* type, Handle<String> name);
+  Failure* TypeError(const char* type,
+                     Handle<Object> object,
+                     Handle<Object> key);
+  Failure* ReferenceError(const char* type, Handle<String> name);
 
   // Access the target code for the given IC address.
   static inline Code* GetTargetAtAddress(Address address);
@@ -167,6 +168,8 @@ class IC {
   // invoke the garbage collector.
   Address* pc_address_;
 
+  Isolate* isolate_;
+
   DISALLOW_IMPLICIT_CONSTRUCTORS(IC);
 };
 
@@ -189,7 +192,8 @@ class IC_Utility {
 
 class CallICBase: public IC {
  protected:
-  explicit CallICBase(Code::Kind kind) : IC(EXTRA_CALL_FRAME), kind_(kind) {}
+  CallICBase(Code::Kind kind, Isolate* isolate)
+      : IC(EXTRA_CALL_FRAME, isolate), kind_(kind) {}
 
  public:
   MUST_USE_RESULT MaybeObject* LoadFunction(State state,
@@ -233,7 +237,9 @@ class CallICBase: public IC {
 
 class CallIC: public CallICBase {
  public:
-  CallIC() : CallICBase(Code::CALL_IC) { ASSERT(target()->is_call_stub()); }
+  explicit CallIC(Isolate* isolate) : CallICBase(Code::CALL_IC, isolate) {
+    ASSERT(target()->is_call_stub());
+  }
 
   // Code generator routines.
   static void GenerateInitialize(MacroAssembler* masm, int argc) {
@@ -247,7 +253,8 @@ class CallIC: public CallICBase {
 
 class KeyedCallIC: public CallICBase {
  public:
-  KeyedCallIC() : CallICBase(Code::KEYED_CALL_IC) {
+  explicit KeyedCallIC(Isolate* isolate)
+      : CallICBase(Code::KEYED_CALL_IC, isolate) {
     ASSERT(target()->is_keyed_call_stub());
   }
 
@@ -267,7 +274,9 @@ class KeyedCallIC: public CallICBase {
 
 class LoadIC: public IC {
  public:
-  LoadIC() : IC(NO_EXTRA_FRAME) { ASSERT(target()->is_load_stub()); }
+  explicit LoadIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {
+    ASSERT(target()->is_load_stub());
+  }
 
   MUST_USE_RESULT MaybeObject* Load(State state,
                                     Handle<Object> object,
@@ -305,14 +314,17 @@ class LoadIC: public IC {
                     Handle<String> name);
 
   // Stub accessors.
-  static Code* megamorphic_stub() {
-    return Builtins::builtin(Builtins::LoadIC_Megamorphic);
+  Code* megamorphic_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::LoadIC_Megamorphic);
   }
   static Code* initialize_stub() {
-    return Builtins::builtin(Builtins::LoadIC_Initialize);
+    return Isolate::Current()->builtins()->builtin(
+        Builtins::LoadIC_Initialize);
   }
-  static Code* pre_monomorphic_stub() {
-    return Builtins::builtin(Builtins::LoadIC_PreMonomorphic);
+  Code* pre_monomorphic_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::LoadIC_PreMonomorphic);
   }
 
   static void Clear(Address address, Code* target);
@@ -330,7 +342,9 @@ class LoadIC: public IC {
 
 class KeyedLoadIC: public IC {
  public:
-  KeyedLoadIC() : IC(NO_EXTRA_FRAME) { ASSERT(target()->is_keyed_load_stub()); }
+  explicit KeyedLoadIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {
+    ASSERT(target()->is_keyed_load_stub());
+  }
 
   MUST_USE_RESULT MaybeObject* Load(State state,
                                     Handle<Object> object,
@@ -367,23 +381,29 @@ class KeyedLoadIC: public IC {
 
   // Stub accessors.
   static Code* initialize_stub() {
-    return Builtins::builtin(Builtins::KeyedLoadIC_Initialize);
+    return Isolate::Current()->builtins()->builtin(
+        Builtins::KeyedLoadIC_Initialize);
   }
-  static Code* megamorphic_stub() {
-    return Builtins::builtin(Builtins::KeyedLoadIC_Generic);
+  Code* megamorphic_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::KeyedLoadIC_Generic);
   }
-  static Code* generic_stub() {
-    return Builtins::builtin(Builtins::KeyedLoadIC_Generic);
+  Code* generic_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::KeyedLoadIC_Generic);
   }
-  static Code* pre_monomorphic_stub() {
-    return Builtins::builtin(Builtins::KeyedLoadIC_PreMonomorphic);
+  Code* pre_monomorphic_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::KeyedLoadIC_PreMonomorphic);
   }
-  static Code* string_stub() {
-    return Builtins::builtin(Builtins::KeyedLoadIC_String);
+  Code* string_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::KeyedLoadIC_String);
   }
 
-  static Code* indexed_interceptor_stub() {
-    return Builtins::builtin(Builtins::KeyedLoadIC_IndexedInterceptor);
+  Code* indexed_interceptor_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::KeyedLoadIC_IndexedInterceptor);
   }
 
   static void Clear(Address address, Code* target);
@@ -398,7 +418,9 @@ class KeyedLoadIC: public IC {
 
 class StoreIC: public IC {
  public:
-  StoreIC() : IC(NO_EXTRA_FRAME) { ASSERT(target()->is_store_stub()); }
+  explicit StoreIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {
+    ASSERT(target()->is_store_stub());
+  }
 
   MUST_USE_RESULT MaybeObject* Store(State state,
                                      StrictModeFlag strict_mode,
@@ -441,23 +463,29 @@ class StoreIC: public IC {
   }
 
   // Stub accessors.
-  static Code* megamorphic_stub() {
-    return Builtins::builtin(Builtins::StoreIC_Megamorphic);
+  Code* megamorphic_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::StoreIC_Megamorphic);
   }
-  static Code* megamorphic_stub_strict() {
-    return Builtins::builtin(Builtins::StoreIC_Megamorphic_Strict);
+  Code* megamorphic_stub_strict() {
+    return isolate()->builtins()->builtin(
+        Builtins::StoreIC_Megamorphic_Strict);
   }
   static Code* initialize_stub() {
-    return Builtins::builtin(Builtins::StoreIC_Initialize);
+    return Isolate::Current()->builtins()->builtin(
+        Builtins::StoreIC_Initialize);
   }
   static Code* initialize_stub_strict() {
-    return Builtins::builtin(Builtins::StoreIC_Initialize_Strict);
+    return Isolate::Current()->builtins()->builtin(
+        Builtins::StoreIC_Initialize_Strict);
   }
-  static Code* global_proxy_stub() {
-    return Builtins::builtin(Builtins::StoreIC_GlobalProxy);
+  Code* global_proxy_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::StoreIC_GlobalProxy);
   }
-  static Code* global_proxy_stub_strict() {
-    return Builtins::builtin(Builtins::StoreIC_GlobalProxy_Strict);
+  Code* global_proxy_stub_strict() {
+    return isolate()->builtins()->builtin(
+        Builtins::StoreIC_GlobalProxy_Strict);
   }
 
   static void Clear(Address address, Code* target);
@@ -472,7 +500,7 @@ class StoreIC: public IC {
 
 class KeyedStoreIC: public IC {
  public:
-  KeyedStoreIC() : IC(NO_EXTRA_FRAME) { }
+  explicit KeyedStoreIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { }
 
   MUST_USE_RESULT MaybeObject* Store(State state,
                                      StrictModeFlag strict_mode,
@@ -511,22 +539,28 @@ class KeyedStoreIC: public IC {
 
   // Stub accessors.
   static Code* initialize_stub() {
-    return Builtins::builtin(Builtins::KeyedStoreIC_Initialize);
+    return Isolate::Current()->builtins()->builtin(
+        Builtins::KeyedStoreIC_Initialize);
   }
-  static Code* initialize_stub_strict() {
-    return Builtins::builtin(Builtins::KeyedStoreIC_Initialize_Strict);
+  Code* megamorphic_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::KeyedStoreIC_Generic);
   }
-  static Code* megamorphic_stub() {
-    return Builtins::builtin(Builtins::KeyedStoreIC_Generic);
+  static Code* initialize_stub_strict() {
+    return Isolate::Current()->builtins()->builtin(
+        Builtins::KeyedStoreIC_Initialize_Strict);
   }
-  static Code* megamorphic_stub_strict() {
-    return Builtins::builtin(Builtins::KeyedStoreIC_Generic_Strict);
+  Code* megamorphic_stub_strict() {
+    return isolate()->builtins()->builtin(
+        Builtins::KeyedStoreIC_Generic_Strict);
   }
-  static Code* generic_stub() {
-    return Builtins::builtin(Builtins::KeyedStoreIC_Generic);
+  Code* generic_stub() {
+    return isolate()->builtins()->builtin(
+        Builtins::KeyedStoreIC_Generic);
   }
-  static Code* generic_stub_strict() {
-    return Builtins::builtin(Builtins::KeyedStoreIC_Generic_Strict);
+  Code* generic_stub_strict() {
+    return isolate()->builtins()->builtin(
+        Builtins::KeyedStoreIC_Generic_Strict);
   }
 
   static void Clear(Address address, Code* target);
@@ -555,7 +589,7 @@ class BinaryOpIC: public IC {
     GENERIC   // Non-specialized case (processes any type combination).
   };
 
-  BinaryOpIC() : IC(NO_EXTRA_FRAME) { }
+  explicit BinaryOpIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { }
 
   void patch(Code* code);
 
@@ -580,7 +614,7 @@ class TRBinaryOpIC: public IC {
     GENERIC
   };
 
-  TRBinaryOpIC() : IC(NO_EXTRA_FRAME) { }
+  explicit TRBinaryOpIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { }
 
   void patch(Code* code);
 
@@ -604,7 +638,8 @@ class CompareIC: public IC {
     GENERIC
   };
 
-  explicit CompareIC(Token::Value op) : IC(EXTRA_CALL_FRAME), op_(op) { }
+  CompareIC(Isolate* isolate, Token::Value op)
+      : IC(EXTRA_CALL_FRAME, isolate), op_(op) { }
 
   // Update the inline cache for the given operands.
   void UpdateCaches(Handle<Object> x, Handle<Object> y);
index c9c3cc4c0eb712837d3cd40893851e3c2751c2e1..1c6c52ca89711049d13f8ff3c8285a1166b12ab1 100644 (file)
@@ -40,10 +40,10 @@ namespace v8 {
 namespace internal {
 
 
-static unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize;
+typedef unibrow::Mapping<unibrow::Ecma262Canonicalize> Canonicalize;
 
-
-static bool BackRefMatchesNoCase(int from,
+static bool BackRefMatchesNoCase(Canonicalize* interp_canonicalize,
+                                 int from,
                                  int current,
                                  int len,
                                  Vector<const uc16> subject) {
@@ -53,8 +53,8 @@ static bool BackRefMatchesNoCase(int from,
     if (old_char == new_char) continue;
     unibrow::uchar old_string[1] = { old_char };
     unibrow::uchar new_string[1] = { new_char };
-    interp_canonicalize.get(old_char, '\0', old_string);
-    interp_canonicalize.get(new_char, '\0', new_string);
+    interp_canonicalize->get(old_char, '\0', old_string);
+    interp_canonicalize->get(new_char, '\0', new_string);
     if (old_string[0] != new_string[0]) {
       return false;
     }
@@ -63,7 +63,8 @@ static bool BackRefMatchesNoCase(int from,
 }
 
 
-static bool BackRefMatchesNoCase(int from,
+static bool BackRefMatchesNoCase(Canonicalize* interp_canonicalize,
+                                 int from,
                                  int current,
                                  int len,
                                  Vector<const char> subject) {
@@ -150,11 +151,11 @@ static int32_t Load16Aligned(const byte* pc) {
 // matching terminates.
 class BacktrackStack {
  public:
-  explicit BacktrackStack() {
-    if (cache_ != NULL) {
+  explicit BacktrackStack(Isolate* isolate) : isolate_(isolate) {
+    if (isolate->irregexp_interpreter_backtrack_stack_cache() != NULL) {
       // If the cache is not empty reuse the previously allocated stack.
-      data_ = cache_;
-      cache_ = NULL;
+      data_ = isolate->irregexp_interpreter_backtrack_stack_cache();
+      isolate->set_irregexp_interpreter_backtrack_stack_cache(NULL);
     } else {
       // Cache was empty. Allocate a new backtrack stack.
       data_ = NewArray<int>(kBacktrackStackSize);
@@ -162,9 +163,9 @@ class BacktrackStack {
   }
 
   ~BacktrackStack() {
-    if (cache_ == NULL) {
+    if (isolate_->irregexp_interpreter_backtrack_stack_cache() == NULL) {
       // The cache is empty. Keep this backtrack stack around.
-      cache_ = data_;
+      isolate_->set_irregexp_interpreter_backtrack_stack_cache(data_);
     } else {
       // A backtrack stack was already cached, just release this one.
       DeleteArray(data_);
@@ -179,16 +180,15 @@ class BacktrackStack {
   static const int kBacktrackStackSize = 10000;
 
   int* data_;
-  static int* cache_;
+  Isolate* isolate_;
 
   DISALLOW_COPY_AND_ASSIGN(BacktrackStack);
 };
 
-int* BacktrackStack::cache_ = NULL;
-
 
 template <typename Char>
-static bool RawMatch(const byte* code_base,
+static bool RawMatch(Isolate* isolate,
+                     const byte* code_base,
                      Vector<const Char> subject,
                      int* registers,
                      int current,
@@ -197,7 +197,7 @@ static bool RawMatch(const byte* code_base,
   // BacktrackStack ensures that the memory allocated for the backtracking stack
   // is returned to the system or cached if there is no stack being cached at
   // the moment.
-  BacktrackStack backtrack_stack;
+  BacktrackStack backtrack_stack(isolate);
   int* backtrack_stack_base = backtrack_stack.data();
   int* backtrack_sp = backtrack_stack_base;
   int backtrack_stack_space = backtrack_stack.max_size();
@@ -584,7 +584,8 @@ static bool RawMatch(const byte* code_base,
           pc = code_base + Load32Aligned(pc + 4);
           break;
         } else {
-          if (BackRefMatchesNoCase(from, current, len, subject)) {
+          if (BackRefMatchesNoCase(isolate->interp_canonicalize_mapping(),
+                                   from, current, len, subject)) {
             current += len;
             pc += BC_CHECK_NOT_BACK_REF_NO_CASE_LENGTH;
           } else {
@@ -624,7 +625,8 @@ static bool RawMatch(const byte* code_base,
 }
 
 
-bool IrregexpInterpreter::Match(Handle<ByteArray> code_array,
+bool IrregexpInterpreter::Match(Isolate* isolate,
+                                Handle<ByteArray> code_array,
                                 Handle<String> subject,
                                 int* registers,
                                 int start_position) {
@@ -636,7 +638,8 @@ bool IrregexpInterpreter::Match(Handle<ByteArray> code_array,
   if (subject->IsAsciiRepresentation()) {
     Vector<const char> subject_vector = subject->ToAsciiVector();
     if (start_position != 0) previous_char = subject_vector[start_position - 1];
-    return RawMatch(code_base,
+    return RawMatch(isolate,
+                    code_base,
                     subject_vector,
                     registers,
                     start_position,
@@ -644,7 +647,8 @@ bool IrregexpInterpreter::Match(Handle<ByteArray> code_array,
   } else {
     Vector<const uc16> subject_vector = subject->ToUC16Vector();
     if (start_position != 0) previous_char = subject_vector[start_position - 1];
-    return RawMatch(code_base,
+    return RawMatch(isolate,
+                    code_base,
                     subject_vector,
                     registers,
                     start_position,
index 0ad8846d79d45b52744bc2a255ca7b0bd8fca6f2..076f0c5081a4d64b94e99fba02378b85f0f9cc50 100644 (file)
@@ -36,7 +36,8 @@ namespace internal {
 
 class IrregexpInterpreter {
  public:
-  static bool Match(Handle<ByteArray> code,
+  static bool Match(Isolate* isolate,
+                    Handle<ByteArray> code,
                     Handle<String> subject,
                     int* captures,
                     int start_position);
diff --git a/src/isolate.cc b/src/isolate.cc
new file mode 100644 (file)
index 0000000..846debf
--- /dev/null
@@ -0,0 +1,825 @@
+// Copyright 2006-2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include <stdlib.h>
+
+#include "v8.h"
+
+#include "ast.h"
+#include "bootstrapper.h"
+#include "codegen.h"
+#include "compilation-cache.h"
+#include "debug.h"
+#include "deoptimizer.h"
+#include "heap-profiler.h"
+#include "hydrogen.h"
+#include "isolate.h"
+#include "lithium-allocator.h"
+#include "log.h"
+#include "regexp-stack.h"
+#include "runtime-profiler.h"
+#include "scanner.h"
+#include "scopeinfo.h"
+#include "serialize.h"
+#include "simulator.h"
+#include "spaces.h"
+#include "stub-cache.h"
+#include "version.h"
+
+
+namespace v8 {
+namespace internal {
+
+
+// Create a dummy thread that will wait forever on a semaphore. The only
+// purpose for this thread is to have some stack area to save essential data
+// into for use by a stacks only core dump (aka minidump).
+class PreallocatedMemoryThread: public Thread {
+ public:
+  char* data() {
+    if (data_ready_semaphore_ != NULL) {
+      // Initial access is guarded until the data has been published.
+      data_ready_semaphore_->Wait();
+      delete data_ready_semaphore_;
+      data_ready_semaphore_ = NULL;
+    }
+    return data_;
+  }
+
+  unsigned length() {
+    if (data_ready_semaphore_ != NULL) {
+      // Initial access is guarded until the data has been published.
+      data_ready_semaphore_->Wait();
+      delete data_ready_semaphore_;
+      data_ready_semaphore_ = NULL;
+    }
+    return length_;
+  }
+
+  // Stop the PreallocatedMemoryThread and release its resources.
+  void StopThread() {
+    keep_running_ = false;
+    wait_for_ever_semaphore_->Signal();
+
+    // Wait for the thread to terminate.
+    Join();
+
+    if (data_ready_semaphore_ != NULL) {
+      delete data_ready_semaphore_;
+      data_ready_semaphore_ = NULL;
+    }
+
+    delete wait_for_ever_semaphore_;
+    wait_for_ever_semaphore_ = NULL;
+  }
+
+ protected:
+  // When the thread starts running it will allocate a fixed number of bytes
+  // on the stack and publish the location of this memory for others to use.
+  void Run() {
+    EmbeddedVector<char, 15 * 1024> local_buffer;
+
+    // Initialize the buffer with a known good value.
+    OS::StrNCpy(local_buffer, "Trace data was not generated.\n",
+                local_buffer.length());
+
+    // Publish the local buffer and signal its availability.
+    data_ = local_buffer.start();
+    length_ = local_buffer.length();
+    data_ready_semaphore_->Signal();
+
+    while (keep_running_) {
+      // This thread will wait here until the end of time.
+      wait_for_ever_semaphore_->Wait();
+    }
+
+    // Make sure we access the buffer after the wait to remove all possibility
+    // of it being optimized away.
+    OS::StrNCpy(local_buffer, "PreallocatedMemoryThread shutting down.\n",
+                local_buffer.length());
+  }
+
+
+ private:
+  explicit PreallocatedMemoryThread(Isolate* isolate)
+      : Thread(isolate, "v8:PreallocMem"),
+        keep_running_(true),
+        wait_for_ever_semaphore_(OS::CreateSemaphore(0)),
+        data_ready_semaphore_(OS::CreateSemaphore(0)),
+        data_(NULL),
+        length_(0) {
+  }
+
+  // Used to make sure that the thread keeps looping even for spurious wakeups.
+  bool keep_running_;
+
+  // This semaphore is used by the PreallocatedMemoryThread to wait for ever.
+  Semaphore* wait_for_ever_semaphore_;
+  // Semaphore to signal that the data has been initialized.
+  Semaphore* data_ready_semaphore_;
+
+  // Location and size of the preallocated memory block.
+  char* data_;
+  unsigned length_;
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(PreallocatedMemoryThread);
+};
+
+
+void Isolate::PreallocatedMemoryThreadStart() {
+  if (preallocated_memory_thread_ != NULL) return;
+  preallocated_memory_thread_ = new PreallocatedMemoryThread(this);
+  preallocated_memory_thread_->Start();
+}
+
+
+void Isolate::PreallocatedMemoryThreadStop() {
+  if (preallocated_memory_thread_ == NULL) return;
+  preallocated_memory_thread_->StopThread();
+  // Done with the thread entirely.
+  delete preallocated_memory_thread_;
+  preallocated_memory_thread_ = NULL;
+}
+
+
+Isolate* Isolate::default_isolate_ = NULL;
+Thread::LocalStorageKey Isolate::isolate_key_;
+Thread::LocalStorageKey Isolate::thread_id_key_;
+Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
+Mutex* Isolate::process_wide_mutex_ = OS::CreateMutex();
+Isolate::ThreadDataTable* Isolate::thread_data_table_ = NULL;
+Isolate::ThreadId Isolate::highest_thread_id_ = 0;
+
+
+class IsolateInitializer {
+ public:
+  IsolateInitializer() {
+    Isolate::EnsureDefaultIsolate();
+  }
+};
+
+static IsolateInitializer* EnsureDefaultIsolateAllocated() {
+  // TODO(isolates): Use the system threading API to do this once?
+  static IsolateInitializer static_initializer;
+  return &static_initializer;
+}
+
+// This variable only needed to trigger static intialization.
+static IsolateInitializer* static_initializer = EnsureDefaultIsolateAllocated();
+
+
+Isolate::ThreadId Isolate::AllocateThreadId() {
+  ThreadId new_id;
+  {
+    ScopedLock lock(process_wide_mutex_);
+    new_id = ++highest_thread_id_;
+  }
+  return new_id;
+}
+
+
+Isolate::PerIsolateThreadData* Isolate::AllocatePerIsolateThreadData(
+    ThreadId thread_id) {
+  ASSERT(thread_id != 0);
+  ASSERT(Thread::GetThreadLocalInt(thread_id_key_) == thread_id);
+  PerIsolateThreadData* per_thread = new PerIsolateThreadData(this, thread_id);
+  {
+    ScopedLock lock(process_wide_mutex_);
+    ASSERT(thread_data_table_->Lookup(this, thread_id) == NULL);
+    thread_data_table_->Insert(per_thread);
+    ASSERT(thread_data_table_->Lookup(this, thread_id) == per_thread);
+  }
+  return per_thread;
+}
+
+
+Isolate::PerIsolateThreadData*
+    Isolate::FindOrAllocatePerThreadDataForThisThread() {
+  ThreadId thread_id = Thread::GetThreadLocalInt(thread_id_key_);
+  if (thread_id == 0) {
+    thread_id = AllocateThreadId();
+    Thread::SetThreadLocalInt(thread_id_key_, thread_id);
+  }
+  PerIsolateThreadData* per_thread = NULL;
+  {
+    ScopedLock lock(process_wide_mutex_);
+    per_thread = thread_data_table_->Lookup(this, thread_id);
+    if (per_thread == NULL) {
+      per_thread = AllocatePerIsolateThreadData(thread_id);
+    }
+  }
+  return per_thread;
+}
+
+
+void Isolate::EnsureDefaultIsolate() {
+  ScopedLock lock(process_wide_mutex_);
+  if (default_isolate_ == NULL) {
+    isolate_key_ = Thread::CreateThreadLocalKey();
+    thread_id_key_ = Thread::CreateThreadLocalKey();
+    per_isolate_thread_data_key_ = Thread::CreateThreadLocalKey();
+    thread_data_table_ = new Isolate::ThreadDataTable();
+    default_isolate_ = new Isolate();
+  }
+  // Can't use SetIsolateThreadLocals(default_isolate_, NULL) here
+  // becase a non-null thread data may be already set.
+  Thread::SetThreadLocal(isolate_key_, default_isolate_);
+  CHECK(default_isolate_->PreInit());
+}
+
+
+Debugger* Isolate::GetDefaultIsolateDebugger() {
+  EnsureDefaultIsolate();
+  return default_isolate_->debugger();
+}
+
+
+StackGuard* Isolate::GetDefaultIsolateStackGuard() {
+  EnsureDefaultIsolate();
+  return default_isolate_->stack_guard();
+}
+
+
+void Isolate::EnterDefaultIsolate() {
+  EnsureDefaultIsolate();
+  ASSERT(default_isolate_ != NULL);
+
+  PerIsolateThreadData* data = CurrentPerIsolateThreadData();
+  // If not yet in default isolate - enter it.
+  if (data == NULL || data->isolate() != default_isolate_) {
+    default_isolate_->Enter();
+  }
+}
+
+
+Isolate* Isolate::GetDefaultIsolateForLocking() {
+  EnsureDefaultIsolate();
+  return default_isolate_;
+}
+
+
+Isolate::ThreadDataTable::ThreadDataTable()
+    : list_(NULL) {
+}
+
+
+Isolate::PerIsolateThreadData*
+    Isolate::ThreadDataTable::Lookup(Isolate* isolate, ThreadId thread_id) {
+  for (PerIsolateThreadData* data = list_; data != NULL; data = data->next_) {
+    if (data->Matches(isolate, thread_id)) return data;
+  }
+  return NULL;
+}
+
+
+void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
+  if (list_ != NULL) list_->prev_ = data;
+  data->next_ = list_;
+  list_ = data;
+}
+
+
+void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
+  if (list_ == data) list_ = data->next_;
+  if (data->next_ != NULL) data->next_->prev_ = data->prev_;
+  if (data->prev_ != NULL) data->prev_->next_ = data->next_;
+}
+
+
+void Isolate::ThreadDataTable::Remove(Isolate* isolate, ThreadId thread_id) {
+  PerIsolateThreadData* data = Lookup(isolate, thread_id);
+  if (data != NULL) {
+    Remove(data);
+  }
+}
+
+
+#ifdef DEBUG
+#define TRACE_ISOLATE(tag)                                              \
+  do {                                                                  \
+    if (FLAG_trace_isolates) {                                          \
+      PrintF("Isolate %p " #tag "\n", reinterpret_cast<void*>(this));   \
+    }                                                                   \
+  } while (false)
+#else
+#define TRACE_ISOLATE(tag)
+#endif
+
+
+Isolate::Isolate()
+    : state_(UNINITIALIZED),
+      entry_stack_(NULL),
+      stack_trace_nesting_level_(0),
+      incomplete_message_(NULL),
+      preallocated_memory_thread_(NULL),
+      preallocated_message_space_(NULL),
+      bootstrapper_(NULL),
+      runtime_profiler_(NULL),
+      compilation_cache_(NULL),
+      counters_(new Counters()),
+      cpu_features_(NULL),
+      code_range_(NULL),
+      break_access_(OS::CreateMutex()),
+      logger_(new Logger()),
+      stats_table_(new StatsTable()),
+      stub_cache_(NULL),
+      deoptimizer_data_(NULL),
+      capture_stack_trace_for_uncaught_exceptions_(false),
+      stack_trace_for_uncaught_exceptions_frame_limit_(0),
+      stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview),
+      transcendental_cache_(NULL),
+      memory_allocator_(NULL),
+      keyed_lookup_cache_(NULL),
+      context_slot_cache_(NULL),
+      descriptor_lookup_cache_(NULL),
+      handle_scope_implementer_(NULL),
+      scanner_constants_(NULL),
+      in_use_list_(0),
+      free_list_(0),
+      preallocated_storage_preallocated_(false),
+      pc_to_code_cache_(NULL),
+      write_input_buffer_(NULL),
+      global_handles_(NULL),
+      context_switcher_(NULL),
+      thread_manager_(NULL),
+      ast_sentinels_(NULL),
+      string_tracker_(NULL),
+      regexp_stack_(NULL),
+      frame_element_constant_list_(0),
+      result_constant_list_(0) {
+  TRACE_ISOLATE(constructor);
+
+  memset(isolate_addresses_, 0,
+      sizeof(isolate_addresses_[0]) * (k_isolate_address_count + 1));
+
+  heap_.isolate_ = this;
+  zone_.isolate_ = this;
+  stack_guard_.isolate_ = this;
+
+#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__)
+  simulator_initialized_ = false;
+  simulator_i_cache_ = NULL;
+  simulator_redirection_ = NULL;
+#endif
+
+#ifdef DEBUG
+  // heap_histograms_ initializes itself.
+  memset(&js_spill_information_, 0, sizeof(js_spill_information_));
+  memset(code_kind_statistics_, 0,
+         sizeof(code_kind_statistics_[0]) * Code::NUMBER_OF_KINDS);
+#endif
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  debug_ = NULL;
+  debugger_ = NULL;
+#endif
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  producer_heap_profile_ = NULL;
+#endif
+
+  handle_scope_data_.Initialize();
+
+#define ISOLATE_INIT_EXECUTE(type, name, initial_value)                        \
+  name##_ = (initial_value);
+  ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
+#undef ISOLATE_INIT_EXECUTE
+
+#define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length)                         \
+  memset(name##_, 0, sizeof(type) * length);
+  ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
+#undef ISOLATE_INIT_ARRAY_EXECUTE
+}
+
+void Isolate::TearDown() {
+  TRACE_ISOLATE(tear_down);
+
+  // Temporarily set this isolate as current so that various parts of
+  // the isolate can access it in their destructors without having a
+  // direct pointer. We don't use Enter/Exit here to avoid
+  // initializing the thread data.
+  PerIsolateThreadData* saved_data = CurrentPerIsolateThreadData();
+  Isolate* saved_isolate = UncheckedCurrent();
+  SetIsolateThreadLocals(this, NULL);
+
+  Deinit();
+
+  if (!IsDefaultIsolate()) {
+    delete this;
+  }
+
+  // Restore the previous current isolate.
+  SetIsolateThreadLocals(saved_isolate, saved_data);
+}
+
+
+void Isolate::Deinit() {
+  if (state_ == INITIALIZED) {
+    TRACE_ISOLATE(deinit);
+
+    if (FLAG_hydrogen_stats) HStatistics::Instance()->Print();
+
+    // We must stop the logger before we tear down other components.
+    logger_->EnsureTickerStopped();
+
+    delete deoptimizer_data_;
+    deoptimizer_data_ = NULL;
+    if (FLAG_preemption) {
+      v8::Locker locker;
+      v8::Locker::StopPreemption();
+    }
+    builtins_.TearDown();
+    bootstrapper_->TearDown();
+
+    // Remove the external reference to the preallocated stack memory.
+    delete preallocated_message_space_;
+    preallocated_message_space_ = NULL;
+    PreallocatedMemoryThreadStop();
+
+    HeapProfiler::TearDown();
+    CpuProfiler::TearDown();
+    if (runtime_profiler_ != NULL) {
+      runtime_profiler_->TearDown();
+      delete runtime_profiler_;
+      runtime_profiler_ = NULL;
+    }
+    heap_.TearDown();
+    logger_->TearDown();
+
+    // The default isolate is re-initializable due to legacy API.
+    state_ = PREINITIALIZED;
+  }
+}
+
+
+void Isolate::SetIsolateThreadLocals(Isolate* isolate,
+                                     PerIsolateThreadData* data) {
+  Thread::SetThreadLocal(isolate_key_, isolate);
+  Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
+}
+
+
+Isolate::~Isolate() {
+  TRACE_ISOLATE(destructor);
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  delete producer_heap_profile_;
+  producer_heap_profile_ = NULL;
+#endif
+
+  delete scanner_constants_;
+  scanner_constants_ = NULL;
+
+  delete regexp_stack_;
+  regexp_stack_ = NULL;
+
+  delete ast_sentinels_;
+  ast_sentinels_ = NULL;
+
+  delete descriptor_lookup_cache_;
+  descriptor_lookup_cache_ = NULL;
+  delete context_slot_cache_;
+  context_slot_cache_ = NULL;
+  delete keyed_lookup_cache_;
+  keyed_lookup_cache_ = NULL;
+
+  delete transcendental_cache_;
+  transcendental_cache_ = NULL;
+  delete stub_cache_;
+  stub_cache_ = NULL;
+  delete stats_table_;
+  stats_table_ = NULL;
+
+  delete logger_;
+  logger_ = NULL;
+
+  delete counters_;
+  counters_ = NULL;
+  delete cpu_features_;
+  cpu_features_ = NULL;
+
+  delete handle_scope_implementer_;
+  handle_scope_implementer_ = NULL;
+  delete break_access_;
+  break_access_ = NULL;
+
+  delete compilation_cache_;
+  compilation_cache_ = NULL;
+  delete bootstrapper_;
+  bootstrapper_ = NULL;
+  delete pc_to_code_cache_;
+  pc_to_code_cache_ = NULL;
+  delete write_input_buffer_;
+  write_input_buffer_ = NULL;
+
+  delete context_switcher_;
+  context_switcher_ = NULL;
+  delete thread_manager_;
+  thread_manager_ = NULL;
+
+  delete string_tracker_;
+  string_tracker_ = NULL;
+
+  delete memory_allocator_;
+  memory_allocator_ = NULL;
+  delete code_range_;
+  code_range_ = NULL;
+  delete global_handles_;
+  global_handles_ = NULL;
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  delete debugger_;
+  debugger_ = NULL;
+  delete debug_;
+  debug_ = NULL;
+#endif
+}
+
+
+bool Isolate::PreInit() {
+  if (state_ != UNINITIALIZED) return true;
+
+  TRACE_ISOLATE(preinit);
+
+  ASSERT(Isolate::Current() == this);
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  debug_ = new Debug(this);
+  debugger_ = new Debugger();
+  debugger_->isolate_ = this;
+#endif
+
+  memory_allocator_ = new MemoryAllocator();
+  memory_allocator_->isolate_ = this;
+  code_range_ = new CodeRange();
+  code_range_->isolate_ = this;
+
+  // Safe after setting Heap::isolate_, initializing StackGuard and
+  // ensuring that Isolate::Current() == this.
+  heap_.SetStackLimits();
+
+#ifdef DEBUG
+  DisallowAllocationFailure disallow_allocation_failure;
+#endif
+
+#define C(name) isolate_addresses_[Isolate::k_##name] =                        \
+    reinterpret_cast<Address>(name());
+  ISOLATE_ADDRESS_LIST(C)
+  ISOLATE_ADDRESS_LIST_PROF(C)
+#undef C
+
+  string_tracker_ = new StringTracker();
+  string_tracker_->isolate_ = this;
+  thread_manager_ = new ThreadManager();
+  thread_manager_->isolate_ = this;
+  compilation_cache_ = new CompilationCache();
+  transcendental_cache_ = new TranscendentalCache();
+  keyed_lookup_cache_ = new KeyedLookupCache();
+  context_slot_cache_ = new ContextSlotCache();
+  descriptor_lookup_cache_ = new DescriptorLookupCache();
+  scanner_constants_ = new ScannerConstants();
+  pc_to_code_cache_ = new PcToCodeCache(this);
+  write_input_buffer_ = new StringInputBuffer();
+  global_handles_ = new GlobalHandles(this);
+  bootstrapper_ = new Bootstrapper();
+  cpu_features_ = new CpuFeatures();
+  handle_scope_implementer_ = new HandleScopeImplementer();
+  stub_cache_ = new StubCache(this);
+  ast_sentinels_ = new AstSentinels();
+  regexp_stack_ = new RegExpStack();
+  regexp_stack_->isolate_ = this;
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  producer_heap_profile_ = new ProducerHeapProfile();
+  producer_heap_profile_->isolate_ = this;
+#endif
+
+  state_ = PREINITIALIZED;
+  return true;
+}
+
+
+void Isolate::InitializeThreadLocal() {
+  thread_local_top_.Initialize();
+  clear_pending_exception();
+  clear_pending_message();
+  clear_scheduled_exception();
+}
+
+
+bool Isolate::Init(Deserializer* des) {
+  ASSERT(state_ != INITIALIZED);
+
+  TRACE_ISOLATE(init);
+
+  bool create_heap_objects = des == NULL;
+
+#ifdef DEBUG
+  // The initialization process does not handle memory exhaustion.
+  DisallowAllocationFailure disallow_allocation_failure;
+#endif
+
+  if (state_ == UNINITIALIZED && !PreInit()) return false;
+
+  // Enable logging before setting up the heap
+  logger_->Setup();
+
+  CpuProfiler::Setup();
+  HeapProfiler::Setup();
+
+  // Setup the platform OS support.
+  OS::Setup();
+
+  // Initialize other runtime facilities
+#if defined(USE_SIMULATOR)
+#if defined(V8_TARGET_ARCH_ARM)
+  Simulator::Initialize();
+#elif defined(V8_TARGET_ARCH_MIPS)
+  ::assembler::mips::Simulator::Initialize();
+#endif
+#endif
+
+  { // NOLINT
+    // Ensure that the thread has a valid stack guard.  The v8::Locker object
+    // will ensure this too, but we don't have to use lockers if we are only
+    // using one thread.
+    ExecutionAccess lock(this);
+    stack_guard_.InitThread(lock);
+  }
+
+  // Setup the object heap
+  ASSERT(!heap_.HasBeenSetup());
+  if (!heap_.Setup(create_heap_objects)) {
+    V8::SetFatalError();
+    return false;
+  }
+
+  bootstrapper_->Initialize(create_heap_objects);
+  builtins_.Setup(create_heap_objects);
+
+  InitializeThreadLocal();
+
+  // Only preallocate on the first initialization.
+  if (FLAG_preallocate_message_memory && preallocated_message_space_ == NULL) {
+    // Start the thread which will set aside some memory.
+    PreallocatedMemoryThreadStart();
+    preallocated_message_space_ =
+        new NoAllocationStringAllocator(
+            preallocated_memory_thread_->data(),
+            preallocated_memory_thread_->length());
+    PreallocatedStorageInit(preallocated_memory_thread_->length() / 4);
+  }
+
+  if (FLAG_preemption) {
+    v8::Locker locker;
+    v8::Locker::StartPreemption(100);
+  }
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  debug_->Setup(create_heap_objects);
+#endif
+  stub_cache_->Initialize(create_heap_objects);
+
+  // If we are deserializing, read the state into the now-empty heap.
+  if (des != NULL) {
+    des->Deserialize();
+    stub_cache_->Clear();
+  }
+
+  // Deserializing may put strange things in the root array's copy of the
+  // stack guard.
+  heap_.SetStackLimits();
+
+  // Setup the CPU support. Must be done after heap setup and after
+  // any deserialization because we have to have the initial heap
+  // objects in place for creating the code object used for probing.
+  CPU::Setup();
+
+  deoptimizer_data_ = new DeoptimizerData;
+  runtime_profiler_ = new RuntimeProfiler(this);
+  runtime_profiler_->Setup();
+
+  // If we are deserializing, log non-function code objects and compiled
+  // functions found in the snapshot.
+  if (des != NULL && FLAG_log_code) {
+    HandleScope scope;
+    LOG(this, LogCodeObjects());
+    LOG(this, LogCompiledFunctions());
+  }
+
+  state_ = INITIALIZED;
+  return true;
+}
+
+
+void Isolate::Enter() {
+  Isolate* current_isolate = NULL;
+  PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
+  if (current_data != NULL) {
+    current_isolate = current_data->isolate_;
+    ASSERT(current_isolate != NULL);
+    if (current_isolate == this) {
+      ASSERT(Current() == this);
+      ASSERT(entry_stack_ != NULL);
+      ASSERT(entry_stack_->previous_thread_data == NULL ||
+             entry_stack_->previous_thread_data->thread_id() ==
+                 Thread::GetThreadLocalInt(thread_id_key_));
+      // Same thread re-enters the isolate, no need to re-init anything.
+      entry_stack_->entry_count++;
+      return;
+    }
+  }
+
+  // Threads can have default isolate set into TLS as Current but not yet have
+  // PerIsolateThreadData for it, as it requires more advanced phase of the
+  // initialization. For example, a thread might be the one that system used for
+  // static initializers - in this case the default isolate is set in TLS but
+  // the thread did not yet Enter the isolate. If PerisolateThreadData is not
+  // there, use the isolate set in TLS.
+  if (current_isolate == NULL) {
+    current_isolate = Isolate::UncheckedCurrent();
+  }
+
+  PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
+  ASSERT(data != NULL);
+  ASSERT(data->isolate_ == this);
+
+  EntryStackItem* item = new EntryStackItem(current_data,
+                                            current_isolate,
+                                            entry_stack_);
+  entry_stack_ = item;
+
+  SetIsolateThreadLocals(this, data);
+
+  CHECK(PreInit());
+
+  // In case it's the first time some thread enters the isolate.
+  set_thread_id(data->thread_id());
+}
+
+
+void Isolate::Exit() {
+  ASSERT(entry_stack_ != NULL);
+  ASSERT(entry_stack_->previous_thread_data == NULL ||
+         entry_stack_->previous_thread_data->thread_id() ==
+             Thread::GetThreadLocalInt(thread_id_key_));
+
+  if (--entry_stack_->entry_count > 0) return;
+
+  ASSERT(CurrentPerIsolateThreadData() != NULL);
+  ASSERT(CurrentPerIsolateThreadData()->isolate_ == this);
+
+  // Pop the stack.
+  EntryStackItem* item = entry_stack_;
+  entry_stack_ = item->previous_item;
+
+  PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
+  Isolate* previous_isolate = item->previous_isolate;
+
+  delete item;
+
+  // Reinit the current thread for the isolate it was running before this one.
+  SetIsolateThreadLocals(previous_isolate, previous_thread_data);
+}
+
+
+void Isolate::ResetEagerOptimizingData() {
+  compilation_cache_->ResetEagerOptimizingData();
+}
+
+
+#ifdef DEBUG
+#define ISOLATE_FIELD_OFFSET(type, name, ignored)                       \
+const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
+ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
+ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
+#undef ISOLATE_FIELD_OFFSET
+#endif
+
+} }  // namespace v8::internal
diff --git a/src/isolate.h b/src/isolate.h
new file mode 100644 (file)
index 0000000..13ffc13
--- /dev/null
@@ -0,0 +1,1304 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_ISOLATE_H_
+#define V8_ISOLATE_H_
+
+#include "../include/v8-debug.h"
+#include "allocation.h"
+#include "apiutils.h"
+#include "atomicops.h"
+#include "builtins.h"
+#include "contexts.h"
+#include "execution.h"
+#include "frames.h"
+#include "global-handles.h"
+#include "handles.h"
+#include "heap.h"
+#include "regexp-stack.h"
+#include "runtime-profiler.h"
+#include "runtime.h"
+#include "zone.h"
+
+namespace v8 {
+namespace internal {
+
+class AstSentinels;
+class Bootstrapper;
+class CodeGenerator;
+class CodeRange;
+class CompilationCache;
+class ContextSlotCache;
+class ContextSwitcher;
+class Counters;
+class CpuFeatures;
+class CpuProfiler;
+class DeoptimizerData;
+class Deserializer;
+class EmptyStatement;
+class ExternalReferenceTable;
+class Factory;
+class FunctionInfoListener;
+class HandleScopeImplementer;
+class HeapProfiler;
+class InlineRuntimeFunctionsTable;
+class NoAllocationStringAllocator;
+class PcToCodeCache;
+class PreallocatedMemoryThread;
+class ProducerHeapProfile;
+class RegExpStack;
+class SaveContext;
+class ScannerConstants;
+class StringInputBuffer;
+class StringTracker;
+class StubCache;
+class ThreadManager;
+class ThreadState;
+class ThreadVisitor;  // Defined in v8threads.h
+class VMState;
+
+// 'void function pointer', used to roundtrip the
+// ExternalReference::ExternalReferenceRedirector since we can not include
+// assembler.h, where it is defined, here.
+typedef void* ExternalReferenceRedirectorPointer();
+
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+class Debug;
+class Debugger;
+class DebuggerAgent;
+#endif
+
+#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM)
+class Redirection;
+class Simulator;
+#endif
+
+// Static indirection table for handles to constants.  If a frame
+// element represents a constant, the data contains an index into
+// this table of handles to the actual constants.
+// Static indirection table for handles to constants.  If a Result
+// represents a constant, the data contains an index into this table
+// of handles to the actual constants.
+typedef ZoneList<Handle<Object> > ZoneObjectList;
+
+#define RETURN_IF_SCHEDULED_EXCEPTION(isolate)    \
+  if (isolate->has_scheduled_exception())         \
+      return isolate->PromoteScheduledException()
+
+#define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \
+  if (call.is_null()) {                                    \
+    ASSERT(isolate->has_pending_exception());              \
+    return value;                                          \
+  }
+
+#define RETURN_IF_EMPTY_HANDLE(isolate, call)                       \
+  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception())
+
+#define ISOLATE_ADDRESS_LIST(C)            \
+  C(handler_address)                       \
+  C(c_entry_fp_address)                    \
+  C(context_address)                       \
+  C(pending_exception_address)             \
+  C(external_caught_exception_address)
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+#define ISOLATE_ADDRESS_LIST_PROF(C)       \
+  C(js_entry_sp_address)
+#else
+#define ISOLATE_ADDRESS_LIST_PROF(C)
+#endif
+
+
+class ThreadLocalTop BASE_EMBEDDED {
+ public:
+  // Initialize the thread data.
+  void Initialize();
+
+  // Get the top C++ try catch handler or NULL if none are registered.
+  //
+  // This method is not guarenteed to return an address that can be
+  // used for comparison with addresses into the JS stack.  If such an
+  // address is needed, use try_catch_handler_address.
+  v8::TryCatch* TryCatchHandler();
+
+  // Get the address of the top C++ try catch handler or NULL if
+  // none are registered.
+  //
+  // This method always returns an address that can be compared to
+  // pointers into the JavaScript stack.  When running on actual
+  // hardware, try_catch_handler_address and TryCatchHandler return
+  // the same pointer.  When running on a simulator with a separate JS
+  // stack, try_catch_handler_address returns a JS stack address that
+  // corresponds to the place on the JS stack where the C++ handler
+  // would have been if the stack were not separate.
+  inline Address try_catch_handler_address() {
+    return try_catch_handler_address_;
+  }
+
+  // Set the address of the top C++ try catch handler.
+  inline void set_try_catch_handler_address(Address address) {
+    try_catch_handler_address_ = address;
+  }
+
+  void Free() {
+    ASSERT(!has_pending_message_);
+    ASSERT(!external_caught_exception_);
+    ASSERT(try_catch_handler_address_ == NULL);
+  }
+
+  // The context where the current execution method is created and for variable
+  // lookups.
+  Context* context_;
+  int thread_id_;
+  MaybeObject* pending_exception_;
+  bool has_pending_message_;
+  const char* pending_message_;
+  Object* pending_message_obj_;
+  Script* pending_message_script_;
+  int pending_message_start_pos_;
+  int pending_message_end_pos_;
+  // Use a separate value for scheduled exceptions to preserve the
+  // invariants that hold about pending_exception.  We may want to
+  // unify them later.
+  MaybeObject* scheduled_exception_;
+  bool external_caught_exception_;
+  SaveContext* save_context_;
+  v8::TryCatch* catcher_;
+
+  // Stack.
+  Address c_entry_fp_;  // the frame pointer of the top c entry frame
+  Address handler_;   // try-blocks are chained through the stack
+
+#ifdef USE_SIMULATOR
+#ifdef V8_TARGET_ARCH_ARM
+  Simulator* simulator_;
+#elif V8_TARGET_ARCH_MIPS
+  assembler::mips::Simulator* simulator_;
+#endif
+#endif  // USE_SIMULATOR
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  Address js_entry_sp_;  // the stack pointer of the bottom js entry frame
+  Address external_callback_;  // the external callback we're currently in
+#endif
+
+#ifdef ENABLE_VMSTATE_TRACKING
+  StateTag current_vm_state_;
+#endif
+
+  // Generated code scratch locations.
+  int32_t formal_count_;
+
+  // Call back function to report unsafe JS accesses.
+  v8::FailedAccessCheckCallback failed_access_check_callback_;
+
+ private:
+  Address try_catch_handler_address_;
+};
+
+#if defined(V8_TARGET_ARCH_ARM)
+
+#define ISOLATE_PLATFORM_INIT_LIST(V)                                          \
+  /* VirtualFrame::SpilledScope state */                                       \
+  V(bool, is_virtual_frame_in_spilled_scope, false)                            \
+  /* CodeGenerator::EmitNamedStore state */                                    \
+  V(int, inlined_write_barrier_size, -1)
+
+#if !defined(__arm__)
+class HashMap;
+#endif
+
+#else
+
+#define ISOLATE_PLATFORM_INIT_LIST(V)
+
+#endif
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+
+#define ISOLATE_DEBUGGER_INIT_LIST(V)                                          \
+  V(v8::Debug::EventCallback, debug_event_callback, NULL)                      \
+  V(DebuggerAgent*, debugger_agent_instance, NULL)
+#else
+
+#define ISOLATE_DEBUGGER_INIT_LIST(V)
+
+#endif
+
+#ifdef DEBUG
+
+#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)                                       \
+  V(CommentStatistic, paged_space_comments_statistics,                         \
+      CommentStatistic::kMaxComments + 1)
+#else
+
+#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
+
+#endif
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+
+#define ISOLATE_LOGGING_INIT_LIST(V)                                           \
+  V(CpuProfiler*, cpu_profiler, NULL)                                          \
+  V(HeapProfiler*, heap_profiler, NULL)
+
+#else
+
+#define ISOLATE_LOGGING_INIT_LIST(V)
+
+#endif
+
+#define ISOLATE_INIT_ARRAY_LIST(V)                                             \
+  /* SerializerDeserializer state. */                                          \
+  V(Object*, serialize_partial_snapshot_cache, kPartialSnapshotCacheCapacity)  \
+  V(int, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize)     \
+  V(int, bad_char_shift_table, kUC16AlphabetSize)                              \
+  V(int, good_suffix_shift_table, (kBMMaxShift + 1))                           \
+  V(int, suffix_table, (kBMMaxShift + 1))                                      \
+  ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
+
+typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
+
+#define ISOLATE_INIT_LIST(V)                                                   \
+  /* AssertNoZoneAllocation state. */                                          \
+  V(bool, zone_allow_allocation, true)                                         \
+  /* SerializerDeserializer state. */                                          \
+  V(int, serialize_partial_snapshot_cache_length, 0)                           \
+  /* Assembler state. */                                                       \
+  /* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */    \
+  V(byte*, assembler_spare_buffer, NULL)                                       \
+  /*This static counter ensures that NativeAllocationCheckers can be nested.*/ \
+  V(int, allocation_disallowed, 0)                                             \
+  V(FatalErrorCallback, exception_behavior, NULL)                              \
+  V(v8::Debug::MessageHandler, message_handler, NULL)                          \
+  /* To distinguish the function templates, so that we can find them in the */ \
+  /* function cache of the global context. */                                  \
+  V(int, next_serial_number, 0)                                                \
+  V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL)  \
+  V(bool, always_allow_natives_syntax, false)                                  \
+  /* Part of the state of liveedit. */                                         \
+  V(FunctionInfoListener*, active_function_info_listener, NULL)                \
+  /* State for Relocatable. */                                                 \
+  V(Relocatable*, relocatable_top, NULL)                                       \
+  /* State for CodeEntry in profile-generator. */                              \
+  V(CodeGenerator*, current_code_generator, NULL)                              \
+  V(bool, jump_target_compiling_deferred_code, false)                          \
+  V(DebugObjectCache*, string_stream_debug_object_cache, NULL)                 \
+  V(Object*, string_stream_current_security_token, NULL)                       \
+  /* TODO(isolates): Release this on destruction? */                           \
+  V(int*, irregexp_interpreter_backtrack_stack_cache, NULL)                    \
+  /* Serializer state. */                                                      \
+  V(ExternalReferenceTable*, external_reference_table, NULL)                   \
+  /* AstNode state. */                                                         \
+  V(unsigned, ast_node_id, 0)                                                  \
+  V(unsigned, ast_node_count, 0)                                               \
+  ISOLATE_PLATFORM_INIT_LIST(V)                                                \
+  ISOLATE_LOGGING_INIT_LIST(V)                                                 \
+  ISOLATE_DEBUGGER_INIT_LIST(V)
+
+class Isolate {
+  // These forward declarations are required to make the friend declarations in
+  // PerIsolateThreadData work on some older versions of gcc.
+  class ThreadDataTable;
+  class EntryStackItem;
+ public:
+  ~Isolate();
+
+  typedef int ThreadId;
+
+  // A thread has a PerIsolateThreadData instance for each isolate that it has
+  // entered. That instance is allocated when the isolate is initially entered
+  // and reused on subsequent entries.
+  class PerIsolateThreadData {
+   public:
+    PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
+        : isolate_(isolate),
+          thread_id_(thread_id),
+          stack_limit_(0),
+          thread_state_(NULL),
+#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM)
+          simulator_(NULL),
+#endif
+          next_(NULL),
+          prev_(NULL) { }
+    Isolate* isolate() const { return isolate_; }
+    ThreadId thread_id() const { return thread_id_; }
+    void set_stack_limit(uintptr_t value) { stack_limit_ = value; }
+    uintptr_t stack_limit() const { return stack_limit_; }
+    ThreadState* thread_state() const { return thread_state_; }
+    void set_thread_state(ThreadState* value) { thread_state_ = value; }
+
+#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM)
+    Simulator* simulator() const { return simulator_; }
+    void set_simulator(Simulator* simulator) {
+      simulator_ = simulator;
+    }
+#endif
+
+    bool Matches(Isolate* isolate, ThreadId thread_id) const {
+      return isolate_ == isolate && thread_id_ == thread_id;
+    }
+
+   private:
+    Isolate* isolate_;
+    ThreadId thread_id_;
+    uintptr_t stack_limit_;
+    ThreadState* thread_state_;
+
+#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM)
+    Simulator* simulator_;
+#endif
+
+    PerIsolateThreadData* next_;
+    PerIsolateThreadData* prev_;
+
+    friend class Isolate;
+    friend class ThreadDataTable;
+    friend class EntryStackItem;
+
+    DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
+  };
+
+
+  enum AddressId {
+#define C(name) k_##name,
+    ISOLATE_ADDRESS_LIST(C)
+    ISOLATE_ADDRESS_LIST_PROF(C)
+#undef C
+    k_isolate_address_count
+  };
+
+  // Returns the PerIsolateThreadData for the current thread (or NULL if one is
+  // not currently set).
+  static PerIsolateThreadData* CurrentPerIsolateThreadData() {
+    return reinterpret_cast<PerIsolateThreadData*>(
+        Thread::GetThreadLocal(per_isolate_thread_data_key_));
+  }
+
+  // Returns the isolate inside which the current thread is running.
+  INLINE(static Isolate* Current()) {
+    Isolate* isolate = UncheckedCurrent();
+    ASSERT(isolate != NULL);
+    return isolate;
+  }
+
+  INLINE(static Isolate* UncheckedCurrent()) {
+    return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
+  }
+
+  bool Init(Deserializer* des);
+
+  bool IsInitialized() { return state_ == INITIALIZED; }
+
+  // True if at least one thread Enter'ed this isolate.
+  bool IsInUse() { return entry_stack_ != NULL; }
+
+  // Destroys the non-default isolates.
+  // Sets default isolate into "has_been_disposed" state rather then destroying,
+  // for legacy API reasons.
+  void TearDown();
+
+  bool IsDefaultIsolate() const { return this == default_isolate_; }
+
+  // Ensures that process-wide resources and the default isolate have been
+  // allocated. It is only necessary to call this method in rare casses, for
+  // example if you are using V8 from within the body of a static initializer.
+  // Safe to call multiple times.
+  static void EnsureDefaultIsolate();
+
+  // Get the debugger from the default isolate. Preinitializes the
+  // default isolate if needed.
+  static Debugger* GetDefaultIsolateDebugger();
+
+  // Get the stack guard from the default isolate. Preinitializes the
+  // default isolate if needed.
+  static StackGuard* GetDefaultIsolateStackGuard();
+
+  // Returns the key used to store the pointer to the current isolate.
+  // Used internally for V8 threads that do not execute JavaScript but still
+  // are part of the domain of an isolate (like the context switcher).
+  static Thread::LocalStorageKey isolate_key() {
+    return isolate_key_;
+  }
+
+  // Returns the key used to store process-wide thread IDs.
+  static Thread::LocalStorageKey thread_id_key() {
+    return thread_id_key_;
+  }
+
+  // Atomically allocates a new thread ID.
+  static ThreadId AllocateThreadId();
+
+  // If a client attempts to create a Locker without specifying an isolate,
+  // we assume that the client is using legacy behavior. Set up the current
+  // thread to be inside the implicit isolate (or fail a check if we have
+  // switched to non-legacy behavior).
+  static void EnterDefaultIsolate();
+
+  // Debug.
+  // Mutex for serializing access to break control structures.
+  Mutex* break_access() { return break_access_; }
+
+  Address get_address_from_id(AddressId id);
+
+  // Access to top context (where the current function object was created).
+  Context* context() { return thread_local_top_.context_; }
+  void set_context(Context* context) {
+    thread_local_top_.context_ = context;
+  }
+  Context** context_address() { return &thread_local_top_.context_; }
+
+  SaveContext* save_context() {return thread_local_top_.save_context_; }
+  void set_save_context(SaveContext* save) {
+    thread_local_top_.save_context_ = save;
+  }
+
+  // Access to current thread id.
+  int thread_id() { return thread_local_top_.thread_id_; }
+  void set_thread_id(int id) { thread_local_top_.thread_id_ = id; }
+
+  // Interface to pending exception.
+  MaybeObject* pending_exception() {
+    ASSERT(has_pending_exception());
+    return thread_local_top_.pending_exception_;
+  }
+  bool external_caught_exception() {
+    return thread_local_top_.external_caught_exception_;
+  }
+  void set_pending_exception(MaybeObject* exception) {
+    thread_local_top_.pending_exception_ = exception;
+  }
+  void clear_pending_exception() {
+    thread_local_top_.pending_exception_ = heap_.the_hole_value();
+  }
+  MaybeObject** pending_exception_address() {
+    return &thread_local_top_.pending_exception_;
+  }
+  bool has_pending_exception() {
+    return !thread_local_top_.pending_exception_->IsTheHole();
+  }
+  void clear_pending_message() {
+    thread_local_top_.has_pending_message_ = false;
+    thread_local_top_.pending_message_ = NULL;
+    thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
+    thread_local_top_.pending_message_script_ = NULL;
+  }
+  v8::TryCatch* try_catch_handler() {
+    return thread_local_top_.TryCatchHandler();
+  }
+  Address try_catch_handler_address() {
+    return thread_local_top_.try_catch_handler_address();
+  }
+  bool* external_caught_exception_address() {
+    return &thread_local_top_.external_caught_exception_;
+  }
+
+  MaybeObject** scheduled_exception_address() {
+    return &thread_local_top_.scheduled_exception_;
+  }
+  MaybeObject* scheduled_exception() {
+    ASSERT(has_scheduled_exception());
+    return thread_local_top_.scheduled_exception_;
+  }
+  bool has_scheduled_exception() {
+    return !thread_local_top_.scheduled_exception_->IsTheHole();
+  }
+  void clear_scheduled_exception() {
+    thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
+  }
+
+  bool IsExternallyCaught();
+
+  bool is_catchable_by_javascript(MaybeObject* exception) {
+    return (exception != Failure::OutOfMemoryException()) &&
+        (exception != heap()->termination_exception());
+  }
+
+  // JS execution stack (see frames.h).
+  static Address c_entry_fp(ThreadLocalTop* thread) {
+    return thread->c_entry_fp_;
+  }
+  static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
+
+  inline Address* c_entry_fp_address() {
+    return &thread_local_top_.c_entry_fp_;
+  }
+  inline Address* handler_address() { return &thread_local_top_.handler_; }
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  // Bottom JS entry (see StackTracer::Trace in log.cc).
+  static Address js_entry_sp(ThreadLocalTop* thread) {
+    return thread->js_entry_sp_;
+  }
+  inline Address* js_entry_sp_address() {
+    return &thread_local_top_.js_entry_sp_;
+  }
+#endif
+
+  // Generated code scratch locations.
+  void* formal_count_address() { return &thread_local_top_.formal_count_; }
+
+  // Returns the global object of the current context. It could be
+  // a builtin object, or a js global object.
+  Handle<GlobalObject> global() {
+    return Handle<GlobalObject>(context()->global());
+  }
+
+  // Returns the global proxy object of the current context.
+  Object* global_proxy() {
+    return context()->global_proxy();
+  }
+
+  Handle<JSBuiltinsObject> js_builtins_object() {
+    return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
+  }
+
+  static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
+  void FreeThreadResources() { thread_local_top_.Free(); }
+
+  // This method is called by the api after operations that may throw
+  // exceptions.  If an exception was thrown and not handled by an external
+  // handler the exception is scheduled to be rethrown when we return to running
+  // JavaScript code.  If an exception is scheduled true is returned.
+  bool OptionalRescheduleException(bool is_bottom_call);
+
+  void SetCaptureStackTraceForUncaughtExceptions(
+      bool capture,
+      int frame_limit,
+      StackTrace::StackTraceOptions options);
+
+  // Tells whether the current context has experienced an out of memory
+  // exception.
+  bool is_out_of_memory();
+
+  void PrintCurrentStackTrace(FILE* out);
+  void PrintStackTrace(FILE* out, char* thread_data);
+  void PrintStack(StringStream* accumulator);
+  void PrintStack();
+  Handle<String> StackTraceString();
+  Handle<JSArray> CaptureCurrentStackTrace(
+      int frame_limit,
+      StackTrace::StackTraceOptions options);
+
+  // Returns if the top context may access the given global object. If
+  // the result is false, the pending exception is guaranteed to be
+  // set.
+  bool MayNamedAccess(JSObject* receiver,
+                      Object* key,
+                      v8::AccessType type);
+  bool MayIndexedAccess(JSObject* receiver,
+                        uint32_t index,
+                        v8::AccessType type);
+
+  void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
+  void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
+
+  // Exception throwing support. The caller should use the result
+  // of Throw() as its return value.
+  Failure* Throw(Object* exception, MessageLocation* location = NULL);
+  // Re-throw an exception.  This involves no error reporting since
+  // error reporting was handled when the exception was thrown
+  // originally.
+  Failure* ReThrow(MaybeObject* exception, MessageLocation* location = NULL);
+  void ScheduleThrow(Object* exception);
+  void ReportPendingMessages();
+  Failure* ThrowIllegalOperation();
+
+  // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
+  Failure* PromoteScheduledException();
+  void DoThrow(MaybeObject* exception,
+               MessageLocation* location,
+               const char* message);
+  // Checks if exception should be reported and finds out if it's
+  // caught externally.
+  bool ShouldReportException(bool* can_be_caught_externally,
+                             bool catchable_by_javascript);
+
+  // Attempts to compute the current source location, storing the
+  // result in the target out parameter.
+  void ComputeLocation(MessageLocation* target);
+
+  // Override command line flag.
+  void TraceException(bool flag);
+
+  // Out of resource exception helpers.
+  Failure* StackOverflow();
+  Failure* TerminateExecution();
+
+  // Administration
+  void Iterate(ObjectVisitor* v);
+  void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
+  char* Iterate(ObjectVisitor* v, char* t);
+  void IterateThread(ThreadVisitor* v);
+  void IterateThread(ThreadVisitor* v, char* t);
+
+
+  // Returns the current global context.
+  Handle<Context> global_context();
+
+  // Returns the global context of the calling JavaScript code.  That
+  // is, the global context of the top-most JavaScript frame.
+  Handle<Context> GetCallingGlobalContext();
+
+  void RegisterTryCatchHandler(v8::TryCatch* that);
+  void UnregisterTryCatchHandler(v8::TryCatch* that);
+
+  char* ArchiveThread(char* to);
+  char* RestoreThread(char* from);
+
+  static const char* const kStackOverflowMessage;
+
+  static const int kUC16AlphabetSize = 256;  // See StringSearchBase.
+  static const int kBMMaxShift = 250;        // See StringSearchBase.
+
+  // Accessors.
+#define GLOBAL_ACCESSOR(type, name, initialvalue)                       \
+  inline type name() const {                                            \
+    ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
+    return name##_;                                                     \
+  }                                                                     \
+  inline void set_##name(type value) {                                  \
+    ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
+    name##_ = value;                                                    \
+  }
+  ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
+#undef GLOBAL_ACCESSOR
+
+#define GLOBAL_ARRAY_ACCESSOR(type, name, length)                       \
+  inline type* name() {                                                 \
+    ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
+    return &(name##_)[0];                                               \
+  }
+  ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
+#undef GLOBAL_ARRAY_ACCESSOR
+
+#define GLOBAL_CONTEXT_FIELD_ACCESSOR(index, type, name)      \
+  Handle<type> name() {                                       \
+    return Handle<type>(context()->global_context()->name()); \
+  }
+  GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSOR)
+#undef GLOBAL_CONTEXT_FIELD_ACCESSOR
+
+  Bootstrapper* bootstrapper() { return bootstrapper_; }
+  Counters* counters() { return counters_; }
+  // TODO(isolates): Having CPU features per isolate is probably too
+  // flexible. We only really need to have the set of currently
+  // enabled features for asserts in DEBUG builds.
+  CpuFeatures* cpu_features() { return cpu_features_; }
+  CodeRange* code_range() { return code_range_; }
+  RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
+  CompilationCache* compilation_cache() { return compilation_cache_; }
+  Logger* logger() { return logger_; }
+  StackGuard* stack_guard() { return &stack_guard_; }
+  Heap* heap() { return &heap_; }
+  StatsTable* stats_table() { return stats_table_; }
+  StubCache* stub_cache() { return stub_cache_; }
+  DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
+  ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
+
+  TranscendentalCache* transcendental_cache() const {
+    return transcendental_cache_;
+  }
+
+  MemoryAllocator* memory_allocator() {
+    return memory_allocator_;
+  }
+
+  KeyedLookupCache* keyed_lookup_cache() {
+    return keyed_lookup_cache_;
+  }
+
+  ContextSlotCache* context_slot_cache() {
+    return context_slot_cache_;
+  }
+
+  DescriptorLookupCache* descriptor_lookup_cache() {
+    return descriptor_lookup_cache_;
+  }
+
+  v8::ImplementationUtilities::HandleScopeData* handle_scope_data() {
+    return &handle_scope_data_;
+  }
+  HandleScopeImplementer* handle_scope_implementer() {
+    ASSERT(handle_scope_implementer_);
+    return handle_scope_implementer_;
+  }
+  Zone* zone() { return &zone_; }
+
+  ScannerConstants* scanner_constants() {
+    return scanner_constants_;
+  }
+
+  PcToCodeCache* pc_to_code_cache() { return pc_to_code_cache_; }
+
+  StringInputBuffer* write_input_buffer() { return write_input_buffer_; }
+
+  GlobalHandles* global_handles() { return global_handles_; }
+
+  ThreadManager* thread_manager() { return thread_manager_; }
+
+  ContextSwitcher* context_switcher() { return context_switcher_; }
+
+  void set_context_switcher(ContextSwitcher* switcher) {
+    context_switcher_ = switcher;
+  }
+
+  StringTracker* string_tracker() { return string_tracker_; }
+
+  unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
+    return &jsregexp_uncanonicalize_;
+  }
+
+  unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
+    return &jsregexp_canonrange_;
+  }
+
+  StringInputBuffer* objects_string_compare_buffer_a() {
+    return &objects_string_compare_buffer_a_;
+  }
+
+  StringInputBuffer* objects_string_compare_buffer_b() {
+    return &objects_string_compare_buffer_b_;
+  }
+
+  StaticResource<StringInputBuffer>* objects_string_input_buffer() {
+    return &objects_string_input_buffer_;
+  }
+
+  AstSentinels* ast_sentinels() { return ast_sentinels_; }
+
+  RuntimeState* runtime_state() { return &runtime_state_; }
+
+  StringInputBuffer* liveedit_compare_substrings_buf1() {
+    return &liveedit_compare_substrings_buf1_;
+  }
+
+  StringInputBuffer* liveedit_compare_substrings_buf2() {
+    return &liveedit_compare_substrings_buf2_;
+  }
+
+  StaticResource<SafeStringInputBuffer>* compiler_safe_string_input_buffer() {
+    return &compiler_safe_string_input_buffer_;
+  }
+
+  Builtins* builtins() { return &builtins_; }
+
+  unibrow::Mapping<unibrow::Ecma262Canonicalize>*
+      regexp_macro_assembler_canonicalize() {
+    return &regexp_macro_assembler_canonicalize_;
+  }
+
+  RegExpStack* regexp_stack() { return regexp_stack_; }
+
+  unibrow::Mapping<unibrow::Ecma262Canonicalize>*
+      interp_canonicalize_mapping() {
+    return &interp_canonicalize_mapping_;
+  }
+
+  ZoneObjectList* frame_element_constant_list() {
+    return &frame_element_constant_list_;
+  }
+
+  ZoneObjectList* result_constant_list() {
+    return &result_constant_list_;
+  }
+
+  void* PreallocatedStorageNew(size_t size);
+  void PreallocatedStorageDelete(void* p);
+  void PreallocatedStorageInit(size_t size);
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  Debugger* debugger() { return debugger_; }
+  Debug* debug() { return debug_; }
+#endif
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  ProducerHeapProfile* producer_heap_profile() {
+    return producer_heap_profile_;
+  }
+#endif
+
+#ifdef DEBUG
+  HistogramInfo* heap_histograms() { return heap_histograms_; }
+
+  JSObject::SpillInformation* js_spill_information() {
+    return &js_spill_information_;
+  }
+
+  int* code_kind_statistics() { return code_kind_statistics_; }
+#endif
+
+#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__)
+  bool simulator_initialized() { return simulator_initialized_; }
+  void set_simulator_initialized(bool initialized) {
+    simulator_initialized_ = initialized;
+  }
+
+  HashMap* simulator_i_cache() { return simulator_i_cache_; }
+  void set_simulator_i_cache(HashMap* hash_map) {
+    simulator_i_cache_ = hash_map;
+  }
+
+  Redirection* simulator_redirection() {
+    return simulator_redirection_;
+  }
+  void set_simulator_redirection(Redirection* redirection) {
+    simulator_redirection_ = redirection;
+  }
+#endif
+
+  Factory* factory() { return reinterpret_cast<Factory*>(this); }
+
+  // SerializerDeserializer state.
+  static const int kPartialSnapshotCacheCapacity = 1400;
+
+  static const int kJSRegexpStaticOffsetsVectorSize = 50;
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  Address external_callback() {
+    return thread_local_top_.external_callback_;
+  }
+  void set_external_callback(Address callback) {
+    thread_local_top_.external_callback_ = callback;
+  }
+#endif
+
+#ifdef ENABLE_VMSTATE_TRACKING
+  StateTag current_vm_state() {
+    return thread_local_top_.current_vm_state_;
+  }
+
+  void SetCurrentVMState(StateTag state) {
+    if (RuntimeProfiler::IsEnabled()) {
+      if (state == JS) {
+        // JS or non-JS -> JS transition.
+        RuntimeProfiler::IsolateEnteredJS(this);
+      } else if (thread_local_top_.current_vm_state_ == JS) {
+        // JS -> non-JS transition.
+        ASSERT(RuntimeProfiler::IsSomeIsolateInJS());
+        RuntimeProfiler::IsolateExitedJS(this);
+      }
+    }
+    thread_local_top_.current_vm_state_ = state;
+  }
+#endif
+
+  void ResetEagerOptimizingData();
+
+ private:
+  Isolate();
+
+  // The per-process lock should be acquired before the ThreadDataTable is
+  // modified.
+  class ThreadDataTable {
+   public:
+    ThreadDataTable();
+    ~ThreadDataTable();
+
+    PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
+    void Insert(PerIsolateThreadData* data);
+    void Remove(Isolate* isolate, ThreadId thread_id);
+    void Remove(PerIsolateThreadData* data);
+
+   private:
+    PerIsolateThreadData* list_;
+  };
+
+  // These items form a stack synchronously with threads Enter'ing and Exit'ing
+  // the Isolate. The top of the stack points to a thread which is currently
+  // running the Isolate. When the stack is empty, the Isolate is considered
+  // not entered by any thread and can be Disposed.
+  // If the same thread enters the Isolate more then once, the entry_count_
+  // is incremented rather then a new item pushed to the stack.
+  class EntryStackItem {
+   public:
+    EntryStackItem(PerIsolateThreadData* previous_thread_data,
+                   Isolate* previous_isolate,
+                   EntryStackItem* previous_item)
+        : entry_count(1),
+          previous_thread_data(previous_thread_data),
+          previous_isolate(previous_isolate),
+          previous_item(previous_item) { }
+
+    int entry_count;
+    PerIsolateThreadData* previous_thread_data;
+    Isolate* previous_isolate;
+    EntryStackItem* previous_item;
+
+    DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
+  };
+
+  // This mutex protects highest_thread_id_, thread_data_table_ and
+  // default_isolate_.
+  static Mutex* process_wide_mutex_;
+
+  static Thread::LocalStorageKey per_isolate_thread_data_key_;
+  static Thread::LocalStorageKey isolate_key_;
+  static Thread::LocalStorageKey thread_id_key_;
+  static Isolate* default_isolate_;
+  static ThreadDataTable* thread_data_table_;
+  static ThreadId highest_thread_id_;
+
+  bool PreInit();
+
+  void Deinit();
+
+  static void SetIsolateThreadLocals(Isolate* isolate,
+                                     PerIsolateThreadData* data);
+
+  enum State {
+    UNINITIALIZED,    // Some components may not have been allocated.
+    PREINITIALIZED,   // Components have been allocated but not initialized.
+    INITIALIZED       // All components are fully initialized.
+  };
+
+  State state_;
+  EntryStackItem* entry_stack_;
+
+  // Allocate and insert PerIsolateThreadData into the ThreadDataTable
+  // (regardless of whether such data already exists).
+  PerIsolateThreadData* AllocatePerIsolateThreadData(ThreadId thread_id);
+
+  // Find the PerThread for this particular (isolate, thread) combination.
+  // If one does not yet exist, allocate a new one.
+  PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
+
+  // PreInits and returns a default isolate. Needed when a new thread tries
+  // to create a Locker for the first time (the lock itself is in the isolate).
+  static Isolate* GetDefaultIsolateForLocking();
+
+  // Initializes the current thread to run this Isolate.
+  // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
+  // at the same time, this should be prevented using external locking.
+  void Enter();
+
+  // Exits the current thread. The previosuly entered Isolate is restored
+  // for the thread.
+  // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
+  // at the same time, this should be prevented using external locking.
+  void Exit();
+
+  void PreallocatedMemoryThreadStart();
+  void PreallocatedMemoryThreadStop();
+  void InitializeThreadLocal();
+
+  void PrintStackTrace(FILE* out, ThreadLocalTop* thread);
+  void MarkCompactPrologue(bool is_compacting,
+                           ThreadLocalTop* archived_thread_data);
+  void MarkCompactEpilogue(bool is_compacting,
+                           ThreadLocalTop* archived_thread_data);
+
+  void FillCache();
+
+  int stack_trace_nesting_level_;
+  StringStream* incomplete_message_;
+  // The preallocated memory thread singleton.
+  PreallocatedMemoryThread* preallocated_memory_thread_;
+  Address isolate_addresses_[k_isolate_address_count + 1];  // NOLINT
+  NoAllocationStringAllocator* preallocated_message_space_;
+
+  Bootstrapper* bootstrapper_;
+  RuntimeProfiler* runtime_profiler_;
+  CompilationCache* compilation_cache_;
+  Counters* counters_;
+  CpuFeatures* cpu_features_;
+  CodeRange* code_range_;
+  Mutex* break_access_;
+  Heap heap_;
+  Logger* logger_;
+  StackGuard stack_guard_;
+  StatsTable* stats_table_;
+  StubCache* stub_cache_;
+  DeoptimizerData* deoptimizer_data_;
+  ThreadLocalTop thread_local_top_;
+  bool capture_stack_trace_for_uncaught_exceptions_;
+  int stack_trace_for_uncaught_exceptions_frame_limit_;
+  StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
+  TranscendentalCache* transcendental_cache_;
+  MemoryAllocator* memory_allocator_;
+  KeyedLookupCache* keyed_lookup_cache_;
+  ContextSlotCache* context_slot_cache_;
+  DescriptorLookupCache* descriptor_lookup_cache_;
+  v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
+  HandleScopeImplementer* handle_scope_implementer_;
+  ScannerConstants* scanner_constants_;
+  Zone zone_;
+  PreallocatedStorage in_use_list_;
+  PreallocatedStorage free_list_;
+  bool preallocated_storage_preallocated_;
+  PcToCodeCache* pc_to_code_cache_;
+  StringInputBuffer* write_input_buffer_;
+  GlobalHandles* global_handles_;
+  ContextSwitcher* context_switcher_;
+  ThreadManager* thread_manager_;
+  AstSentinels* ast_sentinels_;
+  RuntimeState runtime_state_;
+  StringInputBuffer liveedit_compare_substrings_buf1_;
+  StringInputBuffer liveedit_compare_substrings_buf2_;
+  StaticResource<SafeStringInputBuffer> compiler_safe_string_input_buffer_;
+  Builtins builtins_;
+  StringTracker* string_tracker_;
+  unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
+  unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
+  StringInputBuffer objects_string_compare_buffer_a_;
+  StringInputBuffer objects_string_compare_buffer_b_;
+  StaticResource<StringInputBuffer> objects_string_input_buffer_;
+  unibrow::Mapping<unibrow::Ecma262Canonicalize>
+      regexp_macro_assembler_canonicalize_;
+  RegExpStack* regexp_stack_;
+  unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
+  ZoneObjectList frame_element_constant_list_;
+  ZoneObjectList result_constant_list_;
+
+#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__)
+  bool simulator_initialized_;
+  HashMap* simulator_i_cache_;
+  Redirection* simulator_redirection_;
+#endif
+
+#ifdef DEBUG
+  // A static array of histogram info for each type.
+  HistogramInfo heap_histograms_[LAST_TYPE + 1];
+  JSObject::SpillInformation js_spill_information_;
+  int code_kind_statistics_[Code::NUMBER_OF_KINDS];
+#endif
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  Debugger* debugger_;
+  Debug* debug_;
+#endif
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  ProducerHeapProfile* producer_heap_profile_;
+#endif
+
+#define GLOBAL_BACKING_STORE(type, name, initialvalue)                         \
+  type name##_;
+  ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
+#undef GLOBAL_BACKING_STORE
+
+#define GLOBAL_ARRAY_BACKING_STORE(type, name, length)                         \
+  type name##_[length];
+  ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
+#undef GLOBAL_ARRAY_BACKING_STORE
+
+#ifdef DEBUG
+  // This class is huge and has a number of fields controlled by
+  // preprocessor defines. Make sure the offsets of these fields agree
+  // between compilation units.
+#define ISOLATE_FIELD_OFFSET(type, name, ignored)                              \
+  static const intptr_t name##_debug_offset_;
+  ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
+  ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
+#undef ISOLATE_FIELD_OFFSET
+#endif
+
+  friend class ExecutionAccess;
+  friend class IsolateInitializer;
+  friend class v8::Isolate;
+  friend class v8::Locker;
+
+  DISALLOW_COPY_AND_ASSIGN(Isolate);
+};
+
+
+// If the GCC version is 4.1.x or 4.2.x an additional field is added to the
+// class as a work around for a bug in the generated code found with these
+// versions of GCC. See V8 issue 122 for details.
+class SaveContext BASE_EMBEDDED {
+ public:
+  explicit SaveContext(Isolate* isolate) : prev_(isolate->save_context()) {
+    if (isolate->context() != NULL) {
+      context_ = Handle<Context>(isolate->context());
+#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
+      dummy_ = Handle<Context>(isolate->context());
+#endif
+    }
+    isolate->set_save_context(this);
+
+    // If there is no JS frame under the current C frame, use the value 0.
+    JavaScriptFrameIterator it;
+    js_sp_ = it.done() ? 0 : it.frame()->sp();
+  }
+
+  ~SaveContext() {
+    if (context_.is_null()) {
+      Isolate* isolate = Isolate::Current();
+      isolate->set_context(NULL);
+      isolate->set_save_context(prev_);
+    } else {
+      Isolate* isolate = context_->GetIsolate();
+      isolate->set_context(*context_);
+      isolate->set_save_context(prev_);
+    }
+  }
+
+  Handle<Context> context() { return context_; }
+  SaveContext* prev() { return prev_; }
+
+  // Returns true if this save context is below a given JavaScript frame.
+  bool below(JavaScriptFrame* frame) {
+    return (js_sp_ == 0) || (frame->sp() < js_sp_);
+  }
+
+ private:
+  Handle<Context> context_;
+#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
+  Handle<Context> dummy_;
+#endif
+  SaveContext* prev_;
+  Address js_sp_;  // The top JS frame's sp when saving context.
+};
+
+
+class AssertNoContextChange BASE_EMBEDDED {
+#ifdef DEBUG
+ public:
+  AssertNoContextChange() :
+      scope_(Isolate::Current()),
+      context_(Isolate::Current()->context(), Isolate::Current()) {
+  }
+
+  ~AssertNoContextChange() {
+    ASSERT(Isolate::Current()->context() == *context_);
+  }
+
+ private:
+  HandleScope scope_;
+  Handle<Context> context_;
+#else
+ public:
+  AssertNoContextChange() { }
+#endif
+};
+
+
+class ExecutionAccess BASE_EMBEDDED {
+ public:
+  explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
+    Lock(isolate);
+  }
+  ~ExecutionAccess() { Unlock(isolate_); }
+
+  static void Lock(Isolate* isolate) { isolate->break_access_->Lock(); }
+  static void Unlock(Isolate* isolate) { isolate->break_access_->Unlock(); }
+
+  static bool TryLock(Isolate* isolate) {
+    return isolate->break_access_->TryLock();
+  }
+
+ private:
+  Isolate* isolate_;
+};
+
+
+// Support for checking for stack-overflows in C++ code.
+class StackLimitCheck BASE_EMBEDDED {
+ public:
+  explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
+
+  bool HasOverflowed() const {
+    StackGuard* stack_guard = isolate_->stack_guard();
+    // Stack has overflowed in C++ code only if stack pointer exceeds the C++
+    // stack guard and the limits are not set to interrupt values.
+    // TODO(214): Stack overflows are ignored if a interrupt is pending. This
+    // code should probably always use the initial C++ limit.
+    return (reinterpret_cast<uintptr_t>(this) < stack_guard->climit()) &&
+           stack_guard->IsStackOverflow();
+  }
+ private:
+  Isolate* isolate_;
+};
+
+
+// Support for temporarily postponing interrupts. When the outermost
+// postpone scope is left the interrupts will be re-enabled and any
+// interrupts that occurred while in the scope will be taken into
+// account.
+class PostponeInterruptsScope BASE_EMBEDDED {
+ public:
+  explicit PostponeInterruptsScope(Isolate* isolate)
+      : stack_guard_(isolate->stack_guard()) {
+    stack_guard_->thread_local_.postpone_interrupts_nesting_++;
+    stack_guard_->DisableInterrupts();
+  }
+
+  ~PostponeInterruptsScope() {
+    if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) {
+      stack_guard_->EnableInterrupts();
+    }
+  }
+ private:
+  StackGuard* stack_guard_;
+};
+
+
+// Temporary macros for accessing current isolate and its subobjects.
+// They provide better readability, especially when used a lot in the code.
+#define HEAP (v8::internal::Isolate::Current()->heap())
+#define FACTORY (v8::internal::Isolate::Current()->factory())
+#define ISOLATE (v8::internal::Isolate::Current())
+#define ZONE (v8::internal::Isolate::Current()->zone())
+#define LOGGER (v8::internal::Isolate::Current()->logger())
+
+
+// Tells whether the global context is marked with out of memory.
+inline bool Context::has_out_of_memory() {
+  return global_context()->out_of_memory()->IsTrue();
+}
+
+
+// Mark the global context with out of memory.
+inline void Context::mark_out_of_memory() {
+  global_context()->set_out_of_memory(HEAP->true_value());
+}
+
+
+// Temporary macro to be used to flag definitions that are indeed static
+// and not per-isolate. (It would be great to be able to grep for [static]!)
+#define RLYSTC static
+
+
+// Temporary macro to be used to flag classes that should be static.
+#define STATIC_CLASS class
+
+
+// Temporary macro to be used to flag classes that are completely converted
+// to be isolate-friendly. Their mix of static/nonstatic methods/fields is
+// correct.
+#define ISOLATED_CLASS class
+
+} }  // namespace v8::internal
+
+// TODO(isolates): Get rid of these -inl.h includes and place them only where
+//                 they're needed.
+#include "allocation-inl.h"
+#include "zone-inl.h"
+#include "frames-inl.h"
+
+#endif  // V8_ISOLATE_H_
index 8a4f894167f3b7397fb893f08bcd4836f5dc7768..2db8ddff9fde0a716ac28fe31e000fea386a0162 100644 (file)
@@ -35,7 +35,6 @@
 #include "platform.h"
 #include "string-search.h"
 #include "runtime.h"
-#include "top.h"
 #include "compilation-cache.h"
 #include "string-stream.h"
 #include "parser.h"
@@ -62,7 +61,6 @@
 namespace v8 {
 namespace internal {
 
-
 Handle<Object> RegExpImpl::CreateRegExpLiteral(Handle<JSFunction> constructor,
                                                Handle<String> pattern,
                                                Handle<String> flags,
@@ -97,12 +95,14 @@ static inline void ThrowRegExpException(Handle<JSRegExp> re,
                                         Handle<String> pattern,
                                         Handle<String> error_text,
                                         const char* message) {
-  Handle<FixedArray> elements = Factory::NewFixedArray(2);
+  Isolate* isolate = re->GetIsolate();
+  Factory* factory = isolate->factory();
+  Handle<FixedArray> elements = factory->NewFixedArray(2);
   elements->set(0, *pattern);
   elements->set(1, *error_text);
-  Handle<JSArray> array = Factory::NewJSArrayWithElements(elements);
-  Handle<Object> regexp_err = Factory::NewSyntaxError(message, array);
-  Top::Throw(*regexp_err);
+  Handle<JSArray> array = factory->NewJSArrayWithElements(elements);
+  Handle<Object> regexp_err = factory->NewSyntaxError(message, array);
+  isolate->Throw(*regexp_err);
 }
 
 
@@ -112,10 +112,12 @@ static inline void ThrowRegExpException(Handle<JSRegExp> re,
 Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
                                    Handle<String> pattern,
                                    Handle<String> flag_str) {
+  Isolate* isolate = re->GetIsolate();
   JSRegExp::Flags flags = RegExpFlagsFromString(flag_str);
-  Handle<FixedArray> cached = CompilationCache::LookupRegExp(pattern, flags);
+  CompilationCache* compilation_cache = isolate->compilation_cache();
+  Handle<FixedArray> cached = compilation_cache->LookupRegExp(pattern, flags);
   bool in_cache = !cached.is_null();
-  LOG(RegExpCompileEvent(re, in_cache));
+  LOG(isolate, RegExpCompileEvent(re, in_cache));
 
   Handle<Object> result;
   if (in_cache) {
@@ -124,9 +126,9 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
   }
   pattern = FlattenGetString(pattern);
   CompilationZoneScope zone_scope(DELETE_ON_EXIT);
-  PostponeInterruptsScope postpone;
+  PostponeInterruptsScope postpone(isolate);
   RegExpCompileData parse_result;
-  FlatStringReader reader(pattern);
+  FlatStringReader reader(isolate, pattern);
   if (!RegExpParser::ParseRegExp(&reader, flags.is_multiline(),
                                  &parse_result)) {
     // Throw an exception if we fail to parse the pattern.
@@ -145,7 +147,8 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
       parse_result.capture_count == 0) {
     RegExpAtom* atom = parse_result.tree->AsAtom();
     Vector<const uc16> atom_pattern = atom->data();
-    Handle<String> atom_string = Factory::NewStringFromTwoByte(atom_pattern);
+    Handle<String> atom_string =
+        isolate->factory()->NewStringFromTwoByte(atom_pattern);
     AtomCompile(re, pattern, flags, atom_string);
   } else {
     IrregexpInitialize(re, pattern, flags, parse_result.capture_count);
@@ -154,7 +157,7 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
   // Compilation succeeded so the data is set on the regexp
   // and we can store it in the cache.
   Handle<FixedArray> data(FixedArray::cast(re->data()));
-  CompilationCache::PutRegExp(pattern, flags, data);
+  compilation_cache->PutRegExp(pattern, flags, data);
 
   return re;
 }
@@ -170,7 +173,7 @@ Handle<Object> RegExpImpl::Exec(Handle<JSRegExp> regexp,
     case JSRegExp::IRREGEXP: {
       Handle<Object> result =
           IrregexpExec(regexp, subject, index, last_match_info);
-      ASSERT(!result.is_null() || Top::has_pending_exception());
+      ASSERT(!result.is_null() || Isolate::Current()->has_pending_exception());
       return result;
     }
     default:
@@ -187,11 +190,11 @@ void RegExpImpl::AtomCompile(Handle<JSRegExp> re,
                              Handle<String> pattern,
                              JSRegExp::Flags flags,
                              Handle<String> match_pattern) {
-  Factory::SetRegExpAtomData(re,
-                             JSRegExp::ATOM,
-                             pattern,
-                             flags,
-                             match_pattern);
+  re->GetIsolate()->factory()->SetRegExpAtomData(re,
+                                                 JSRegExp::ATOM,
+                                                 pattern,
+                                                 flags,
+                                                 match_pattern);
 }
 
 
@@ -224,6 +227,8 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
                                     Handle<String> subject,
                                     int index,
                                     Handle<JSArray> last_match_info) {
+  Isolate* isolate = re->GetIsolate();
+
   ASSERT(0 <= index);
   ASSERT(index <= subject->length());
 
@@ -237,24 +242,30 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
   int needle_len = needle->length();
 
   if (needle_len != 0) {
-    if (index + needle_len > subject->length()) return Factory::null_value();
+    if (index + needle_len > subject->length())
+        return isolate->factory()->null_value();
+
     // dispatch on type of strings
     index = (needle->IsAsciiRepresentation()
              ? (seq_sub->IsAsciiRepresentation()
-                ? SearchString(seq_sub->ToAsciiVector(),
+                ? SearchString(isolate,
+                               seq_sub->ToAsciiVector(),
                                needle->ToAsciiVector(),
                                index)
-                : SearchString(seq_sub->ToUC16Vector(),
+                : SearchString(isolate,
+                               seq_sub->ToUC16Vector(),
                                needle->ToAsciiVector(),
                                index))
              : (seq_sub->IsAsciiRepresentation()
-                ? SearchString(seq_sub->ToAsciiVector(),
+                ? SearchString(isolate,
+                               seq_sub->ToAsciiVector(),
                                needle->ToUC16Vector(),
                                index)
-                : SearchString(seq_sub->ToUC16Vector(),
+                : SearchString(isolate,
+                               seq_sub->ToUC16Vector(),
                                needle->ToUC16Vector(),
                                index)));
-    if (index == -1) return Factory::null_value();
+    if (index == -1) return FACTORY->null_value();
   }
   ASSERT(last_match_info->HasFastElements());
 
@@ -288,13 +299,14 @@ bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) {
 
 bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
   // Compile the RegExp.
+  Isolate* isolate = re->GetIsolate();
   CompilationZoneScope zone_scope(DELETE_ON_EXIT);
-  PostponeInterruptsScope postpone;
+  PostponeInterruptsScope postpone(isolate);
   Object* entry = re->DataAt(JSRegExp::code_index(is_ascii));
   if (entry->IsJSObject()) {
     // If it's a JSObject, a previous compilation failed and threw this object.
     // Re-throw the object without trying again.
-    Top::Throw(entry);
+    isolate->Throw(entry);
     return false;
   }
   ASSERT(entry->IsTheHole());
@@ -307,7 +319,7 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
   }
 
   RegExpCompileData compile_data;
-  FlatStringReader reader(pattern);
+  FlatStringReader reader(isolate, pattern);
   if (!RegExpParser::ParseRegExp(&reader, flags.is_multiline(),
                                  &compile_data)) {
     // Throw an exception if we fail to parse the pattern.
@@ -326,15 +338,16 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
                             is_ascii);
   if (result.error_message != NULL) {
     // Unable to compile regexp.
-    Handle<FixedArray> elements = Factory::NewFixedArray(2);
+    Factory* factory = isolate->factory();
+    Handle<FixedArray> elements = factory->NewFixedArray(2);
     elements->set(0, *pattern);
     Handle<String> error_message =
-        Factory::NewStringFromUtf8(CStrVector(result.error_message));
+        factory->NewStringFromUtf8(CStrVector(result.error_message));
     elements->set(1, *error_message);
-    Handle<JSArray> array = Factory::NewJSArrayWithElements(elements);
+    Handle<JSArray> array = factory->NewJSArrayWithElements(elements);
     Handle<Object> regexp_err =
-        Factory::NewSyntaxError("malformed_regexp", array);
-    Top::Throw(*regexp_err);
+        factory->NewSyntaxError("malformed_regexp", array);
+    isolate->Throw(*regexp_err);
     re->SetDataAt(JSRegExp::code_index(is_ascii), *regexp_err);
     return false;
   }
@@ -386,11 +399,11 @@ void RegExpImpl::IrregexpInitialize(Handle<JSRegExp> re,
                                     JSRegExp::Flags flags,
                                     int capture_count) {
   // Initialize compiled code entries to null.
-  Factory::SetRegExpIrregexpData(re,
-                                 JSRegExp::IRREGEXP,
-                                 pattern,
-                                 flags,
-                                 capture_count);
+  re->GetIsolate()->factory()->SetRegExpIrregexpData(re,
+                                                     JSRegExp::IRREGEXP,
+                                                     pattern,
+                                                     flags,
+                                                     capture_count);
 }
 
 
@@ -428,7 +441,9 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
     Handle<String> subject,
     int index,
     Vector<int> output) {
-  Handle<FixedArray> irregexp(FixedArray::cast(regexp->data()));
+  Isolate* isolate = regexp->GetIsolate();
+
+  Handle<FixedArray> irregexp(FixedArray::cast(regexp->data()), isolate);
 
   ASSERT(index >= 0);
   ASSERT(index <= subject->length());
@@ -436,24 +451,24 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
 
   // A flat ASCII string might have a two-byte first part.
   if (subject->IsConsString()) {
-    subject = Handle<String>(ConsString::cast(*subject)->first());
+    subject = Handle<String>(ConsString::cast(*subject)->first(), isolate);
   }
 
 #ifndef V8_INTERPRETED_REGEXP
-  ASSERT(output.length() >=
-      (IrregexpNumberOfCaptures(*irregexp) + 1) * 2);
+  ASSERT(output.length() >= (IrregexpNumberOfCaptures(*irregexp) + 1) * 2);
   do {
     bool is_ascii = subject->IsAsciiRepresentation();
-    Handle<Code> code(IrregexpNativeCode(*irregexp, is_ascii));
+    Handle<Code> code(IrregexpNativeCode(*irregexp, is_ascii), isolate);
     NativeRegExpMacroAssembler::Result res =
         NativeRegExpMacroAssembler::Match(code,
                                           subject,
                                           output.start(),
                                           output.length(),
-                                          index);
+                                          index,
+                                          isolate);
     if (res != NativeRegExpMacroAssembler::RETRY) {
       ASSERT(res != NativeRegExpMacroAssembler::EXCEPTION ||
-             Top::has_pending_exception());
+             isolate->has_pending_exception());
       STATIC_ASSERT(
           static_cast<int>(NativeRegExpMacroAssembler::SUCCESS) == RE_SUCCESS);
       STATIC_ASSERT(
@@ -484,7 +499,7 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
   for (int i = number_of_capture_registers - 1; i >= 0; i--) {
     register_vector[i] = -1;
   }
-  Handle<ByteArray> byte_codes(IrregexpByteCode(*irregexp, is_ascii));
+  Handle<ByteArray> byte_codes(IrregexpByteCode(*irregexp, is_ascii), isolate);
 
   if (IrregexpInterpreter::Match(byte_codes,
                                  subject,
@@ -516,7 +531,7 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
   int required_registers = RegExpImpl::IrregexpPrepare(jsregexp, subject);
   if (required_registers < 0) {
     // Compiling failed with an exception.
-    ASSERT(Top::has_pending_exception());
+    ASSERT(Isolate::Current()->has_pending_exception());
     return Handle<Object>::null();
   }
 
@@ -542,11 +557,11 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
     return last_match_info;
   }
   if (res == RE_EXCEPTION) {
-    ASSERT(Top::has_pending_exception());
+    ASSERT(Isolate::Current()->has_pending_exception());
     return Handle<Object>::null();
   }
   ASSERT(res == RE_FAILURE);
-  return Factory::null_value();
+  return Isolate::Current()->factory()->null_value();
 }
 
 
@@ -1306,16 +1321,14 @@ void ChoiceNode::GenerateGuard(RegExpMacroAssembler* macro_assembler,
 }
 
 
-static unibrow::Mapping<unibrow::Ecma262UnCanonicalize> uncanonicalize;
-static unibrow::Mapping<unibrow::CanonicalizationRange> canonrange;
-
-
 // Returns the number of characters in the equivalence class, omitting those
 // that cannot occur in the source string because it is ASCII.
-static int GetCaseIndependentLetters(uc16 character,
+static int GetCaseIndependentLetters(Isolate* isolate,
+                                     uc16 character,
                                      bool ascii_subject,
                                      unibrow::uchar* letters) {
-  int length = uncanonicalize.get(character, '\0', letters);
+  int length =
+      isolate->jsregexp_uncanonicalize()->get(character, '\0', letters);
   // Unibrow returns 0 or 1 for characters where case independence is
   // trivial.
   if (length == 0) {
@@ -1331,7 +1344,8 @@ static int GetCaseIndependentLetters(uc16 character,
 }
 
 
-static inline bool EmitSimpleCharacter(RegExpCompiler* compiler,
+static inline bool EmitSimpleCharacter(Isolate* isolate,
+                                       RegExpCompiler* compiler,
                                        uc16 c,
                                        Label* on_failure,
                                        int cp_offset,
@@ -1353,7 +1367,8 @@ static inline bool EmitSimpleCharacter(RegExpCompiler* compiler,
 
 // Only emits non-letters (things that don't have case).  Only used for case
 // independent matches.
-static inline bool EmitAtomNonLetter(RegExpCompiler* compiler,
+static inline bool EmitAtomNonLetter(Isolate* isolate,
+                                     RegExpCompiler* compiler,
                                      uc16 c,
                                      Label* on_failure,
                                      int cp_offset,
@@ -1362,7 +1377,7 @@ static inline bool EmitAtomNonLetter(RegExpCompiler* compiler,
   RegExpMacroAssembler* macro_assembler = compiler->macro_assembler();
   bool ascii = compiler->ascii();
   unibrow::uchar chars[unibrow::Ecma262UnCanonicalize::kMaxWidth];
-  int length = GetCaseIndependentLetters(c, ascii, chars);
+  int length = GetCaseIndependentLetters(isolate, c, ascii, chars);
   if (length < 1) {
     // This can't match.  Must be an ASCII subject and a non-ASCII character.
     // We do not need to do anything since the ASCII pass already handled this.
@@ -1424,7 +1439,8 @@ static bool ShortCutEmitCharacterPair(RegExpMacroAssembler* macro_assembler,
 }
 
 
-typedef bool EmitCharacterFunction(RegExpCompiler* compiler,
+typedef bool EmitCharacterFunction(Isolate* isolate,
+                                   RegExpCompiler* compiler,
                                    uc16 c,
                                    Label* on_failure,
                                    int cp_offset,
@@ -1433,7 +1449,8 @@ typedef bool EmitCharacterFunction(RegExpCompiler* compiler,
 
 // Only emits letters (things that have case).  Only used for case independent
 // matches.
-static inline bool EmitAtomLetter(RegExpCompiler* compiler,
+static inline bool EmitAtomLetter(Isolate* isolate,
+                                  RegExpCompiler* compiler,
                                   uc16 c,
                                   Label* on_failure,
                                   int cp_offset,
@@ -1442,7 +1459,7 @@ static inline bool EmitAtomLetter(RegExpCompiler* compiler,
   RegExpMacroAssembler* macro_assembler = compiler->macro_assembler();
   bool ascii = compiler->ascii();
   unibrow::uchar chars[unibrow::Ecma262UnCanonicalize::kMaxWidth];
-  int length = GetCaseIndependentLetters(c, ascii, chars);
+  int length = GetCaseIndependentLetters(isolate, c, ascii, chars);
   if (length <= 1) return false;
   // We may not need to check against the end of the input string
   // if this character lies before a character that matched.
@@ -1880,6 +1897,7 @@ void TextNode::GetQuickCheckDetails(QuickCheckDetails* details,
                                     RegExpCompiler* compiler,
                                     int characters_filled_in,
                                     bool not_at_start) {
+  Isolate* isolate = Isolate::Current();
   ASSERT(characters_filled_in < details->characters());
   int characters = details->characters();
   int char_mask;
@@ -1910,7 +1928,8 @@ void TextNode::GetQuickCheckDetails(QuickCheckDetails* details,
         }
         if (compiler->ignore_case()) {
           unibrow::uchar chars[unibrow::Ecma262UnCanonicalize::kMaxWidth];
-          int length = GetCaseIndependentLetters(c, compiler->ascii(), chars);
+          int length = GetCaseIndependentLetters(isolate, c, compiler->ascii(),
+                                                 chars);
           ASSERT(length != 0);  // Can only happen if c > char_mask (see above).
           if (length == 1) {
             // This letter has no case equivalents, so it's nice and simple
@@ -2410,6 +2429,7 @@ void TextNode::TextEmitPass(RegExpCompiler* compiler,
                             Trace* trace,
                             bool first_element_checked,
                             int* checked_up_to) {
+  Isolate* isolate = Isolate::Current();
   RegExpMacroAssembler* assembler = compiler->macro_assembler();
   bool ascii = compiler->ascii();
   Label* backtrack = trace->backtrack();
@@ -2445,7 +2465,8 @@ void TextNode::TextEmitPass(RegExpCompiler* compiler,
             break;
         }
         if (emit_function != NULL) {
-          bool bound_checked = emit_function(compiler,
+          bool bound_checked = emit_function(isolate,
+                                             compiler,
                                              quarks[j],
                                              backtrack,
                                              cp_offset + j,
@@ -4085,13 +4106,15 @@ void CharacterRange::Split(ZoneList<CharacterRange>* base,
 }
 
 
-static void AddUncanonicals(ZoneList<CharacterRange>* ranges,
+static void AddUncanonicals(Isolate* isolate,
+                            ZoneList<CharacterRange>* ranges,
                             int bottom,
                             int top);
 
 
 void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
                                         bool is_ascii) {
+  Isolate* isolate = Isolate::Current();
   uc16 bottom = from();
   uc16 top = to();
   if (is_ascii) {
@@ -4101,7 +4124,7 @@ void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
   unibrow::uchar chars[unibrow::Ecma262UnCanonicalize::kMaxWidth];
   if (top == bottom) {
     // If this is a singleton we just expand the one character.
-    int length = uncanonicalize.get(bottom, '\0', chars);
+    int length = isolate->jsregexp_uncanonicalize()->get(bottom, '\0', chars);
     for (int i = 0; i < length; i++) {
       uc32 chr = chars[i];
       if (chr != bottom) {
@@ -4130,7 +4153,7 @@ void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
     unibrow::uchar range[unibrow::Ecma262UnCanonicalize::kMaxWidth];
     int pos = bottom;
     while (pos < top) {
-      int length = canonrange.get(pos, '\0', range);
+      int length = isolate->jsregexp_canonrange()->get(pos, '\0', range);
       uc16 block_end;
       if (length == 0) {
         block_end = pos;
@@ -4139,7 +4162,7 @@ void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
         block_end = range[0];
       }
       int end = (block_end > top) ? top : block_end;
-      length = uncanonicalize.get(block_end, '\0', range);
+      length = isolate->jsregexp_uncanonicalize()->get(block_end, '\0', range);
       for (int i = 0; i < length; i++) {
         uc32 c = range[i];
         uc16 range_from = c - (block_end - pos);
@@ -4249,7 +4272,8 @@ SetRelation CharacterRange::WordCharacterRelation(
 }
 
 
-static void AddUncanonicals(ZoneList<CharacterRange>* ranges,
+static void AddUncanonicals(Isolate* isolate,
+                            ZoneList<CharacterRange>* ranges,
                             int bottom,
                             int top) {
   unibrow::uchar chars[unibrow::Ecma262UnCanonicalize::kMaxWidth];
@@ -4281,8 +4305,8 @@ static void AddUncanonicals(ZoneList<CharacterRange>* ranges,
   // case mappings.
   for (int i = 0; i < boundary_count; i++) {
     if (bottom < boundaries[i] && top >= boundaries[i]) {
-      AddUncanonicals(ranges, bottom, boundaries[i] - 1);
-      AddUncanonicals(ranges, boundaries[i], top);
+      AddUncanonicals(isolate, ranges, bottom, boundaries[i] - 1);
+      AddUncanonicals(isolate, ranges, boundaries[i], top);
       return;
     }
   }
@@ -4293,7 +4317,8 @@ static void AddUncanonicals(ZoneList<CharacterRange>* ranges,
 #ifdef DEBUG
       for (int j = bottom; j <= top; j++) {
         unsigned current_char = j;
-        int length = uncanonicalize.get(current_char, '\0', chars);
+        int length = isolate->jsregexp_uncanonicalize()->get(current_char,
+                                                             '\0', chars);
         for (int k = 0; k < length; k++) {
           ASSERT(chars[k] == current_char);
         }
@@ -4306,7 +4331,7 @@ static void AddUncanonicals(ZoneList<CharacterRange>* ranges,
   // Step through the range finding equivalent characters.
   ZoneList<unibrow::uchar> *characters = new ZoneList<unibrow::uchar>(100);
   for (int i = bottom; i <= top; i++) {
-    int length = uncanonicalize.get(i, '\0', chars);
+    int length = isolate->jsregexp_uncanonicalize()->get(i, '\0', chars);
     for (int j = 0; j < length; j++) {
       uc32 chr = chars[j];
       if (chr != i && (chr < bottom || chr > top)) {
@@ -4828,7 +4853,7 @@ OutSet* DispatchTable::Get(uc16 value) {
 
 
 void Analysis::EnsureAnalyzed(RegExpNode* that) {
-  StackLimitCheck check;
+  StackLimitCheck check(Isolate::Current());
   if (check.HasOverflowed()) {
     fail("Stack overflow");
     return;
@@ -5338,7 +5363,4 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(RegExpCompileData* data,
 }
 
 
-int OffsetsVector::static_offsets_vector_[
-    OffsetsVector::kStaticOffsetsVectorSize];
-
 }}  // namespace v8::internal
index af28a872268b59c13bcf4d095d4c5183929527a0..3ed5a7e43da743b3c4101d72a2e6192fff0755e6 100644 (file)
@@ -1424,7 +1424,7 @@ class RegExpEngine: public AllStatic {
   struct CompilationResult {
     explicit CompilationResult(const char* error_message)
         : error_message(error_message),
-          code(Heap::the_hole_value()),
+          code(HEAP->the_hole_value()),
           num_registers(0) {}
     CompilationResult(Object* code, int registers)
       : error_message(NULL),
@@ -1449,14 +1449,14 @@ class OffsetsVector {
  public:
   inline OffsetsVector(int num_registers)
       : offsets_vector_length_(num_registers) {
-    if (offsets_vector_length_ > kStaticOffsetsVectorSize) {
+    if (offsets_vector_length_ > Isolate::kJSRegexpStaticOffsetsVectorSize) {
       vector_ = NewArray<int>(offsets_vector_length_);
     } else {
-      vector_ = static_offsets_vector_;
+      vector_ = Isolate::Current()->jsregexp_static_offsets_vector();
     }
   }
   inline ~OffsetsVector() {
-    if (offsets_vector_length_ > kStaticOffsetsVectorSize) {
+    if (offsets_vector_length_ > Isolate::kJSRegexpStaticOffsetsVectorSize) {
       DeleteArray(vector_);
       vector_ = NULL;
     }
@@ -1467,13 +1467,12 @@ class OffsetsVector {
   static const int kStaticOffsetsVectorSize = 50;
 
  private:
-  static Address static_offsets_vector_address() {
-    return reinterpret_cast<Address>(&static_offsets_vector_);
+  static Address static_offsets_vector_address(Isolate* isolate) {
+    return reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector());
   }
 
   int* vector_;
   int offsets_vector_length_;
-  static int static_offsets_vector_[kStaticOffsetsVectorSize];
 
   friend class ExternalReference;
 };
index c3c22f1ac63abfb6b9647912b045754d449717c0..28e22c013b3effe3770cad50bdbaef04fc3bc101 100644 (file)
@@ -35,9 +35,6 @@ namespace v8 {
 namespace internal {
 
 
-bool JumpTarget::compiling_deferred_code_ = false;
-
-
 void JumpTarget::Jump(Result* arg) {
   ASSERT(cgen()->has_valid_frame());
 
@@ -143,9 +140,9 @@ void JumpTarget::ComputeEntryFrame() {
   // the directionality of the block.  Compute: an entry frame for the
   // block.
 
-  Counters::compute_entry_frame.Increment();
+  COUNTERS->compute_entry_frame()->Increment();
 #ifdef DEBUG
-  if (compiling_deferred_code_) {
+  if (Isolate::Current()->jump_target_compiling_deferred_code()) {
     ASSERT(reaching_frames_.length() > 1);
     VirtualFrame* frame = reaching_frames_[0];
     bool all_identical = true;
@@ -413,15 +410,15 @@ void BreakTarget::Branch(Condition cc, Hint hint) {
 
 
 DeferredCode::DeferredCode()
-    : masm_(CodeGeneratorScope::Current()->masm()),
+    : masm_(CodeGeneratorScope::Current(Isolate::Current())->masm()),
       statement_position_(masm_->positions_recorder()->
                           current_statement_position()),
       position_(masm_->positions_recorder()->current_position()),
-      frame_state_(CodeGeneratorScope::Current()->frame()) {
+      frame_state_(CodeGeneratorScope::Current(Isolate::Current())->frame()) {
   ASSERT(statement_position_ != RelocInfo::kNoPosition);
   ASSERT(position_ != RelocInfo::kNoPosition);
 
-  CodeGeneratorScope::Current()->AddDeferred(this);
+  CodeGeneratorScope::Current(Isolate::Current())->AddDeferred(this);
 #ifdef DEBUG
   comment_ = "";
 #endif
index 8cec86926ace3013f0c35a04d8e73ecc4f205b60..bf977563b38fe82eef8b9ddad37bfd2fef0900ab 100644 (file)
@@ -135,10 +135,6 @@ class JumpTarget : public ZoneObject {  // Shadows are dynamically allocated.
   // after the call is the same as the frame before the call.
   void Call();
 
-  static void set_compiling_deferred_code(bool flag) {
-    compiling_deferred_code_ = flag;
-  }
-
  protected:
   // Directionality flag set at initialization time.
   Directionality direction_;
@@ -164,8 +160,6 @@ class JumpTarget : public ZoneObject {  // Shadows are dynamically allocated.
   void DoBind();
 
  private:
-  static bool compiling_deferred_code_;
-
   // Add a virtual frame reaching this labeled block via a forward jump,
   // and a corresponding merge code label.
   void AddReachingFrame(VirtualFrame* frame);
index 4c9ee5bc435659bf977b5103854ed0117e64f406..545328c40c6820b530b6ba0790e3d3c96c39f938 100644 (file)
@@ -40,7 +40,7 @@ namespace v8 {
 namespace internal {
 
 CodeGenerator* JumpTarget::cgen() {
-  return CodeGeneratorScope::Current();
+  return CodeGeneratorScope::Current(Isolate::Current());
 }
 
 } }  // namespace v8::internal
index 36dc176bce7faa4b4edeb9847fdaa74de2e20eb9..1d8947459afb24158d08af81fc9ebaaaf3d51168 100644 (file)
@@ -35,15 +35,15 @@ namespace internal {
 
 
 DeferredCode::DeferredCode()
-    : masm_(CodeGeneratorScope::Current()->masm()),
+    : masm_(CodeGeneratorScope::Current(Isolate::Current())->masm()),
       statement_position_(masm_->positions_recorder()->
                           current_statement_position()),
       position_(masm_->positions_recorder()->current_position()),
-      frame_state_(*CodeGeneratorScope::Current()->frame()) {
+      frame_state_(*CodeGeneratorScope::Current(Isolate::Current())->frame()) {
   ASSERT(statement_position_ != RelocInfo::kNoPosition);
   ASSERT(position_ != RelocInfo::kNoPosition);
 
-  CodeGeneratorScope::Current()->AddDeferred(this);
+  CodeGeneratorScope::Current(Isolate::Current())->AddDeferred(this);
 
 #ifdef DEBUG
   comment_ = "";
index 198339090be6762f82c326887eb8ee988a22bf00..965a226fc98c316dd36143e3dfd4d4cabd0f2e63 100644 (file)
@@ -25,6 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
 #include "lithium-allocator-inl.h"
 
 #include "hydrogen.h"
@@ -44,13 +45,18 @@ namespace v8 {
 namespace internal {
 
 
-#define DEFINE_OPERAND_CACHE(name, type)            \
-  name name::cache[name::kNumCachedOperands];       \
-  void name::SetupCache() {                         \
-    for (int i = 0; i < kNumCachedOperands; i++) {  \
-      cache[i].ConvertTo(type, i);                  \
-    }                                               \
-  }
+#define DEFINE_OPERAND_CACHE(name, type)                      \
+  name name::cache[name::kNumCachedOperands];                 \
+  void name::SetupCache() {                                   \
+    for (int i = 0; i < kNumCachedOperands; i++) {            \
+      cache[i].ConvertTo(type, i);                            \
+    }                                                         \
+  }                                                           \
+  static bool name##_initialize() {                           \
+    name::SetupCache();                                       \
+    return true;                                              \
+  }                                                           \
+  static bool name##_cache_initialized = name##_initialize();
 
 DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND)
 DEFINE_OPERAND_CACHE(LStackSlot,       STACK_SLOT)
@@ -1550,15 +1556,6 @@ void LAllocator::AllocateRegisters() {
 }
 
 
-void LAllocator::Setup() {
-  LConstantOperand::SetupCache();
-  LStackSlot::SetupCache();
-  LDoubleStackSlot::SetupCache();
-  LRegister::SetupCache();
-  LDoubleRegister::SetupCache();
-}
-
-
 const char* LAllocator::RegisterName(int allocation_index) {
   ASSERT(mode_ != NONE);
   if (mode_ == GENERAL_REGISTERS) {
index 14a0201a8a6d2e56d7f9a44568aea1e5c5e048b5..f109c454913b44d415a0b52fb1aa92330d54ae4a 100644 (file)
@@ -430,7 +430,6 @@ class LAllocator BASE_EMBEDDED {
  public:
   LAllocator(int first_virtual_register, HGraph* graph);
 
-  static void Setup();
   static void TraceAlloc(const char* msg, ...);
 
   // Lithium translation support.
index e829f2f049a06579f57a29aecefebef5eb8e9f77..aeac2db562ab01296b2c63680cf05a1ba49d8fb2 100644 (file)
@@ -25,6 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
 #include "lithium.h"
 
 namespace v8 {
index 161b762385f58611f0492b36390f541686d6b7b2..4d44bf861cef1b0a1fc0a9e345082e2e197455c7 100644 (file)
@@ -268,10 +268,10 @@ void Comparator::CalculateDifference(Comparator::Input* input,
 }
 
 
-static bool CompareSubstrings(Handle<String> s1, int pos1,
+static bool CompareSubstrings(Isolate* isolate, Handle<String> s1, int pos1,
                               Handle<String> s2, int pos2, int len) {
-  static StringInputBuffer buf1;
-  static StringInputBuffer buf2;
+  StringInputBuffer& buf1 = *isolate->liveedit_compare_substrings_buf1();
+  StringInputBuffer& buf2 = *isolate->liveedit_compare_substrings_buf2();
   buf1.Reset(*s1);
   buf1.Seek(pos1);
   buf2.Reset(*s2);
@@ -291,7 +291,7 @@ static bool CompareSubstrings(Handle<String> s1, int pos1,
 class CompareOutputArrayWriter {
  public:
   CompareOutputArrayWriter()
-      : array_(Factory::NewJSArray(10)), current_size_(0) {}
+      : array_(FACTORY->NewJSArray(10)), current_size_(0) {}
 
   Handle<JSArray> GetResult() {
     return array_;
@@ -410,9 +410,10 @@ class LineEndsWrapper {
 // Represents 2 strings as 2 arrays of lines.
 class LineArrayCompareInput : public Comparator::Input {
  public:
-  LineArrayCompareInput(Handle<String> s1, Handle<String> s2,
+  LineArrayCompareInput(Isolate* isolate, Handle<String> s1, Handle<String> s2,
                         LineEndsWrapper line_ends1, LineEndsWrapper line_ends2)
-      : s1_(s1), s2_(s2), line_ends1_(line_ends1), line_ends2_(line_ends2) {
+      : isolate_(isolate), s1_(s1), s2_(s2), line_ends1_(line_ends1),
+        line_ends2_(line_ends2) {
   }
   int getLength1() {
     return line_ends1_.length();
@@ -430,10 +431,12 @@ class LineArrayCompareInput : public Comparator::Input {
     if (len1 != len2) {
       return false;
     }
-    return CompareSubstrings(s1_, line_start1, s2_, line_start2, len1);
+    return CompareSubstrings(isolate_, s1_, line_start1, s2_, line_start2,
+                             len1);
   }
 
  private:
+  Isolate* isolate_;
   Handle<String> s1_;
   Handle<String> s2_;
   LineEndsWrapper line_ends1_;
@@ -492,7 +495,8 @@ Handle<JSArray> LiveEdit::CompareStrings(Handle<String> s1,
   LineEndsWrapper line_ends1(s1);
   LineEndsWrapper line_ends2(s2);
 
-  LineArrayCompareInput input(s1, s2, line_ends1, line_ends2);
+  LineArrayCompareInput
+      input(Isolate::Current(), s1, s2, line_ends1, line_ends2);
   TokenizingLineArrayCompareOutput output(line_ends1, line_ends2, s1, s2);
 
   Comparator::CalculateDifference(&input, &output);
@@ -501,21 +505,21 @@ Handle<JSArray> LiveEdit::CompareStrings(Handle<String> s1,
 }
 
 
-static void CompileScriptForTracker(Handle<Script> script) {
+static void CompileScriptForTracker(Isolate* isolate, Handle<Script> script) {
   // TODO(635): support extensions.
-  PostponeInterruptsScope postpone;
+  PostponeInterruptsScope postpone(isolate);
 
   // Build AST.
   CompilationInfo info(script);
   info.MarkAsGlobal();
   if (ParserApi::Parse(&info)) {
     // Compile the code.
-    LiveEditFunctionTracker tracker(info.function());
+    LiveEditFunctionTracker tracker(info.isolate(), info.function());
     if (Compiler::MakeCodeForLiveEdit(&info)) {
       ASSERT(!info.code().is_null());
       tracker.RecordRootFunctionInfo(info.code());
     } else {
-      Top::StackOverflow();
+      info.isolate()->StackOverflow();
     }
   }
 }
@@ -530,9 +534,10 @@ static Handle<Object> UnwrapJSValue(Handle<JSValue> jsValue) {
 // Wraps any object into a OpaqueReference, that will hide the object
 // from JavaScript.
 static Handle<JSValue> WrapInJSValue(Object* object) {
-  Handle<JSFunction> constructor = Top::opaque_reference_function();
+  Handle<JSFunction> constructor =
+      Isolate::Current()->opaque_reference_function();
   Handle<JSValue> result =
-      Handle<JSValue>::cast(Factory::NewJSObject(constructor));
+      Handle<JSValue>::cast(FACTORY->NewJSObject(constructor));
   result->set_value(object);
   return result;
 }
@@ -545,7 +550,7 @@ template<typename S>
 class JSArrayBasedStruct {
  public:
   static S Create() {
-    Handle<JSArray> array = Factory::NewJSArray(S::kSize_);
+    Handle<JSArray> array = FACTORY->NewJSArray(S::kSize_);
     return S(array);
   }
   static S cast(Object* object) {
@@ -695,7 +700,7 @@ class FunctionInfoListener {
   FunctionInfoListener() {
     current_parent_index_ = -1;
     len_ = 0;
-    result_ = Factory::NewJSArray(10);
+    result_ = FACTORY->NewJSArray(10);
   }
 
   void FunctionStarted(FunctionLiteral* fun) {
@@ -723,7 +728,7 @@ class FunctionInfoListener {
     FunctionInfoWrapper info =
         FunctionInfoWrapper::cast(
             result_->GetElementNoExceptionThrown(current_parent_index_));
-    info.SetFunctionCode(function_code, Handle<Object>(Heap::null_value()));
+    info.SetFunctionCode(function_code, Handle<Object>(HEAP->null_value()));
   }
 
   // Saves full information about a function: its code, its scope info
@@ -749,7 +754,7 @@ class FunctionInfoListener {
   Object* SerializeFunctionScope(Scope* scope) {
     HandleScope handle_scope;
 
-    Handle<JSArray> scope_info_list = Factory::NewJSArray(10);
+    Handle<JSArray> scope_info_list = FACTORY->NewJSArray(10);
     int scope_info_length = 0;
 
     // Saves some description of scope. It stores name and indexes of
@@ -757,7 +762,7 @@ class FunctionInfoListener {
     // scopes of this chain.
     Scope* outer_scope = scope->outer_scope();
     if (outer_scope == NULL) {
-      return Heap::undefined_value();
+      return HEAP->undefined_value();
     }
     do {
       ZoneList<Variable*> list(10);
@@ -797,7 +802,7 @@ class FunctionInfoListener {
       }
       SetElementNonStrict(scope_info_list,
                           scope_info_length,
-                          Handle<Object>(Heap::null_value()));
+                          Handle<Object>(HEAP->null_value()));
       scope_info_length++;
 
       outer_scope = outer_scope->outer_scope();
@@ -812,18 +817,17 @@ class FunctionInfoListener {
 };
 
 
-static FunctionInfoListener* active_function_info_listener = NULL;
-
 JSArray* LiveEdit::GatherCompileInfo(Handle<Script> script,
                                      Handle<String> source) {
+  Isolate* isolate = Isolate::Current();
   CompilationZoneScope zone_scope(DELETE_ON_EXIT);
 
   FunctionInfoListener listener;
   Handle<Object> original_source = Handle<Object>(script->source());
   script->set_source(*source);
-  active_function_info_listener = &listener;
-  CompileScriptForTracker(script);
-  active_function_info_listener = NULL;
+  isolate->set_active_function_info_listener(&listener);
+  CompileScriptForTracker(isolate, script);
+  isolate->set_active_function_info_listener(NULL);
   script->set_source(*original_source);
 
   return *(listener.GetResult());
@@ -905,7 +909,7 @@ class ReferenceCollectorVisitor : public ObjectVisitor {
 
 // Finds all references to original and replaces them with substitution.
 static void ReplaceCodeObject(Code* original, Code* substitution) {
-  ASSERT(!Heap::InNewSpace(substitution));
+  ASSERT(!HEAP->InNewSpace(substitution));
 
   AssertNoAllocation no_allocations_please;
 
@@ -918,7 +922,7 @@ static void ReplaceCodeObject(Code* original, Code* substitution) {
   // so temporary replace the pointers with offset numbers
   // in prologue/epilogue.
   {
-    Heap::IterateStrongRoots(&visitor, VISIT_ALL);
+    HEAP->IterateStrongRoots(&visitor, VISIT_ALL);
   }
 
   // Now iterate over all pointers of all objects, including code_target
@@ -948,7 +952,7 @@ static bool IsInlined(JSFunction* function, SharedFunctionInfo* candidate) {
   DeoptimizationInputData* data =
       DeoptimizationInputData::cast(function->code()->deoptimization_data());
 
-  if (data == Heap::empty_fixed_array()) return false;
+  if (data == HEAP->empty_fixed_array()) return false;
 
   FixedArray* literals = data->LiteralArray();
 
@@ -1000,7 +1004,7 @@ MaybeObject* LiveEdit::ReplaceFunctionCode(
   HandleScope scope;
 
   if (!SharedInfoWrapper::IsInstance(shared_info_array)) {
-    return Top::ThrowIllegalOperation();
+    return Isolate::Current()->ThrowIllegalOperation();
   }
 
   FunctionInfoWrapper compile_info_wrapper(new_compile_info_array);
@@ -1020,7 +1024,7 @@ MaybeObject* LiveEdit::ReplaceFunctionCode(
   if (shared_info->debug_info()->IsDebugInfo()) {
     Handle<DebugInfo> debug_info(DebugInfo::cast(shared_info->debug_info()));
     Handle<Code> new_original_code =
-        Factory::CopyCode(compile_info_wrapper.GetFunctionCode());
+        FACTORY->CopyCode(compile_info_wrapper.GetFunctionCode());
     debug_info->set_original_code(*new_original_code);
   }
 
@@ -1028,12 +1032,13 @@ MaybeObject* LiveEdit::ReplaceFunctionCode(
   shared_info->set_end_position(compile_info_wrapper.GetEndPosition());
 
   shared_info->set_construct_stub(
-      Builtins::builtin(Builtins::JSConstructStubGeneric));
+      Isolate::Current()->builtins()->builtin(
+          Builtins::JSConstructStubGeneric));
 
   DeoptimizeDependentFunctions(*shared_info);
-  CompilationCache::Remove(shared_info);
+  Isolate::Current()->compilation_cache()->Remove(shared_info);
 
-  return Heap::undefined_value();
+  return HEAP->undefined_value();
 }
 
 
@@ -1042,16 +1047,16 @@ MaybeObject* LiveEdit::FunctionSourceUpdated(
   HandleScope scope;
 
   if (!SharedInfoWrapper::IsInstance(shared_info_array)) {
-    return Top::ThrowIllegalOperation();
+    return Isolate::Current()->ThrowIllegalOperation();
   }
 
   SharedInfoWrapper shared_info_wrapper(shared_info_array);
   Handle<SharedFunctionInfo> shared_info = shared_info_wrapper.GetInfo();
 
   DeoptimizeDependentFunctions(*shared_info);
-  CompilationCache::Remove(shared_info);
+  Isolate::Current()->compilation_cache()->Remove(shared_info);
 
-  return Heap::undefined_value();
+  return HEAP->undefined_value();
 }
 
 
@@ -1061,7 +1066,7 @@ void LiveEdit::SetFunctionScript(Handle<JSValue> function_wrapper,
       Handle<SharedFunctionInfo>::cast(UnwrapJSValue(function_wrapper));
   shared_info->set_script(*script_handle);
 
-  CompilationCache::Remove(shared_info);
+  Isolate::Current()->compilation_cache()->Remove(shared_info);
 }
 
 
@@ -1209,7 +1214,7 @@ static Handle<Code> PatchPositionsInCode(Handle<Code> code,
     // Relocation info section now has different size. We cannot simply
     // rewrite it inside code object. Instead we have to create a new
     // code object.
-    Handle<Code> result(Factory::CopyCode(code, buffer));
+    Handle<Code> result(FACTORY->CopyCode(code, buffer));
     return result;
   }
 }
@@ -1219,7 +1224,7 @@ MaybeObject* LiveEdit::PatchFunctionPositions(
     Handle<JSArray> shared_info_array, Handle<JSArray> position_change_array) {
 
   if (!SharedInfoWrapper::IsInstance(shared_info_array)) {
-    return Top::ThrowIllegalOperation();
+    return Isolate::Current()->ThrowIllegalOperation();
   }
 
   SharedInfoWrapper shared_info_wrapper(shared_info_array);
@@ -1250,14 +1255,14 @@ MaybeObject* LiveEdit::PatchFunctionPositions(
     }
   }
 
-  return Heap::undefined_value();
+  return HEAP->undefined_value();
 }
 
 
 static Handle<Script> CreateScriptCopy(Handle<Script> original) {
   Handle<String> original_source(String::cast(original->source()));
 
-  Handle<Script> copy = Factory::NewScript(original_source);
+  Handle<Script> copy = FACTORY->NewScript(original_source);
 
   copy->set_name(original->name());
   copy->set_line_offset(original->line_offset());
@@ -1282,15 +1287,16 @@ Object* LiveEdit::ChangeScriptSource(Handle<Script> original_script,
     Handle<Script> old_script = CreateScriptCopy(original_script);
     old_script->set_name(String::cast(*old_script_name));
     old_script_object = old_script;
-    Debugger::OnAfterCompile(old_script, Debugger::SEND_WHEN_DEBUGGING);
+    Isolate::Current()->debugger()->OnAfterCompile(
+        old_script, Debugger::SEND_WHEN_DEBUGGING);
   } else {
-    old_script_object = Handle<Object>(Heap::null_value());
+    old_script_object = Handle<Object>(HEAP->null_value());
   }
 
   original_script->set_source(*new_source);
 
   // Drop line ends so that they will be recalculated.
-  original_script->set_line_ends(Heap::undefined_value());
+  original_script->set_line_ends(HEAP->undefined_value());
 
   return *old_script_object;
 }
@@ -1351,7 +1357,8 @@ static bool CheckActivation(Handle<JSArray> shared_info_array,
 static bool FixTryCatchHandler(StackFrame* top_frame,
                                StackFrame* bottom_frame) {
   Address* pointer_address =
-      &Memory::Address_at(Top::get_address_from_id(Top::k_handler_address));
+      &Memory::Address_at(Isolate::Current()->get_address_from_id(
+          Isolate::k_handler_address));
 
   while (*pointer_address < top_frame->sp()) {
     pointer_address = &Memory::Address_at(*pointer_address);
@@ -1386,19 +1393,22 @@ static const char* DropFrames(Vector<StackFrame*> frames,
   ASSERT(bottom_js_frame->is_java_script());
 
   // Check the nature of the top frame.
-  if (pre_top_frame->code()->is_inline_cache_stub() &&
-      pre_top_frame->code()->ic_state() == DEBUG_BREAK) {
+  Code* pre_top_frame_code = pre_top_frame->LookupCode(Isolate::Current());
+  if (pre_top_frame_code->is_inline_cache_stub() &&
+      pre_top_frame_code->ic_state() == DEBUG_BREAK) {
     // OK, we can drop inline cache calls.
     *mode = Debug::FRAME_DROPPED_IN_IC_CALL;
-  } else if (pre_top_frame->code() == Debug::debug_break_slot()) {
+  } else if (pre_top_frame_code ==
+             Isolate::Current()->debug()->debug_break_slot()) {
     // OK, we can drop debug break slot.
     *mode = Debug::FRAME_DROPPED_IN_DEBUG_SLOT_CALL;
-  } else if (pre_top_frame->code() ==
-      Builtins::builtin(Builtins::FrameDropper_LiveEdit)) {
+  } else if (pre_top_frame_code ==
+      Isolate::Current()->builtins()->builtin(
+          Builtins::FrameDropper_LiveEdit)) {
     // OK, we can drop our own code.
     *mode = Debug::FRAME_DROPPED_IN_DIRECT_CALL;
-  } else if (pre_top_frame->code()->kind() == Code::STUB &&
-      pre_top_frame->code()->major_key()) {
+  } else if (pre_top_frame_code->kind() == Code::STUB &&
+      pre_top_frame_code->major_key()) {
     // Entry from our unit tests, it's fine, we support this case.
     *mode = Debug::FRAME_DROPPED_IN_DIRECT_CALL;
   } else {
@@ -1420,7 +1430,8 @@ static const char* DropFrames(Vector<StackFrame*> frames,
   // Make sure FixTryCatchHandler is idempotent.
   ASSERT(!FixTryCatchHandler(pre_top_frame, bottom_js_frame));
 
-  Handle<Code> code(Builtins::builtin(Builtins::FrameDropper_LiveEdit));
+  Handle<Code> code(Isolate::Current()->builtins()->builtin(
+      Builtins::FrameDropper_LiveEdit));
   top_frame->set_pc(code->entry());
   pre_top_frame->SetCallerFp(bottom_js_frame->fp());
 
@@ -1447,7 +1458,7 @@ static bool IsDropableFrame(StackFrame* frame) {
 // removing all listed function if possible and if do_drop is true.
 static const char* DropActivationsInActiveThread(
     Handle<JSArray> shared_info_array, Handle<JSArray> result, bool do_drop) {
-
+  Debug* debug = Isolate::Current()->debug();
   ZoneScope scope(DELETE_ON_EXIT);
   Vector<StackFrame*> frames = CreateStackMap();
 
@@ -1457,7 +1468,7 @@ static const char* DropActivationsInActiveThread(
   int frame_index = 0;
   for (; frame_index < frames.length(); frame_index++) {
     StackFrame* frame = frames[frame_index];
-    if (frame->id() == Debug::break_frame_id()) {
+    if (frame->id() == debug->break_frame_id()) {
       top_frame_index = frame_index;
       break;
     }
@@ -1534,7 +1545,7 @@ static const char* DropActivationsInActiveThread(
       break;
     }
   }
-  Debug::FramesHaveBeenDropped(new_id, drop_mode,
+  debug->FramesHaveBeenDropped(new_id, drop_mode,
                                restarter_frame_function_pointer);
 
   // Replace "blocked on active" with "replaced on active" status.
@@ -1579,7 +1590,7 @@ Handle<JSArray> LiveEdit::CheckAndDropActivations(
     Handle<JSArray> shared_info_array, bool do_drop) {
   int len = Smi::cast(shared_info_array->length())->value();
 
-  Handle<JSArray> result = Factory::NewJSArray(len);
+  Handle<JSArray> result = FACTORY->NewJSArray(len);
 
   // Fill the default values.
   for (int i = 0; i < len; i++) {
@@ -1593,7 +1604,8 @@ Handle<JSArray> LiveEdit::CheckAndDropActivations(
   // First check inactive threads. Fail if some functions are blocked there.
   InactiveThreadActivationsChecker inactive_threads_checker(shared_info_array,
                                                             result);
-  ThreadManager::IterateArchivedThreads(&inactive_threads_checker);
+  Isolate::Current()->thread_manager()->IterateArchivedThreads(
+      &inactive_threads_checker);
   if (inactive_threads_checker.HasBlockedFunctions()) {
     return result;
   }
@@ -1604,42 +1616,44 @@ Handle<JSArray> LiveEdit::CheckAndDropActivations(
   if (error_message != NULL) {
     // Add error message as an array extra element.
     Vector<const char> vector_message(error_message, StrLength(error_message));
-    Handle<String> str = Factory::NewStringFromAscii(vector_message);
+    Handle<String> str = FACTORY->NewStringFromAscii(vector_message);
     SetElementNonStrict(result, len, str);
   }
   return result;
 }
 
 
-LiveEditFunctionTracker::LiveEditFunctionTracker(FunctionLiteral* fun) {
-  if (active_function_info_listener != NULL) {
-    active_function_info_listener->FunctionStarted(fun);
+LiveEditFunctionTracker::LiveEditFunctionTracker(Isolate* isolate,
+                                                 FunctionLiteral* fun)
+    : isolate_(isolate) {
+  if (isolate_->active_function_info_listener() != NULL) {
+    isolate_->active_function_info_listener()->FunctionStarted(fun);
   }
 }
 
 
 LiveEditFunctionTracker::~LiveEditFunctionTracker() {
-  if (active_function_info_listener != NULL) {
-    active_function_info_listener->FunctionDone();
+  if (isolate_->active_function_info_listener() != NULL) {
+    isolate_->active_function_info_listener()->FunctionDone();
   }
 }
 
 
 void LiveEditFunctionTracker::RecordFunctionInfo(
     Handle<SharedFunctionInfo> info, FunctionLiteral* lit) {
-  if (active_function_info_listener != NULL) {
-    active_function_info_listener->FunctionInfo(info, lit->scope());
+  if (isolate_->active_function_info_listener() != NULL) {
+    isolate_->active_function_info_listener()->FunctionInfo(info, lit->scope());
   }
 }
 
 
 void LiveEditFunctionTracker::RecordRootFunctionInfo(Handle<Code> code) {
-  active_function_info_listener->FunctionCode(code);
+  isolate_->active_function_info_listener()->FunctionCode(code);
 }
 
 
-bool LiveEditFunctionTracker::IsActive() {
-  return active_function_info_listener != NULL;
+bool LiveEditFunctionTracker::IsActive(Isolate* isolate) {
+  return isolate->active_function_info_listener() != NULL;
 }
 
 
@@ -1647,7 +1661,8 @@ bool LiveEditFunctionTracker::IsActive() {
 
 // This ifdef-else-endif section provides working or stub implementation of
 // LiveEditFunctionTracker.
-LiveEditFunctionTracker::LiveEditFunctionTracker(FunctionLiteral* fun) {
+LiveEditFunctionTracker::LiveEditFunctionTracker(Isolate* isolate,
+                                                 FunctionLiteral* fun) {
 }
 
 
index 5f2c99c3d737bf78d7932e4e090e1fe9e77fd8a1..36c2c760677c8e9c80d74d7b701a725245de3172 100644 (file)
@@ -65,13 +65,18 @@ namespace internal {
 // also collects compiled function codes.
 class LiveEditFunctionTracker {
  public:
-  explicit LiveEditFunctionTracker(FunctionLiteral* fun);
+  explicit LiveEditFunctionTracker(Isolate* isolate, FunctionLiteral* fun);
   ~LiveEditFunctionTracker();
   void RecordFunctionInfo(Handle<SharedFunctionInfo> info,
                           FunctionLiteral* lit);
   void RecordRootFunctionInfo(Handle<Code> code);
 
-  static bool IsActive();
+  static bool IsActive(Isolate* isolate);
+
+ private:
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  Isolate* isolate_;
+#endif
 };
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
index 423f8f0d744b254dccdb8ad35c1111d1329413fe..23e418d6d86a8bc0a19dfbf993c1cf90da3217ec 100644 (file)
@@ -273,28 +273,28 @@ class LiveObjectList {
   inline static void ProcessNonLive(HeapObject* obj) {}
   inline static void UpdateReferencesForScavengeGC() {}
 
-  inline static MaybeObject* Capture() { return Heap::undefined_value(); }
+  inline static MaybeObject* Capture() { return HEAP->undefined_value(); }
   inline static bool Delete(int id) { return false; }
   inline static MaybeObject* Dump(int id1,
                                   int id2,
                                   int start_idx,
                                   int dump_limit,
                                   Handle<JSObject> filter_obj) {
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
   inline static MaybeObject* Info(int start_idx, int dump_limit) {
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
   inline static MaybeObject* Summarize(int id1,
                                        int id2,
                                        Handle<JSObject> filter_obj) {
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
 
   inline static void Reset() {}
-  inline static Object* GetObj(int obj_id) { return Heap::undefined_value(); }
+  inline static Object* GetObj(int obj_id) { return HEAP->undefined_value(); }
   inline static Object* GetObjId(Handle<String> address) {
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
   inline static MaybeObject* GetObjRetainers(int obj_id,
                                              Handle<JSObject> instance_filter,
@@ -302,15 +302,15 @@ class LiveObjectList {
                                              int start,
                                              int count,
                                              Handle<JSObject> filter_obj) {
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
 
   inline static Object* GetPath(int obj_id1,
                                 int obj_id2,
                                 Handle<JSObject> instance_filter) {
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
-  inline static Object* PrintObj(int obj_id) { return Heap::undefined_value(); }
+  inline static Object* PrintObj(int obj_id) { return HEAP->undefined_value(); }
 };
 
 
index 9a498ec0ff95826a42a1edc69e6d272a047da603..a854ade0541380ac2c57ead13c93cf4dbe2d90e9 100644 (file)
@@ -28,6 +28,7 @@
 #include "v8.h"
 
 #include "log-utils.h"
+#include "string-stream.h"
 
 namespace v8 {
 namespace internal {
@@ -118,29 +119,117 @@ int LogDynamicBuffer::WriteInternal(const char* data, int data_size) {
   return data_size;
 }
 
-
-bool Log::is_stopped_ = false;
-Log::WritePtr Log::Write = NULL;
-FILE* Log::output_handle_ = NULL;
-FILE* Log::output_code_handle_ = NULL;
-LogDynamicBuffer* Log::output_buffer_ = NULL;
 // Must be the same message as in Logger::PauseProfiler.
-const char* Log::kDynamicBufferSeal = "profiler,\"pause\"\n";
-Mutex* Log::mutex_ = NULL;
-char* Log::message_buffer_ = NULL;
+const char* const Log::kDynamicBufferSeal = "profiler,\"pause\"\n";
+
+Log::Log(Logger* logger)
+  : write_to_file_(false),
+    is_stopped_(false),
+    output_handle_(NULL),
+    output_code_handle_(NULL),
+    output_buffer_(NULL),
+    mutex_(NULL),
+    message_buffer_(NULL),
+    logger_(logger) {
+}
+
+
+static void AddIsolateIdIfNeeded(StringStream* stream) {
+  Isolate* isolate = Isolate::Current();
+  if (isolate->IsDefaultIsolate()) return;
+  stream->Add("isolate-%p-", isolate);
+}
 
 
-void Log::Init() {
+void Log::Initialize() {
+#ifdef ENABLE_LOGGING_AND_PROFILING
   mutex_ = OS::CreateMutex();
   message_buffer_ = NewArray<char>(kMessageBufferSize);
+
+  // --log-all enables all the log flags.
+  if (FLAG_log_all) {
+    FLAG_log_runtime = true;
+    FLAG_log_api = true;
+    FLAG_log_code = true;
+    FLAG_log_gc = true;
+    FLAG_log_suspect = true;
+    FLAG_log_handles = true;
+    FLAG_log_regexp = true;
+  }
+
+  // --prof implies --log-code.
+  if (FLAG_prof) FLAG_log_code = true;
+
+  // --prof_lazy controls --log-code, implies --noprof_auto.
+  if (FLAG_prof_lazy) {
+    FLAG_log_code = false;
+    FLAG_prof_auto = false;
+  }
+
+  bool start_logging = FLAG_log || FLAG_log_runtime || FLAG_log_api
+      || FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect
+      || FLAG_log_regexp || FLAG_log_state_changes;
+
+  bool open_log_file = start_logging || FLAG_prof_lazy;
+
+  // If we're logging anything, we need to open the log file.
+  if (open_log_file) {
+    if (strcmp(FLAG_logfile, "-") == 0) {
+      OpenStdout();
+    } else if (strcmp(FLAG_logfile, "*") == 0) {
+      OpenMemoryBuffer();
+    } else  {
+      if (strchr(FLAG_logfile, '%') != NULL ||
+          !Isolate::Current()->IsDefaultIsolate()) {
+        // If there's a '%' in the log file name we have to expand
+        // placeholders.
+        HeapStringAllocator allocator;
+        StringStream stream(&allocator);
+        AddIsolateIdIfNeeded(&stream);
+        for (const char* p = FLAG_logfile; *p; p++) {
+          if (*p == '%') {
+            p++;
+            switch (*p) {
+              case '\0':
+                // If there's a % at the end of the string we back up
+                // one character so we can escape the loop properly.
+                p--;
+                break;
+              case 't': {
+                // %t expands to the current time in milliseconds.
+                double time = OS::TimeCurrentMillis();
+                stream.Add("%.0f", FmtElm(time));
+                break;
+              }
+              case '%':
+                // %% expands (contracts really) to %.
+                stream.Put('%');
+                break;
+              default:
+                // All other %'s expand to themselves.
+                stream.Put('%');
+                stream.Put(*p);
+                break;
+            }
+          } else {
+            stream.Put(*p);
+          }
+        }
+        SmartPointer<const char> expanded = stream.ToCString();
+        OpenFile(*expanded);
+      } else {
+        OpenFile(FLAG_logfile);
+      }
+    }
+  }
+#endif
 }
 
 
 void Log::OpenStdout() {
   ASSERT(!IsEnabled());
   output_handle_ = stdout;
-  Write = WriteToFile;
-  Init();
+  write_to_file_ = true;
 }
 
 
@@ -150,6 +239,7 @@ static const char kCodeLogExt[] = ".code";
 void Log::OpenFile(const char* name) {
   ASSERT(!IsEnabled());
   output_handle_ = OS::FOpen(name, OS::LogFileOpenMode);
+  write_to_file_ = true;
   if (FLAG_ll_prof) {
     // Open a file for logging the contents of code objects so that
     // they can be disassembled later.
@@ -160,8 +250,6 @@ void Log::OpenFile(const char* name) {
     memcpy(code_name.start() + name_len, kCodeLogExt, sizeof(kCodeLogExt));
     output_code_handle_ = OS::FOpen(code_name.start(), OS::LogFileOpenMode);
   }
-  Write = WriteToFile;
-  Init();
 }
 
 
@@ -170,24 +258,20 @@ void Log::OpenMemoryBuffer() {
   output_buffer_ = new LogDynamicBuffer(
       kDynamicBufferBlockSize, kMaxDynamicBufferSize,
       kDynamicBufferSeal, StrLength(kDynamicBufferSeal));
-  Write = WriteToMemory;
-  Init();
+  write_to_file_ = false;
 }
 
 
 void Log::Close() {
-  if (Write == WriteToFile) {
+  if (write_to_file_) {
     if (output_handle_ != NULL) fclose(output_handle_);
     output_handle_ = NULL;
     if (output_code_handle_ != NULL) fclose(output_code_handle_);
     output_code_handle_ = NULL;
-  } else if (Write == WriteToMemory) {
+  } else {
     delete output_buffer_;
     output_buffer_ = NULL;
-  } else {
-    ASSERT(Write == NULL);
   }
-  Write = NULL;
 
   DeleteArray(message_buffer_);
   message_buffer_ = NULL;
@@ -200,7 +284,7 @@ void Log::Close() {
 
 
 int Log::GetLogLines(int from_pos, char* dest_buf, int max_size) {
-  if (Write != WriteToMemory) return 0;
+  if (write_to_file_) return 0;
   ASSERT(output_buffer_ != NULL);
   ASSERT(from_pos >= 0);
   ASSERT(max_size >= 0);
@@ -220,17 +304,16 @@ int Log::GetLogLines(int from_pos, char* dest_buf, int max_size) {
 }
 
 
-LogMessageBuilder::WriteFailureHandler
-    LogMessageBuilder::write_failure_handler = NULL;
-
-
-LogMessageBuilder::LogMessageBuilder(): sl(Log::mutex_), pos_(0) {
-  ASSERT(Log::message_buffer_ != NULL);
+LogMessageBuilder::LogMessageBuilder(Logger* logger)
+  : log_(logger->log_),
+    sl(log_->mutex_),
+    pos_(0) {
+  ASSERT(log_->message_buffer_ != NULL);
 }
 
 
 void LogMessageBuilder::Append(const char* format, ...) {
-  Vector<char> buf(Log::message_buffer_ + pos_,
+  Vector<char> buf(log_->message_buffer_ + pos_,
                    Log::kMessageBufferSize - pos_);
   va_list args;
   va_start(args, format);
@@ -241,7 +324,7 @@ void LogMessageBuilder::Append(const char* format, ...) {
 
 
 void LogMessageBuilder::AppendVA(const char* format, va_list args) {
-  Vector<char> buf(Log::message_buffer_ + pos_,
+  Vector<char> buf(log_->message_buffer_ + pos_,
                    Log::kMessageBufferSize - pos_);
   int result = v8::internal::OS::VSNPrintF(buf, format, args);
 
@@ -257,7 +340,7 @@ void LogMessageBuilder::AppendVA(const char* format, va_list args) {
 
 void LogMessageBuilder::Append(const char c) {
   if (pos_ < Log::kMessageBufferSize) {
-    Log::message_buffer_[pos_++] = c;
+    log_->message_buffer_[pos_++] = c;
   }
   ASSERT(pos_ <= Log::kMessageBufferSize);
 }
@@ -315,7 +398,7 @@ void LogMessageBuilder::AppendStringPart(const char* str, int len) {
     ASSERT(len >= 0);
     if (len == 0) return;
   }
-  Vector<char> buf(Log::message_buffer_ + pos_,
+  Vector<char> buf(log_->message_buffer_ + pos_,
                    Log::kMessageBufferSize - pos_);
   OS::StrNCpy(buf, str, len);
   pos_ += len;
@@ -325,12 +408,16 @@ void LogMessageBuilder::AppendStringPart(const char* str, int len) {
 
 void LogMessageBuilder::WriteToLogFile() {
   ASSERT(pos_ <= Log::kMessageBufferSize);
-  const int written = Log::Write(Log::message_buffer_, pos_);
-  if (written != pos_ && write_failure_handler != NULL) {
-    write_failure_handler();
+  const int written = log_->write_to_file_ ?
+      log_->WriteToFile(log_->message_buffer_, pos_) :
+      log_->WriteToMemory(log_->message_buffer_, pos_);
+  if (written != pos_) {
+    log_->stop();
+    log_->logger_->LogFailure();
   }
 }
 
+
 #endif  // ENABLE_LOGGING_AND_PROFILING
 
 } }  // namespace v8::internal
index 719d37030e12d85ae521792f793973a2a489d45f..255c73c9bc5c972fd3ca968fedb801ecaabd6185 100644 (file)
@@ -33,6 +33,8 @@ namespace internal {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 
+class Logger;
+
 // A memory buffer that increments its size as you write in it.  Size
 // is incremented with 'block_size' steps, never exceeding 'max_size'.
 // During growth, memory contents are never copied.  At the end of the
@@ -89,28 +91,23 @@ class LogDynamicBuffer {
 
 
 // Functions and data for performing output of log messages.
-class Log : public AllStatic {
+class Log {
  public:
-  // Opens stdout for logging.
-  static void OpenStdout();
 
-  // Opens file for logging.
-  static void OpenFile(const char* name);
-
-  // Opens memory buffer for logging.
-  static void OpenMemoryBuffer();
+  // Performs process-wide initialization.
+  void Initialize();
 
   // Disables logging, but preserves acquired resources.
-  static void stop() { is_stopped_ = true; }
+  void stop() { is_stopped_ = true; }
 
-  // Frees all resources acquired in Open... functions.
-  static void Close();
+  // Frees all resources acquired in Initialize and Open... functions.
+  void Close();
 
   // See description in include/v8.h.
-  static int GetLogLines(int from_pos, char* dest_buf, int max_size);
+  int GetLogLines(int from_pos, char* dest_buf, int max_size);
 
   // Returns whether logging is enabled.
-  static bool IsEnabled() {
+  bool IsEnabled() {
     return !is_stopped_ && (output_handle_ != NULL || output_buffer_ != NULL);
   }
 
@@ -118,16 +115,19 @@ class Log : public AllStatic {
   static const int kMessageBufferSize = v8::V8::kMinimumSizeForLogLinesBuffer;
 
  private:
-  typedef int (*WritePtr)(const char* msg, int length);
+  explicit Log(Logger* logger);
+
+  // Opens stdout for logging.
+  void OpenStdout();
 
-  // Initialization function called from Open... functions.
-  static void Init();
+  // Opens file for logging.
+  void OpenFile(const char* name);
 
-  // Write functions assume that mutex_ is acquired by the caller.
-  static WritePtr Write;
+  // Opens memory buffer for logging.
+  void OpenMemoryBuffer();
 
   // Implementation of writing to a log file.
-  static int WriteToFile(const char* msg, int length) {
+  int WriteToFile(const char* msg, int length) {
     ASSERT(output_handle_ != NULL);
     size_t rv = fwrite(msg, 1, length, output_handle_);
     ASSERT(static_cast<size_t>(length) == rv);
@@ -137,25 +137,27 @@ class Log : public AllStatic {
   }
 
   // Implementation of writing to a memory buffer.
-  static int WriteToMemory(const char* msg, int length) {
+  int WriteToMemory(const char* msg, int length) {
     ASSERT(output_buffer_ != NULL);
     return output_buffer_->Write(msg, length);
   }
 
+  bool write_to_file_;
+
   // Whether logging is stopped (e.g. due to insufficient resources).
-  static bool is_stopped_;
+  bool is_stopped_;
 
   // When logging is active, either output_handle_ or output_buffer_ is used
   // to store a pointer to log destination. If logging was opened via OpenStdout
   // or OpenFile, then output_handle_ is used. If logging was opened
   // via OpenMemoryBuffer, then output_buffer_ is used.
   // mutex_ should be acquired before using output_handle_ or output_buffer_.
-  static FILE* output_handle_;
+  FILE* output_handle_;
 
   // Used when low-level profiling is active to save code object contents.
-  static FILE* output_code_handle_;
+  FILE* output_code_handle_;
 
-  static LogDynamicBuffer* output_buffer_;
+  LogDynamicBuffer* output_buffer_;
 
   // Size of dynamic buffer block (and dynamic buffer initial size).
   static const int kDynamicBufferBlockSize = 65536;
@@ -164,15 +166,17 @@ class Log : public AllStatic {
   static const int kMaxDynamicBufferSize = 50 * 1024 * 1024;
 
   // Message to "seal" dynamic buffer with.
-  static const char* kDynamicBufferSeal;
+  static const char* const kDynamicBufferSeal;
 
   // mutex_ is a Mutex used for enforcing exclusive
   // access to the formatting buffer and the log file or log memory buffer.
-  static Mutex* mutex_;
+  Mutex* mutex_;
 
   // Buffer used for formatting log messages. This is a singleton buffer and
   // mutex_ should be acquired before using it.
-  static char* message_buffer_;
+  char* message_buffer_;
+
+  Logger* logger_;
 
   friend class Logger;
   friend class LogMessageBuilder;
@@ -185,7 +189,7 @@ class LogMessageBuilder BASE_EMBEDDED {
  public:
   // Create a message builder starting from position 0. This acquires the mutex
   // in the log as well.
-  explicit LogMessageBuilder();
+  explicit LogMessageBuilder(Logger* logger);
   ~LogMessageBuilder() { }
 
   // Append string data to the log message.
@@ -211,16 +215,9 @@ class LogMessageBuilder BASE_EMBEDDED {
   // Write the log message to the log file currently opened.
   void WriteToLogFile();
 
-  // A handler that is called when Log::Write fails.
-  typedef void (*WriteFailureHandler)();
-
-  static void set_write_failure_handler(WriteFailureHandler handler) {
-    write_failure_handler = handler;
-  }
-
  private:
-  static WriteFailureHandler write_failure_handler;
 
+  Log* log_;
   ScopedLock sl;
   int pos_;
 };
index b43b685219fc33db6be8ed6203d41611372a61ae..6013506a154cf38a3cfd4752b45047053c06a34d 100644 (file)
@@ -64,12 +64,12 @@ class SlidingStateWindow {
 
 
   void IncrementStateCounter(StateTag state) {
-    Counters::state_counters[state].Increment();
+    COUNTERS->state_counters(state)->Increment();
   }
 
 
   void DecrementStateCounter(StateTag state) {
-    Counters::state_counters[state].Decrement();
+    COUNTERS->state_counters(state)->Decrement();
   }
 };
 
@@ -82,7 +82,7 @@ class SlidingStateWindow {
 //
 class Profiler: public Thread {
  public:
-  Profiler();
+  explicit Profiler(Isolate* isolate);
   void Engage();
   void Disengage();
 
@@ -113,9 +113,9 @@ class Profiler: public Thread {
   void Run();
 
   // Pause and Resume TickSample data collection.
-  static bool paused() { return paused_; }
-  static void pause() { paused_ = true; }
-  static void resume() { paused_ = false; }
+  bool paused() const { return paused_; }
+  void pause() { paused_ = true; }
+  void resume() { paused_ = false; }
 
  private:
   // Returns the next index in the cyclic buffer.
@@ -137,23 +137,24 @@ class Profiler: public Thread {
   bool running_;
 
   // Tells whether we are currently recording tick samples.
-  static bool paused_;
+  bool paused_;
 };
 
-bool Profiler::paused_ = false;
-
 
 //
 // StackTracer implementation
 //
-void StackTracer::Trace(TickSample* sample) {
+void StackTracer::Trace(Isolate* isolate, TickSample* sample) {
+  ASSERT(isolate->IsInitialized());
+
   sample->tos = NULL;
   sample->frames_count = 0;
 
   // Avoid collecting traces while doing GC.
   if (sample->state == GC) return;
 
-  const Address js_entry_sp = Top::js_entry_sp(Top::GetCurrentThread());
+  const Address js_entry_sp =
+      Isolate::js_entry_sp(isolate->thread_local_top());
   if (js_entry_sp == 0) {
     // Not executing JS now.
     return;
@@ -164,7 +165,7 @@ void StackTracer::Trace(TickSample* sample) {
   sample->tos = Memory::Address_at(sample->sp);
 
   int i = 0;
-  const Address callback = Top::external_callback();
+  const Address callback = isolate->external_callback();
   // Surprisingly, PC can point _exactly_ to callback start, with good
   // probability, and this will result in reporting fake nested
   // callback call.
@@ -172,7 +173,8 @@ void StackTracer::Trace(TickSample* sample) {
     sample->stack[i++] = callback;
   }
 
-  SafeStackTraceFrameIterator it(sample->fp, sample->sp,
+  SafeStackTraceFrameIterator it(isolate,
+                                 sample->fp, sample->sp,
                                  sample->sp, js_entry_sp);
   while (!it.done() && i < TickSample::kMaxFramesCount) {
     sample->stack[i++] = it.frame()->pc();
@@ -188,8 +190,8 @@ void StackTracer::Trace(TickSample* sample) {
 //
 class Ticker: public Sampler {
  public:
-  explicit Ticker(int interval) :
-      Sampler(interval),
+  explicit Ticker(Isolate* isolate, int interval):
+      Sampler(isolate, interval),
       window_(NULL),
       profiler_(NULL) {}
 
@@ -225,7 +227,7 @@ class Ticker: public Sampler {
 
  protected:
   virtual void DoSampleStack(TickSample* sample) {
-    StackTracer::Trace(sample);
+    StackTracer::Trace(isolate(), sample);
   }
 
  private:
@@ -241,12 +243,12 @@ SlidingStateWindow::SlidingStateWindow(): current_index_(0), is_full_(false) {
   for (int i = 0; i < kBufferSize; i++) {
     buffer_[i] = static_cast<byte>(OTHER);
   }
-  Logger::ticker_->SetWindow(this);
+  LOGGER->ticker_->SetWindow(this);
 }
 
 
 SlidingStateWindow::~SlidingStateWindow() {
-  Logger::ticker_->ClearWindow();
+  LOGGER->ticker_->ClearWindow();
 }
 
 
@@ -266,14 +268,15 @@ void SlidingStateWindow::AddState(StateTag state) {
 //
 // Profiler implementation.
 //
-Profiler::Profiler()
-    : Thread("v8:Profiler"),
+Profiler::Profiler(Isolate* isolate)
+    : Thread(isolate, "v8:Profiler"),
       head_(0),
       tail_(0),
       overflow_(false),
       buffer_semaphore_(OS::CreateSemaphore(0)),
       engaged_(false),
-      running_(false) {
+      running_(false),
+      paused_(false) {
 }
 
 
@@ -292,9 +295,9 @@ void Profiler::Engage() {
   Start();
 
   // Register to get ticks.
-  Logger::ticker_->SetProfiler(this);
+  LOGGER->ticker_->SetProfiler(this);
 
-  Logger::ProfilerBeginEvent();
+  LOGGER->ProfilerBeginEvent();
 }
 
 
@@ -302,7 +305,7 @@ void Profiler::Disengage() {
   if (!engaged_) return;
 
   // Stop receiving ticks.
-  Logger::ticker_->ClearProfiler();
+  LOGGER->ticker_->ClearProfiler();
 
   // Terminate the worker thread by setting running_ to false,
   // inserting a fake element in the queue and then wait for
@@ -314,15 +317,16 @@ void Profiler::Disengage() {
   Insert(&sample);
   Join();
 
-  LOG(UncheckedStringEvent("profiler", "end"));
+  LOG(ISOLATE, UncheckedStringEvent("profiler", "end"));
 }
 
 
 void Profiler::Run() {
   TickSample sample;
   bool overflow = Remove(&sample);
+  i::Isolate* isolate = ISOLATE;
   while (running_) {
-    LOG(TickEvent(&sample, overflow));
+    LOG(isolate, TickEvent(&sample, overflow));
     overflow = Remove(&sample);
   }
 }
@@ -331,23 +335,38 @@ void Profiler::Run() {
 //
 // Logger class implementation.
 //
-Ticker* Logger::ticker_ = NULL;
-Profiler* Logger::profiler_ = NULL;
-SlidingStateWindow* Logger::sliding_state_window_ = NULL;
-int Logger::logging_nesting_ = 0;
-int Logger::cpu_profiler_nesting_ = 0;
-int Logger::heap_profiler_nesting_ = 0;
+
+Logger::Logger()
+  : ticker_(NULL),
+    profiler_(NULL),
+    sliding_state_window_(NULL),
+    log_events_(NULL),
+    logging_nesting_(0),
+    cpu_profiler_nesting_(0),
+    heap_profiler_nesting_(0),
+    log_(new Log(this)),
+    is_initialized_(false),
+    last_address_(NULL),
+    prev_sp_(NULL),
+    prev_function_(NULL),
+    prev_to_(NULL),
+    prev_code_(NULL) {
+}
+
+Logger::~Logger() {
+  delete log_;
+}
 
 #define DECLARE_EVENT(ignore1, name) name,
-const char* kLogEventsNames[Logger::NUMBER_OF_LOG_EVENTS] = {
+static const char* const kLogEventsNames[Logger::NUMBER_OF_LOG_EVENTS] = {
   LOG_EVENTS_AND_TAGS_LIST(DECLARE_EVENT)
 };
 #undef DECLARE_EVENT
 
 
 void Logger::ProfilerBeginEvent() {
-  if (!Log::IsEnabled()) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled()) return;
+  LogMessageBuilder msg(this);
   msg.Append("profiler,\"begin\",%d\n", kSamplingIntervalMs);
   msg.WriteToLogFile();
 }
@@ -364,8 +383,8 @@ void Logger::StringEvent(const char* name, const char* value) {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 void Logger::UncheckedStringEvent(const char* name, const char* value) {
-  if (!Log::IsEnabled()) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled()) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,\"%s\"\n", name, value);
   msg.WriteToLogFile();
 }
@@ -388,8 +407,8 @@ void Logger::IntPtrTEvent(const char* name, intptr_t value) {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 void Logger::UncheckedIntEvent(const char* name, int value) {
-  if (!Log::IsEnabled()) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled()) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,%d\n", name, value);
   msg.WriteToLogFile();
 }
@@ -398,8 +417,8 @@ void Logger::UncheckedIntEvent(const char* name, int value) {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 void Logger::UncheckedIntPtrTEvent(const char* name, intptr_t value) {
-  if (!Log::IsEnabled()) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled()) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,%" V8_PTR_PREFIX "d\n", name, value);
   msg.WriteToLogFile();
 }
@@ -408,8 +427,8 @@ void Logger::UncheckedIntPtrTEvent(const char* name, intptr_t value) {
 
 void Logger::HandleEvent(const char* name, Object** location) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_handles) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_handles) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,0x%" V8PRIxPTR "\n", name, location);
   msg.WriteToLogFile();
 #endif
@@ -421,8 +440,8 @@ void Logger::HandleEvent(const char* name, Object** location) {
 // caller's responsibility to ensure that log is enabled and that
 // FLAG_log_api is true.
 void Logger::ApiEvent(const char* format, ...) {
-  ASSERT(Log::IsEnabled() && FLAG_log_api);
-  LogMessageBuilder msg;
+  ASSERT(log_->IsEnabled() && FLAG_log_api);
+  LogMessageBuilder msg(this);
   va_list ap;
   va_start(ap, format);
   msg.AppendVA(format, ap);
@@ -434,7 +453,7 @@ void Logger::ApiEvent(const char* format, ...) {
 
 void Logger::ApiNamedSecurityCheck(Object* key) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_api) return;
+  if (!log_->IsEnabled() || !FLAG_log_api) return;
   if (key->IsString()) {
     SmartPointer<char> str =
         String::cast(key)->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
@@ -452,8 +471,8 @@ void Logger::SharedLibraryEvent(const char* library_path,
                                 uintptr_t start,
                                 uintptr_t end) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_prof) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_prof) return;
+  LogMessageBuilder msg(this);
   msg.Append("shared-library,\"%s\",0x%08" V8PRIxPTR ",0x%08" V8PRIxPTR "\n",
              library_path,
              start,
@@ -467,8 +486,8 @@ void Logger::SharedLibraryEvent(const wchar_t* library_path,
                                 uintptr_t start,
                                 uintptr_t end) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_prof) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_prof) return;
+  LogMessageBuilder msg(this);
   msg.Append("shared-library,\"%ls\",0x%08" V8PRIxPTR ",0x%08" V8PRIxPTR "\n",
              library_path,
              start,
@@ -482,7 +501,7 @@ void Logger::SharedLibraryEvent(const wchar_t* library_path,
 void Logger::LogRegExpSource(Handle<JSRegExp> regexp) {
   // Prints "/" + re.source + "/" +
   //      (re.global?"g":"") + (re.ignorecase?"i":"") + (re.multiline?"m":"")
-  LogMessageBuilder msg;
+  LogMessageBuilder msg(this);
 
   Handle<Object> source = GetProperty(regexp, "source");
   if (!source->IsString()) {
@@ -524,8 +543,8 @@ void Logger::LogRegExpSource(Handle<JSRegExp> regexp) {
 
 void Logger::RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_regexp) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_regexp) return;
+  LogMessageBuilder msg(this);
   msg.Append("regexp-compile,");
   LogRegExpSource(regexp);
   msg.Append(in_cache ? ",hit\n" : ",miss\n");
@@ -536,9 +555,9 @@ void Logger::RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache) {
 
 void Logger::LogRuntime(Vector<const char> format, JSArray* args) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_runtime) return;
+  if (!log_->IsEnabled() || !FLAG_log_runtime) return;
   HandleScope scope;
-  LogMessageBuilder msg;
+  LogMessageBuilder msg(this);
   for (int i = 0; i < format.length(); i++) {
     char c = format[i];
     if (c == '%' && i <= format.length() - 2) {
@@ -582,7 +601,7 @@ void Logger::LogRuntime(Vector<const char> format, JSArray* args) {
 
 void Logger::ApiIndexedSecurityCheck(uint32_t index) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_api) return;
+  if (!log_->IsEnabled() || !FLAG_log_api) return;
   ApiEvent("api,check-security,%u\n", index);
 #endif
 }
@@ -593,13 +612,13 @@ void Logger::ApiNamedPropertyAccess(const char* tag,
                                     Object* name) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
   ASSERT(name->IsString());
-  if (!Log::IsEnabled() || !FLAG_log_api) return;
+  if (!log_->IsEnabled() || !FLAG_log_api) return;
   String* class_name_obj = holder->class_name();
   SmartPointer<char> class_name =
       class_name_obj->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
   SmartPointer<char> property_name =
       String::cast(name)->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
-  Logger::ApiEvent("api,%s,\"%s\",\"%s\"\n", tag, *class_name, *property_name);
+  LOGGER->ApiEvent("api,%s,\"%s\",\"%s\"\n", tag, *class_name, *property_name);
 #endif
 }
 
@@ -607,37 +626,37 @@ void Logger::ApiIndexedPropertyAccess(const char* tag,
                                       JSObject* holder,
                                       uint32_t index) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_api) return;
+  if (!log_->IsEnabled() || !FLAG_log_api) return;
   String* class_name_obj = holder->class_name();
   SmartPointer<char> class_name =
       class_name_obj->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
-  Logger::ApiEvent("api,%s,\"%s\",%u\n", tag, *class_name, index);
+  LOGGER->ApiEvent("api,%s,\"%s\",%u\n", tag, *class_name, index);
 #endif
 }
 
 void Logger::ApiObjectAccess(const char* tag, JSObject* object) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_api) return;
+  if (!log_->IsEnabled() || !FLAG_log_api) return;
   String* class_name_obj = object->class_name();
   SmartPointer<char> class_name =
       class_name_obj->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
-  Logger::ApiEvent("api,%s,\"%s\"\n", tag, *class_name);
+  LOGGER->ApiEvent("api,%s,\"%s\"\n", tag, *class_name);
 #endif
 }
 
 
 void Logger::ApiEntryCall(const char* name) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_api) return;
-  Logger::ApiEvent("api,%s\n", name);
+  if (!log_->IsEnabled() || !FLAG_log_api) return;
+  LOGGER->ApiEvent("api,%s\n", name);
 #endif
 }
 
 
 void Logger::NewEvent(const char* name, void* object, size_t size) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log) return;
+  LogMessageBuilder msg(this);
   msg.Append("new,%s,0x%" V8PRIxPTR ",%u\n", name, object,
              static_cast<unsigned int>(size));
   msg.WriteToLogFile();
@@ -647,19 +666,28 @@ void Logger::NewEvent(const char* name, void* object, size_t size) {
 
 void Logger::DeleteEvent(const char* name, void* object) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log) return;
+  LogMessageBuilder msg(this);
   msg.Append("delete,%s,0x%" V8PRIxPTR "\n", name, object);
   msg.WriteToLogFile();
 #endif
 }
 
 
+void Logger::NewEventStatic(const char* name, void* object, size_t size) {
+  LOGGER->NewEvent(name, object, size);
+}
+
+
+void Logger::DeleteEventStatic(const char* name, void* object) {
+  LOGGER->DeleteEvent(name, object);
+}
+
 #ifdef ENABLE_LOGGING_AND_PROFILING
 void Logger::CallbackEventInternal(const char* prefix, const char* name,
                                    Address entry_point) {
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,%s,",
              kLogEventsNames[CODE_CREATION_EVENT],
              kLogEventsNames[CALLBACK_TAG]);
@@ -673,7 +701,7 @@ void Logger::CallbackEventInternal(const char* prefix, const char* name,
 
 void Logger::CallbackEvent(String* name, Address entry_point) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
   SmartPointer<char> str =
       name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
   CallbackEventInternal("", *str, entry_point);
@@ -683,7 +711,7 @@ void Logger::CallbackEvent(String* name, Address entry_point) {
 
 void Logger::GetterCallbackEvent(String* name, Address entry_point) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
   SmartPointer<char> str =
       name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
   CallbackEventInternal("get ", *str, entry_point);
@@ -693,7 +721,7 @@ void Logger::GetterCallbackEvent(String* name, Address entry_point) {
 
 void Logger::SetterCallbackEvent(String* name, Address entry_point) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
   SmartPointer<char> str =
       name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
   CallbackEventInternal("set ", *str, entry_point);
@@ -705,8 +733,8 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
                              Code* code,
                              const char* comment) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,%s,",
              kLogEventsNames[CODE_CREATION_EVENT],
              kLogEventsNames[tag]);
@@ -758,9 +786,12 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
                              SharedFunctionInfo* shared,
                              String* name) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
-  if (code == Builtins::builtin(Builtins::LazyCompile)) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
+  if (code == Isolate::Current()->builtins()->builtin(
+      Builtins::LazyCompile))
+    return;
+
+  LogMessageBuilder msg(this);
   SmartPointer<char> str =
       name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
   msg.Append("%s,%s,",
@@ -785,8 +816,8 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
                              SharedFunctionInfo* shared,
                              String* source, int line) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
+  LogMessageBuilder msg(this);
   SmartPointer<char> name =
       shared->DebugName()->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
   SmartPointer<char> sourcestr =
@@ -811,8 +842,8 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
 
 void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,%s,",
              kLogEventsNames[CODE_CREATION_EVENT],
              kLogEventsNames[tag]);
@@ -827,8 +858,8 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count) {
 
 void Logger::CodeMovingGCEvent() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code || !FLAG_ll_prof) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_code || !FLAG_ll_prof) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s\n", kLogEventsNames[CODE_MOVING_GC]);
   msg.WriteToLogFile();
   OS::SignalCodeMovingGC();
@@ -838,8 +869,8 @@ void Logger::CodeMovingGCEvent() {
 
 void Logger::RegExpCodeCreateEvent(Code* code, String* source) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,%s,",
              kLogEventsNames[CODE_CREATION_EVENT],
              kLogEventsNames[REG_EXP_TAG]);
@@ -870,8 +901,8 @@ void Logger::CodeDeleteEvent(Address from) {
 
 void Logger::SnapshotPositionEvent(Address addr, int pos) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_snapshot_positions) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_snapshot_positions) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,", kLogEventsNames[SNAPSHOT_POSITION_EVENT]);
   msg.AppendAddress(addr);
   msg.Append(",%d", pos);
@@ -892,8 +923,8 @@ void Logger::SharedFunctionInfoMoveEvent(Address from, Address to) {
 void Logger::MoveEventInternal(LogEventsAndTags event,
                                Address from,
                                Address to) {
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,", kLogEventsNames[event]);
   msg.AppendAddress(from);
   msg.Append(',');
@@ -906,8 +937,8 @@ void Logger::MoveEventInternal(LogEventsAndTags event,
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 void Logger::DeleteEventInternal(LogEventsAndTags event, Address from) {
-  if (!Log::IsEnabled() || !FLAG_log_code) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_code) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,", kLogEventsNames[event]);
   msg.AppendAddress(from);
   msg.Append('\n');
@@ -918,8 +949,8 @@ void Logger::DeleteEventInternal(LogEventsAndTags event, Address from) {
 
 void Logger::ResourceEvent(const char* name, const char* tag) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,%s,", name, tag);
 
   uint32_t sec, usec;
@@ -936,11 +967,11 @@ void Logger::ResourceEvent(const char* name, const char* tag) {
 
 void Logger::SuspectReadEvent(String* name, Object* obj) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_suspect) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_suspect) return;
+  LogMessageBuilder msg(this);
   String* class_name = obj->IsJSObject()
                        ? JSObject::cast(obj)->class_name()
-                       : Heap::empty_string();
+                       : HEAP->empty_string();
   msg.Append("suspect-read,");
   msg.Append(class_name);
   msg.Append(',');
@@ -955,8 +986,8 @@ void Logger::SuspectReadEvent(String* name, Object* obj) {
 
 void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_gc) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_gc) return;
+  LogMessageBuilder msg(this);
   // Using non-relative system time in order to be able to synchronize with
   // external memory profiling events (e.g. DOM memory size).
   msg.Append("heap-sample-begin,\"%s\",\"%s\",%.0f\n",
@@ -969,8 +1000,8 @@ void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
 void Logger::HeapSampleStats(const char* space, const char* kind,
                              intptr_t capacity, intptr_t used) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_gc) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_gc) return;
+  LogMessageBuilder msg(this);
   msg.Append("heap-sample-stats,\"%s\",\"%s\","
                  "%" V8_PTR_PREFIX "d,%" V8_PTR_PREFIX "d\n",
              space, kind, capacity, used);
@@ -981,8 +1012,8 @@ void Logger::HeapSampleStats(const char* space, const char* kind,
 
 void Logger::HeapSampleEndEvent(const char* space, const char* kind) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_gc) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_gc) return;
+  LogMessageBuilder msg(this);
   msg.Append("heap-sample-end,\"%s\",\"%s\"\n", space, kind);
   msg.WriteToLogFile();
 #endif
@@ -991,8 +1022,8 @@ void Logger::HeapSampleEndEvent(const char* space, const char* kind) {
 
 void Logger::HeapSampleItemEvent(const char* type, int number, int bytes) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_gc) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_gc) return;
+  LogMessageBuilder msg(this);
   msg.Append("heap-sample-item,%s,%d,%d\n", type, number, bytes);
   msg.WriteToLogFile();
 #endif
@@ -1002,32 +1033,32 @@ void Logger::HeapSampleItemEvent(const char* type, int number, int bytes) {
 void Logger::HeapSampleJSConstructorEvent(const char* constructor,
                                           int number, int bytes) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_gc) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_gc) return;
+  LogMessageBuilder msg(this);
   msg.Append("heap-js-cons-item,%s,%d,%d\n", constructor, number, bytes);
   msg.WriteToLogFile();
 #endif
 }
 
+// Event starts with comma, so we don't have it in the format string.
+static const char kEventText[] = "heap-js-ret-item,%s";
+// We take placeholder strings into account, but it's OK to be conservative.
+static const int kEventTextLen = sizeof(kEventText)/sizeof(kEventText[0]);
 
 void Logger::HeapSampleJSRetainersEvent(
     const char* constructor, const char* event) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_gc) return;
-  // Event starts with comma, so we don't have it in the format string.
-  static const char* event_text = "heap-js-ret-item,%s";
-  // We take placeholder strings into account, but it's OK to be conservative.
-  static const int event_text_len = StrLength(event_text);
+  if (!log_->IsEnabled() || !FLAG_log_gc) return;
   const int cons_len = StrLength(constructor);
   const int event_len = StrLength(event);
   int pos = 0;
   // Retainer lists can be long. We may need to split them into multiple events.
   do {
-    LogMessageBuilder msg;
-    msg.Append(event_text, constructor);
+    LogMessageBuilder msg(this);
+    msg.Append(kEventText, constructor);
     int to_write = event_len - pos;
-    if (to_write > Log::kMessageBufferSize - (cons_len + event_text_len)) {
-      int cut_pos = pos + Log::kMessageBufferSize - (cons_len + event_text_len);
+    if (to_write > Log::kMessageBufferSize - (cons_len + kEventTextLen)) {
+      int cut_pos = pos + Log::kMessageBufferSize - (cons_len + kEventTextLen);
       ASSERT(cut_pos < event_len);
       while (cut_pos > pos && event[cut_pos] != ',') --cut_pos;
       if (event[cut_pos] != ',') {
@@ -1053,8 +1084,8 @@ void Logger::HeapSampleJSRetainersEvent(
 void Logger::HeapSampleJSProducerEvent(const char* constructor,
                                        Address* stack) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_gc) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log_gc) return;
+  LogMessageBuilder msg(this);
   msg.Append("heap-js-prod-item,%s", constructor);
   while (*stack != NULL) {
     msg.Append(",0x%" V8PRIxPTR, *stack++);
@@ -1067,8 +1098,8 @@ void Logger::HeapSampleJSProducerEvent(const char* constructor,
 
 void Logger::DebugTag(const char* call_site_tag) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_log) return;
+  LogMessageBuilder msg(this);
   msg.Append("debug-tag,%s\n", call_site_tag);
   msg.WriteToLogFile();
 #endif
@@ -1077,13 +1108,13 @@ void Logger::DebugTag(const char* call_site_tag) {
 
 void Logger::DebugEvent(const char* event_type, Vector<uint16_t> parameter) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log) return;
+  if (!log_->IsEnabled() || !FLAG_log) return;
   StringBuilder s(parameter.length() + 1);
   for (int i = 0; i < parameter.length(); ++i) {
     s.AddCharacter(static_cast<char>(parameter[i]));
   }
   char* parameter_string = s.Finalize();
-  LogMessageBuilder msg;
+  LogMessageBuilder msg(this);
   msg.Append("debug-queue-event,%s,%15.3f,%s\n",
              event_type,
              OS::TimeCurrentMillis(),
@@ -1096,8 +1127,8 @@ void Logger::DebugEvent(const char* event_type, Vector<uint16_t> parameter) {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 void Logger::TickEvent(TickSample* sample, bool overflow) {
-  if (!Log::IsEnabled() || !FLAG_prof) return;
-  LogMessageBuilder msg;
+  if (!log_->IsEnabled() || !FLAG_prof) return;
+  LogMessageBuilder msg(this);
   msg.Append("%s,", kLogEventsNames[TICK_EVENT]);
   msg.AppendAddress(sample->pc);
   msg.Append(',');
@@ -1130,7 +1161,7 @@ int Logger::GetActiveProfilerModules() {
 
 
 void Logger::PauseProfiler(int flags, int tag) {
-  if (!Log::IsEnabled()) return;
+  if (!log_->IsEnabled()) return;
   if (profiler_ != NULL && (flags & PROFILER_MODULE_CPU)) {
     // It is OK to have negative nesting.
     if (--cpu_profiler_nesting_ == 0) {
@@ -1141,7 +1172,7 @@ void Logger::PauseProfiler(int flags, int tag) {
         }
         FLAG_log_code = false;
         // Must be the same message as Log::kDynamicBufferSeal.
-        LOG(UncheckedStringEvent("profiler", "pause"));
+        LOG(ISOLATE, UncheckedStringEvent("profiler", "pause"));
       }
       --logging_nesting_;
     }
@@ -1160,7 +1191,7 @@ void Logger::PauseProfiler(int flags, int tag) {
 
 
 void Logger::ResumeProfiler(int flags, int tag) {
-  if (!Log::IsEnabled()) return;
+  if (!log_->IsEnabled()) return;
   if (tag != 0) {
     UncheckedIntEvent("open-tag", tag);
   }
@@ -1169,7 +1200,7 @@ void Logger::ResumeProfiler(int flags, int tag) {
       ++logging_nesting_;
       if (FLAG_prof_lazy) {
         profiler_->Engage();
-        LOG(UncheckedStringEvent("profiler", "resume"));
+        LOG(ISOLATE, UncheckedStringEvent("profiler", "resume"));
         FLAG_log_code = true;
         LogCompiledFunctions();
         LogAccessorCallbacks();
@@ -1192,8 +1223,7 @@ void Logger::ResumeProfiler(int flags, int tag) {
 
 // This function can be called when Log's mutex is acquired,
 // either from main or Profiler's thread.
-void Logger::StopLoggingAndProfiling() {
-  Log::stop();
+void Logger::LogFailure() {
   PauseProfiler(PROFILER_MODULE_CPU, 0);
 }
 
@@ -1204,7 +1234,7 @@ bool Logger::IsProfilerSamplerActive() {
 
 
 int Logger::GetLogLines(int from_pos, char* dest_buf, int max_size) {
-  return Log::GetLogLines(from_pos, dest_buf, max_size);
+  return log_->GetLogLines(from_pos, dest_buf, max_size);
 }
 
 
@@ -1326,14 +1356,14 @@ void Logger::LogCodeObject(Object* object) {
         tag = Logger::KEYED_CALL_IC_TAG;
         break;
     }
-    PROFILE(CodeCreateEvent(tag, code_object, description));
+    PROFILE(ISOLATE, CodeCreateEvent(tag, code_object, description));
   }
 }
 
 
 void Logger::LogCodeInfo() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  if (!Log::IsEnabled() || !FLAG_log_code || !FLAG_ll_prof) return;
+  if (!log_->IsEnabled() || !FLAG_log_code || !FLAG_ll_prof) return;
 #if V8_TARGET_ARCH_IA32
   const char arch[] = "ia32";
 #elif V8_TARGET_ARCH_X64
@@ -1343,7 +1373,7 @@ void Logger::LogCodeInfo() {
 #else
   const char arch[] = "unknown";
 #endif
-  LogMessageBuilder msg;
+  LogMessageBuilder msg(this);
   msg.Append("code-info,%s,%d\n", arch, Code::kHeaderSize);
   msg.WriteToLogFile();
 #endif  // ENABLE_LOGGING_AND_PROFILING
@@ -1351,10 +1381,10 @@ void Logger::LogCodeInfo() {
 
 
 void Logger::LowLevelCodeCreateEvent(Code* code, LogMessageBuilder* msg) {
-  if (!FLAG_ll_prof || Log::output_code_handle_ == NULL) return;
-  int pos = static_cast<int>(ftell(Log::output_code_handle_));
+  if (!FLAG_ll_prof || log_->output_code_handle_ == NULL) return;
+  int pos = static_cast<int>(ftell(log_->output_code_handle_));
   size_t rv = fwrite(code->instruction_start(), 1, code->instruction_size(),
-                     Log::output_code_handle_);
+                     log_->output_code_handle_);
   ASSERT(static_cast<size_t>(code->instruction_size()) == rv);
   USE(rv);
   msg->Append(",%d", pos);
@@ -1380,7 +1410,9 @@ void Logger::LogCompiledFunctions() {
   // During iteration, there can be heap allocation due to
   // GetScriptLineNumber call.
   for (int i = 0; i < compiled_funcs_count; ++i) {
-    if (*code_objects[i] == Builtins::builtin(Builtins::LazyCompile)) continue;
+    if (*code_objects[i] == Isolate::Current()->builtins()->builtin(
+        Builtins::LazyCompile))
+      continue;
     Handle<SharedFunctionInfo> shared = sfis[i];
     Handle<String> func_name(shared->DebugName());
     if (shared->script()->IsScript()) {
@@ -1389,20 +1421,23 @@ void Logger::LogCompiledFunctions() {
         Handle<String> script_name(String::cast(script->name()));
         int line_num = GetScriptLineNumber(script, shared->start_position());
         if (line_num > 0) {
-          PROFILE(CodeCreateEvent(
-              Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
-              *code_objects[i], *shared,
-              *script_name, line_num + 1));
+          PROFILE(ISOLATE,
+                  CodeCreateEvent(
+                    Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
+                    *code_objects[i], *shared,
+                    *script_name, line_num + 1));
         } else {
           // Can't distinguish eval and script here, so always use Script.
-          PROFILE(CodeCreateEvent(
-              Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
-              *code_objects[i], *shared, *script_name));
+          PROFILE(ISOLATE,
+                  CodeCreateEvent(
+                      Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
+                      *code_objects[i], *shared, *script_name));
         }
       } else {
-        PROFILE(CodeCreateEvent(
-            Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
-            *code_objects[i], *shared, *func_name));
+        PROFILE(ISOLATE,
+                CodeCreateEvent(
+                    Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
+                    *code_objects[i], *shared, *func_name));
       }
     } else if (shared->IsApiFunction()) {
       // API function.
@@ -1412,11 +1447,13 @@ void Logger::LogCompiledFunctions() {
         CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
         Object* callback_obj = call_data->callback();
         Address entry_point = v8::ToCData<Address>(callback_obj);
-        PROFILE(CallbackEvent(*func_name, entry_point));
+        PROFILE(ISOLATE, CallbackEvent(*func_name, entry_point));
       }
     } else {
-      PROFILE(CodeCreateEvent(
-          Logger::LAZY_COMPILE_TAG, *code_objects[i], *shared, *func_name));
+      PROFILE(ISOLATE,
+              CodeCreateEvent(
+                  Logger::LAZY_COMPILE_TAG, *code_objects[i],
+                  *shared, *func_name));
     }
   }
 }
@@ -1425,6 +1462,7 @@ void Logger::LogCompiledFunctions() {
 void Logger::LogAccessorCallbacks() {
   AssertNoAllocation no_alloc;
   HeapIterator iterator;
+  i::Isolate* isolate = ISOLATE;
   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
     if (!obj->IsAccessorInfo()) continue;
     AccessorInfo* ai = AccessorInfo::cast(obj);
@@ -1432,11 +1470,11 @@ void Logger::LogAccessorCallbacks() {
     String* name = String::cast(ai->name());
     Address getter_entry = v8::ToCData<Address>(ai->getter());
     if (getter_entry != 0) {
-      PROFILE(GetterCallbackEvent(name, getter_entry));
+      PROFILE(isolate, GetterCallbackEvent(name, getter_entry));
     }
     Address setter_entry = v8::ToCData<Address>(ai->setter());
     if (setter_entry != 0) {
-      PROFILE(SetterCallbackEvent(name, setter_entry));
+      PROFILE(isolate, SetterCallbackEvent(name, setter_entry));
     }
   }
 }
@@ -1446,19 +1484,9 @@ void Logger::LogAccessorCallbacks() {
 
 bool Logger::Setup() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  // --log-all enables all the log flags.
-  if (FLAG_log_all) {
-    FLAG_log_runtime = true;
-    FLAG_log_api = true;
-    FLAG_log_code = true;
-    FLAG_log_gc = true;
-    FLAG_log_suspect = true;
-    FLAG_log_handles = true;
-    FLAG_log_regexp = true;
-  }
-
-  // --prof implies --log-code.
-  if (FLAG_prof) FLAG_log_code = true;
+  // Tests and EnsureInitialize() can call this twice in a row. It's harmless.
+  if (is_initialized_) return true;
+  is_initialized_ = true;
 
   // --ll-prof implies --log-code and --log-snapshot-positions.
   if (FLAG_ll_prof) {
@@ -1472,73 +1500,30 @@ bool Logger::Setup() {
     FLAG_prof_auto = false;
   }
 
-  bool start_logging = FLAG_log || FLAG_log_runtime || FLAG_log_api
-      || FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect
-      || FLAG_log_regexp || FLAG_log_state_changes;
-
-  bool open_log_file = start_logging || FLAG_prof_lazy;
-
-  // If we're logging anything, we need to open the log file.
-  if (open_log_file) {
-    if (strcmp(FLAG_logfile, "-") == 0) {
-      Log::OpenStdout();
-    } else if (strcmp(FLAG_logfile, "*") == 0) {
-      Log::OpenMemoryBuffer();
-    } else if (strchr(FLAG_logfile, '%') != NULL) {
-      // If there's a '%' in the log file name we have to expand
-      // placeholders.
-      HeapStringAllocator allocator;
-      StringStream stream(&allocator);
-      for (const char* p = FLAG_logfile; *p; p++) {
-        if (*p == '%') {
-          p++;
-          switch (*p) {
-            case '\0':
-              // If there's a % at the end of the string we back up
-              // one character so we can escape the loop properly.
-              p--;
-              break;
-            case 't': {
-              // %t expands to the current time in milliseconds.
-              double time = OS::TimeCurrentMillis();
-              stream.Add("%.0f", FmtElm(time));
-              break;
-            }
-            case '%':
-              // %% expands (contracts really) to %.
-              stream.Put('%');
-              break;
-            default:
-              // All other %'s expand to themselves.
-              stream.Put('%');
-              stream.Put(*p);
-              break;
-          }
-        } else {
-          stream.Put(*p);
-        }
-      }
-      SmartPointer<const char> expanded = stream.ToCString();
-      Log::OpenFile(*expanded);
-    } else {
-      Log::OpenFile(FLAG_logfile);
-    }
-  }
+  // TODO(isolates): this assert introduces cyclic dependency (logger
+  // -> thread local top -> heap -> logger).
+  // ASSERT(VMState::is_outermost_external());
+
+  log_->Initialize();
 
   if (FLAG_ll_prof) LogCodeInfo();
 
-  ticker_ = new Ticker(kSamplingIntervalMs);
+  ticker_ = new Ticker(Isolate::Current(), kSamplingIntervalMs);
 
   if (FLAG_sliding_state_window && sliding_state_window_ == NULL) {
     sliding_state_window_ = new SlidingStateWindow();
   }
 
+  bool start_logging = FLAG_log || FLAG_log_runtime || FLAG_log_api
+    || FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect
+    || FLAG_log_regexp || FLAG_log_state_changes;
+
   if (start_logging) {
     logging_nesting_ = 1;
   }
 
   if (FLAG_prof) {
-    profiler_ = new Profiler();
+    profiler_ = new Profiler(Isolate::Current());
     if (!FLAG_prof_auto) {
       profiler_->pause();
     } else {
@@ -1549,7 +1534,6 @@ bool Logger::Setup() {
     }
   }
 
-  LogMessageBuilder::set_write_failure_handler(StopLoggingAndProfiling);
   return true;
 
 #else
@@ -1558,6 +1542,11 @@ bool Logger::Setup() {
 }
 
 
+Sampler* Logger::sampler() {
+  return ticker_;
+}
+
+
 void Logger::EnsureTickerStarted() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
   ASSERT(ticker_ != NULL);
@@ -1575,7 +1564,8 @@ void Logger::EnsureTickerStopped() {
 
 void Logger::TearDown() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  LogMessageBuilder::set_write_failure_handler(NULL);
+  if (!is_initialized_) return;
+  is_initialized_ = false;
 
   // Stop the profiler before closing the file.
   if (profiler_ != NULL) {
@@ -1590,7 +1580,7 @@ void Logger::TearDown() {
   delete ticker_;
   ticker_ = NULL;
 
-  Log::Close();
+  log_->Close();
 #endif
 }
 
@@ -1613,4 +1603,56 @@ void Logger::EnableSlidingStateWindow() {
 #endif
 }
 
+
+Mutex* SamplerRegistry::mutex_ = OS::CreateMutex();
+List<Sampler*>* SamplerRegistry::active_samplers_ = NULL;
+
+
+bool SamplerRegistry::IterateActiveSamplers(VisitSampler func, void* param) {
+  ScopedLock lock(mutex_);
+  for (int i = 0;
+       ActiveSamplersExist() && i < active_samplers_->length();
+       ++i) {
+    func(active_samplers_->at(i), param);
+  }
+  return ActiveSamplersExist();
+}
+
+
+static void ComputeCpuProfiling(Sampler* sampler, void* flag_ptr) {
+  bool* flag = reinterpret_cast<bool*>(flag_ptr);
+  *flag |= sampler->IsProfiling();
+}
+
+
+SamplerRegistry::State SamplerRegistry::GetState() {
+  bool flag = false;
+  if (!IterateActiveSamplers(&ComputeCpuProfiling, &flag)) {
+    return HAS_NO_SAMPLERS;
+  }
+  return flag ? HAS_CPU_PROFILING_SAMPLERS : HAS_SAMPLERS;
+}
+
+
+void SamplerRegistry::AddActiveSampler(Sampler* sampler) {
+  ASSERT(sampler->IsActive());
+  ScopedLock lock(mutex_);
+  if (active_samplers_ == NULL) {
+    active_samplers_ = new List<Sampler*>;
+  } else {
+    ASSERT(!active_samplers_->Contains(sampler));
+  }
+  active_samplers_->Add(sampler);
+}
+
+
+void SamplerRegistry::RemoveActiveSampler(Sampler* sampler) {
+  ASSERT(sampler->IsActive());
+  ScopedLock lock(mutex_);
+  ASSERT(active_samplers_ != NULL);
+  bool removed = active_samplers_->RemoveElement(sampler);
+  ASSERT(removed);
+  USE(removed);
+}
+
 } }  // namespace v8::internal
index 6e1736d5621e08b6b9e8e2a1b66e44c905d686b4..4fb0e230f0f59d458d910707bebe405736677550 100644 (file)
--- a/src/log.h
+++ b/src/log.h
@@ -77,13 +77,15 @@ class LogMessageBuilder;
 
 #undef LOG
 #ifdef ENABLE_LOGGING_AND_PROFILING
-#define LOG(Call)                           \
-  do {                                      \
-    if (v8::internal::Logger::is_logging()) \
-      v8::internal::Logger::Call;           \
+#define LOG(isolate, Call)                          \
+  do {                                              \
+    v8::internal::Logger* logger =                  \
+        (isolate)->logger();                        \
+    if (logger->is_logging())                       \
+      logger->Call;                                 \
   } while (false)
 #else
-#define LOG(Call) ((void) 0)
+#define LOG(isolate, Call) ((void) 0)
 #endif
 
 #define LOG_EVENTS_AND_TAGS_LIST(V) \
@@ -133,6 +135,9 @@ class LogMessageBuilder;
 // original tags when writing to the log.
 
 
+class Sampler;
+
+
 class Logger {
  public:
 #define DECLARE_ENUM(enum_item, ignore) enum_item,
@@ -143,142 +148,147 @@ class Logger {
 #undef DECLARE_ENUM
 
   // Acquires resources for logging if the right flags are set.
-  static bool Setup();
+  bool Setup();
 
-  static void EnsureTickerStarted();
-  static void EnsureTickerStopped();
+  void EnsureTickerStarted();
+  void EnsureTickerStopped();
+
+  Sampler* sampler();
 
   // Frees resources acquired in Setup.
-  static void TearDown();
+  void TearDown();
 
   // Enable the computation of a sliding window of states.
-  static void EnableSlidingStateWindow();
+  void EnableSlidingStateWindow();
 
   // Emits an event with a string value -> (name, value).
-  static void StringEvent(const char* name, const char* value);
+  void StringEvent(const char* name, const char* value);
 
   // Emits an event with an int value -> (name, value).
-  static void IntEvent(const char* name, int value);
-  static void IntPtrTEvent(const char* name, intptr_t value);
+  void IntEvent(const char* name, int value);
+  void IntPtrTEvent(const char* name, intptr_t value);
 
   // Emits an event with an handle value -> (name, location).
-  static void HandleEvent(const char* name, Object** location);
+  void HandleEvent(const char* name, Object** location);
 
   // Emits memory management events for C allocated structures.
-  static void NewEvent(const char* name, void* object, size_t size);
-  static void DeleteEvent(const char* name, void* object);
+  void NewEvent(const char* name, void* object, size_t size);
+  void DeleteEvent(const char* name, void* object);
+
+  // Static versions of the above, operate on current isolate's logger.
+  // Used in TRACK_MEMORY(TypeName) defined in globals.h
+  static void NewEventStatic(const char* name, void* object, size_t size);
+  static void DeleteEventStatic(const char* name, void* object);
 
   // Emits an event with a tag, and some resource usage information.
   // -> (name, tag, <rusage information>).
   // Currently, the resource usage information is a process time stamp
   // and a real time timestamp.
-  static void ResourceEvent(const char* name, const char* tag);
+  void ResourceEvent(const char* name, const char* tag);
 
   // Emits an event that an undefined property was read from an
   // object.
-  static void SuspectReadEvent(String* name, Object* obj);
+  void SuspectReadEvent(String* name, Object* obj);
 
   // Emits an event when a message is put on or read from a debugging queue.
   // DebugTag lets us put a call-site specific label on the event.
-  static void DebugTag(const char* call_site_tag);
-  static void DebugEvent(const char* event_type, Vector<uint16_t> parameter);
+  void DebugTag(const char* call_site_tag);
+  void DebugEvent(const char* event_type, Vector<uint16_t> parameter);
 
 
   // ==== Events logged by --log-api. ====
-  static void ApiNamedSecurityCheck(Object* key);
-  static void ApiIndexedSecurityCheck(uint32_t index);
-  static void ApiNamedPropertyAccess(const char* tag,
-                                     JSObject* holder,
-                                     Object* name);
-  static void ApiIndexedPropertyAccess(const char* tag,
-                                       JSObject* holder,
-                                       uint32_t index);
-  static void ApiObjectAccess(const char* tag, JSObject* obj);
-  static void ApiEntryCall(const char* name);
+  void ApiNamedSecurityCheck(Object* key);
+  void ApiIndexedSecurityCheck(uint32_t index);
+  void ApiNamedPropertyAccess(const char* tag, JSObject* holder, Object* name);
+  void ApiIndexedPropertyAccess(const char* tag,
+                                JSObject* holder,
+                                uint32_t index);
+  void ApiObjectAccess(const char* tag, JSObject* obj);
+  void ApiEntryCall(const char* name);
 
 
   // ==== Events logged by --log-code. ====
   // Emits a code event for a callback function.
-  static void CallbackEvent(String* name, Address entry_point);
-  static void GetterCallbackEvent(String* name, Address entry_point);
-  static void SetterCallbackEvent(String* name, Address entry_point);
+  void CallbackEvent(String* name, Address entry_point);
+  void GetterCallbackEvent(String* name, Address entry_point);
+  void SetterCallbackEvent(String* name, Address entry_point);
   // Emits a code create event.
-  static void CodeCreateEvent(LogEventsAndTags tag,
-                              Code* code, const char* source);
-  static void CodeCreateEvent(LogEventsAndTags tag,
-                              Code* code, String* name);
-  static void CodeCreateEvent(LogEventsAndTags tag,
-                              Code* code,
-                              SharedFunctionInfo* shared,
-                              String* name);
-  static void CodeCreateEvent(LogEventsAndTags tag,
-                              Code* code,
-                              SharedFunctionInfo* shared,
-                              String* source, int line);
-  static void CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count);
-  static void CodeMovingGCEvent();
+  void CodeCreateEvent(LogEventsAndTags tag,
+                       Code* code, const char* source);
+  void CodeCreateEvent(LogEventsAndTags tag,
+                       Code* code, String* name);
+  void CodeCreateEvent(LogEventsAndTags tag,
+                       Code* code,
+                       SharedFunctionInfo* shared,
+                       String* name);
+  void CodeCreateEvent(LogEventsAndTags tag,
+                       Code* code,
+                       SharedFunctionInfo* shared,
+                       String* source, int line);
+  void CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count);
+  void CodeMovingGCEvent();
   // Emits a code create event for a RegExp.
-  static void RegExpCodeCreateEvent(Code* code, String* source);
+  void RegExpCodeCreateEvent(Code* code, String* source);
   // Emits a code move event.
-  static void CodeMoveEvent(Address from, Address to);
+  void CodeMoveEvent(Address from, Address to);
   // Emits a code delete event.
-  static void CodeDeleteEvent(Address from);
+  void CodeDeleteEvent(Address from);
 
-  static void SharedFunctionInfoMoveEvent(Address from, Address to);
+  void SharedFunctionInfoMoveEvent(Address from, Address to);
 
-  static void SnapshotPositionEvent(Address addr, int pos);
+  void SnapshotPositionEvent(Address addr, int pos);
 
   // ==== Events logged by --log-gc. ====
   // Heap sampling events: start, end, and individual types.
-  static void HeapSampleBeginEvent(const char* space, const char* kind);
-  static void HeapSampleEndEvent(const char* space, const char* kind);
-  static void HeapSampleItemEvent(const char* type, int number, int bytes);
-  static void HeapSampleJSConstructorEvent(const char* constructor,
-                                           int number, int bytes);
-  static void HeapSampleJSRetainersEvent(const char* constructor,
+  void HeapSampleBeginEvent(const char* space, const char* kind);
+  void HeapSampleEndEvent(const char* space, const char* kind);
+  void HeapSampleItemEvent(const char* type, int number, int bytes);
+  void HeapSampleJSConstructorEvent(const char* constructor,
+                                    int number, int bytes);
+  void HeapSampleJSRetainersEvent(const char* constructor,
                                          const char* event);
-  static void HeapSampleJSProducerEvent(const char* constructor,
-                                        Address* stack);
-  static void HeapSampleStats(const char* space, const char* kind,
-                              intptr_t capacity, intptr_t used);
-
-  static void SharedLibraryEvent(const char* library_path,
-                                 uintptr_t start,
-                                 uintptr_t end);
-  static void SharedLibraryEvent(const wchar_t* library_path,
-                                 uintptr_t start,
-                                 uintptr_t end);
+  void HeapSampleJSProducerEvent(const char* constructor,
+                                 Address* stack);
+  void HeapSampleStats(const char* space, const char* kind,
+                       intptr_t capacity, intptr_t used);
+
+  void SharedLibraryEvent(const char* library_path,
+                          uintptr_t start,
+                          uintptr_t end);
+  void SharedLibraryEvent(const wchar_t* library_path,
+                          uintptr_t start,
+                          uintptr_t end);
 
   // ==== Events logged by --log-regexp ====
   // Regexp compilation and execution events.
 
-  static void RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache);
+  void RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache);
 
   // Log an event reported from generated code
-  static void LogRuntime(Vector<const char> format, JSArray* args);
+  void LogRuntime(Vector<const char> format, JSArray* args);
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  static bool is_logging() {
+  bool is_logging() {
     return logging_nesting_ > 0;
   }
 
   // Pause/Resume collection of profiling data.
   // When data collection is paused, CPU Tick events are discarded until
   // data collection is Resumed.
-  static void PauseProfiler(int flags, int tag);
-  static void ResumeProfiler(int flags, int tag);
-  static int GetActiveProfilerModules();
+  void PauseProfiler(int flags, int tag);
+  void ResumeProfiler(int flags, int tag);
+  int GetActiveProfilerModules();
 
   // If logging is performed into a memory buffer, allows to
   // retrieve previously written messages. See v8.h.
-  static int GetLogLines(int from_pos, char* dest_buf, int max_size);
+  int GetLogLines(int from_pos, char* dest_buf, int max_size);
 
   // Logs all compiled functions found in the heap.
-  static void LogCompiledFunctions();
+  void LogCompiledFunctions();
   // Logs all accessor callbacks found in the heap.
-  static void LogAccessorCallbacks();
+  void LogAccessorCallbacks();
   // Used for logging stubs found in the snapshot.
-  static void LogCodeObjects();
+  void LogCodeObjects();
 
   // Converts tag to a corresponding NATIVE_... if the script is native.
   INLINE(static LogEventsAndTags ToNativeByScript(LogEventsAndTags, Script*));
@@ -286,70 +296,74 @@ class Logger {
   // Profiler's sampling interval (in milliseconds).
   static const int kSamplingIntervalMs = 1;
 
+  // Callback from Log, stops profiling in case of insufficient resources.
+  void LogFailure();
+
  private:
+  Logger();
+  ~Logger();
 
   // Emits the profiler's first message.
-  static void ProfilerBeginEvent();
+  void ProfilerBeginEvent();
 
   // Emits callback event messages.
-  static void CallbackEventInternal(const char* prefix,
-                                    const char* name,
-                                    Address entry_point);
+  void CallbackEventInternal(const char* prefix,
+                             const char* name,
+                             Address entry_point);
 
   // Internal configurable move event.
-  static void MoveEventInternal(LogEventsAndTags event,
-                                Address from,
-                                Address to);
+  void MoveEventInternal(LogEventsAndTags event, Address from, Address to);
 
   // Internal configurable move event.
-  static void DeleteEventInternal(LogEventsAndTags event,
-                                  Address from);
+  void DeleteEventInternal(LogEventsAndTags event, Address from);
 
   // Emits the source code of a regexp. Used by regexp events.
-  static void LogRegExpSource(Handle<JSRegExp> regexp);
+  void LogRegExpSource(Handle<JSRegExp> regexp);
 
   // Used for logging stubs found in the snapshot.
-  static void LogCodeObject(Object* code_object);
+  void LogCodeObject(Object* code_object);
 
   // Emits general information about generated code.
-  static void LogCodeInfo();
+  void LogCodeInfo();
 
   // Handles code creation when low-level profiling is active.
-  static void LowLevelCodeCreateEvent(Code* code, LogMessageBuilder* msg);
+  void LowLevelCodeCreateEvent(Code* code, LogMessageBuilder* msg);
 
   // Emits a profiler tick event. Used by the profiler thread.
-  static void TickEvent(TickSample* sample, bool overflow);
+  void TickEvent(TickSample* sample, bool overflow);
 
-  static void ApiEvent(const char* name, ...);
+  void ApiEvent(const char* name, ...);
 
   // Logs a StringEvent regardless of whether FLAG_log is true.
-  static void UncheckedStringEvent(const char* name, const char* value);
+  void UncheckedStringEvent(const char* name, const char* value);
 
   // Logs an IntEvent regardless of whether FLAG_log is true.
-  static void UncheckedIntEvent(const char* name, int value);
-  static void UncheckedIntPtrTEvent(const char* name, intptr_t value);
-
-  // Stops logging and profiling in case of insufficient resources.
-  static void StopLoggingAndProfiling();
+  void UncheckedIntEvent(const char* name, int value);
+  void UncheckedIntPtrTEvent(const char* name, intptr_t value);
 
   // Returns whether profiler's sampler is active.
-  static bool IsProfilerSamplerActive();
+  bool IsProfilerSamplerActive();
 
   // The sampler used by the profiler and the sliding state window.
-  static Ticker* ticker_;
+  Ticker* ticker_;
 
   // When the statistical profile is active, profiler_
   // points to a Profiler, that handles collection
   // of samples.
-  static Profiler* profiler_;
+  Profiler* profiler_;
 
   // SlidingStateWindow instance keeping a sliding window of the most
   // recent VM states.
-  static SlidingStateWindow* sliding_state_window_;
+  SlidingStateWindow* sliding_state_window_;
+
+  // An array of log events names.
+  const char* const* log_events_;
 
   // Internal implementation classes with access to
   // private members.
   friend class EventLog;
+  friend class Isolate;
+  friend class LogMessageBuilder;
   friend class TimeLog;
   friend class Profiler;
   friend class SlidingStateWindow;
@@ -358,21 +372,72 @@ class Logger {
 
   friend class LoggerTestHelper;
 
-  static int logging_nesting_;
-  static int cpu_profiler_nesting_;
-  static int heap_profiler_nesting_;
+
+  int logging_nesting_;
+  int cpu_profiler_nesting_;
+  int heap_profiler_nesting_;
+
+  Log* log_;
+
+  // Guards against multiple calls to TearDown() that can happen in some tests.
+  // 'true' between Setup() and TearDown().
+  bool is_initialized_;
+
+  // Support for 'incremental addresses' in compressed logs:
+  //  LogMessageBuilder::AppendAddress(Address addr)
+  Address last_address_;
+  //  Logger::TickEvent(...)
+  Address prev_sp_;
+  Address prev_function_;
+  //  Logger::MoveEventInternal(...)
+  Address prev_to_;
+  //  Logger::FunctionCreateEvent(...)
+  Address prev_code_;
 
   friend class CpuProfiler;
 #else
-  static bool is_logging() { return false; }
+  bool is_logging() { return false; }
 #endif
 };
 
 
+// Process wide registry of samplers.
+class SamplerRegistry : public AllStatic {
+ public:
+  enum State {
+    HAS_NO_SAMPLERS,
+    HAS_SAMPLERS,
+    HAS_CPU_PROFILING_SAMPLERS
+  };
+
+  typedef void (*VisitSampler)(Sampler*, void*);
+
+  static State GetState();
+
+  // Iterates over all active samplers keeping the internal lock held.
+  // Returns whether there are any active samplers.
+  static bool IterateActiveSamplers(VisitSampler func, void* param);
+
+  // Adds/Removes an active sampler.
+  static void AddActiveSampler(Sampler* sampler);
+  static void RemoveActiveSampler(Sampler* sampler);
+
+ private:
+  static bool ActiveSamplersExist() {
+    return active_samplers_ != NULL && !active_samplers_->is_empty();
+  }
+
+  static Mutex* mutex_;  // Protects the state below.
+  static List<Sampler*>* active_samplers_;
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(SamplerRegistry);
+};
+
+
 // Class that extracts stack trace, used for profiling.
 class StackTracer : public AllStatic {
  public:
-  static void Trace(TickSample* sample);
+  static void Trace(Isolate* isolate, TickSample* sample);
 };
 
 } }  // namespace v8::internal
index b91f7bc512862795014d4fc041c6ac7a220839a6..022636d70b83da1131610131efa6b502c7cc921b 100644 (file)
@@ -44,28 +44,27 @@ namespace internal {
 // -------------------------------------------------------------------------
 // MarkCompactCollector
 
-bool MarkCompactCollector::force_compaction_ = false;
-bool MarkCompactCollector::compacting_collection_ = false;
-bool MarkCompactCollector::compact_on_next_gc_ = false;
-
-int MarkCompactCollector::previous_marked_count_ = 0;
-GCTracer* MarkCompactCollector::tracer_ = NULL;
-
-
+MarkCompactCollector::MarkCompactCollector() :  // NOLINT
+#ifdef DEBUG
+      state_(IDLE),
+#endif
+      force_compaction_(false),
+      compacting_collection_(false),
+      compact_on_next_gc_(false),
+      previous_marked_count_(0),
+      tracer_(NULL),
 #ifdef DEBUG
-MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE;
-
-// Counters used for debugging the marking phase of mark-compact or mark-sweep
-// collection.
-int MarkCompactCollector::live_bytes_ = 0;
-int MarkCompactCollector::live_young_objects_size_ = 0;
-int MarkCompactCollector::live_old_data_objects_size_ = 0;
-int MarkCompactCollector::live_old_pointer_objects_size_ = 0;
-int MarkCompactCollector::live_code_objects_size_ = 0;
-int MarkCompactCollector::live_map_objects_size_ = 0;
-int MarkCompactCollector::live_cell_objects_size_ = 0;
-int MarkCompactCollector::live_lo_objects_size_ = 0;
+      live_young_objects_size_(0),
+      live_old_pointer_objects_size_(0),
+      live_old_data_objects_size_(0),
+      live_code_objects_size_(0),
+      live_map_objects_size_(0),
+      live_cell_objects_size_(0),
+      live_lo_objects_size_(0),
+      live_bytes_(0),
 #endif
+      heap_(NULL),
+      code_flusher_(NULL) { }
 
 
 void MarkCompactCollector::CollectGarbage() {
@@ -87,15 +86,15 @@ void MarkCompactCollector::CollectGarbage() {
     GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT);
     EncodeForwardingAddresses();
 
-    Heap::MarkMapPointersAsEncoded(true);
+    heap_->MarkMapPointersAsEncoded(true);
     UpdatePointers();
-    Heap::MarkMapPointersAsEncoded(false);
-    PcToCodeCache::FlushPcToCodeCache();
+    heap_->MarkMapPointersAsEncoded(false);
+    heap_->isolate()->pc_to_code_cache()->Flush();
 
     RelocateObjects();
   } else {
     SweepSpaces();
-    PcToCodeCache::FlushPcToCodeCache();
+    heap_->isolate()->pc_to_code_cache()->Flush();
   }
 
   Finish();
@@ -124,7 +123,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
   compact_on_next_gc_ = false;
 
   if (FLAG_never_compact) compacting_collection_ = false;
-  if (!Heap::map_space()->MapPointersEncodable())
+  if (!HEAP->map_space()->MapPointersEncodable())
       compacting_collection_ = false;
   if (FLAG_collect_maps) CreateBackPointers();
 #ifdef ENABLE_GDB_JIT_INTERFACE
@@ -162,9 +161,9 @@ void MarkCompactCollector::Finish() {
   // force lazy re-initialization of it. This must be done after the
   // GC, because it relies on the new address of certain old space
   // objects (empty string, illegal builtin).
-  StubCache::Clear();
+  Isolate::Current()->stub_cache()->Clear();
 
-  ExternalStringTable::CleanUp();
+  heap_->external_string_table_.CleanUp();
 
   // If we've just compacted old space there's no reason to check the
   // fragmentation limit. Just return.
@@ -221,17 +220,19 @@ void MarkCompactCollector::Finish() {
 // and continue with marking.  This process repeats until all reachable
 // objects have been marked.
 
-static MarkingStack marking_stack;
-
-class FlushCode : public AllStatic {
+class CodeFlusher {
  public:
-  static void AddCandidate(SharedFunctionInfo* shared_info) {
+  explicit CodeFlusher(Isolate* isolate)
+      : isolate_(isolate),
+        jsfunction_candidates_head_(NULL),
+        shared_function_info_candidates_head_(NULL) {}
+
+  void AddCandidate(SharedFunctionInfo* shared_info) {
     SetNextCandidate(shared_info, shared_function_info_candidates_head_);
     shared_function_info_candidates_head_ = shared_info;
   }
 
-
-  static void AddCandidate(JSFunction* function) {
+  void AddCandidate(JSFunction* function) {
     ASSERT(function->unchecked_code() ==
            function->unchecked_shared()->unchecked_code());
 
@@ -239,15 +240,14 @@ class FlushCode : public AllStatic {
     jsfunction_candidates_head_ = function;
   }
 
-
-  static void ProcessCandidates() {
+  void ProcessCandidates() {
     ProcessSharedFunctionInfoCandidates();
     ProcessJSFunctionCandidates();
   }
 
  private:
-  static void ProcessJSFunctionCandidates() {
-    Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile);
+  void ProcessJSFunctionCandidates() {
+    Code* lazy_compile = isolate_->builtins()->builtin(Builtins::LazyCompile);
 
     JSFunction* candidate = jsfunction_candidates_head_;
     JSFunction* next_candidate;
@@ -271,8 +271,8 @@ class FlushCode : public AllStatic {
   }
 
 
-  static void ProcessSharedFunctionInfoCandidates() {
-    Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile);
+  void ProcessSharedFunctionInfoCandidates() {
+    Code* lazy_compile = isolate_->builtins()->builtin(Builtins::LazyCompile);
 
     SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
     SharedFunctionInfo* next_candidate;
@@ -291,27 +291,22 @@ class FlushCode : public AllStatic {
     shared_function_info_candidates_head_ = NULL;
   }
 
-
   static JSFunction** GetNextCandidateField(JSFunction* candidate) {
     return reinterpret_cast<JSFunction**>(
         candidate->address() + JSFunction::kCodeEntryOffset);
   }
 
-
   static JSFunction* GetNextCandidate(JSFunction* candidate) {
     return *GetNextCandidateField(candidate);
   }
 
-
   static void SetNextCandidate(JSFunction* candidate,
                                JSFunction* next_candidate) {
     *GetNextCandidateField(candidate) = next_candidate;
   }
 
-
   STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart);
 
-
   static SharedFunctionInfo** GetNextCandidateField(
       SharedFunctionInfo* candidate) {
     Code* code = candidate->unchecked_code();
@@ -319,29 +314,34 @@ class FlushCode : public AllStatic {
         code->address() + Code::kHeaderPaddingStart);
   }
 
-
   static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
     return *GetNextCandidateField(candidate);
   }
 
-
   static void SetNextCandidate(SharedFunctionInfo* candidate,
                                SharedFunctionInfo* next_candidate) {
     *GetNextCandidateField(candidate) = next_candidate;
   }
 
-  static JSFunction* jsfunction_candidates_head_;
+  Isolate* isolate_;
+  JSFunction* jsfunction_candidates_head_;
+  SharedFunctionInfo* shared_function_info_candidates_head_;
 
-  static SharedFunctionInfo* shared_function_info_candidates_head_;
+  DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
 };
 
-JSFunction* FlushCode::jsfunction_candidates_head_ = NULL;
 
-SharedFunctionInfo* FlushCode::shared_function_info_candidates_head_ = NULL;
+MarkCompactCollector::~MarkCompactCollector() {
+  if (code_flusher_ != NULL) {
+    delete code_flusher_;
+    code_flusher_ = NULL;
+  }
+}
+
 
 static inline HeapObject* ShortCircuitConsString(Object** p) {
   // Optimization: If the heap object pointed to by p is a non-symbol
-  // cons string whose right substring is Heap::empty_string, update
+  // cons string whose right substring is HEAP->empty_string, update
   // it in place to its left substring.  Return the updated value.
   //
   // Here we assume that if we change *p, we replace it with a heap object
@@ -349,7 +349,7 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
   //
   // The check performed is:
   //   object->IsConsString() && !object->IsSymbol() &&
-  //   (ConsString::cast(object)->second() == Heap::empty_string())
+  //   (ConsString::cast(object)->second() == HEAP->empty_string())
   // except the maps for the object and its possible substrings might be
   // marked.
   HeapObject* object = HeapObject::cast(*p);
@@ -359,7 +359,8 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
   if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
 
   Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
-  if (second != Heap::raw_unchecked_empty_string()) {
+  Heap* heap = map_word.ToMap()->heap();
+  if (second != heap->raw_unchecked_empty_string()) {
     return object;
   }
 
@@ -367,7 +368,7 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
   // page dirty marks. Therefore, we only replace the string with its left
   // substring when page dirty marks do not change.
   Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first();
-  if (!Heap::InNewSpace(object) && Heap::InNewSpace(first)) return object;
+  if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object;
 
   *p = first;
   return HeapObject::cast(first);
@@ -380,19 +381,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
     table_.GetVisitor(map)(map, obj);
   }
 
-  static void EnableCodeFlushing(bool enabled) {
-    if (enabled) {
-      table_.Register(kVisitJSFunction, &VisitJSFunctionAndFlushCode);
-      table_.Register(kVisitSharedFunctionInfo,
-                      &VisitSharedFunctionInfoAndFlushCode);
-
-    } else {
-      table_.Register(kVisitJSFunction, &VisitJSFunction);
-      table_.Register(kVisitSharedFunctionInfo,
-                      &VisitSharedFunctionInfoGeneric);
-    }
-  }
-
   static void Initialize() {
     table_.Register(kVisitShortcutCandidate,
                     &FixedBodyVisitor<StaticMarkingVisitor,
@@ -454,18 +442,18 @@ class StaticMarkingVisitor : public StaticVisitorBase {
                                    kVisitStructGeneric>();
   }
 
-  INLINE(static void VisitPointer(Object** p)) {
-    MarkObjectByPointer(p);
+  INLINE(static void VisitPointer(Heap* heap, Object** p)) {
+    MarkObjectByPointer(heap, p);
   }
 
-  INLINE(static void VisitPointers(Object** start, Object** end)) {
+  INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
     // Mark all objects pointed to in [start, end).
     const int kMinRangeForMarkingRecursion = 64;
     if (end - start >= kMinRangeForMarkingRecursion) {
-      if (VisitUnmarkedObjects(start, end)) return;
+      if (VisitUnmarkedObjects(heap, start, end)) return;
       // We are close to a stack overflow, so just mark the objects.
     }
-    for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
+    for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p);
   }
 
   static inline void VisitCodeTarget(RelocInfo* rinfo) {
@@ -474,9 +462,9 @@ class StaticMarkingVisitor : public StaticVisitorBase {
     if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
       IC::Clear(rinfo->pc());
       // Please note targets for cleared inline cached do not have to be
-      // marked since they are contained in Heap::non_monomorphic_cache().
+      // marked since they are contained in HEAP->non_monomorphic_cache().
     } else {
-      MarkCompactCollector::MarkObject(code);
+      HEAP->mark_compact_collector()->MarkObject(code);
     }
   }
 
@@ -484,7 +472,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
     ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
     Object* cell = rinfo->target_cell();
     Object* old_cell = cell;
-    VisitPointer(&cell);
+    VisitPointer(HEAP, &cell);
     if (cell != old_cell) {
       rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell));
     }
@@ -496,34 +484,38 @@ class StaticMarkingVisitor : public StaticVisitorBase {
            (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
             rinfo->IsPatchedDebugBreakSlotSequence()));
     HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
-    MarkCompactCollector::MarkObject(code);
+    HEAP->mark_compact_collector()->MarkObject(code);
   }
 
   // Mark object pointed to by p.
-  INLINE(static void MarkObjectByPointer(Object** p)) {
+  INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) {
     if (!(*p)->IsHeapObject()) return;
     HeapObject* object = ShortCircuitConsString(p);
-    MarkCompactCollector::MarkObject(object);
+    heap->mark_compact_collector()->MarkObject(object);
   }
 
+
   // Visit an unmarked object.
   static inline void VisitUnmarkedObject(HeapObject* obj) {
 #ifdef DEBUG
-    ASSERT(Heap::Contains(obj));
+    ASSERT(HEAP->Contains(obj));
     ASSERT(!obj->IsMarked());
 #endif
     Map* map = obj->map();
-    MarkCompactCollector::SetMark(obj);
+    MarkCompactCollector* collector = map->heap()->mark_compact_collector();
+    collector->SetMark(obj);
     // Mark the map pointer and the body.
-    MarkCompactCollector::MarkObject(map);
+    collector->MarkObject(map);
     IterateBody(map, obj);
   }
 
   // Visit all unmarked objects pointed to by [start, end).
   // Returns false if the operation fails (lack of stack space).
-  static inline bool VisitUnmarkedObjects(Object** start, Object** end) {
+  static inline bool VisitUnmarkedObjects(Heap* heap,
+                                          Object** start,
+                                          Object** end) {
     // Return false is we are close to the stack limit.
-    StackLimitCheck check;
+    StackLimitCheck check(heap->isolate());
     if (check.HasOverflowed()) return false;
 
     // Visit the unmarked objects.
@@ -559,7 +551,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
                               void> StructObjectVisitor;
 
   static void VisitCode(Map* map, HeapObject* object) {
-    reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>();
+    reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
+        map->heap());
   }
 
   // Code flushing support.
@@ -569,21 +562,20 @@ class StaticMarkingVisitor : public StaticVisitorBase {
   static const int kCodeAgeThreshold = 5;
 
   inline static bool HasSourceCode(SharedFunctionInfo* info) {
-    Object* undefined = Heap::raw_unchecked_undefined_value();
+    Object* undefined = HEAP->raw_unchecked_undefined_value();
     return (info->script() != undefined) &&
         (reinterpret_cast<Script*>(info->script())->source() != undefined);
   }
 
 
   inline static bool IsCompiled(JSFunction* function) {
-    return
-        function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile);
+    return function->unchecked_code() !=
+        Isolate::Current()->builtins()->builtin(Builtins::LazyCompile);
   }
 
-
   inline static bool IsCompiled(SharedFunctionInfo* function) {
-    return
-        function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile);
+    return function->unchecked_code() !=
+        Isolate::Current()->builtins()->builtin(Builtins::LazyCompile);
   }
 
   inline static bool IsFlushable(JSFunction* function) {
@@ -645,7 +637,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
   }
 
 
-  static bool FlushCodeForFunction(JSFunction* function) {
+  static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
     if (!IsFlushable(function)) return false;
 
     // This function's code looks flushable. But we have to postpone the
@@ -653,7 +645,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
     // SharedFunctionInfo because some of them might be optimized.
     // That would make the nonoptimized version of the code nonflushable,
     // because it is required for bailing out from optimized code.
-    FlushCode::AddCandidate(function);
+    heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
     return true;
   }
 
@@ -676,9 +668,9 @@ class StaticMarkingVisitor : public StaticVisitorBase {
     if (!ctx->IsHeapObject()) return false;
 
     Map* map = SafeMap(ctx);
-    if (!(map == Heap::raw_unchecked_context_map() ||
-          map == Heap::raw_unchecked_catch_context_map() ||
-          map == Heap::raw_unchecked_global_context_map())) {
+    if (!(map == HEAP->raw_unchecked_context_map() ||
+          map == HEAP->raw_unchecked_catch_context_map() ||
+          map == HEAP->raw_unchecked_global_context_map())) {
       return false;
     }
 
@@ -705,29 +697,37 @@ class StaticMarkingVisitor : public StaticVisitorBase {
 
   static void VisitSharedFunctionInfoAndFlushCode(Map* map,
                                                   HeapObject* object) {
+    MarkCompactCollector* collector = map->heap()->mark_compact_collector();
+    if (!collector->is_code_flushing_enabled()) {
+      VisitSharedFunctionInfoGeneric(map, object);
+      return;
+    }
     VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
   }
 
 
   static void VisitSharedFunctionInfoAndFlushCodeGeneric(
       Map* map, HeapObject* object, bool known_flush_code_candidate) {
+    Heap* heap = map->heap();
     SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
 
     if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
 
     if (!known_flush_code_candidate) {
       known_flush_code_candidate = IsFlushable(shared);
-      if (known_flush_code_candidate) FlushCode::AddCandidate(shared);
+      if (known_flush_code_candidate) {
+        heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
+      }
     }
 
-    VisitSharedFunctionInfoFields(object, known_flush_code_candidate);
+    VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
   }
 
 
-  static void VisitCodeEntry(Address entry_address) {
+  static void VisitCodeEntry(Heap* heap, Address entry_address) {
     Object* code = Code::GetObjectFromEntryAddress(entry_address);
     Object* old_code = code;
-    VisitPointer(&code);
+    VisitPointer(heap, &code);
     if (code != old_code) {
       Memory::Address_at(entry_address) =
           reinterpret_cast<Code*>(code)->entry();
@@ -736,16 +736,22 @@ class StaticMarkingVisitor : public StaticVisitorBase {
 
 
   static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
+    Heap* heap = map->heap();
+    MarkCompactCollector* collector = heap->mark_compact_collector();
+    if (!collector->is_code_flushing_enabled()) {
+      VisitJSFunction(map, object);
+      return;
+    }
+
     JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
     // The function must have a valid context and not be a builtin.
     bool flush_code_candidate = false;
     if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
-      flush_code_candidate = FlushCodeForFunction(jsfunction);
+      flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
     }
 
     if (!flush_code_candidate) {
-      MarkCompactCollector::MarkObject(
-          jsfunction->unchecked_shared()->unchecked_code());
+      collector->MarkObject(jsfunction->unchecked_shared()->unchecked_code());
 
       if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) {
         // For optimized functions we should retain both non-optimized version
@@ -761,8 +767,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
              i < count;
              i++) {
           JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
-          MarkCompactCollector::MarkObject(
-              inlined->unchecked_shared()->unchecked_code());
+          collector->MarkObject(inlined->unchecked_shared()->unchecked_code());
         }
       }
     }
@@ -787,11 +792,15 @@ class StaticMarkingVisitor : public StaticVisitorBase {
   static inline void VisitJSFunctionFields(Map* map,
                                            JSFunction* object,
                                            bool flush_code_candidate) {
-    VisitPointers(SLOT_ADDR(object, JSFunction::kPropertiesOffset),
+    Heap* heap = map->heap();
+    MarkCompactCollector* collector = heap->mark_compact_collector();
+
+    VisitPointers(heap,
+                  SLOT_ADDR(object, JSFunction::kPropertiesOffset),
                   SLOT_ADDR(object, JSFunction::kCodeEntryOffset));
 
     if (!flush_code_candidate) {
-      VisitCodeEntry(object->address() + JSFunction::kCodeEntryOffset);
+      VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
     } else {
       // Don't visit code object.
 
@@ -800,15 +809,16 @@ class StaticMarkingVisitor : public StaticVisitorBase {
       SharedFunctionInfo* shared_info = object->unchecked_shared();
       if (!shared_info->IsMarked()) {
         Map* shared_info_map = shared_info->map();
-        MarkCompactCollector::SetMark(shared_info);
-        MarkCompactCollector::MarkObject(shared_info_map);
+        collector->SetMark(shared_info);
+        collector->MarkObject(shared_info_map);
         VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
                                                    shared_info,
                                                    true);
       }
     }
 
-    VisitPointers(SLOT_ADDR(object,
+    VisitPointers(heap,
+                  SLOT_ADDR(object,
                             JSFunction::kCodeEntryOffset + kPointerSize),
                   SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset));
 
@@ -816,15 +826,17 @@ class StaticMarkingVisitor : public StaticVisitorBase {
   }
 
 
-  static void VisitSharedFunctionInfoFields(HeapObject* object,
+  static void VisitSharedFunctionInfoFields(Heap* heap,
+                                            HeapObject* object,
                                             bool flush_code_candidate) {
-    VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
+    VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
 
     if (!flush_code_candidate) {
-      VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
+      VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
     }
 
-    VisitPointers(SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
+    VisitPointers(heap,
+                  SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
                   SLOT_ADDR(object, SharedFunctionInfo::kSize));
   }
 
@@ -842,12 +854,14 @@ VisitorDispatchTable<StaticMarkingVisitor::Callback>
 
 class MarkingVisitor : public ObjectVisitor {
  public:
+  explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
+
   void VisitPointer(Object** p) {
-    StaticMarkingVisitor::VisitPointer(p);
+    StaticMarkingVisitor::VisitPointer(heap_, p);
   }
 
   void VisitPointers(Object** start, Object** end) {
-    StaticMarkingVisitor::VisitPointers(start, end);
+    StaticMarkingVisitor::VisitPointers(heap_, start, end);
   }
 
   void VisitCodeTarget(RelocInfo* rinfo) {
@@ -861,21 +875,33 @@ class MarkingVisitor : public ObjectVisitor {
   void VisitDebugTarget(RelocInfo* rinfo) {
     StaticMarkingVisitor::VisitDebugTarget(rinfo);
   }
+
+ private:
+  Heap* heap_;
 };
 
 
 class CodeMarkingVisitor : public ThreadVisitor {
  public:
+  explicit CodeMarkingVisitor(MarkCompactCollector* collector)
+      : collector_(collector) {}
+
   void VisitThread(ThreadLocalTop* top) {
     for (StackFrameIterator it(top); !it.done(); it.Advance()) {
-      MarkCompactCollector::MarkObject(it.frame()->unchecked_code());
+      collector_->MarkObject(it.frame()->unchecked_code());
     }
   }
+
+ private:
+  MarkCompactCollector* collector_;
 };
 
 
 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
  public:
+  explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector)
+      : collector_(collector) {}
+
   void VisitPointers(Object** start, Object** end) {
     for (Object** p = start; p < end; p++) VisitPointer(p);
   }
@@ -884,44 +910,52 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
     Object* obj = *slot;
     if (obj->IsSharedFunctionInfo()) {
       SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
-      MarkCompactCollector::MarkObject(shared->unchecked_code());
-      MarkCompactCollector::MarkObject(shared);
+      collector_->MarkObject(shared->unchecked_code());
+      collector_->MarkObject(shared);
     }
   }
+
+ private:
+  MarkCompactCollector* collector_;
 };
 
 
 void MarkCompactCollector::PrepareForCodeFlushing() {
+  ASSERT(heap_ == Isolate::Current()->heap());
+
   if (!FLAG_flush_code) {
-    StaticMarkingVisitor::EnableCodeFlushing(false);
+    EnableCodeFlushing(false);
     return;
   }
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  if (Debug::IsLoaded() || Debug::has_break_points()) {
-    StaticMarkingVisitor::EnableCodeFlushing(false);
+  if (heap_->isolate()->debug()->IsLoaded() ||
+      heap_->isolate()->debug()->has_break_points()) {
+    EnableCodeFlushing(false);
     return;
   }
 #endif
-  StaticMarkingVisitor::EnableCodeFlushing(true);
+  EnableCodeFlushing(true);
 
   // Ensure that empty descriptor array is marked. Method MarkDescriptorArray
   // relies on it being marked before any other descriptor array.
-  MarkObject(Heap::raw_unchecked_empty_descriptor_array());
+  MarkObject(heap_->raw_unchecked_empty_descriptor_array());
 
   // Make sure we are not referencing the code from the stack.
+  ASSERT(this == heap_->mark_compact_collector());
   for (StackFrameIterator it; !it.done(); it.Advance()) {
     MarkObject(it.frame()->unchecked_code());
   }
 
   // Iterate the archived stacks in all threads to check if
   // the code is referenced.
-  CodeMarkingVisitor code_marking_visitor;
-  ThreadManager::IterateArchivedThreads(&code_marking_visitor);
+  CodeMarkingVisitor code_marking_visitor(this);
+  heap_->isolate()->thread_manager()->IterateArchivedThreads(
+      &code_marking_visitor);
 
-  SharedFunctionInfoMarkingVisitor visitor;
-  CompilationCache::IterateFunctions(&visitor);
-  HandleScopeImplementer::Iterate(&visitor);
+  SharedFunctionInfoMarkingVisitor visitor(this);
+  heap_->isolate()->compilation_cache()->IterateFunctions(&visitor);
+  heap_->isolate()->handle_scope_implementer()->Iterate(&visitor);
 
   ProcessMarkingStack();
 }
@@ -930,6 +964,9 @@ void MarkCompactCollector::PrepareForCodeFlushing() {
 // Visitor class for marking heap roots.
 class RootMarkingVisitor : public ObjectVisitor {
  public:
+  explicit RootMarkingVisitor(Heap* heap)
+    : collector_(heap->mark_compact_collector()) { }
+
   void VisitPointer(Object** p) {
     MarkObjectByPointer(p);
   }
@@ -948,16 +985,18 @@ class RootMarkingVisitor : public ObjectVisitor {
 
     Map* map = object->map();
     // Mark the object.
-    MarkCompactCollector::SetMark(object);
+    collector_->SetMark(object);
 
     // Mark the map pointer and body, and push them on the marking stack.
-    MarkCompactCollector::MarkObject(map);
+    collector_->MarkObject(map);
     StaticMarkingVisitor::IterateBody(map, object);
 
     // Mark all the objects reachable from the map and body.  May leave
     // overflowed objects in the heap.
-    MarkCompactCollector::EmptyMarkingStack();
+    collector_->EmptyMarkingStack();
   }
+
+  MarkCompactCollector* collector_;
 };
 
 
@@ -976,10 +1015,10 @@ class SymbolTableCleaner : public ObjectVisitor {
         // Since no objects have yet been moved we can safely access the map of
         // the object.
         if ((*p)->IsExternalString()) {
-          Heap::FinalizeExternalString(String::cast(*p));
+          HEAP->FinalizeExternalString(String::cast(*p));
         }
         // Set the entry to null_value (as deleted).
-        *p = Heap::raw_unchecked_null_value();
+        *p = HEAP->raw_unchecked_null_value();
         pointers_removed_++;
       }
     }
@@ -1010,11 +1049,11 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
 
 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
   ASSERT(!object->IsMarked());
-  ASSERT(Heap::Contains(object));
+  ASSERT(HEAP->Contains(object));
   if (object->IsMap()) {
     Map* map = Map::cast(object);
     if (FLAG_cleanup_caches_in_maps_at_gc) {
-      map->ClearCodeCache();
+      map->ClearCodeCache(heap_);
     }
     SetMark(map);
     if (FLAG_collect_maps &&
@@ -1022,11 +1061,11 @@ void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
         map->instance_type() <= JS_FUNCTION_TYPE) {
       MarkMapContents(map);
     } else {
-      marking_stack.Push(map);
+      marking_stack_.Push(map);
     }
   } else {
     SetMark(object);
-    marking_stack.Push(object);
+    marking_stack_.Push(object);
   }
 }
 
@@ -1043,7 +1082,7 @@ void MarkCompactCollector::MarkMapContents(Map* map) {
 
   Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
 
-  StaticMarkingVisitor::VisitPointers(start_slot, end_slot);
+  StaticMarkingVisitor::VisitPointers(map->heap(), start_slot, end_slot);
 }
 
 
@@ -1051,7 +1090,7 @@ void MarkCompactCollector::MarkDescriptorArray(
     DescriptorArray* descriptors) {
   if (descriptors->IsMarked()) return;
   // Empty descriptor array is marked as a root before any maps are marked.
-  ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array());
+  ASSERT(descriptors != HEAP->raw_unchecked_empty_descriptor_array());
   SetMark(descriptors);
 
   FixedArray* contents = reinterpret_cast<FixedArray*>(
@@ -1074,18 +1113,18 @@ void MarkCompactCollector::MarkDescriptorArray(
       HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i));
       if (object->IsHeapObject() && !object->IsMarked()) {
         SetMark(object);
-        marking_stack.Push(object);
+        marking_stack_.Push(object);
       }
     }
   }
   // The DescriptorArray descriptors contains a pointer to its contents array,
   // but the contents array is already marked.
-  marking_stack.Push(descriptors);
+  marking_stack_.Push(descriptors);
 }
 
 
 void MarkCompactCollector::CreateBackPointers() {
-  HeapObjectIterator iterator(Heap::map_space());
+  HeapObjectIterator iterator(HEAP->map_space());
   for (HeapObject* next_object = iterator.next();
        next_object != NULL; next_object = iterator.next()) {
     if (next_object->IsMap()) {  // Could also be ByteArray on free list.
@@ -1094,7 +1133,7 @@ void MarkCompactCollector::CreateBackPointers() {
           map->instance_type() <= JS_FUNCTION_TYPE) {
         map->CreateBackPointers();
       } else {
-        ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array());
+        ASSERT(map->instance_descriptors() == HEAP->empty_descriptor_array());
       }
     }
   }
@@ -1111,25 +1150,29 @@ static int OverflowObjectSize(HeapObject* obj) {
 }
 
 
-// Fill the marking stack with overflowed objects returned by the given
-// iterator.  Stop when the marking stack is filled or the end of the space
-// is reached, whichever comes first.
-template<class T>
-static void ScanOverflowedObjects(T* it) {
-  // The caller should ensure that the marking stack is initially not full,
-  // so that we don't waste effort pointlessly scanning for objects.
-  ASSERT(!marking_stack.is_full());
-
-  for (HeapObject* object = it->next(); object != NULL; object = it->next()) {
-    if (object->IsOverflowed()) {
-      object->ClearOverflow();
-      ASSERT(object->IsMarked());
-      ASSERT(Heap::Contains(object));
-      marking_stack.Push(object);
-      if (marking_stack.is_full()) return;
+class OverflowedObjectsScanner : public AllStatic {
+ public:
+  // Fill the marking stack with overflowed objects returned by the given
+  // iterator.  Stop when the marking stack is filled or the end of the space
+  // is reached, whichever comes first.
+  template<class T>
+  static inline void ScanOverflowedObjects(MarkCompactCollector* collector,
+                                           T* it) {
+    // The caller should ensure that the marking stack is initially not full,
+    // so that we don't waste effort pointlessly scanning for objects.
+    ASSERT(!collector->marking_stack_.is_full());
+
+    for (HeapObject* object = it->next(); object != NULL; object = it->next()) {
+      if (object->IsOverflowed()) {
+        object->ClearOverflow();
+        ASSERT(object->IsMarked());
+        ASSERT(HEAP->Contains(object));
+        collector->marking_stack_.Push(object);
+        if (collector->marking_stack_.is_full()) return;
+      }
     }
   }
-}
+};
 
 
 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
@@ -1138,11 +1181,11 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
 
 
 void MarkCompactCollector::MarkSymbolTable() {
-  SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
+  SymbolTable* symbol_table = heap_->raw_unchecked_symbol_table();
   // Mark the symbol table itself.
   SetMark(symbol_table);
   // Explicitly mark the prefix.
-  MarkingVisitor marker;
+  MarkingVisitor marker(heap_);
   symbol_table->IteratePrefix(&marker);
   ProcessMarkingStack();
 }
@@ -1151,13 +1194,13 @@ void MarkCompactCollector::MarkSymbolTable() {
 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
   // Mark the heap roots including global variables, stack variables,
   // etc., and all objects reachable from them.
-  Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
+  HEAP->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
 
   // Handle the symbol table specially.
   MarkSymbolTable();
 
   // There may be overflowed objects in the heap.  Visit them now.
-  while (marking_stack.overflowed()) {
+  while (marking_stack_.overflowed()) {
     RefillMarkingStack();
     EmptyMarkingStack();
   }
@@ -1165,7 +1208,8 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
 
 
 void MarkCompactCollector::MarkObjectGroups() {
-  List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups();
+  List<ObjectGroup*>* object_groups =
+      heap_->isolate()->global_handles()->object_groups();
 
   for (int i = 0; i < object_groups->length(); i++) {
     ObjectGroup* entry = object_groups->at(i);
@@ -1200,7 +1244,8 @@ void MarkCompactCollector::MarkObjectGroups() {
 
 
 void MarkCompactCollector::MarkImplicitRefGroups() {
-  List<ImplicitRefGroup*>* ref_groups = GlobalHandles::ImplicitRefGroups();
+  List<ImplicitRefGroup*>* ref_groups =
+      heap_->isolate()->global_handles()->implicit_ref_groups();
 
   for (int i = 0; i < ref_groups->length(); i++) {
     ImplicitRefGroup* entry = ref_groups->at(i);
@@ -1230,10 +1275,10 @@ void MarkCompactCollector::MarkImplicitRefGroups() {
 // After: the marking stack is empty, and all objects reachable from the
 // marking stack have been marked, or are overflowed in the heap.
 void MarkCompactCollector::EmptyMarkingStack() {
-  while (!marking_stack.is_empty()) {
-    HeapObject* object = marking_stack.Pop();
+  while (!marking_stack_.is_empty()) {
+    HeapObject* object = marking_stack_.Pop();
     ASSERT(object->IsHeapObject());
-    ASSERT(Heap::Contains(object));
+    ASSERT(heap_->Contains(object));
     ASSERT(object->IsMarked());
     ASSERT(!object->IsOverflowed());
 
@@ -1255,38 +1300,38 @@ void MarkCompactCollector::EmptyMarkingStack() {
 // overflowed objects in the heap so the overflow flag on the markings stack
 // is cleared.
 void MarkCompactCollector::RefillMarkingStack() {
-  ASSERT(marking_stack.overflowed());
+  ASSERT(marking_stack_.overflowed());
 
-  SemiSpaceIterator new_it(Heap::new_space(), &OverflowObjectSize);
-  ScanOverflowedObjects(&new_it);
-  if (marking_stack.is_full()) return;
+  SemiSpaceIterator new_it(HEAP->new_space(), &OverflowObjectSize);
+  OverflowedObjectsScanner::ScanOverflowedObjects(this, &new_it);
+  if (marking_stack_.is_full()) return;
 
-  HeapObjectIterator old_pointer_it(Heap::old_pointer_space(),
+  HeapObjectIterator old_pointer_it(HEAP->old_pointer_space(),
                                     &OverflowObjectSize);
-  ScanOverflowedObjects(&old_pointer_it);
-  if (marking_stack.is_full()) return;
+  OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_pointer_it);
+  if (marking_stack_.is_full()) return;
 
-  HeapObjectIterator old_data_it(Heap::old_data_space(), &OverflowObjectSize);
-  ScanOverflowedObjects(&old_data_it);
-  if (marking_stack.is_full()) return;
+  HeapObjectIterator old_data_it(HEAP->old_data_space(), &OverflowObjectSize);
+  OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_data_it);
+  if (marking_stack_.is_full()) return;
 
-  HeapObjectIterator code_it(Heap::code_space(), &OverflowObjectSize);
-  ScanOverflowedObjects(&code_it);
-  if (marking_stack.is_full()) return;
+  HeapObjectIterator code_it(HEAP->code_space(), &OverflowObjectSize);
+  OverflowedObjectsScanner::ScanOverflowedObjects(this, &code_it);
+  if (marking_stack_.is_full()) return;
 
-  HeapObjectIterator map_it(Heap::map_space(), &OverflowObjectSize);
-  ScanOverflowedObjects(&map_it);
-  if (marking_stack.is_full()) return;
+  HeapObjectIterator map_it(HEAP->map_space(), &OverflowObjectSize);
+  OverflowedObjectsScanner::ScanOverflowedObjects(this, &map_it);
+  if (marking_stack_.is_full()) return;
 
-  HeapObjectIterator cell_it(Heap::cell_space(), &OverflowObjectSize);
-  ScanOverflowedObjects(&cell_it);
-  if (marking_stack.is_full()) return;
+  HeapObjectIterator cell_it(HEAP->cell_space(), &OverflowObjectSize);
+  OverflowedObjectsScanner::ScanOverflowedObjects(this, &cell_it);
+  if (marking_stack_.is_full()) return;
 
-  LargeObjectIterator lo_it(Heap::lo_space(), &OverflowObjectSize);
-  ScanOverflowedObjects(&lo_it);
-  if (marking_stack.is_full()) return;
+  LargeObjectIterator lo_it(HEAP->lo_space(), &OverflowObjectSize);
+  OverflowedObjectsScanner::ScanOverflowedObjects(this, &lo_it);
+  if (marking_stack_.is_full()) return;
 
-  marking_stack.clear_overflowed();
+  marking_stack_.clear_overflowed();
 }
 
 
@@ -1296,7 +1341,7 @@ void MarkCompactCollector::RefillMarkingStack() {
 // objects in the heap.
 void MarkCompactCollector::ProcessMarkingStack() {
   EmptyMarkingStack();
-  while (marking_stack.overflowed()) {
+  while (marking_stack_.overflowed()) {
     RefillMarkingStack();
     EmptyMarkingStack();
   }
@@ -1305,11 +1350,11 @@ void MarkCompactCollector::ProcessMarkingStack() {
 
 void MarkCompactCollector::ProcessExternalMarking() {
   bool work_to_do = true;
-  ASSERT(marking_stack.is_empty());
+  ASSERT(marking_stack_.is_empty());
   while (work_to_do) {
     MarkObjectGroups();
     MarkImplicitRefGroups();
-    work_to_do = !marking_stack.is_empty();
+    work_to_do = !marking_stack_.is_empty();
     ProcessMarkingStack();
   }
 }
@@ -1320,7 +1365,7 @@ void MarkCompactCollector::MarkLiveObjects() {
   // The recursive GC marker detects when it is nearing stack overflow,
   // and switches to a different marking system.  JS interrupts interfere
   // with the C stack limit check.
-  PostponeInterruptsScope postpone;
+  PostponeInterruptsScope postpone(heap_->isolate());
 
 #ifdef DEBUG
   ASSERT(state_ == PREPARE_GC);
@@ -1328,14 +1373,14 @@ void MarkCompactCollector::MarkLiveObjects() {
 #endif
   // The to space contains live objects, the from space is used as a marking
   // stack.
-  marking_stack.Initialize(Heap::new_space()->FromSpaceLow(),
-                           Heap::new_space()->FromSpaceHigh());
+  marking_stack_.Initialize(heap_->new_space()->FromSpaceLow(),
+                            heap_->new_space()->FromSpaceHigh());
 
-  ASSERT(!marking_stack.overflowed());
+  ASSERT(!marking_stack_.overflowed());
 
   PrepareForCodeFlushing();
 
-  RootMarkingVisitor root_visitor;
+  RootMarkingVisitor root_visitor(heap_);
   MarkRoots(&root_visitor);
 
   // The objects reachable from the roots are marked, yet unreachable
@@ -1349,10 +1394,11 @@ void MarkCompactCollector::MarkLiveObjects() {
   //
   // First we identify nonlive weak handles and mark them as pending
   // destruction.
-  GlobalHandles::IdentifyWeakHandles(&IsUnmarkedHeapObject);
+  heap_->isolate()->global_handles()->IdentifyWeakHandles(
+      &IsUnmarkedHeapObject);
   // Then we mark the objects and process the transitive closure.
-  GlobalHandles::IterateWeakRoots(&root_visitor);
-  while (marking_stack.overflowed()) {
+  heap_->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
+  while (marking_stack_.overflowed()) {
     RefillMarkingStack();
     EmptyMarkingStack();
   }
@@ -1364,47 +1410,49 @@ void MarkCompactCollector::MarkLiveObjects() {
   // Prune the symbol table removing all symbols only pointed to by the
   // symbol table.  Cannot use symbol_table() here because the symbol
   // table is marked.
-  SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
+  SymbolTable* symbol_table = heap_->raw_unchecked_symbol_table();
   SymbolTableCleaner v;
   symbol_table->IterateElements(&v);
   symbol_table->ElementsRemoved(v.PointersRemoved());
-  ExternalStringTable::Iterate(&v);
-  ExternalStringTable::CleanUp();
+  heap_->external_string_table_.Iterate(&v);
+  heap_->external_string_table_.CleanUp();
 
   // Process the weak references.
   MarkCompactWeakObjectRetainer mark_compact_object_retainer;
-  Heap::ProcessWeakReferences(&mark_compact_object_retainer);
+  heap_->ProcessWeakReferences(&mark_compact_object_retainer);
 
   // Remove object groups after marking phase.
-  GlobalHandles::RemoveObjectGroups();
-  GlobalHandles::RemoveImplicitRefGroups();
+  heap_->isolate()->global_handles()->RemoveObjectGroups();
+  heap_->isolate()->global_handles()->RemoveImplicitRefGroups();
 
   // Flush code from collected candidates.
-  FlushCode::ProcessCandidates();
+  if (is_code_flushing_enabled()) {
+    code_flusher_->ProcessCandidates();
+  }
 
   // Clean up dead objects from the runtime profiler.
-  RuntimeProfiler::RemoveDeadSamples();
+  heap_->isolate()->runtime_profiler()->RemoveDeadSamples();
 }
 
 
 #ifdef DEBUG
 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
   live_bytes_ += obj->Size();
-  if (Heap::new_space()->Contains(obj)) {
+  if (HEAP->new_space()->Contains(obj)) {
     live_young_objects_size_ += obj->Size();
-  } else if (Heap::map_space()->Contains(obj)) {
+  } else if (HEAP->map_space()->Contains(obj)) {
     ASSERT(obj->IsMap());
     live_map_objects_size_ += obj->Size();
-  } else if (Heap::cell_space()->Contains(obj)) {
+  } else if (HEAP->cell_space()->Contains(obj)) {
     ASSERT(obj->IsJSGlobalPropertyCell());
     live_cell_objects_size_ += obj->Size();
-  } else if (Heap::old_pointer_space()->Contains(obj)) {
+  } else if (HEAP->old_pointer_space()->Contains(obj)) {
     live_old_pointer_objects_size_ += obj->Size();
-  } else if (Heap::old_data_space()->Contains(obj)) {
+  } else if (HEAP->old_data_space()->Contains(obj)) {
     live_old_data_objects_size_ += obj->Size();
-  } else if (Heap::code_space()->Contains(obj)) {
+  } else if (HEAP->code_space()->Contains(obj)) {
     live_code_objects_size_ += obj->Size();
-  } else if (Heap::lo_space()->Contains(obj)) {
+  } else if (HEAP->lo_space()->Contains(obj)) {
     live_lo_objects_size_ += obj->Size();
   } else {
     UNREACHABLE();
@@ -1420,7 +1468,7 @@ void MarkCompactCollector::SweepLargeObjectSpace() {
       compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
 #endif
   // Deallocate unmarked objects and clear marked bits for marked objects.
-  Heap::lo_space()->FreeUnmarkedObjects();
+  HEAP->lo_space()->FreeUnmarkedObjects();
 }
 
 
@@ -1433,7 +1481,7 @@ bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
 
 
 void MarkCompactCollector::ClearNonLiveTransitions() {
-  HeapObjectIterator map_iterator(Heap::map_space(), &SizeOfMarkedObject);
+  HeapObjectIterator map_iterator(HEAP->map_space(), &SizeOfMarkedObject);
   // Iterate over the map space, setting map transitions that go from
   // a marked map to an unmarked map to null transitions.  At the same time,
   // set all the prototype fields of maps back to their original value,
@@ -1483,7 +1531,7 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
       // This test will always be false on the first iteration.
       if (on_dead_path && current->IsMarked()) {
         on_dead_path = false;
-        current->ClearNonLiveTransitions(real_prototype);
+        current->ClearNonLiveTransitions(heap_, real_prototype);
       }
       *HeapObject::RawField(current, Map::kPrototypeOffset) =
           real_prototype;
@@ -1545,20 +1593,21 @@ void EncodeFreeRegion(Address free_start, int free_size) {
 // Try to promote all objects in new space.  Heap numbers and sequential
 // strings are promoted to the code space, large objects to large object space,
 // and all others to the old space.
-inline MaybeObject* MCAllocateFromNewSpace(HeapObject* object,
+inline MaybeObject* MCAllocateFromNewSpace(Heap* heap,
+                                           HeapObject* object,
                                            int object_size) {
   MaybeObject* forwarded;
-  if (object_size > Heap::MaxObjectSizeInPagedSpace()) {
+  if (object_size > heap->MaxObjectSizeInPagedSpace()) {
     forwarded = Failure::Exception();
   } else {
-    OldSpace* target_space = Heap::TargetSpace(object);
-    ASSERT(target_space == Heap::old_pointer_space() ||
-           target_space == Heap::old_data_space());
+    OldSpace* target_space = heap->TargetSpace(object);
+    ASSERT(target_space == heap->old_pointer_space() ||
+           target_space == heap->old_data_space());
     forwarded = target_space->MCAllocateRaw(object_size);
   }
   Object* result;
   if (!forwarded->ToObject(&result)) {
-    result = Heap::new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked();
+    result = heap->new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked();
   }
   return result;
 }
@@ -1566,48 +1615,53 @@ inline MaybeObject* MCAllocateFromNewSpace(HeapObject* object,
 
 // Allocation functions for the paged spaces call the space's MCAllocateRaw.
 MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldPointerSpace(
+    Heap *heap,
     HeapObject* ignore,
     int object_size) {
-  return Heap::old_pointer_space()->MCAllocateRaw(object_size);
+  return heap->old_pointer_space()->MCAllocateRaw(object_size);
 }
 
 
 MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldDataSpace(
+    Heap* heap,
     HeapObject* ignore,
     int object_size) {
-  return Heap::old_data_space()->MCAllocateRaw(object_size);
+  return heap->old_data_space()->MCAllocateRaw(object_size);
 }
 
 
 MUST_USE_RESULT inline MaybeObject* MCAllocateFromCodeSpace(
+    Heap* heap,
     HeapObject* ignore,
     int object_size) {
-  return Heap::code_space()->MCAllocateRaw(object_size);
+  return heap->code_space()->MCAllocateRaw(object_size);
 }
 
 
 MUST_USE_RESULT inline MaybeObject* MCAllocateFromMapSpace(
+    Heap* heap,
     HeapObject* ignore,
     int object_size) {
-  return Heap::map_space()->MCAllocateRaw(object_size);
+  return heap->map_space()->MCAllocateRaw(object_size);
 }
 
 
-MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace(HeapObject* ignore,
-                                                            int object_size) {
-  return Heap::cell_space()->MCAllocateRaw(object_size);
+MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace(
+    Heap* heap, HeapObject* ignore, int object_size) {
+  return heap->cell_space()->MCAllocateRaw(object_size);
 }
 
 
 // The forwarding address is encoded at the same offset as the current
 // to-space object, but in from space.
-inline void EncodeForwardingAddressInNewSpace(HeapObject* old_object,
+inline void EncodeForwardingAddressInNewSpace(Heap* heap,
+                                              HeapObject* old_object,
                                               int object_size,
                                               Object* new_object,
                                               int* ignored) {
   int offset =
-      Heap::new_space()->ToSpaceOffsetForAddress(old_object->address());
-  Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset) =
+      heap->new_space()->ToSpaceOffsetForAddress(old_object->address());
+  Memory::Address_at(heap->new_space()->FromSpaceLow() + offset) =
       HeapObject::cast(new_object)->address();
 }
 
@@ -1615,7 +1669,8 @@ inline void EncodeForwardingAddressInNewSpace(HeapObject* old_object,
 // The forwarding address is encoded in the map pointer of the object as an
 // offset (in terms of live bytes) from the address of the first live object
 // in the page.
-inline void EncodeForwardingAddressInPagedSpace(HeapObject* old_object,
+inline void EncodeForwardingAddressInPagedSpace(Heap* heap,
+                                                HeapObject* old_object,
                                                 int object_size,
                                                 Object* new_object,
                                                 int* offset) {
@@ -1648,7 +1703,8 @@ inline void IgnoreNonLiveObject(HeapObject* object) {}
 template<MarkCompactCollector::AllocationFunction Alloc,
          MarkCompactCollector::EncodingFunction Encode,
          MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
-inline void EncodeForwardingAddressesInRange(Address start,
+inline void EncodeForwardingAddressesInRange(MarkCompactCollector* collector,
+                                             Address start,
                                              Address end,
                                              int* offset) {
   // The start address of the current free region while sweeping the space.
@@ -1668,12 +1724,12 @@ inline void EncodeForwardingAddressesInRange(Address start,
     HeapObject* object = HeapObject::FromAddress(current);
     if (object->IsMarked()) {
       object->ClearMark();
-      MarkCompactCollector::tracer()->decrement_marked_count();
+      collector->tracer()->decrement_marked_count();
       object_size = object->Size();
 
-      // Allocation cannot fail, because we are compacting the space.
-      Object* forwarded = Alloc(object, object_size)->ToObjectUnchecked();
-      Encode(object, object_size, forwarded, offset);
+      Object* forwarded =
+          Alloc(collector->heap(), object, object_size)->ToObjectUnchecked();
+      Encode(collector->heap(), object, object_size, forwarded, offset);
 
 #ifdef DEBUG
       if (FLAG_gc_verbose) {
@@ -1709,8 +1765,9 @@ void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() {
   EncodeForwardingAddressesInRange<MCAllocateFromNewSpace,
                                    EncodeForwardingAddressInNewSpace,
                                    IgnoreNonLiveObject>(
-      Heap::new_space()->bottom(),
-      Heap::new_space()->top(),
+      this,
+      heap_->new_space()->bottom(),
+      heap_->new_space()->top(),
       &ignored);
 }
 
@@ -1729,6 +1786,7 @@ void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
     EncodeForwardingAddressesInRange<Alloc,
                                      EncodeForwardingAddressInPagedSpace,
                                      ProcessNonLive>(
+        this,
         p->ObjectAreaStart(),
         p->AllocationTop(),
         &offset);
@@ -1746,14 +1804,15 @@ void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
 // to encounter pointers to dead objects during traversal of dirty regions we
 // should clear them to avoid encountering them during next dirty regions
 // iteration.
-static void MigrateObject(Address dst,
+static void MigrateObject(Heap* heap,
+                          Address dst,
                           Address src,
                           int size,
                           bool to_old_space) {
   if (to_old_space) {
-    Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size);
+    heap->CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size);
   } else {
-    Heap::CopyBlock(dst, src, size);
+    heap->CopyBlock(dst, src, size);
   }
 
   Memory::Address_at(src) = dst;
@@ -1763,14 +1822,14 @@ static void MigrateObject(Address dst,
 class StaticPointersToNewGenUpdatingVisitor : public
   StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
  public:
-  static inline void VisitPointer(Object** p) {
+  static inline void VisitPointer(Heap* heap, Object** p) {
     if (!(*p)->IsHeapObject()) return;
 
     HeapObject* obj = HeapObject::cast(*p);
     Address old_addr = obj->address();
 
-    if (Heap::new_space()->Contains(obj)) {
-      ASSERT(Heap::InFromSpace(*p));
+    if (heap->new_space()->Contains(obj)) {
+      ASSERT(heap->InFromSpace(*p));
       *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
     }
   }
@@ -1781,13 +1840,15 @@ class StaticPointersToNewGenUpdatingVisitor : public
 // It does not expect to encounter pointers to dead objects.
 class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
  public:
+  explicit PointersToNewGenUpdatingVisitor(Heap* heap) : heap_(heap) { }
+
   void VisitPointer(Object** p) {
-    StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
+    StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
   }
 
   void VisitPointers(Object** start, Object** end) {
     for (Object** p = start; p < end; p++) {
-      StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
+      StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
     }
   }
 
@@ -1807,6 +1868,8 @@ class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
     VisitPointer(&target);
     rinfo->set_call_address(Code::cast(target)->instruction_start());
   }
+ private:
+  Heap* heap_;
 };
 
 
@@ -1817,7 +1880,7 @@ static void UpdatePointerToNewGen(HeapObject** p) {
   if (!(*p)->IsHeapObject()) return;
 
   Address old_addr = (*p)->address();
-  ASSERT(Heap::InFromSpace(*p));
+  ASSERT(HEAP->InFromSpace(*p));
 
   Address new_addr = Memory::Address_at(old_addr);
 
@@ -1831,39 +1894,42 @@ static void UpdatePointerToNewGen(HeapObject** p) {
 }
 
 
-static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Object **p) {
+static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
+                                                                 Object** p) {
   Address old_addr = HeapObject::cast(*p)->address();
   Address new_addr = Memory::Address_at(old_addr);
   return String::cast(HeapObject::FromAddress(new_addr));
 }
 
 
-static bool TryPromoteObject(HeapObject* object, int object_size) {
+static bool TryPromoteObject(Heap* heap, HeapObject* object, int object_size) {
   Object* result;
 
-  if (object_size > Heap::MaxObjectSizeInPagedSpace()) {
+  if (object_size > heap->MaxObjectSizeInPagedSpace()) {
     MaybeObject* maybe_result =
-        Heap::lo_space()->AllocateRawFixedArray(object_size);
+        heap->lo_space()->AllocateRawFixedArray(object_size);
     if (maybe_result->ToObject(&result)) {
       HeapObject* target = HeapObject::cast(result);
-      MigrateObject(target->address(), object->address(), object_size, true);
-      MarkCompactCollector::tracer()->
+      MigrateObject(heap, target->address(), object->address(), object_size,
+                    true);
+      heap->mark_compact_collector()->tracer()->
           increment_promoted_objects_size(object_size);
       return true;
     }
   } else {
-    OldSpace* target_space = Heap::TargetSpace(object);
+    OldSpace* target_space = heap->TargetSpace(object);
 
-    ASSERT(target_space == Heap::old_pointer_space() ||
-           target_space == Heap::old_data_space());
+    ASSERT(target_space == heap->old_pointer_space() ||
+           target_space == heap->old_data_space());
     MaybeObject* maybe_result = target_space->AllocateRaw(object_size);
     if (maybe_result->ToObject(&result)) {
       HeapObject* target = HeapObject::cast(result);
-      MigrateObject(target->address(),
+      MigrateObject(heap,
+                    target->address(),
                     object->address(),
                     object_size,
-                    target_space == Heap::old_pointer_space());
-      MarkCompactCollector::tracer()->
+                    target_space == heap->old_pointer_space());
+      heap->mark_compact_collector()->tracer()->
           increment_promoted_objects_size(object_size);
       return true;
     }
@@ -1873,8 +1939,8 @@ static bool TryPromoteObject(HeapObject* object, int object_size) {
 }
 
 
-static void SweepNewSpace(NewSpace* space) {
-  Heap::CheckNewSpaceExpansionCriteria();
+static void SweepNewSpace(Heap* heap, NewSpace* space) {
+  heap->CheckNewSpaceExpansionCriteria();
 
   Address from_bottom = space->bottom();
   Address from_top = space->top();
@@ -1894,13 +1960,13 @@ static void SweepNewSpace(NewSpace* space) {
 
     if (object->IsMarked()) {
       object->ClearMark();
-      MarkCompactCollector::tracer()->decrement_marked_count();
+      heap->mark_compact_collector()->tracer()->decrement_marked_count();
 
       size = object->Size();
       survivors_size += size;
 
       // Aggressively promote young survivors to the old space.
-      if (TryPromoteObject(object, size)) {
+      if (TryPromoteObject(heap, object, size)) {
         continue;
       }
 
@@ -1908,7 +1974,8 @@ static void SweepNewSpace(NewSpace* space) {
       // Allocation cannot fail at this point: semispaces are of equal size.
       Object* target = space->AllocateRaw(size)->ToObjectUnchecked();
 
-      MigrateObject(HeapObject::cast(target)->address(),
+      MigrateObject(heap,
+                    HeapObject::cast(target)->address(),
                     current,
                     size,
                     false);
@@ -1922,7 +1989,7 @@ static void SweepNewSpace(NewSpace* space) {
   }
 
   // Second pass: find pointers to new space and update them.
-  PointersToNewGenUpdatingVisitor updating_visitor;
+  PointersToNewGenUpdatingVisitor updating_visitor(heap);
 
   // Update pointers in to space.
   Address current = space->bottom();
@@ -1934,19 +2001,19 @@ static void SweepNewSpace(NewSpace* space) {
   }
 
   // Update roots.
-  Heap::IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
+  heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
   LiveObjectList::IterateElements(&updating_visitor);
 
   // Update pointers in old spaces.
-  Heap::IterateDirtyRegions(Heap::old_pointer_space(),
+  heap->IterateDirtyRegions(heap->old_pointer_space(),
                             &Heap::IteratePointersInDirtyRegion,
                             &UpdatePointerToNewGen,
-                            Heap::WATERMARK_SHOULD_BE_VALID);
+                            heap->WATERMARK_SHOULD_BE_VALID);
 
-  Heap::lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen);
+  heap->lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen);
 
   // Update pointers from cells.
-  HeapObjectIterator cell_iterator(Heap::cell_space());
+  HeapObjectIterator cell_iterator(heap->cell_space());
   for (HeapObject* cell = cell_iterator.next();
        cell != NULL;
        cell = cell_iterator.next()) {
@@ -1959,22 +2026,22 @@ static void SweepNewSpace(NewSpace* space) {
   }
 
   // Update pointer from the global contexts list.
-  updating_visitor.VisitPointer(Heap::global_contexts_list_address());
+  updating_visitor.VisitPointer(heap->global_contexts_list_address());
 
   // Update pointers from external string table.
-  Heap::UpdateNewSpaceReferencesInExternalStringTable(
+  heap->UpdateNewSpaceReferencesInExternalStringTable(
       &UpdateNewSpaceReferenceInExternalStringTableEntry);
 
   // All pointers were updated. Update auxiliary allocation info.
-  Heap::IncrementYoungSurvivorsCounter(survivors_size);
+  heap->IncrementYoungSurvivorsCounter(survivors_size);
   space->set_age_mark(space->top());
 
   // Update JSFunction pointers from the runtime profiler.
-  RuntimeProfiler::UpdateSamplesAfterScavenge();
+  heap->isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
 }
 
 
-static void SweepSpace(PagedSpace* space) {
+static void SweepSpace(Heap* heap, PagedSpace* space) {
   PageIterator it(space, PageIterator::PAGES_IN_USE);
 
   // During sweeping of paged space we are trying to find longest sequences
@@ -2012,7 +2079,7 @@ static void SweepSpace(PagedSpace* space) {
       object = HeapObject::FromAddress(current);
       if (object->IsMarked()) {
         object->ClearMark();
-        MarkCompactCollector::tracer()->decrement_marked_count();
+        heap->mark_compact_collector()->tracer()->decrement_marked_count();
 
         if (!is_previous_alive) {  // Transition from free to live.
           space->DeallocateBlock(free_start,
@@ -2021,7 +2088,7 @@ static void SweepSpace(PagedSpace* space) {
           is_previous_alive = true;
         }
       } else {
-        MarkCompactCollector::ReportDeleteIfNeeded(object);
+        heap->mark_compact_collector()->ReportDeleteIfNeeded(object);
         if (is_previous_alive) {  // Transition from live to free.
           free_start = current;
           is_previous_alive = false;
@@ -2121,24 +2188,24 @@ void MarkCompactCollector::EncodeForwardingAddresses() {
   // Objects in the active semispace of the young generation may be
   // relocated to the inactive semispace (if not promoted).  Set the
   // relocation info to the beginning of the inactive semispace.
-  Heap::new_space()->MCResetRelocationInfo();
+  heap_->new_space()->MCResetRelocationInfo();
 
   // Compute the forwarding pointers in each space.
   EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
                                         ReportDeleteIfNeeded>(
-      Heap::old_pointer_space());
+      heap_->old_pointer_space());
 
   EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
                                         IgnoreNonLiveObject>(
-      Heap::old_data_space());
+      heap_->old_data_space());
 
   EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
                                         ReportDeleteIfNeeded>(
-      Heap::code_space());
+      heap_->code_space());
 
   EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
                                         IgnoreNonLiveObject>(
-      Heap::cell_space());
+      heap_->cell_space());
 
 
   // Compute new space next to last after the old and code spaces have been
@@ -2150,25 +2217,25 @@ void MarkCompactCollector::EncodeForwardingAddresses() {
   // non-live map pointers to get the sizes of non-live objects.
   EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace,
                                         IgnoreNonLiveObject>(
-      Heap::map_space());
+      heap_->map_space());
 
   // Write relocation info to the top page, so we can use it later.  This is
   // done after promoting objects from the new space so we get the correct
   // allocation top.
-  Heap::old_pointer_space()->MCWriteRelocationInfoToPage();
-  Heap::old_data_space()->MCWriteRelocationInfoToPage();
-  Heap::code_space()->MCWriteRelocationInfoToPage();
-  Heap::map_space()->MCWriteRelocationInfoToPage();
-  Heap::cell_space()->MCWriteRelocationInfoToPage();
+  heap_->old_pointer_space()->MCWriteRelocationInfoToPage();
+  heap_->old_data_space()->MCWriteRelocationInfoToPage();
+  heap_->code_space()->MCWriteRelocationInfoToPage();
+  heap_->map_space()->MCWriteRelocationInfoToPage();
+  heap_->cell_space()->MCWriteRelocationInfoToPage();
 }
 
 
 class MapIterator : public HeapObjectIterator {
  public:
-  MapIterator() : HeapObjectIterator(Heap::map_space(), &SizeCallback) { }
+  MapIterator() : HeapObjectIterator(HEAP->map_space(), &SizeCallback) { }
 
   explicit MapIterator(Address start)
-      : HeapObjectIterator(Heap::map_space(), start, &SizeCallback) { }
+      : HeapObjectIterator(HEAP->map_space(), start, &SizeCallback) { }
 
  private:
   static int SizeCallback(HeapObject* unused) {
@@ -2180,9 +2247,10 @@ class MapIterator : public HeapObjectIterator {
 
 class MapCompact {
  public:
-  explicit MapCompact(int live_maps)
-    : live_maps_(live_maps),
-      to_evacuate_start_(Heap::map_space()->TopAfterCompaction(live_maps)),
+  explicit MapCompact(Heap* heap, int live_maps)
+    : heap_(heap),
+      live_maps_(live_maps),
+      to_evacuate_start_(heap->map_space()->TopAfterCompaction(live_maps)),
       map_to_evacuate_it_(to_evacuate_start_),
       first_map_to_evacuate_(
           reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) {
@@ -2203,37 +2271,39 @@ class MapCompact {
   }
 
   void UpdateMapPointersInRoots() {
-    Heap::IterateRoots(&map_updating_visitor_, VISIT_ONLY_STRONG);
-    GlobalHandles::IterateWeakRoots(&map_updating_visitor_);
-    LiveObjectList::IterateElements(&map_updating_visitor_);
+    MapUpdatingVisitor map_updating_visitor;
+    heap_->IterateRoots(&map_updating_visitor, VISIT_ONLY_STRONG);
+    heap_->isolate()->global_handles()->IterateWeakRoots(&map_updating_visitor);
+    LiveObjectList::IterateElements(&map_updating_visitor);
   }
 
   void UpdateMapPointersInPagedSpace(PagedSpace* space) {
-    ASSERT(space != Heap::map_space());
+    ASSERT(space != heap_->map_space());
 
     PageIterator it(space, PageIterator::PAGES_IN_USE);
     while (it.has_next()) {
       Page* p = it.next();
-      UpdateMapPointersInRange(p->ObjectAreaStart(), p->AllocationTop());
+      UpdateMapPointersInRange(heap_, p->ObjectAreaStart(), p->AllocationTop());
     }
   }
 
   void UpdateMapPointersInNewSpace() {
-    NewSpace* space = Heap::new_space();
-    UpdateMapPointersInRange(space->bottom(), space->top());
+    NewSpace* space = heap_->new_space();
+    UpdateMapPointersInRange(heap_, space->bottom(), space->top());
   }
 
   void UpdateMapPointersInLargeObjectSpace() {
-    LargeObjectIterator it(Heap::lo_space());
+    LargeObjectIterator it(heap_->lo_space());
     for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
-      UpdateMapPointersInObject(obj);
+      UpdateMapPointersInObject(heap_, obj);
   }
 
   void Finish() {
-    Heap::map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
+    heap_->map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
   }
 
  private:
+  Heap* heap_;
   int live_maps_;
   Address to_evacuate_start_;
   MapIterator vacant_map_it_;
@@ -2243,6 +2313,8 @@ class MapCompact {
   // Helper class for updating map pointers in HeapObjects.
   class MapUpdatingVisitor: public ObjectVisitor {
   public:
+    MapUpdatingVisitor() {}
+
     void VisitPointer(Object** p) {
       UpdateMapPointer(p);
     }
@@ -2265,8 +2337,6 @@ class MapCompact {
     }
   };
 
-  static MapUpdatingVisitor map_updating_visitor_;
-
   static Map* NextMap(MapIterator* it, HeapObject* last, bool live) {
     while (true) {
       HeapObject* next = it->next();
@@ -2300,9 +2370,8 @@ class MapCompact {
 
     ASSERT(Map::kSize % 4 == 0);
 
-    Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(vacant_map->address(),
-                                                  map_to_evacuate->address(),
-                                                  Map::kSize);
+    map_to_evacuate->heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(
+        vacant_map->address(), map_to_evacuate->address(), Map::kSize);
 
     ASSERT(vacant_map->IsMap());  // Due to memcpy above.
 
@@ -2322,15 +2391,15 @@ class MapCompact {
     return new_map;
   }
 
-  static int UpdateMapPointersInObject(HeapObject* obj) {
+  static int UpdateMapPointersInObject(Heap* heap, HeapObject* obj) {
     ASSERT(!obj->IsMarked());
     Map* map = obj->map();
-    ASSERT(Heap::map_space()->Contains(map));
+    ASSERT(heap->map_space()->Contains(map));
     MapWord map_word = map->map_word();
     ASSERT(!map_word.IsMarked());
     if (map_word.IsOverflowed()) {
       Map* new_map = GetForwardedMap(map_word);
-      ASSERT(Heap::map_space()->Contains(new_map));
+      ASSERT(heap->map_space()->Contains(new_map));
       obj->set_map(new_map);
 
 #ifdef DEBUG
@@ -2344,16 +2413,17 @@ class MapCompact {
     }
 
     int size = obj->SizeFromMap(map);
-    obj->IterateBody(map->instance_type(), size, &map_updating_visitor_);
+    MapUpdatingVisitor map_updating_visitor;
+    obj->IterateBody(map->instance_type(), size, &map_updating_visitor);
     return size;
   }
 
-  static void UpdateMapPointersInRange(Address start, Address end) {
+  static void UpdateMapPointersInRange(Heap* heap, Address start, Address end) {
     HeapObject* object;
     int size;
     for (Address current = start; current < end; current += size) {
       object = HeapObject::FromAddress(current);
-      size = UpdateMapPointersInObject(object);
+      size = UpdateMapPointersInObject(heap, object);
       ASSERT(size > 0);
     }
   }
@@ -2370,8 +2440,6 @@ class MapCompact {
 #endif
 };
 
-MapCompact::MapUpdatingVisitor MapCompact::map_updating_visitor_;
-
 
 void MarkCompactCollector::SweepSpaces() {
   GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
@@ -2383,26 +2451,26 @@ void MarkCompactCollector::SweepSpaces() {
   // the map space last because freeing non-live maps overwrites them and
   // the other spaces rely on possibly non-live maps to get the sizes for
   // non-live objects.
-  SweepSpace(Heap::old_pointer_space());
-  SweepSpace(Heap::old_data_space());
-  SweepSpace(Heap::code_space());
-  SweepSpace(Heap::cell_space());
+  SweepSpace(heap_, heap_->old_pointer_space());
+  SweepSpace(heap_, heap_->old_data_space());
+  SweepSpace(heap_, heap_->code_space());
+  SweepSpace(heap_, heap_->cell_space());
   { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
-    SweepNewSpace(Heap::new_space());
+    SweepNewSpace(heap_, heap_->new_space());
   }
-  SweepSpace(Heap::map_space());
+  SweepSpace(heap_, heap_->map_space());
 
-  Heap::IterateDirtyRegions(Heap::map_space(),
-                            &Heap::IteratePointersInDirtyMapsRegion,
-                            &UpdatePointerToNewGen,
-                            Heap::WATERMARK_SHOULD_BE_VALID);
+  heap_->IterateDirtyRegions(heap_->map_space(),
+                             &heap_->IteratePointersInDirtyMapsRegion,
+                             &UpdatePointerToNewGen,
+                             heap_->WATERMARK_SHOULD_BE_VALID);
 
-  intptr_t live_maps_size = Heap::map_space()->Size();
+  intptr_t live_maps_size = heap_->map_space()->Size();
   int live_maps = static_cast<int>(live_maps_size / Map::kSize);
   ASSERT(live_map_objects_size_ == live_maps_size);
 
-  if (Heap::map_space()->NeedsCompaction(live_maps)) {
-    MapCompact map_compact(live_maps);
+  if (heap_->map_space()->NeedsCompaction(live_maps)) {
+    MapCompact map_compact(heap_, live_maps);
 
     map_compact.CompactMaps();
     map_compact.UpdateMapPointersInRoots();
@@ -2410,7 +2478,7 @@ void MarkCompactCollector::SweepSpaces() {
     PagedSpaces spaces;
     for (PagedSpace* space = spaces.next();
          space != NULL; space = spaces.next()) {
-      if (space == Heap::map_space()) continue;
+      if (space == heap_->map_space()) continue;
       map_compact.UpdateMapPointersInPagedSpace(space);
     }
     map_compact.UpdateMapPointersInNewSpace();
@@ -2429,7 +2497,7 @@ void MarkCompactCollector::SweepSpaces() {
 int MarkCompactCollector::IterateLiveObjectsInRange(
     Address start,
     Address end,
-    HeapObjectCallback size_func) {
+    LiveObjectCallback size_func) {
   int live_objects_size = 0;
   Address current = start;
   while (current < end) {
@@ -2439,7 +2507,7 @@ int MarkCompactCollector::IterateLiveObjectsInRange(
     } else if (encoded_map == kMultiFreeEncoding) {
       current += Memory::int_at(current + kIntSize);
     } else {
-      int size = size_func(HeapObject::FromAddress(current));
+      int size = (this->*size_func)(HeapObject::FromAddress(current));
       current += size;
       live_objects_size += size;
     }
@@ -2448,15 +2516,15 @@ int MarkCompactCollector::IterateLiveObjectsInRange(
 }
 
 
-int MarkCompactCollector::IterateLiveObjects(NewSpace* space,
-                                             HeapObjectCallback size_f) {
+int MarkCompactCollector::IterateLiveObjects(
+    NewSpace* space, LiveObjectCallback size_f) {
   ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
   return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f);
 }
 
 
-int MarkCompactCollector::IterateLiveObjects(PagedSpace* space,
-                                             HeapObjectCallback size_f) {
+int MarkCompactCollector::IterateLiveObjects(
+    PagedSpace* space, LiveObjectCallback size_f) {
   ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
   int total = 0;
   PageIterator it(space, PageIterator::PAGES_IN_USE);
@@ -2476,6 +2544,8 @@ int MarkCompactCollector::IterateLiveObjects(PagedSpace* space,
 // Helper class for updating pointers in HeapObjects.
 class UpdatingVisitor: public ObjectVisitor {
  public:
+  explicit UpdatingVisitor(Heap* heap) : heap_(heap) {}
+
   void VisitPointer(Object** p) {
     UpdatePointer(p);
   }
@@ -2511,27 +2581,27 @@ class UpdatingVisitor: public ObjectVisitor {
     HeapObject* obj = HeapObject::cast(*p);
     Address old_addr = obj->address();
     Address new_addr;
-    ASSERT(!Heap::InFromSpace(obj));
+    ASSERT(!heap_->InFromSpace(obj));
 
-    if (Heap::new_space()->Contains(obj)) {
+    if (heap_->new_space()->Contains(obj)) {
       Address forwarding_pointer_addr =
-          Heap::new_space()->FromSpaceLow() +
-          Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
+          heap_->new_space()->FromSpaceLow() +
+          heap_->new_space()->ToSpaceOffsetForAddress(old_addr);
       new_addr = Memory::Address_at(forwarding_pointer_addr);
 
 #ifdef DEBUG
-      ASSERT(Heap::old_pointer_space()->Contains(new_addr) ||
-             Heap::old_data_space()->Contains(new_addr) ||
-             Heap::new_space()->FromSpaceContains(new_addr) ||
-             Heap::lo_space()->Contains(HeapObject::FromAddress(new_addr)));
-
-      if (Heap::new_space()->FromSpaceContains(new_addr)) {
-        ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
-               Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
+      ASSERT(heap_->old_pointer_space()->Contains(new_addr) ||
+             heap_->old_data_space()->Contains(new_addr) ||
+             heap_->new_space()->FromSpaceContains(new_addr) ||
+             heap_->lo_space()->Contains(HeapObject::FromAddress(new_addr)));
+
+      if (heap_->new_space()->FromSpaceContains(new_addr)) {
+        ASSERT(heap_->new_space()->FromSpaceOffsetForAddress(new_addr) <=
+               heap_->new_space()->ToSpaceOffsetForAddress(old_addr));
       }
 #endif
 
-    } else if (Heap::lo_space()->Contains(obj)) {
+    } else if (heap_->lo_space()->Contains(obj)) {
       // Don't move objects in the large object space.
       return;
 
@@ -2560,6 +2630,8 @@ class UpdatingVisitor: public ObjectVisitor {
     }
 #endif
   }
+
+  Heap* heap_;
 };
 
 
@@ -2568,31 +2640,34 @@ void MarkCompactCollector::UpdatePointers() {
   ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
   state_ = UPDATE_POINTERS;
 #endif
-  UpdatingVisitor updating_visitor;
-  RuntimeProfiler::UpdateSamplesAfterCompact(&updating_visitor);
-  Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
-  GlobalHandles::IterateWeakRoots(&updating_visitor);
+  UpdatingVisitor updating_visitor(heap_);
+  heap_->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
+      &updating_visitor);
+  heap_->IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
+  heap_->isolate()->global_handles()->IterateWeakRoots(&updating_visitor);
 
   // Update the pointer to the head of the weak list of global contexts.
-  updating_visitor.VisitPointer(&Heap::global_contexts_list_);
+  updating_visitor.VisitPointer(&heap_->global_contexts_list_);
 
   LiveObjectList::IterateElements(&updating_visitor);
 
-  int live_maps_size = IterateLiveObjects(Heap::map_space(),
-                                          &UpdatePointersInOldObject);
-  int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(),
-                                                  &UpdatePointersInOldObject);
-  int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(),
-                                               &UpdatePointersInOldObject);
-  int live_codes_size = IterateLiveObjects(Heap::code_space(),
-                                           &UpdatePointersInOldObject);
-  int live_cells_size = IterateLiveObjects(Heap::cell_space(),
-                                           &UpdatePointersInOldObject);
-  int live_news_size = IterateLiveObjects(Heap::new_space(),
-                                          &UpdatePointersInNewObject);
+  int live_maps_size = IterateLiveObjects(
+      heap_->map_space(), &MarkCompactCollector::UpdatePointersInOldObject);
+  int live_pointer_olds_size = IterateLiveObjects(
+      heap_->old_pointer_space(),
+      &MarkCompactCollector::UpdatePointersInOldObject);
+  int live_data_olds_size = IterateLiveObjects(
+      heap_->old_data_space(),
+      &MarkCompactCollector::UpdatePointersInOldObject);
+  int live_codes_size = IterateLiveObjects(
+      heap_->code_space(), &MarkCompactCollector::UpdatePointersInOldObject);
+  int live_cells_size = IterateLiveObjects(
+      heap_->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject);
+  int live_news_size = IterateLiveObjects(
+      heap_->new_space(), &MarkCompactCollector::UpdatePointersInNewObject);
 
   // Large objects do not move, the map word can be updated directly.
-  LargeObjectIterator it(Heap::lo_space());
+  LargeObjectIterator it(heap_->lo_space());
   for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
     UpdatePointersInNewObject(obj);
   }
@@ -2619,8 +2694,8 @@ int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
 
   Address forwarded = GetForwardingAddressInOldSpace(old_map);
 
-  ASSERT(Heap::map_space()->Contains(old_map));
-  ASSERT(Heap::map_space()->Contains(forwarded));
+  ASSERT(heap_->map_space()->Contains(old_map));
+  ASSERT(heap_->map_space()->Contains(forwarded));
 #ifdef DEBUG
   if (FLAG_gc_verbose) {
     PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(),
@@ -2635,7 +2710,7 @@ int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
   int obj_size = obj->SizeFromMap(old_map);
 
   // Update pointers in the object body.
-  UpdatingVisitor updating_visitor;
+  UpdatingVisitor updating_visitor(heap_);
   obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor);
   return obj_size;
 }
@@ -2644,8 +2719,8 @@ int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
 int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
   // Decode the map pointer.
   MapWord encoding = obj->map_word();
-  Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
-  ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
+  Address map_addr = encoding.DecodeMapAddress(heap_->map_space());
+  ASSERT(heap_->map_space()->Contains(HeapObject::FromAddress(map_addr)));
 
   // At this point, the first word of map_addr is also encoded, cannot
   // cast it to Map* using Map::cast.
@@ -2666,7 +2741,7 @@ int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
 #endif
 
   // Update pointers in the object body.
-  UpdatingVisitor updating_visitor;
+  UpdatingVisitor updating_visitor(heap_);
   obj->IterateBody(type, obj_size, &updating_visitor);
   return obj_size;
 }
@@ -2722,18 +2797,19 @@ void MarkCompactCollector::RelocateObjects() {
 #endif
   // Relocates objects, always relocate map objects first. Relocating
   // objects in other space relies on map objects to get object size.
-  int live_maps_size = IterateLiveObjects(Heap::map_space(),
-                                          &RelocateMapObject);
-  int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(),
-                                                  &RelocateOldPointerObject);
-  int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(),
-                                               &RelocateOldDataObject);
-  int live_codes_size = IterateLiveObjects(Heap::code_space(),
-                                           &RelocateCodeObject);
-  int live_cells_size = IterateLiveObjects(Heap::cell_space(),
-                                           &RelocateCellObject);
-  int live_news_size = IterateLiveObjects(Heap::new_space(),
-                                          &RelocateNewObject);
+  int live_maps_size = IterateLiveObjects(
+      heap_->map_space(), &MarkCompactCollector::RelocateMapObject);
+  int live_pointer_olds_size = IterateLiveObjects(
+      heap_->old_pointer_space(),
+      &MarkCompactCollector::RelocateOldPointerObject);
+  int live_data_olds_size = IterateLiveObjects(
+      heap_->old_data_space(), &MarkCompactCollector::RelocateOldDataObject);
+  int live_codes_size = IterateLiveObjects(
+      heap_->code_space(), &MarkCompactCollector::RelocateCodeObject);
+  int live_cells_size = IterateLiveObjects(
+      heap_->cell_space(), &MarkCompactCollector::RelocateCellObject);
+  int live_news_size = IterateLiveObjects(
+      heap_->new_space(), &MarkCompactCollector::RelocateNewObject);
 
   USE(live_maps_size);
   USE(live_pointer_olds_size);
@@ -2749,28 +2825,28 @@ void MarkCompactCollector::RelocateObjects() {
   ASSERT(live_news_size == live_young_objects_size_);
 
   // Flip from and to spaces
-  Heap::new_space()->Flip();
+  heap_->new_space()->Flip();
 
-  Heap::new_space()->MCCommitRelocationInfo();
+  heap_->new_space()->MCCommitRelocationInfo();
 
   // Set age_mark to bottom in to space
-  Address mark = Heap::new_space()->bottom();
-  Heap::new_space()->set_age_mark(mark);
+  Address mark = heap_->new_space()->bottom();
+  heap_->new_space()->set_age_mark(mark);
 
   PagedSpaces spaces;
   for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
     space->MCCommitRelocationInfo();
 
-  Heap::CheckNewSpaceExpansionCriteria();
-  Heap::IncrementYoungSurvivorsCounter(live_news_size);
+  heap_->CheckNewSpaceExpansionCriteria();
+  heap_->IncrementYoungSurvivorsCounter(live_news_size);
 }
 
 
 int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
   // Recover map pointer.
   MapWord encoding = obj->map_word();
-  Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
-  ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
+  Address map_addr = encoding.DecodeMapAddress(heap_->map_space());
+  ASSERT(heap_->map_space()->Contains(HeapObject::FromAddress(map_addr)));
 
   // Get forwarding address before resetting map pointer
   Address new_addr = GetForwardingAddressInOldSpace(obj);
@@ -2783,9 +2859,9 @@ int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
 
   if (new_addr != old_addr) {
     // Move contents.
-    Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
-                                                  old_addr,
-                                                  Map::kSize);
+    heap_->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
+                                                   old_addr,
+                                                   Map::kSize);
   }
 
 #ifdef DEBUG
@@ -2829,8 +2905,8 @@ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
                                                    PagedSpace* space) {
   // Recover map pointer.
   MapWord encoding = obj->map_word();
-  Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
-  ASSERT(Heap::map_space()->Contains(map_addr));
+  Address map_addr = encoding.DecodeMapAddress(heap_->map_space());
+  ASSERT(heap_->map_space()->Contains(map_addr));
 
   // Get forwarding address before resetting map pointer.
   Address new_addr = GetForwardingAddressInOldSpace(obj);
@@ -2842,12 +2918,12 @@ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
 
   if (new_addr != old_addr) {
     // Move contents.
-    if (space == Heap::old_data_space()) {
-      Heap::MoveBlock(new_addr, old_addr, obj_size);
+    if (space == heap_->old_data_space()) {
+      heap_->MoveBlock(new_addr, old_addr, obj_size);
     } else {
-      Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
-                                                    old_addr,
-                                                    obj_size);
+      heap_->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
+                                                     old_addr,
+                                                     obj_size);
     }
   }
 
@@ -2855,46 +2931,47 @@ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
 
   HeapObject* copied_to = HeapObject::FromAddress(new_addr);
   if (copied_to->IsSharedFunctionInfo()) {
-    PROFILE(SharedFunctionInfoMoveEvent(old_addr, new_addr));
+    PROFILE(heap_->isolate(),
+            SharedFunctionInfoMoveEvent(old_addr, new_addr));
   }
-  HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
+  HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr));
 
   return obj_size;
 }
 
 
 int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) {
-  return RelocateOldNonCodeObject(obj, Heap::old_pointer_space());
+  return RelocateOldNonCodeObject(obj, heap_->old_pointer_space());
 }
 
 
 int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) {
-  return RelocateOldNonCodeObject(obj, Heap::old_data_space());
+  return RelocateOldNonCodeObject(obj, heap_->old_data_space());
 }
 
 
 int MarkCompactCollector::RelocateCellObject(HeapObject* obj) {
-  return RelocateOldNonCodeObject(obj, Heap::cell_space());
+  return RelocateOldNonCodeObject(obj, heap_->cell_space());
 }
 
 
 int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
   // Recover map pointer.
   MapWord encoding = obj->map_word();
-  Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
-  ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
+  Address map_addr = encoding.DecodeMapAddress(heap_->map_space());
+  ASSERT(heap_->map_space()->Contains(HeapObject::FromAddress(map_addr)));
 
   // Get forwarding address before resetting map pointer
   Address new_addr = GetForwardingAddressInOldSpace(obj);
 
   // Reset the map pointer.
-  int obj_size = RestoreMap(obj, Heap::code_space(), new_addr, map_addr);
+  int obj_size = RestoreMap(obj, heap_->code_space(), new_addr, map_addr);
 
   Address old_addr = obj->address();
 
   if (new_addr != old_addr) {
     // Move contents.
-    Heap::MoveBlock(new_addr, old_addr, obj_size);
+    heap_->MoveBlock(new_addr, old_addr, obj_size);
   }
 
   HeapObject* copied_to = HeapObject::FromAddress(new_addr);
@@ -2902,9 +2979,9 @@ int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
     // May also update inline cache target.
     Code::cast(copied_to)->Relocate(new_addr - old_addr);
     // Notify the logger that compiled code has moved.
-    PROFILE(CodeMoveEvent(old_addr, new_addr));
+    PROFILE(heap_->isolate(), CodeMoveEvent(old_addr, new_addr));
   }
-  HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
+  HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr));
 
   return obj_size;
 }
@@ -2915,28 +2992,28 @@ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
 
   // Get forwarding address
   Address old_addr = obj->address();
-  int offset = Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
+  int offset = heap_->new_space()->ToSpaceOffsetForAddress(old_addr);
 
   Address new_addr =
-    Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset);
+    Memory::Address_at(heap_->new_space()->FromSpaceLow() + offset);
 
 #ifdef DEBUG
-  if (Heap::new_space()->FromSpaceContains(new_addr)) {
-    ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
-           Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
+  if (heap_->new_space()->FromSpaceContains(new_addr)) {
+    ASSERT(heap_->new_space()->FromSpaceOffsetForAddress(new_addr) <=
+           heap_->new_space()->ToSpaceOffsetForAddress(old_addr));
   } else {
-    ASSERT(Heap::TargetSpace(obj) == Heap::old_pointer_space() ||
-           Heap::TargetSpace(obj) == Heap::old_data_space());
+    ASSERT(heap_->TargetSpace(obj) == heap_->old_pointer_space() ||
+           heap_->TargetSpace(obj) == heap_->old_data_space());
   }
 #endif
 
   // New and old addresses cannot overlap.
-  if (Heap::InNewSpace(HeapObject::FromAddress(new_addr))) {
-    Heap::CopyBlock(new_addr, old_addr, obj_size);
+  if (heap_->InNewSpace(HeapObject::FromAddress(new_addr))) {
+    heap_->CopyBlock(new_addr, old_addr, obj_size);
   } else {
-    Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr,
-                                                  old_addr,
-                                                  obj_size);
+    heap_->CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr,
+                                                   old_addr,
+                                                   obj_size);
   }
 
 #ifdef DEBUG
@@ -2947,14 +3024,27 @@ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
 
   HeapObject* copied_to = HeapObject::FromAddress(new_addr);
   if (copied_to->IsSharedFunctionInfo()) {
-    PROFILE(SharedFunctionInfoMoveEvent(old_addr, new_addr));
+    PROFILE(heap_->isolate(),
+            SharedFunctionInfoMoveEvent(old_addr, new_addr));
   }
-  HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
+  HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr));
 
   return obj_size;
 }
 
 
+void MarkCompactCollector::EnableCodeFlushing(bool enable) {
+  if (enable) {
+    if (code_flusher_ != NULL) return;
+    code_flusher_ = new CodeFlusher(heap_->isolate());
+  } else {
+    if (code_flusher_ == NULL) return;
+    delete code_flusher_;
+    code_flusher_ = NULL;
+  }
+}
+
+
 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
 #ifdef ENABLE_GDB_JIT_INTERFACE
   if (obj->IsCode()) {
@@ -2963,7 +3053,7 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
 #endif
 #ifdef ENABLE_LOGGING_AND_PROFILING
   if (obj->IsCode()) {
-    PROFILE(CodeDeleteEvent(obj->address()));
+    PROFILE(ISOLATE, CodeDeleteEvent(obj->address()));
   }
 #endif
 }
index 9cda31e71dd6fbc946aac1763dce404ef385cf42..3c9d28ba5352c98b3807ff7e7f04a856a83203cd 100644 (file)
@@ -28,6 +28,8 @@
 #ifndef V8_MARK_COMPACT_H_
 #define V8_MARK_COMPACT_H_
 
+#include "spaces.h"
+
 namespace v8 {
 namespace internal {
 
@@ -37,8 +39,61 @@ namespace internal {
 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset);
 
 // Forward declarations.
-class RootMarkingVisitor;
+class CodeFlusher;
+class GCTracer;
 class MarkingVisitor;
+class RootMarkingVisitor;
+
+
+// ----------------------------------------------------------------------------
+// Marking stack for tracing live objects.
+
+class MarkingStack {
+ public:
+  MarkingStack() : low_(NULL), top_(NULL), high_(NULL), overflowed_(false) { }
+
+  void Initialize(Address low, Address high) {
+    top_ = low_ = reinterpret_cast<HeapObject**>(low);
+    high_ = reinterpret_cast<HeapObject**>(high);
+    overflowed_ = false;
+  }
+
+  bool is_full() const { return top_ >= high_; }
+
+  bool is_empty() const { return top_ <= low_; }
+
+  bool overflowed() const { return overflowed_; }
+
+  void clear_overflowed() { overflowed_ = false; }
+
+  // Push the (marked) object on the marking stack if there is room,
+  // otherwise mark the object as overflowed and wait for a rescan of the
+  // heap.
+  void Push(HeapObject* object) {
+    CHECK(object->IsHeapObject());
+    if (is_full()) {
+      object->SetOverflow();
+      overflowed_ = true;
+    } else {
+      *(top_++) = object;
+    }
+  }
+
+  HeapObject* Pop() {
+    ASSERT(!is_empty());
+    HeapObject* object = *(--top_);
+    CHECK(object->IsHeapObject());
+    return object;
+  }
+
+ private:
+  HeapObject** low_;
+  HeapObject** top_;
+  HeapObject** high_;
+  bool overflowed_;
+
+  DISALLOW_COPY_AND_ASSIGN(MarkingStack);
+};
 
 
 // -------------------------------------------------------------------------
@@ -46,14 +101,17 @@ class MarkingVisitor;
 //
 // All methods are static.
 
-class MarkCompactCollector: public AllStatic {
+class OverflowedObjectsScanner;
+
+class MarkCompactCollector {
  public:
   // Type of functions to compute forwarding addresses of objects in
   // compacted spaces.  Given an object and its size, return a (non-failure)
   // Object* that will be the object after forwarding.  There is a separate
   // allocation function for each (compactable) space based on the location
   // of the object before compaction.
-  typedef MaybeObject* (*AllocationFunction)(HeapObject* object,
+  typedef MaybeObject* (*AllocationFunction)(Heap* heap,
+                                             HeapObject* object,
                                              int object_size);
 
   // Type of functions to encode the forwarding address for an object.
@@ -64,7 +122,8 @@ class MarkCompactCollector: public AllStatic {
   // page as input, and is updated to contain the offset to be used for the
   // next live object in the same page.  For spaces using a different
   // encoding (ie, contiguous spaces), the offset parameter is ignored.
-  typedef void (*EncodingFunction)(HeapObject* old_object,
+  typedef void (*EncodingFunction)(Heap* heap,
+                                   HeapObject* old_object,
                                    int object_size,
                                    Object* new_object,
                                    int* offset);
@@ -72,9 +131,12 @@ class MarkCompactCollector: public AllStatic {
   // Type of functions to process non-live objects.
   typedef void (*ProcessNonLiveFunction)(HeapObject* object);
 
+  // Pointer to member function, used in IterateLiveObjects.
+  typedef int (MarkCompactCollector::*LiveObjectCallback)(HeapObject* obj);
+
   // Set the global force_compaction flag, it must be called before Prepare
   // to take effect.
-  static void SetForceCompaction(bool value) {
+  void SetForceCompaction(bool value) {
     force_compaction_ = value;
   }
 
@@ -83,16 +145,16 @@ class MarkCompactCollector: public AllStatic {
 
   // Prepares for GC by resetting relocation info in old and map spaces and
   // choosing spaces to compact.
-  static void Prepare(GCTracer* tracer);
+  void Prepare(GCTracer* tracer);
 
   // Performs a global garbage collection.
-  static void CollectGarbage();
+  void CollectGarbage();
 
   // True if the last full GC performed heap compaction.
-  static bool HasCompacted() { return compacting_collection_; }
+  bool HasCompacted() { return compacting_collection_; }
 
   // True after the Prepare phase if the compaction is taking place.
-  static bool IsCompacting() {
+  bool IsCompacting() {
 #ifdef DEBUG
     // For the purposes of asserts we don't want this to keep returning true
     // after the collection is completed.
@@ -104,16 +166,16 @@ class MarkCompactCollector: public AllStatic {
 
   // The count of the number of objects left marked at the end of the last
   // completed full GC (expected to be zero).
-  static int previous_marked_count() { return previous_marked_count_; }
+  int previous_marked_count() { return previous_marked_count_; }
 
   // During a full GC, there is a stack-allocated GCTracer that is used for
   // bookkeeping information.  Return a pointer to that tracer.
-  static GCTracer* tracer() { return tracer_; }
+  GCTracer* tracer() { return tracer_; }
 
 #ifdef DEBUG
   // Checks whether performing mark-compact collection.
-  static bool in_use() { return state_ > PREPARE_GC; }
-  static bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
+  bool in_use() { return state_ > PREPARE_GC; }
+  bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
 #endif
 
   // Determine type of object and emit deletion log event.
@@ -127,7 +189,16 @@ class MarkCompactCollector: public AllStatic {
   static const uint32_t kSingleFreeEncoding = 0;
   static const uint32_t kMultiFreeEncoding = 1;
 
+  inline Heap* heap() const { return heap_; }
+
+  CodeFlusher* code_flusher() { return code_flusher_; }
+  inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }
+  void EnableCodeFlushing(bool enable);
+
  private:
+  MarkCompactCollector();
+  ~MarkCompactCollector();
+
 #ifdef DEBUG
   enum CollectorState {
     IDLE,
@@ -140,28 +211,28 @@ class MarkCompactCollector: public AllStatic {
   };
 
   // The current stage of the collector.
-  static CollectorState state_;
+  CollectorState state_;
 #endif
 
   // Global flag that forces a compaction.
-  static bool force_compaction_;
+  bool force_compaction_;
 
   // Global flag indicating whether spaces were compacted on the last GC.
-  static bool compacting_collection_;
+  bool compacting_collection_;
 
   // Global flag indicating whether spaces will be compacted on the next GC.
-  static bool compact_on_next_gc_;
+  bool compact_on_next_gc_;
 
   // The number of objects left marked at the end of the last completed full
   // GC (expected to be zero).
-  static int previous_marked_count_;
+  int previous_marked_count_;
 
   // A pointer to the current stack-allocated GC tracer object during a full
   // collection (NULL before and after).
-  static GCTracer* tracer_;
+  GCTracer* tracer_;
 
   // Finishes GC, performs heap verification if enabled.
-  static void Finish();
+  void Finish();
 
   // -----------------------------------------------------------------------
   // Phase 1: Marking live objects.
@@ -179,88 +250,82 @@ class MarkCompactCollector: public AllStatic {
   friend class CodeMarkingVisitor;
   friend class SharedFunctionInfoMarkingVisitor;
 
-  static void PrepareForCodeFlushing();
+  void PrepareForCodeFlushing();
 
   // Marking operations for objects reachable from roots.
-  static void MarkLiveObjects();
+  void MarkLiveObjects();
 
-  static void MarkUnmarkedObject(HeapObject* obj);
+  void MarkUnmarkedObject(HeapObject* obj);
 
-  static inline void MarkObject(HeapObject* obj) {
+  inline void MarkObject(HeapObject* obj) {
     if (!obj->IsMarked()) MarkUnmarkedObject(obj);
   }
 
-  static inline void SetMark(HeapObject* obj) {
-    tracer_->increment_marked_count();
-#ifdef DEBUG
-    UpdateLiveObjectCount(obj);
-#endif
-    obj->SetMark();
-  }
+  inline void SetMark(HeapObject* obj);
 
   // Creates back pointers for all map transitions, stores them in
   // the prototype field.  The original prototype pointers are restored
   // in ClearNonLiveTransitions().  All JSObject maps
   // connected by map transitions have the same prototype object, which
   // is why we can use this field temporarily for back pointers.
-  static void CreateBackPointers();
+  void CreateBackPointers();
 
   // Mark a Map and its DescriptorArray together, skipping transitions.
-  static void MarkMapContents(Map* map);
-  static void MarkDescriptorArray(DescriptorArray* descriptors);
+  void MarkMapContents(Map* map);
+  void MarkDescriptorArray(DescriptorArray* descriptors);
 
   // Mark the heap roots and all objects reachable from them.
-  static void MarkRoots(RootMarkingVisitor* visitor);
+  void MarkRoots(RootMarkingVisitor* visitor);
 
   // Mark the symbol table specially.  References to symbols from the
   // symbol table are weak.
-  static void MarkSymbolTable();
+  void MarkSymbolTable();
 
   // Mark objects in object groups that have at least one object in the
   // group marked.
-  static void MarkObjectGroups();
+  void MarkObjectGroups();
 
   // Mark objects in implicit references groups if their parent object
   // is marked.
-  static void MarkImplicitRefGroups();
+  void MarkImplicitRefGroups();
 
   // Mark all objects which are reachable due to host application
   // logic like object groups or implicit references' groups.
-  static void ProcessExternalMarking();
+  void ProcessExternalMarking();
 
   // Mark objects reachable (transitively) from objects in the marking stack
   // or overflowed in the heap.
-  static void ProcessMarkingStack();
+  void ProcessMarkingStack();
 
   // Mark objects reachable (transitively) from objects in the marking
   // stack.  This function empties the marking stack, but may leave
   // overflowed objects in the heap, in which case the marking stack's
   // overflow flag will be set.
-  static void EmptyMarkingStack();
+  void EmptyMarkingStack();
 
   // Refill the marking stack with overflowed objects from the heap.  This
   // function either leaves the marking stack full or clears the overflow
   // flag on the marking stack.
-  static void RefillMarkingStack();
+  void RefillMarkingStack();
 
   // Callback function for telling whether the object *p is an unmarked
   // heap object.
   static bool IsUnmarkedHeapObject(Object** p);
 
 #ifdef DEBUG
-  static void UpdateLiveObjectCount(HeapObject* obj);
+  void UpdateLiveObjectCount(HeapObject* obj);
 #endif
 
   // We sweep the large object space in the same way whether we are
   // compacting or not, because the large object space is never compacted.
-  static void SweepLargeObjectSpace();
+  void SweepLargeObjectSpace();
 
   // Test whether a (possibly marked) object is a Map.
   static inline bool SafeIsMap(HeapObject* object);
 
   // Map transitions from a live map to a dead map must be killed.
   // We replace them with a null descriptor, with the same key.
-  static void ClearNonLiveTransitions();
+  void ClearNonLiveTransitions();
 
   // -----------------------------------------------------------------------
   // Phase 2: Sweeping to clear mark bits and free non-live objects for
@@ -305,32 +370,32 @@ class MarkCompactCollector: public AllStatic {
 
   // Encodes forwarding addresses of objects in compactable parts of the
   // heap.
-  static void EncodeForwardingAddresses();
+  void EncodeForwardingAddresses();
 
   // Encodes the forwarding addresses of objects in new space.
-  static void EncodeForwardingAddressesInNewSpace();
+  void EncodeForwardingAddressesInNewSpace();
 
   // Function template to encode the forwarding addresses of objects in
   // paged spaces, parameterized by allocation and non-live processing
   // functions.
   template<AllocationFunction Alloc, ProcessNonLiveFunction ProcessNonLive>
-  static void EncodeForwardingAddressesInPagedSpace(PagedSpace* space);
+  void EncodeForwardingAddressesInPagedSpace(PagedSpace* space);
 
   // Iterates live objects in a space, passes live objects
   // to a callback function which returns the heap size of the object.
   // Returns the number of live objects iterated.
-  static int IterateLiveObjects(NewSpace* space, HeapObjectCallback size_f);
-  static int IterateLiveObjects(PagedSpace* space, HeapObjectCallback size_f);
+  int IterateLiveObjects(NewSpace* space, LiveObjectCallback size_f);
+  int IterateLiveObjects(PagedSpace* space, LiveObjectCallback size_f);
 
   // Iterates the live objects between a range of addresses, returning the
   // number of live objects.
-  static int IterateLiveObjectsInRange(Address start, Address end,
-                                       HeapObjectCallback size_func);
+  int IterateLiveObjectsInRange(Address start, Address end,
+                                LiveObjectCallback size_func);
 
   // If we are not compacting the heap, we simply sweep the spaces except
   // for the large object space, clearing mark bits and adding unmarked
   // regions to each space's free list.
-  static void SweepSpaces();
+  void SweepSpaces();
 
   // -----------------------------------------------------------------------
   // Phase 3: Updating pointers in live objects.
@@ -344,15 +409,15 @@ class MarkCompactCollector: public AllStatic {
   friend class UpdatingVisitor;  // helper for updating visited objects
 
   // Updates pointers in all spaces.
-  static void UpdatePointers();
+  void UpdatePointers();
 
   // Updates pointers in an object in new space.
   // Returns the heap size of the object.
-  static int UpdatePointersInNewObject(HeapObject* obj);
+  int UpdatePointersInNewObject(HeapObject* obj);
 
   // Updates pointers in an object in old spaces.
   // Returns the heap size of the object.
-  static int UpdatePointersInOldObject(HeapObject* obj);
+  int UpdatePointersInOldObject(HeapObject* obj);
 
   // Calculates the forwarding address of an object in an old space.
   static Address GetForwardingAddressInOldSpace(HeapObject* obj);
@@ -366,31 +431,31 @@ class MarkCompactCollector: public AllStatic {
   //   After: Objects have been moved to their new addresses.
 
   // Relocates objects in all spaces.
-  static void RelocateObjects();
+  void RelocateObjects();
 
   // Converts a code object's inline target to addresses, convention from
   // address to target happens in the marking phase.
-  static int ConvertCodeICTargetToAddress(HeapObject* obj);
+  int ConvertCodeICTargetToAddress(HeapObject* obj);
 
   // Relocate a map object.
-  static int RelocateMapObject(HeapObject* obj);
+  int RelocateMapObject(HeapObject* obj);
 
   // Relocates an old object.
-  static int RelocateOldPointerObject(HeapObject* obj);
-  static int RelocateOldDataObject(HeapObject* obj);
+  int RelocateOldPointerObject(HeapObject* obj);
+  int RelocateOldDataObject(HeapObject* obj);
 
   // Relocate a property cell object.
-  static int RelocateCellObject(HeapObject* obj);
+  int RelocateCellObject(HeapObject* obj);
 
   // Helper function.
-  static inline int RelocateOldNonCodeObject(HeapObject* obj,
-                                             PagedSpace* space);
+  inline int RelocateOldNonCodeObject(HeapObject* obj,
+                                      PagedSpace* space);
 
   // Relocates an object in the code space.
-  static int RelocateCodeObject(HeapObject* obj);
+  int RelocateCodeObject(HeapObject* obj);
 
   // Copy a new object.
-  static int RelocateNewObject(HeapObject* obj);
+  int RelocateNewObject(HeapObject* obj);
 
 #ifdef DEBUG
   // -----------------------------------------------------------------------
@@ -399,28 +464,28 @@ class MarkCompactCollector: public AllStatic {
   // mark-sweep collection.
 
   // Size of live objects in Heap::to_space_.
-  static int live_young_objects_size_;
+  int live_young_objects_size_;
 
   // Size of live objects in Heap::old_pointer_space_.
-  static int live_old_pointer_objects_size_;
+  int live_old_pointer_objects_size_;
 
   // Size of live objects in Heap::old_data_space_.
-  static int live_old_data_objects_size_;
+  int live_old_data_objects_size_;
 
   // Size of live objects in Heap::code_space_.
-  static int live_code_objects_size_;
+  int live_code_objects_size_;
 
   // Size of live objects in Heap::map_space_.
-  static int live_map_objects_size_;
+  int live_map_objects_size_;
 
   // Size of live objects in Heap::cell_space_.
-  static int live_cell_objects_size_;
+  int live_cell_objects_size_;
 
   // Size of live objects in Heap::lo_space_.
-  static int live_lo_objects_size_;
+  int live_lo_objects_size_;
 
   // Number of live bytes in this collection.
-  static int live_bytes_;
+  int live_bytes_;
 
   friend class MarkObjectVisitor;
   static void VisitObject(HeapObject* obj);
@@ -428,6 +493,13 @@ class MarkCompactCollector: public AllStatic {
   friend class UnmarkObjectVisitor;
   static void UnmarkObject(HeapObject* obj);
 #endif
+
+  Heap* heap_;
+  MarkingStack marking_stack_;
+  CodeFlusher* code_flusher_;
+
+  friend class Heap;
+  friend class OverflowedObjectsScanner;
 };
 
 
index 990000a32ea84aa4d605562912afb3440c871354..cab982cecc5d9f82632218278ed032264809de1f 100644 (file)
@@ -32,7 +32,6 @@
 #include "execution.h"
 #include "messages.h"
 #include "spaces-inl.h"
-#include "top.h"
 
 namespace v8 {
 namespace internal {
@@ -68,18 +67,18 @@ Handle<JSMessageObject> MessageHandler::MakeMessageObject(
     Vector< Handle<Object> > args,
     Handle<String> stack_trace,
     Handle<JSArray> stack_frames) {
-  Handle<String> type_handle = Factory::LookupAsciiSymbol(type);
+  Handle<String> type_handle = FACTORY->LookupAsciiSymbol(type);
   Handle<FixedArray> arguments_elements =
-      Factory::NewFixedArray(args.length());
+      FACTORY->NewFixedArray(args.length());
   for (int i = 0; i < args.length(); i++) {
     arguments_elements->set(i, *args[i]);
   }
   Handle<JSArray> arguments_handle =
-      Factory::NewJSArrayWithElements(arguments_elements);
+      FACTORY->NewJSArrayWithElements(arguments_elements);
 
   int start = 0;
   int end = 0;
-  Handle<Object> script_handle = Factory::undefined_value();
+  Handle<Object> script_handle = FACTORY->undefined_value();
   if (loc) {
     start = loc->start_pos();
     end = loc->end_pos();
@@ -87,15 +86,15 @@ Handle<JSMessageObject> MessageHandler::MakeMessageObject(
   }
 
   Handle<Object> stack_trace_handle = stack_trace.is_null()
-      ? Factory::undefined_value()
+      ? FACTORY->undefined_value()
       : Handle<Object>::cast(stack_trace);
 
   Handle<Object> stack_frames_handle = stack_frames.is_null()
-      ? Factory::undefined_value()
+      ? FACTORY->undefined_value()
       : Handle<Object>::cast(stack_frames);
 
   Handle<JSMessageObject> message =
-      Factory::NewJSMessageObject(type_handle,
+      FACTORY->NewJSMessageObject(type_handle,
                                   arguments_handle,
                                   start,
                                   end,
@@ -111,7 +110,7 @@ void MessageHandler::ReportMessage(MessageLocation* loc,
                                    Handle<Object> message) {
   v8::Local<v8::Message> api_message_obj = v8::Utils::MessageToLocal(message);
 
-  v8::NeanderArray global_listeners(Factory::message_listeners());
+  v8::NeanderArray global_listeners(FACTORY->message_listeners());
   int global_length = global_listeners.length();
   if (global_length == 0) {
     DefaultMessageReport(loc, message);
@@ -131,18 +130,21 @@ void MessageHandler::ReportMessage(MessageLocation* loc,
 
 
 Handle<String> MessageHandler::GetMessage(Handle<Object> data) {
-  Handle<String> fmt_str = Factory::LookupAsciiSymbol("FormatMessage");
+  Handle<String> fmt_str = FACTORY->LookupAsciiSymbol("FormatMessage");
   Handle<JSFunction> fun =
-      Handle<JSFunction>(JSFunction::cast(
-          Top::builtins()->GetPropertyNoExceptionThrown(*fmt_str)));
+      Handle<JSFunction>(
+          JSFunction::cast(
+              Isolate::Current()->js_builtins_object()->
+              GetPropertyNoExceptionThrown(*fmt_str)));
   Object** argv[1] = { data.location() };
 
   bool caught_exception;
   Handle<Object> result =
-      Execution::TryCall(fun, Top::builtins(), 1, argv, &caught_exception);
+      Execution::TryCall(fun,
+          Isolate::Current()->js_builtins_object(), 1, argv, &caught_exception);
 
   if (caught_exception || !result->IsString()) {
-    return Factory::LookupAsciiSymbol("<error>");
+    return FACTORY->LookupAsciiSymbol("<error>");
   }
   Handle<String> result_string = Handle<String>::cast(result);
   // A string that has been obtained from JS code in this way is
index 45de71c9285454e88fd24cb406c5d00110a84e5e..2ba12fd9f9a0f5355fb0f1c7ecd6e42e72af0dbd 100644 (file)
@@ -1085,5 +1085,5 @@ function errorToString() {
 InstallFunctions($Error.prototype, DONT_ENUM, ['toString', errorToString]);
 
 // Boilerplate for exceptions for stack overflows. Used from
-// Top::StackOverflow().
+// Isolate::StackOverflow().
 const kStackOverflowBoilerplate = MakeRangeError('stack_overflow', []);
index 79801f07becbd30506897511854a47f95285458d..b5226163f179e043691c6d3201fbe3bad477e82e 100644 (file)
@@ -201,7 +201,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
     if (!scope()->HasIllegalRedeclaration()) {
       Comment cmnt(masm_, "[ function body");
 #ifdef DEBUG
-      bool is_builtin = Bootstrapper::IsActive();
+      bool is_builtin = Isolate::Current()->bootstrapper()->IsActive();
       bool should_trace =
           is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
       if (should_trace) {
@@ -1178,14 +1178,14 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
   __ LoadExternalReference(t0, ExternalReference::the_hole_value_location());
   __ lw(a3, MemOperand(t0));
   __ LoadExternalReference(t0,
-      ExternalReference(Top::k_pending_exception_address));
+      ExternalReference(Isolate::k_pending_exception_address));
   __ lw(v0, MemOperand(t0));
   __ sw(a3, MemOperand(t0));
 
   // Special handling of termination exceptions which are uncatchable
   // by javascript code.
   __ Branch(eq, throw_termination_exception,
-            v0, Operand(Factory::termination_exception()));
+            v0, Operand(FACTORY->termination_exception()));
 
   // Handle normal exception.
   __ b(throw_normal_exception);
@@ -1275,7 +1275,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
   __ li(t2, Operand(Smi::FromInt(marker)));
   __ li(t1, Operand(Smi::FromInt(marker)));
-  __ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
+  __ LoadExternalReference(t0,
+      ExternalReference(Isolate::k_c_entry_fp_address));
   __ lw(t0, MemOperand(t0));
   __ MultiPush(t0.bit() | t1.bit() | t2.bit() | t3.bit());
 
@@ -1311,7 +1312,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // Coming in here the fp will be invalid because the PushTryHandler below
   // sets it to 0 to signal the existence of the JSEntry frame.
   __ LoadExternalReference(t0,
-      ExternalReference(Top::k_pending_exception_address));
+      ExternalReference(Isolate::k_pending_exception_address));
   __ sw(v0, MemOperand(t0));  // We come back from 'invoke'. result is in v0.
   __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
   __ b(&exit);
@@ -1329,7 +1330,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ LoadExternalReference(t0, ExternalReference::the_hole_value_location());
   __ lw(t1, MemOperand(t0));
   __ LoadExternalReference(t0,
-      ExternalReference(Top::k_pending_exception_address));
+      ExternalReference(Isolate::k_pending_exception_address));
   __ sw(t1, MemOperand(t0));
 
   // Invoke the function by calling through JS entry trampoline builtin.
@@ -1368,7 +1369,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // displacement since the current stack pointer (sp) points directly
   // to the stack handler.
   __ lw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
-  __ LoadExternalReference(t0, ExternalReference(Top::k_handler_address));
+  __ LoadExternalReference(t0, ExternalReference(Isolate::k_handler_address));
   __ sw(t1, MemOperand(t0));
 
   // This restores sp to its position before PushTryHandler.
@@ -1377,7 +1378,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ bind(&exit);  // v0 holds result
   // Restore the top frame descriptors from the stack.
   __ Pop(t1);
-  __ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
+  __ LoadExternalReference(t0,
+                           ExternalReference(Isolate::k_c_entry_fp_address));
   __ sw(t1, MemOperand(t0));
 
   // Reset the stack to the callee saved registers.
index 959a4a22068aa782a296db625f55f5ec2cb16198..7cb52a61a2f7e69a36a15231309f475bc72d3e35 100644 (file)
@@ -711,9 +711,8 @@ namespace v8i = v8::internal;
 
 
 const char* NameConverter::NameOfAddress(byte_* addr) const {
-  static v8::internal::EmbeddedVector<char, 32> tmp_buffer;
-  v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr);
-  return tmp_buffer.start();
+  v8::internal::OS::SNPrintF(tmp_buffer_, "%p", addr);
+  return tmp_buffer_.start();
 }
 
 
diff --git a/src/mips/fast-codegen-mips.cc b/src/mips/fast-codegen-mips.cc
deleted file mode 100644 (file)
index 186f9fa..0000000
+++ /dev/null
@@ -1,77 +0,0 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "v8.h"
-
-#if defined(V8_TARGET_ARCH_MIPS)
-
-#include "codegen-inl.h"
-#include "fast-codegen.h"
-
-namespace v8 {
-namespace internal {
-
-#define __ ACCESS_MASM(masm_)
-
-Register FastCodeGenerator::accumulator0() { return no_reg; }
-Register FastCodeGenerator::accumulator1() { return no_reg; }
-Register FastCodeGenerator::scratch0() { return no_reg; }
-Register FastCodeGenerator::scratch1() { return no_reg; }
-Register FastCodeGenerator::receiver_reg() { return no_reg; }
-Register FastCodeGenerator::context_reg() { return no_reg; }
-
-
-void FastCodeGenerator::Generate(CompilationInfo* info) {
-  UNIMPLEMENTED_MIPS();
-}
-
-
-void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
-  UNIMPLEMENTED_MIPS();
-}
-
-
-void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> name) {
-  UNIMPLEMENTED_MIPS();
-}
-
-
-void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
-  UNIMPLEMENTED_MIPS();
-}
-
-
-void FastCodeGenerator::EmitBitOr() {
-  UNIMPLEMENTED_MIPS();
-}
-
-#undef __
-
-
-} }  // namespace v8::internal
-
-#endif  // V8_TARGET_ARCH_MIPS
index e096028e3832a25a5c5585ae183eab5c671973f0..02ece8c56ca8954ee8b9771975e21cf9b70b971e 100644 (file)
@@ -822,7 +822,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
            && StackHandlerConstants::kPCOffset == 3 * kPointerSize
            && StackHandlerConstants::kNextOffset == 0 * kPointerSize);
     // Save the current handler as the next handler.
-    LoadExternalReference(t2, ExternalReference(Top::k_handler_address));
+    LoadExternalReference(t2, ExternalReference(Isolate::k_handler_address));
     lw(t1, MemOperand(t2));
 
     addiu(sp, sp, -StackHandlerConstants::kSize);
@@ -848,7 +848,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
     li(t0, Operand(StackHandler::ENTRY));
 
     // Save the current handler as the next handler.
-    LoadExternalReference(t2, ExternalReference(Top::k_handler_address));
+    LoadExternalReference(t2, ExternalReference(Isolate::k_handler_address));
     lw(t1, MemOperand(t2));
 
     addiu(sp, sp, -StackHandlerConstants::kSize);
@@ -1270,9 +1270,9 @@ void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode,
   }
 
   // Save the frame pointer and the context in top.
-  LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
+  LoadExternalReference(t0, ExternalReference(Isolate::k_c_entry_fp_address));
   sw(fp, MemOperand(t0));
-  LoadExternalReference(t0, ExternalReference(Top::k_context_address));
+  LoadExternalReference(t0, ExternalReference(Isolate::k_context_address));
   sw(cp, MemOperand(t0));
 
   // Setup argc and the builtin function in callee-saved registers.
@@ -1283,11 +1283,11 @@ void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode,
 
 void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
   // Clear top frame.
-  LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
+  LoadExternalReference(t0, ExternalReference(Isolate::k_c_entry_fp_address));
   sw(zero_reg, MemOperand(t0));
 
   // Restore current context from top and clear it in debug mode.
-  LoadExternalReference(t0, ExternalReference(Top::k_context_address));
+  LoadExternalReference(t0, ExternalReference(Isolate::k_context_address));
   lw(cp, MemOperand(t0));
 #ifdef DEBUG
   sw(a3, MemOperand(t0));
index a30b45079ebd9b1449685319471141e5259077ec..6ecbc8c55557f1385bfcc685a713cb59758ef858 100644 (file)
@@ -223,12 +223,12 @@ int main(int argc, char** argv) {
   // Make sure all builtin scripts are cached.
   { HandleScope scope;
     for (int i = 0; i < i::Natives::GetBuiltinsCount(); i++) {
-      i::Bootstrapper::NativesSourceLookup(i);
+      i::Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
     }
   }
   // If we don't do this then we end up with a stray root pointing at the
   // context even after we have disposed of the context.
-  i::Heap::CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
   i::Object* raw_context = *(v8::Utils::OpenHandle(*context));
   context.Dispose();
   CppByteSink sink(argv[1]);
index 71d2b0760c445bf2a77a433221f72c985f2d6502..dd606dcd0220f12bd69b958e08273979cab2dd03 100644 (file)
@@ -178,7 +178,7 @@ void HeapObject::HeapObjectVerify() {
 
 void HeapObject::VerifyHeapPointer(Object* p) {
   ASSERT(p->IsHeapObject());
-  ASSERT(Heap::Contains(HeapObject::cast(p)));
+  ASSERT(HEAP->Contains(HeapObject::cast(p)));
 }
 
 
@@ -241,18 +241,18 @@ void JSObject::JSObjectVerify() {
               map()->NextFreePropertyIndex()));
   }
   ASSERT(map()->has_fast_elements() ==
-         (elements()->map() == Heap::fixed_array_map() ||
-          elements()->map() == Heap::fixed_cow_array_map()));
+         (elements()->map() == GetHeap()->fixed_array_map() ||
+          elements()->map() == GetHeap()->fixed_cow_array_map()));
   ASSERT(map()->has_fast_elements() == HasFastElements());
 }
 
 
 void Map::MapVerify() {
-  ASSERT(!Heap::InNewSpace(this));
+  ASSERT(!HEAP->InNewSpace(this));
   ASSERT(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
   ASSERT(instance_size() == kVariableSizeSentinel ||
          (kPointerSize <= instance_size() &&
-          instance_size() < Heap::Capacity()));
+          instance_size() < HEAP->Capacity()));
   VerifyHeapPointer(prototype());
   VerifyHeapPointer(instance_descriptors());
 }
@@ -261,7 +261,7 @@ void Map::MapVerify() {
 void Map::SharedMapVerify() {
   MapVerify();
   ASSERT(is_shared());
-  ASSERT_EQ(Heap::empty_descriptor_array(), instance_descriptors());
+  ASSERT_EQ(GetHeap()->empty_descriptor_array(), instance_descriptors());
   ASSERT_EQ(0, pre_allocated_property_fields());
   ASSERT_EQ(0, unused_property_fields());
   ASSERT_EQ(StaticVisitorBase::GetVisitorId(instance_type(), instance_size()),
@@ -315,7 +315,7 @@ void String::StringVerify() {
   CHECK(IsString());
   CHECK(length() >= 0 && length() <= Smi::kMaxValue);
   if (IsSymbol()) {
-    CHECK(!Heap::InNewSpace(this));
+    CHECK(!HEAP->InNewSpace(this));
   }
 }
 
@@ -379,7 +379,7 @@ void Oddball::OddballVerify() {
   VerifyHeapPointer(to_string());
   Object* number = to_number();
   if (number->IsHeapObject()) {
-    ASSERT(number == Heap::nan_value());
+    ASSERT(number == HEAP->nan_value());
   } else {
     ASSERT(number->IsSmi());
     int value = Smi::cast(number)->value();
@@ -590,8 +590,9 @@ void JSObject::IncrementSpillStatistics(SpillInformation* info) {
       int holes = 0;
       FixedArray* e = FixedArray::cast(elements());
       int len = e->length();
+      Heap* heap = HEAP;
       for (int i = 0; i < len; i++) {
-        if (e->get(i) == Heap::the_hole_value()) holes++;
+        if (e->get(i) == heap->the_hole_value()) holes++;
       }
       info->number_of_fast_used_elements_   += len - holes;
       info->number_of_fast_unused_elements_ += holes;
index 128b7b995360e941b940955eebb08613e8012af0..e39474dc8f4b7a47f9858b0d60f4831ded617536 100644 (file)
@@ -40,6 +40,7 @@
 #include "conversions-inl.h"
 #include "heap.h"
 #include "memory.h"
+#include "isolate.h"
 #include "property.h"
 #include "spaces.h"
 
@@ -78,7 +79,16 @@ PropertyDetails PropertyDetails::AsDeleted() {
   type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
   void holder::set_##name(type* value, WriteBarrierMode mode) {         \
     WRITE_FIELD(this, offset, value);                                   \
-    CONDITIONAL_WRITE_BARRIER(this, offset, mode);                      \
+    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);           \
+  }
+
+
+// GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
+#define ACCESSORS_GCSAFE(holder, name, type, offset)                    \
+  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
+  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
+    WRITE_FIELD(this, offset, value);                                   \
+    CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode);                \
   }
 
 
@@ -419,7 +429,7 @@ bool MaybeObject::IsException() {
 
 
 bool MaybeObject::IsTheHole() {
-  return this == Heap::the_hole_value();
+  return !IsFailure() && ToObjectUnchecked()->IsTheHole();
 }
 
 
@@ -487,22 +497,27 @@ bool Object::IsDeoptimizationOutputData() {
 
 
 bool Object::IsContext() {
-  return Object::IsHeapObject()
-    && (HeapObject::cast(this)->map() == Heap::context_map() ||
-        HeapObject::cast(this)->map() == Heap::catch_context_map() ||
-        HeapObject::cast(this)->map() == Heap::global_context_map());
+  if (Object::IsHeapObject()) {
+    Heap* heap = HeapObject::cast(this)->GetHeap();
+    return (HeapObject::cast(this)->map() == heap->context_map() ||
+            HeapObject::cast(this)->map() == heap->catch_context_map() ||
+            HeapObject::cast(this)->map() == heap->global_context_map());
+  }
+  return false;
 }
 
 
 bool Object::IsCatchContext() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map() == Heap::catch_context_map();
+  return Object::IsHeapObject() &&
+      HeapObject::cast(this)->map() ==
+      HeapObject::cast(this)->GetHeap()->catch_context_map();
 }
 
 
 bool Object::IsGlobalContext() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map() == Heap::global_context_map();
+  return Object::IsHeapObject() &&
+      HeapObject::cast(this)->map() ==
+      HeapObject::cast(this)->GetHeap()->global_context_map();
 }
 
 
@@ -524,6 +539,7 @@ bool Object::IsCode() {
 
 
 bool Object::IsOddball() {
+  ASSERT(HEAP->is_safe_to_read_maps());
   return Object::IsHeapObject()
     && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
 }
@@ -568,7 +584,8 @@ bool Object::IsProxy() {
 
 
 bool Object::IsBoolean() {
-  return IsTrue() || IsFalse();
+  return IsOddball() &&
+      ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
 }
 
 
@@ -590,18 +607,21 @@ template <> inline bool Is<JSArray>(Object* obj) {
 
 
 bool Object::IsHashTable() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map() == Heap::hash_table_map();
+  return Object::IsHeapObject() &&
+      HeapObject::cast(this)->map() ==
+      HeapObject::cast(this)->GetHeap()->hash_table_map();
 }
 
 
 bool Object::IsDictionary() {
-  return IsHashTable() && this != Heap::symbol_table();
+  return IsHashTable() && this !=
+         HeapObject::cast(this)->GetHeap()->symbol_table();
 }
 
 
 bool Object::IsSymbolTable() {
-  return IsHashTable() && this == Heap::raw_unchecked_symbol_table();
+  return IsHashTable() && this ==
+         HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
 }
 
 
@@ -718,27 +738,32 @@ bool Object::IsStruct() {
 
 
 bool Object::IsUndefined() {
-  return this == Heap::undefined_value();
+  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
 }
 
 
 bool Object::IsNull() {
-  return this == Heap::null_value();
+  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
+}
+
+
+bool Object::IsTheHole() {
+  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
 }
 
 
 bool Object::IsTrue() {
-  return this == Heap::true_value();
+  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
 }
 
 
 bool Object::IsFalse() {
-  return this == Heap::false_value();
+  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
 }
 
 
 bool Object::IsArgumentsMarker() {
-  return this == Heap::arguments_marker();
+  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
 }
 
 
@@ -750,7 +775,6 @@ double Object::Number() {
 }
 
 
-
 MaybeObject* Object::ToSmi() {
   if (IsSmi()) return this;
   if (IsHeapNumber()) {
@@ -773,7 +797,7 @@ MaybeObject* Object::GetElement(uint32_t index) {
   // GetElement can trigger a getter which can cause allocation.
   // This was not always the case. This ASSERT is here to catch
   // leftover incorrect uses.
-  ASSERT(Heap::IsAllocationAllowed());
+  ASSERT(HEAP->IsAllocationAllowed());
   return GetElementWithReceiver(this, index);
 }
 
@@ -807,19 +831,19 @@ MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
 #define WRITE_FIELD(p, offset, value) \
   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
 
-
+// TODO(isolates): Pass heap in to these macros.
 #define WRITE_BARRIER(object, offset) \
-  Heap::RecordWrite(object->address(), offset);
+  object->GetHeap()->RecordWrite(object->address(), offset);
 
 // CONDITIONAL_WRITE_BARRIER must be issued after the actual
 // write due to the assert validating the written value.
-#define CONDITIONAL_WRITE_BARRIER(object, offset, mode) \
+#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \
   if (mode == UPDATE_WRITE_BARRIER) { \
-    Heap::RecordWrite(object->address(), offset); \
+    heap->RecordWrite(object->address(), offset); \
   } else { \
     ASSERT(mode == SKIP_WRITE_BARRIER); \
-    ASSERT(Heap::InNewSpace(object) || \
-           !Heap::InNewSpace(READ_FIELD(object, offset)) || \
+    ASSERT(heap->InNewSpace(object) || \
+           !heap->InNewSpace(READ_FIELD(object, offset)) || \
            Page::FromAddress(object->address())->           \
                IsRegionDirty(object->address() + offset));  \
   }
@@ -1099,6 +1123,23 @@ void HeapObject::VerifySmiField(int offset) {
 #endif
 
 
+Heap* HeapObject::GetHeap() {
+  // During GC, the map pointer in HeapObject is used in various ways that
+  // prevent us from retrieving Heap from the map.
+  // Assert that we are not in GC, implement GC code in a way that it doesn't
+  // pull heap from the map.
+  ASSERT(HEAP->is_safe_to_read_maps());
+  return map()->heap();
+}
+
+
+Isolate* HeapObject::GetIsolate() {
+  Isolate* i = GetHeap()->isolate();
+  ASSERT(i == Isolate::Current());
+  return i;
+}
+
+
 Map* HeapObject::map() {
   return map_word().ToMap();
 }
@@ -1223,25 +1264,25 @@ HeapObject* JSObject::elements() {
 
 void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
   ASSERT(map()->has_fast_elements() ==
-         (value->map() == Heap::fixed_array_map() ||
-          value->map() == Heap::fixed_cow_array_map()));
+         (value->map() == GetHeap()->fixed_array_map() ||
+          value->map() == GetHeap()->fixed_cow_array_map()));
   // In the assert below Dictionary is covered under FixedArray.
   ASSERT(value->IsFixedArray() || value->IsExternalArray());
   WRITE_FIELD(this, kElementsOffset, value);
-  CONDITIONAL_WRITE_BARRIER(this, kElementsOffset, mode);
+  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
 }
 
 
 void JSObject::initialize_properties() {
-  ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
-  WRITE_FIELD(this, kPropertiesOffset, Heap::empty_fixed_array());
+  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
+  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
 }
 
 
 void JSObject::initialize_elements() {
   ASSERT(map()->has_fast_elements());
-  ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
-  WRITE_FIELD(this, kElementsOffset, Heap::empty_fixed_array());
+  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
+  WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
 }
 
 
@@ -1260,6 +1301,16 @@ ACCESSORS(Oddball, to_string, String, kToStringOffset)
 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
 
 
+byte Oddball::kind() {
+  return READ_BYTE_FIELD(this, kKindOffset);
+}
+
+
+void Oddball::set_kind(byte value) {
+  WRITE_BYTE_FIELD(this, kKindOffset, value);
+}
+
+
 Object* JSGlobalPropertyCell::value() {
   return READ_FIELD(this, kValueOffset);
 }
@@ -1395,14 +1446,14 @@ Object* JSObject::InObjectPropertyAtPut(int index,
   ASSERT(index < 0);
   int offset = map()->instance_size() + (index * kPointerSize);
   WRITE_FIELD(this, offset, value);
-  CONDITIONAL_WRITE_BARRIER(this, offset, mode);
+  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
   return value;
 }
 
 
 
 void JSObject::InitializeBody(int object_size, Object* value) {
-  ASSERT(!value->IsHeapObject() || !Heap::InNewSpace(value));
+  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
     WRITE_FIELD(this, offset, value);
   }
@@ -1425,7 +1476,7 @@ int JSObject::MaxFastProperties() {
 
 
 void Struct::InitializeBody(int object_size) {
-  Object* value = Heap::undefined_value();
+  Object* value = GetHeap()->undefined_value();
   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
     WRITE_FIELD(this, offset, value);
   }
@@ -1471,7 +1522,7 @@ Object* FixedArray::get(int index) {
 
 
 void FixedArray::set(int index, Smi* value) {
-  ASSERT(map() != Heap::fixed_cow_array_map());
+  ASSERT(map() != HEAP->fixed_cow_array_map());
   ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
   int offset = kHeaderSize + index * kPointerSize;
   WRITE_FIELD(this, offset, value);
@@ -1479,7 +1530,7 @@ void FixedArray::set(int index, Smi* value) {
 
 
 void FixedArray::set(int index, Object* value) {
-  ASSERT(map() != Heap::fixed_cow_array_map());
+  ASSERT(map() != HEAP->fixed_cow_array_map());
   ASSERT(index >= 0 && index < this->length());
   int offset = kHeaderSize + index * kPointerSize;
   WRITE_FIELD(this, offset, value);
@@ -1488,7 +1539,7 @@ void FixedArray::set(int index, Object* value) {
 
 
 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
-  if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER;
+  if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
   return UPDATE_WRITE_BARRIER;
 }
 
@@ -1496,44 +1547,55 @@ WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
 void FixedArray::set(int index,
                      Object* value,
                      WriteBarrierMode mode) {
-  ASSERT(map() != Heap::fixed_cow_array_map());
+  ASSERT(map() != HEAP->fixed_cow_array_map());
   ASSERT(index >= 0 && index < this->length());
   int offset = kHeaderSize + index * kPointerSize;
   WRITE_FIELD(this, offset, value);
-  CONDITIONAL_WRITE_BARRIER(this, offset, mode);
+  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
 }
 
 
 void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
-  ASSERT(array->map() != Heap::raw_unchecked_fixed_cow_array_map());
+  ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
   ASSERT(index >= 0 && index < array->length());
-  ASSERT(!Heap::InNewSpace(value));
+  ASSERT(!HEAP->InNewSpace(value));
   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
 }
 
 
 void FixedArray::set_undefined(int index) {
-  ASSERT(map() != Heap::fixed_cow_array_map());
+  ASSERT(map() != HEAP->fixed_cow_array_map());
+  set_undefined(GetHeap(), index);
+}
+
+
+void FixedArray::set_undefined(Heap* heap, int index) {
   ASSERT(index >= 0 && index < this->length());
-  ASSERT(!Heap::InNewSpace(Heap::undefined_value()));
+  ASSERT(!heap->InNewSpace(heap->undefined_value()));
   WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
-              Heap::undefined_value());
+              heap->undefined_value());
 }
 
 
 void FixedArray::set_null(int index) {
-  ASSERT(map() != Heap::fixed_cow_array_map());
+  set_null(GetHeap(), index);
+}
+
+
+void FixedArray::set_null(Heap* heap, int index) {
   ASSERT(index >= 0 && index < this->length());
-  ASSERT(!Heap::InNewSpace(Heap::null_value()));
-  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value());
+  ASSERT(!heap->InNewSpace(heap->null_value()));
+  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
 }
 
 
 void FixedArray::set_the_hole(int index) {
-  ASSERT(map() != Heap::fixed_cow_array_map());
+  ASSERT(map() != HEAP->fixed_cow_array_map());
   ASSERT(index >= 0 && index < this->length());
-  ASSERT(!Heap::InNewSpace(Heap::the_hole_value()));
-  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::the_hole_value());
+  ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
+  WRITE_FIELD(this,
+              kHeaderSize + index * kPointerSize,
+              GetHeap()->the_hole_value());
 }
 
 
@@ -1544,19 +1606,20 @@ void FixedArray::set_unchecked(int index, Smi* value) {
 }
 
 
-void FixedArray::set_unchecked(int index,
+void FixedArray::set_unchecked(Heap* heap,
+                               int index,
                                Object* value,
                                WriteBarrierMode mode) {
   int offset = kHeaderSize + index * kPointerSize;
   WRITE_FIELD(this, offset, value);
-  CONDITIONAL_WRITE_BARRIER(this, offset, mode);
+  CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode);
 }
 
 
-void FixedArray::set_null_unchecked(int index) {
+void FixedArray::set_null_unchecked(Heap* heap, int index) {
   ASSERT(index >= 0 && index < this->length());
-  ASSERT(!Heap::InNewSpace(Heap::null_value()));
-  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value());
+  ASSERT(!HEAP->InNewSpace(heap->null_value()));
+  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
 }
 
 
@@ -1566,9 +1629,9 @@ Object** FixedArray::data_start() {
 
 
 bool DescriptorArray::IsEmpty() {
-  ASSERT(this == Heap::empty_descriptor_array() ||
-         this->length() > 2);
-  return this == Heap::empty_descriptor_array();
+  ASSERT(this->length() > kFirstIndex ||
+         this == HEAP->empty_descriptor_array());
+  return length() <= kFirstIndex;
 }
 
 
@@ -1598,10 +1661,10 @@ int DescriptorArray::Search(String* name) {
 
 
 int DescriptorArray::SearchWithCache(String* name) {
-  int number = DescriptorLookupCache::Lookup(this, name);
+  int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
   if (number == DescriptorLookupCache::kAbsent) {
     number = Search(name);
-    DescriptorLookupCache::Update(this, name, number);
+    GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
   }
   return number;
 }
@@ -1687,8 +1750,8 @@ void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
   ASSERT(descriptor_number < number_of_descriptors());
 
   // Make sure none of the elements in desc are in new space.
-  ASSERT(!Heap::InNewSpace(desc->GetKey()));
-  ASSERT(!Heap::InNewSpace(desc->GetValue()));
+  ASSERT(!HEAP->InNewSpace(desc->GetKey()));
+  ASSERT(!HEAP->InNewSpace(desc->GetValue()));
 
   fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
   FixedArray* content_array = GetContentArray();
@@ -1713,6 +1776,30 @@ void DescriptorArray::Swap(int first, int second) {
 }
 
 
+template<typename Shape, typename Key>
+int HashTable<Shape, Key>::FindEntry(Key key) {
+  return FindEntry(GetIsolate(), key);
+}
+
+
+// Find entry for key otherwise return kNotFound.
+template<typename Shape, typename Key>
+int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
+  uint32_t capacity = Capacity();
+  uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
+  uint32_t count = 1;
+  // EnsureCapacity will guarantee the hash table is never full.
+  while (true) {
+    Object* element = KeyAt(entry);
+    if (element == isolate->heap()->undefined_value()) break;  // Empty entry.
+    if (element != isolate->heap()->null_value() &&
+        Shape::IsMatch(key, element)) return entry;
+    entry = NextProbe(entry, count++, capacity);
+  }
+  return kNotFound;
+}
+
+
 bool NumberDictionary::requires_slow_elements() {
   Object* max_index_object = get(kMaxNumberKeyIndex);
   if (!max_index_object->IsSmi()) return false;
@@ -1959,7 +2046,7 @@ Object* ConsString::unchecked_first() {
 
 void ConsString::set_first(String* value, WriteBarrierMode mode) {
   WRITE_FIELD(this, kFirstOffset, value);
-  CONDITIONAL_WRITE_BARRIER(this, kFirstOffset, mode);
+  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode);
 }
 
 
@@ -1975,7 +2062,7 @@ Object* ConsString::unchecked_second() {
 
 void ConsString::set_second(String* value, WriteBarrierMode mode) {
   WRITE_FIELD(this, kSecondOffset, value);
-  CONDITIONAL_WRITE_BARRIER(this, kSecondOffset, mode);
+  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode);
 }
 
 
@@ -2011,7 +2098,7 @@ void JSFunctionResultCache::Clear() {
   int cache_size = size();
   Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
   MemsetPointer(entries_start,
-                Heap::the_hole_value(),
+                GetHeap()->the_hole_value(),
                 cache_size - kEntriesIndex);
   MakeZeroSize();
 }
@@ -2728,6 +2815,15 @@ Code* Code::GetCodeFromTargetAddress(Address address) {
 }
 
 
+Heap* Map::heap() {
+  // NOTE: address() helper is not used to save one instruction.
+  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
+  ASSERT(heap != NULL);
+  ASSERT(heap->isolate() == Isolate::Current());
+  return heap;
+}
+
+
 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
   return HeapObject::
       FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
@@ -2742,7 +2838,7 @@ Object* Map::prototype() {
 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
   ASSERT(value->IsNull() || value->IsJSObject());
   WRITE_FIELD(this, kPrototypeOffset, value);
-  CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, mode);
+  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode);
 }
 
 
@@ -2754,7 +2850,7 @@ MaybeObject* Map::GetFastElementsMap() {
   }
   Map* new_map = Map::cast(obj);
   new_map->set_has_fast_elements(true);
-  Counters::map_slow_to_fast_elements.Increment();
+  COUNTERS->map_slow_to_fast_elements()->Increment();
   return new_map;
 }
 
@@ -2767,7 +2863,7 @@ MaybeObject* Map::GetSlowElementsMap() {
   }
   Map* new_map = Map::cast(obj);
   new_map->set_has_fast_elements(false);
-  Counters::map_fast_to_slow_elements.Increment();
+  COUNTERS->map_fast_to_slow_elements()->Increment();
   return new_map;
 }
 
@@ -2782,7 +2878,7 @@ MaybeObject* Map::NewExternalArrayElementsMap() {
   Map* new_map = Map::cast(obj);
   new_map->set_has_fast_elements(false);
   new_map->set_has_external_array_elements(true);
-  Counters::map_to_external_array_elements.Increment();
+  COUNTERS->map_to_external_array_elements()->Increment();
   return new_map;
 }
 
@@ -2794,7 +2890,8 @@ ACCESSORS(Map, constructor, Object, kConstructorOffset)
 
 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
-ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
+ACCESSORS_GCSAFE(JSFunction, next_function_link, Object,
+                 kNextFunctionLinkOffset)
 
 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
@@ -2883,8 +2980,8 @@ ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
 #endif
 
 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
-ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
-ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
+ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
+ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
           kInstanceClassNameOffset)
 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
@@ -3012,7 +3109,7 @@ void SharedFunctionInfo::set_live_objects_may_exist(bool value) {
 
 
 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
-  return initial_map() != Heap::undefined_value();
+  return initial_map() != HEAP->undefined_value();
 }
 
 
@@ -3091,7 +3188,7 @@ Code* SharedFunctionInfo::unchecked_code() {
 
 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
   WRITE_FIELD(this, kCodeOffset, value);
-  CONDITIONAL_WRITE_BARRIER(this, kCodeOffset, mode);
+  ASSERT(!Isolate::Current()->heap()->InNewSpace(value));
 }
 
 
@@ -3104,7 +3201,7 @@ SerializedScopeInfo* SharedFunctionInfo::scope_info() {
 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
                                         WriteBarrierMode mode) {
   WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
-  CONDITIONAL_WRITE_BARRIER(this, kScopeInfoOffset, mode);
+  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode);
 }
 
 
@@ -3119,7 +3216,8 @@ void SharedFunctionInfo::set_deopt_counter(Smi* value) {
 
 
 bool SharedFunctionInfo::is_compiled() {
-  return code() != Builtins::builtin(Builtins::LazyCompile);
+  return code() !=
+      Isolate::Current()->builtins()->builtin(Builtins::LazyCompile);
 }
 
 
@@ -3179,7 +3277,7 @@ bool JSFunction::IsOptimized() {
 
 
 bool JSFunction::IsMarkedForLazyRecompilation() {
-  return code() == Builtins::builtin(Builtins::LazyRecompile);
+  return code() == GetIsolate()->builtins()->builtin(Builtins::LazyRecompile);
 }
 
 
@@ -3196,7 +3294,7 @@ Code* JSFunction::unchecked_code() {
 
 void JSFunction::set_code(Code* value) {
   // Skip the write barrier because code is never in new space.
-  ASSERT(!Heap::InNewSpace(value));
+  ASSERT(!HEAP->InNewSpace(value));
   Address entry = value->entry();
   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
 }
@@ -3236,7 +3334,7 @@ SharedFunctionInfo* JSFunction::unchecked_shared() {
 
 
 void JSFunction::set_context(Object* value) {
-  ASSERT(value == Heap::undefined_value() || value->IsContext());
+  ASSERT(value->IsUndefined() || value->IsContext());
   WRITE_FIELD(this, kContextOffset, value);
   WRITE_BARRIER(this, kContextOffset);
 }
@@ -3293,7 +3391,7 @@ bool JSFunction::should_have_prototype() {
 
 
 bool JSFunction::is_compiled() {
-  return code() != Builtins::builtin(Builtins::LazyCompile);
+  return code() != GetIsolate()->builtins()->builtin(Builtins::LazyCompile);
 }
 
 
@@ -3326,7 +3424,7 @@ void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
                                                    Code* value) {
   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
-  ASSERT(!Heap::InNewSpace(value));
+  ASSERT(!HEAP->InNewSpace(value));
 }
 
 
@@ -3476,8 +3574,8 @@ void JSRegExp::SetDataAt(int index, Object* value) {
 
 JSObject::ElementsKind JSObject::GetElementsKind() {
   if (map()->has_fast_elements()) {
-    ASSERT(elements()->map() == Heap::fixed_array_map() ||
-           elements()->map() == Heap::fixed_cow_array_map());
+    ASSERT(elements()->map() == GetHeap()->fixed_array_map() ||
+           elements()->map() == GetHeap()->fixed_cow_array_map());
     return FAST_ELEMENTS;
   }
   HeapObject* array = elements();
@@ -3487,6 +3585,7 @@ JSObject::ElementsKind JSObject::GetElementsKind() {
     ASSERT(array->IsDictionary());
     return DICTIONARY_ELEMENTS;
   }
+  ASSERT(!map()->has_fast_elements());
   if (array->IsExternalArray()) {
     switch (array->map()->instance_type()) {
       case EXTERNAL_BYTE_ARRAY_TYPE:
@@ -3572,16 +3671,17 @@ bool JSObject::AllowsSetElementsLength() {
 MaybeObject* JSObject::EnsureWritableFastElements() {
   ASSERT(HasFastElements());
   FixedArray* elems = FixedArray::cast(elements());
-  if (elems->map() != Heap::fixed_cow_array_map()) return elems;
+  Isolate* isolate = GetIsolate();
+  if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
   Object* writable_elems;
-  { MaybeObject* maybe_writable_elems =
-        Heap::CopyFixedArrayWithMap(elems, Heap::fixed_array_map());
+  { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
+      elems, isolate->heap()->fixed_array_map());
     if (!maybe_writable_elems->ToObject(&writable_elems)) {
       return maybe_writable_elems;
     }
   }
   set_elements(FixedArray::cast(writable_elems));
-  Counters::cow_arrays_converted.Increment();
+  isolate->counters()->cow_arrays_converted()->Increment();
   return writable_elems;
 }
 
@@ -3721,7 +3821,7 @@ PropertyAttributes JSObject::GetPropertyAttribute(String* key) {
 Object* JSObject::BypassGlobalProxy() {
   if (IsJSGlobalProxy()) {
     Object* proto = GetPrototype();
-    if (proto->IsNull()) return Heap::undefined_value();
+    if (proto->IsNull()) return GetHeap()->undefined_value();
     ASSERT(proto->IsJSGlobalObject());
     return proto;
   }
@@ -3732,7 +3832,7 @@ Object* JSObject::BypassGlobalProxy() {
 bool JSObject::HasHiddenPropertiesObject() {
   ASSERT(!IsJSGlobalProxy());
   return GetPropertyAttributePostInterceptor(this,
-                                             Heap::hidden_symbol(),
+                                             GetHeap()->hidden_symbol(),
                                              false) != ABSENT;
 }
 
@@ -3745,7 +3845,7 @@ Object* JSObject::GetHiddenPropertiesObject() {
   // object.
   Object* result =
       GetLocalPropertyPostInterceptor(this,
-                                      Heap::hidden_symbol(),
+                                      GetHeap()->hidden_symbol(),
                                       &attributes)->ToObjectUnchecked();
   return result;
 }
@@ -3753,7 +3853,7 @@ Object* JSObject::GetHiddenPropertiesObject() {
 
 MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
   ASSERT(!IsJSGlobalProxy());
-  return SetPropertyPostInterceptor(Heap::hidden_symbol(),
+  return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
                                     hidden_obj,
                                     DONT_ENUM,
                                     kNonStrictMode);
@@ -3821,12 +3921,57 @@ void Dictionary<Shape, Key>::SetEntry(int entry,
 }
 
 
-void Map::ClearCodeCache() {
+bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
+  ASSERT(other->IsNumber());
+  return key == static_cast<uint32_t>(other->Number());
+}
+
+
+uint32_t NumberDictionaryShape::Hash(uint32_t key) {
+  return ComputeIntegerHash(key);
+}
+
+
+uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
+  ASSERT(other->IsNumber());
+  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
+}
+
+
+MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
+  return Isolate::Current()->heap()->NumberFromUint32(key);
+}
+
+
+bool StringDictionaryShape::IsMatch(String* key, Object* other) {
+  // We know that all entries in a hash table had their hash keys created.
+  // Use that knowledge to have fast failure.
+  if (key->Hash() != String::cast(other)->Hash()) return false;
+  return key->Equals(String::cast(other));
+}
+
+
+uint32_t StringDictionaryShape::Hash(String* key) {
+  return key->Hash();
+}
+
+
+uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
+  return String::cast(other)->Hash();
+}
+
+
+MaybeObject* StringDictionaryShape::AsObject(String* key) {
+  return key;
+}
+
+
+void Map::ClearCodeCache(Heap* heap) {
   // No write barrier is needed since empty_fixed_array is not in new space.
   // Please note this function is used during marking:
   //  - MarkCompactCollector::MarkUnmarkedObject
-  ASSERT(!Heap::InNewSpace(Heap::raw_unchecked_empty_fixed_array()));
-  WRITE_FIELD(this, kCodeCacheOffset, Heap::raw_unchecked_empty_fixed_array());
+  ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
+  WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
 }
 
 
@@ -3839,7 +3984,7 @@ void JSArray::EnsureSize(int required_size) {
     // constantly growing.
     Expand(required_size + (required_size >> 3));
     // It's a performance benefit to keep a frequently used array in new-space.
-  } else if (!Heap::new_space()->Contains(elts) &&
+  } else if (!GetHeap()->new_space()->Contains(elts) &&
              required_size < kArraySizeThatFitsComfortablyInNewSpace) {
     // Expand will allocate a new backing store in new space even if the size
     // we asked for isn't larger than what we had before.
@@ -3861,7 +4006,22 @@ void JSArray::SetContent(FixedArray* storage) {
 
 MaybeObject* FixedArray::Copy() {
   if (length() == 0) return this;
-  return Heap::CopyFixedArray(this);
+  return GetHeap()->CopyFixedArray(this);
+}
+
+
+Relocatable::Relocatable(Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  isolate_ = isolate;
+  prev_ = isolate->relocatable_top();
+  isolate->set_relocatable_top(this);
+}
+
+
+Relocatable::~Relocatable() {
+  ASSERT(isolate_ == Isolate::Current());
+  ASSERT_EQ(isolate_->relocatable_top(), this);
+  isolate_->set_relocatable_top(prev_);
 }
 
 
index ea6d7954edcc14dbf6585b291731bc25d269c435..42f90608c35032452fc83b909ef13cd7d9000f61 100644 (file)
@@ -186,14 +186,15 @@ class VisitorDispatchTable {
 template<typename StaticVisitor>
 class BodyVisitorBase : public AllStatic {
  public:
-  INLINE(static void IteratePointers(HeapObject* object,
+  INLINE(static void IteratePointers(Heap* heap,
+                                     HeapObject* object,
                                      int start_offset,
                                      int end_offset)) {
     Object** start_slot = reinterpret_cast<Object**>(object->address() +
                                                      start_offset);
     Object** end_slot = reinterpret_cast<Object**>(object->address() +
                                                    end_offset);
-    StaticVisitor::VisitPointers(start_slot, end_slot);
+    StaticVisitor::VisitPointers(heap, start_slot, end_slot);
   }
 };
 
@@ -204,7 +205,10 @@ class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
   static inline ReturnType Visit(Map* map, HeapObject* object) {
     int object_size = BodyDescriptor::SizeOf(map, object);
     BodyVisitorBase<StaticVisitor>::IteratePointers(
-        object, BodyDescriptor::kStartOffset, object_size);
+        map->heap(),
+        object,
+        BodyDescriptor::kStartOffset,
+        object_size);
     return static_cast<ReturnType>(object_size);
   }
 
@@ -212,7 +216,10 @@ class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
   static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
     ASSERT(BodyDescriptor::SizeOf(map, object) == object_size);
     BodyVisitorBase<StaticVisitor>::IteratePointers(
-        object, BodyDescriptor::kStartOffset, object_size);
+        map->heap(),
+        object,
+        BodyDescriptor::kStartOffset,
+        object_size);
     return static_cast<ReturnType>(object_size);
   }
 };
@@ -223,7 +230,10 @@ class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
  public:
   static inline ReturnType Visit(Map* map, HeapObject* object) {
     BodyVisitorBase<StaticVisitor>::IteratePointers(
-        object, BodyDescriptor::kStartOffset, BodyDescriptor::kEndOffset);
+        map->heap(),
+        object,
+        BodyDescriptor::kStartOffset,
+        BodyDescriptor::kEndOffset);
     return static_cast<ReturnType>(BodyDescriptor::kSize);
   }
 };
@@ -299,8 +309,8 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
     return table_.GetVisitor(map)(map, obj);
   }
 
-  static inline void VisitPointers(Object** start, Object** end) {
-    for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(p);
+  static inline void VisitPointers(Heap* heap, Object** start, Object** end) {
+    for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
   }
 
  private:
@@ -372,7 +382,7 @@ void Code::CodeIterateBody(ObjectVisitor* v) {
 
 
 template<typename StaticVisitor>
-void Code::CodeIterateBody() {
+void Code::CodeIterateBody(Heap* heap) {
   int mode_mask = RelocInfo::kCodeTargetMask |
                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
                   RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) |
@@ -386,12 +396,14 @@ void Code::CodeIterateBody() {
   RelocIterator it(this, mode_mask);
 
   StaticVisitor::VisitPointer(
+      heap,
       reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
   StaticVisitor::VisitPointer(
+      heap,
       reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
 
   for (; !it.done(); it.next()) {
-    it.rinfo()->template Visit<StaticVisitor>();
+    it.rinfo()->template Visit<StaticVisitor>(heap);
   }
 }
 
index a4e07d289245fc7f261bc3f65edc0af664f4b8e1..da751477338fefbb40ac95ab22a8e658fd21a687 100644 (file)
@@ -51,7 +51,6 @@
 #include "disassembler.h"
 #endif
 
-
 namespace v8 {
 namespace internal {
 
@@ -64,7 +63,8 @@ const int kSetterIndex = 1;
 MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor,
                                                   Object* value) {
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateJSObject(constructor);
+  { MaybeObject* maybe_result =
+        constructor->GetHeap()->AllocateJSObject(constructor);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   JSValue::cast(result)->set_value(value);
@@ -86,14 +86,19 @@ MaybeObject* Object::ToObject(Context* global_context) {
 
 
 MaybeObject* Object::ToObject() {
-  Context* global_context = Top::context()->global_context();
   if (IsJSObject()) {
     return this;
   } else if (IsNumber()) {
+    Isolate* isolate = Isolate::Current();
+    Context* global_context = isolate->context()->global_context();
     return CreateJSValue(global_context->number_function(), this);
   } else if (IsBoolean()) {
+    Isolate* isolate = HeapObject::cast(this)->GetIsolate();
+    Context* global_context = isolate->context()->global_context();
     return CreateJSValue(global_context->boolean_function(), this);
   } else if (IsString()) {
+    Isolate* isolate = HeapObject::cast(this)->GetIsolate();
+    Context* global_context = isolate->context()->global_context();
     return CreateJSValue(global_context->string_function(), this);
   }
 
@@ -103,35 +108,43 @@ MaybeObject* Object::ToObject() {
 
 
 Object* Object::ToBoolean() {
-  if (IsTrue()) return Heap::true_value();
-  if (IsFalse()) return Heap::false_value();
+  if (IsTrue()) return this;
+  if (IsFalse()) return this;
   if (IsSmi()) {
-    return Heap::ToBoolean(Smi::cast(this)->value() != 0);
+    return Isolate::Current()->heap()->ToBoolean(Smi::cast(this)->value() != 0);
+  }
+  if (IsUndefined() || IsNull()) {
+    return HeapObject::cast(this)->GetHeap()->false_value();
   }
-  if (IsUndefined() || IsNull()) return Heap::false_value();
   // Undetectable object is false
   if (IsUndetectableObject()) {
-    return Heap::false_value();
+    return HeapObject::cast(this)->GetHeap()->false_value();
   }
   if (IsString()) {
-    return Heap::ToBoolean(String::cast(this)->length() != 0);
+    return HeapObject::cast(this)->GetHeap()->ToBoolean(
+        String::cast(this)->length() != 0);
   }
   if (IsHeapNumber()) {
     return HeapNumber::cast(this)->HeapNumberToBoolean();
   }
-  return Heap::true_value();
+  return Isolate::Current()->heap()->true_value();
 }
 
 
 void Object::Lookup(String* name, LookupResult* result) {
   if (IsJSObject()) return JSObject::cast(this)->Lookup(name, result);
   Object* holder = NULL;
-  Context* global_context = Top::context()->global_context();
   if (IsString()) {
+    Heap* heap = HeapObject::cast(this)->GetHeap();
+    Context* global_context = heap->isolate()->context()->global_context();
     holder = global_context->string_function()->instance_prototype();
   } else if (IsNumber()) {
+    Heap* heap = Isolate::Current()->heap();
+    Context* global_context = heap->isolate()->context()->global_context();
     holder = global_context->number_function()->instance_prototype();
   } else if (IsBoolean()) {
+    Heap* heap = HeapObject::cast(this)->GetHeap();
+    Context* global_context = heap->isolate()->context()->global_context();
     holder = global_context->boolean_function()->instance_prototype();
   }
   ASSERT(holder != NULL);  // Cannot handle null or undefined.
@@ -154,6 +167,7 @@ MaybeObject* Object::GetPropertyWithCallback(Object* receiver,
                                              Object* structure,
                                              String* name,
                                              Object* holder) {
+  Isolate* isolate = name->GetIsolate();
   // To accommodate both the old and the new api we switch on the
   // data structure used to store the callbacks.  Eventually proxy
   // callbacks should be phased out.
@@ -161,7 +175,7 @@ MaybeObject* Object::GetPropertyWithCallback(Object* receiver,
     AccessorDescriptor* callback =
         reinterpret_cast<AccessorDescriptor*>(Proxy::cast(structure)->proxy());
     MaybeObject* value = (callback->getter)(receiver, callback->data);
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     return value;
   }
 
@@ -174,17 +188,19 @@ MaybeObject* Object::GetPropertyWithCallback(Object* receiver,
     JSObject* self = JSObject::cast(receiver);
     JSObject* holder_handle = JSObject::cast(holder);
     Handle<String> key(name);
-    LOG(ApiNamedPropertyAccess("load", self, name));
-    CustomArguments args(data->data(), self, holder_handle);
+    LOG(isolate, ApiNamedPropertyAccess("load", self, name));
+    CustomArguments args(isolate, data->data(), self, holder_handle);
     v8::AccessorInfo info(args.end());
     v8::Handle<v8::Value> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = call_fun(v8::Utils::ToLocal(key), info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
-    if (result.IsEmpty()) return Heap::undefined_value();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
+    if (result.IsEmpty()) {
+      return isolate->heap()->undefined_value();
+    }
     return *v8::Utils::OpenHandle(*result);
   }
 
@@ -196,7 +212,7 @@ MaybeObject* Object::GetPropertyWithCallback(Object* receiver,
                                                   JSFunction::cast(getter));
     }
     // Getter is not a function.
-    return Heap::undefined_value();
+    return isolate->heap()->undefined_value();
   }
 
   UNREACHABLE();
@@ -210,9 +226,10 @@ MaybeObject* Object::GetPropertyWithDefinedGetter(Object* receiver,
   Handle<JSFunction> fun(JSFunction::cast(getter));
   Handle<Object> self(receiver);
 #ifdef ENABLE_DEBUGGER_SUPPORT
+  Debug* debug = fun->GetHeap()->isolate()->debug();
   // Handle stepping into a getter if step into is active.
-  if (Debug::StepInActive()) {
-    Debug::HandleStepIn(fun, Handle<Object>::null(), 0, false);
+  if (debug->StepInActive()) {
+    debug->HandleStepIn(fun, Handle<Object>::null(), 0, false);
   }
 #endif
   bool has_pending_exception;
@@ -230,6 +247,7 @@ MaybeObject* JSObject::GetPropertyWithFailedAccessCheck(
     LookupResult* result,
     String* name,
     PropertyAttributes* attributes) {
+  Heap* heap = name->GetHeap();
   if (result->IsProperty()) {
     switch (result->type()) {
       case CALLBACKS: {
@@ -281,8 +299,8 @@ MaybeObject* JSObject::GetPropertyWithFailedAccessCheck(
 
   // No accessible property found.
   *attributes = ABSENT;
-  Top::ReportFailedAccessCheck(this, v8::ACCESS_GET);
-  return Heap::undefined_value();
+  heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_GET);
+  return heap->undefined_value();
 }
 
 
@@ -291,6 +309,7 @@ PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck(
     LookupResult* result,
     String* name,
     bool continue_search) {
+  Heap* heap = name->GetHeap();
   if (result->IsProperty()) {
     switch (result->type()) {
       case CALLBACKS: {
@@ -344,7 +363,7 @@ PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck(
     }
   }
 
-  Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+  heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
   return ABSENT;
 }
 
@@ -378,16 +397,14 @@ MaybeObject* JSObject::SetNormalizedProperty(String* name,
                                              Object* value,
                                              PropertyDetails details) {
   ASSERT(!HasFastProperties());
+  Heap* heap = name->GetHeap();
   int entry = property_dictionary()->FindEntry(name);
   if (entry == StringDictionary::kNotFound) {
     Object* store_value = value;
     if (IsGlobalObject()) {
-      { MaybeObject* maybe_store_value =
-            Heap::AllocateJSGlobalPropertyCell(value);
-        if (!maybe_store_value->ToObject(&store_value)) {
-          return maybe_store_value;
-        }
-      }
+      MaybeObject* maybe_store_value =
+          heap->AllocateJSGlobalPropertyCell(value);
+      if (!maybe_store_value->ToObject(&store_value)) return maybe_store_value;
     }
     Object* dict;
     { MaybeObject* maybe_dict =
@@ -416,6 +433,7 @@ MaybeObject* JSObject::SetNormalizedProperty(String* name,
 
 MaybeObject* JSObject::DeleteNormalizedProperty(String* name, DeleteMode mode) {
   ASSERT(!HasFastProperties());
+  Heap* heap = GetHeap();
   StringDictionary* dictionary = property_dictionary();
   int entry = dictionary->FindEntry(name);
   if (entry != StringDictionary::kNotFound) {
@@ -423,7 +441,7 @@ MaybeObject* JSObject::DeleteNormalizedProperty(String* name, DeleteMode mode) {
     if (IsGlobalObject()) {
       PropertyDetails details = dictionary->DetailsAt(entry);
       if (details.IsDontDelete()) {
-        if (mode != FORCE_DELETION) return Heap::false_value();
+        if (mode != FORCE_DELETION) return heap->false_value();
         // When forced to delete global properties, we have to make a
         // map change to invalidate any ICs that think they can load
         // from the DontDelete cell without checking if it contains
@@ -436,13 +454,13 @@ MaybeObject* JSObject::DeleteNormalizedProperty(String* name, DeleteMode mode) {
       }
       JSGlobalPropertyCell* cell =
           JSGlobalPropertyCell::cast(dictionary->ValueAt(entry));
-      cell->set_value(Heap::the_hole_value());
+      cell->set_value(heap->the_hole_value());
       dictionary->DetailsAtPut(entry, details.AsDeleted());
     } else {
       return dictionary->DeleteProperty(entry, mode);
     }
   }
-  return Heap::true_value();
+  return heap->true_value();
 }
 
 
@@ -468,6 +486,7 @@ MaybeObject* Object::GetProperty(Object* receiver,
   // Make sure that the top context does not change when doing
   // callbacks or interceptor calls.
   AssertNoContextChange ncc;
+  Heap* heap = name->GetHeap();
 
   // Traverse the prototype chain from the current object (this) to
   // the holder and check for access rights. This avoid traversing the
@@ -475,7 +494,7 @@ MaybeObject* Object::GetProperty(Object* receiver,
   // holder will always be the interceptor holder and the search may
   // only continue with a current object just after the interceptor
   // holder in the prototype chain.
-  Object* last = result->IsProperty() ? result->holder() : Heap::null_value();
+  Object* last = result->IsProperty() ? result->holder() : heap->null_value();
   for (Object* current = this; true; current = current->GetPrototype()) {
     if (current->IsAccessCheckNeeded()) {
       // Check if we're allowed to read from the current object. Note
@@ -483,7 +502,7 @@ MaybeObject* Object::GetProperty(Object* receiver,
       // property from the current object, we still check that we have
       // access to it.
       JSObject* checked = JSObject::cast(current);
-      if (!Top::MayNamedAccess(checked, name, v8::ACCESS_GET)) {
+      if (!heap->isolate()->MayNamedAccess(checked, name, v8::ACCESS_GET)) {
         return checked->GetPropertyWithFailedAccessCheck(receiver,
                                                          result,
                                                          name,
@@ -498,7 +517,7 @@ MaybeObject* Object::GetProperty(Object* receiver,
 
   if (!result->IsProperty()) {
     *attributes = ABSENT;
-    return Heap::undefined_value();
+    return heap->undefined_value();
   }
   *attributes = result->GetAttributes();
   Object* value;
@@ -507,11 +526,11 @@ MaybeObject* Object::GetProperty(Object* receiver,
     case NORMAL:
       value = holder->GetNormalizedProperty(result);
       ASSERT(!value->IsTheHole() || result->IsReadOnly());
-      return value->IsTheHole() ? Heap::undefined_value() : value;
+      return value->IsTheHole() ? heap->undefined_value() : value;
     case FIELD:
       value = holder->FastPropertyAt(result->GetFieldIndex());
       ASSERT(!value->IsTheHole() || result->IsReadOnly());
-      return value->IsTheHole() ? Heap::undefined_value() : value;
+      return value->IsTheHole() ? heap->undefined_value() : value;
     case CONSTANT_FUNCTION:
       return result->GetConstantFunction();
     case CALLBACKS:
@@ -536,7 +555,7 @@ MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
   }
 
   Object* holder = NULL;
-  Context* global_context = Top::context()->global_context();
+  Context* global_context = Isolate::Current()->context()->global_context();
   if (IsString()) {
     holder = global_context->string_function()->instance_prototype();
   } else if (IsNumber()) {
@@ -546,7 +565,7 @@ MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
   } else {
     // Undefined and null have no indexed properties.
     ASSERT(IsUndefined() || IsNull());
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
 
   return JSObject::cast(holder)->GetElementWithReceiver(receiver, index);
@@ -556,14 +575,15 @@ MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
 Object* Object::GetPrototype() {
   // The object is either a number, a string, a boolean, or a real JS object.
   if (IsJSObject()) return JSObject::cast(this)->map()->prototype();
-  Context* context = Top::context()->global_context();
+  Heap* heap = Isolate::Current()->heap();
+  Context* context = heap->isolate()->context()->global_context();
 
   if (IsNumber()) return context->number_function()->instance_prototype();
   if (IsString()) return context->string_function()->instance_prototype();
   if (IsBoolean()) {
     return context->boolean_function()->instance_prototype();
   } else {
-    return Heap::null_value();
+    return heap->null_value();
   }
 }
 
@@ -637,9 +657,10 @@ MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) {
   // allowed.  This is to avoid an assertion failure when allocating.
   // Flattening strings is the only case where we always allow
   // allocation because no GC is performed if the allocation fails.
-  if (!Heap::IsAllocationAllowed()) return this;
+  if (!HEAP->IsAllocationAllowed()) return this;
 #endif
 
+  Heap* heap = GetHeap();
   switch (StringShape(this).representation_tag()) {
     case kConsStringTag: {
       ConsString* cs = ConsString::cast(this);
@@ -649,12 +670,12 @@ MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) {
       // There's little point in putting the flat string in new space if the
       // cons string is in old space.  It can never get GCed until there is
       // an old space GC.
-      PretenureFlag tenure = Heap::InNewSpace(this) ? pretenure : TENURED;
+      PretenureFlag tenure = heap->InNewSpace(this) ? pretenure : TENURED;
       int len = length();
       Object* object;
       String* result;
       if (IsAsciiRepresentation()) {
-        { MaybeObject* maybe_object = Heap::AllocateRawAsciiString(len, tenure);
+        { MaybeObject* maybe_object = heap->AllocateRawAsciiString(len, tenure);
           if (!maybe_object->ToObject(&object)) return maybe_object;
         }
         result = String::cast(object);
@@ -669,7 +690,7 @@ MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) {
                     len - first_length);
       } else {
         { MaybeObject* maybe_object =
-              Heap::AllocateRawTwoByteString(len, tenure);
+              heap->AllocateRawTwoByteString(len, tenure);
           if (!maybe_object->ToObject(&object)) return maybe_object;
         }
         result = String::cast(object);
@@ -684,7 +705,7 @@ MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) {
                     len - first_length);
       }
       cs->set_first(result);
-      cs->set_second(Heap::empty_string());
+      cs->set_second(heap->empty_string());
       return result;
     }
     default:
@@ -708,7 +729,7 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
                   resource->length() * sizeof(smart_chars[0])) == 0);
   }
 #endif  // DEBUG
-
+  Heap* heap = GetHeap();
   int size = this->Size();  // Byte size of the original string.
   if (size < ExternalString::kSize) {
     // The string is too small to fit an external String in its place. This can
@@ -724,8 +745,8 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
   // Morph the object to an external string by adjusting the map and
   // reinitializing the fields.
   this->set_map(is_ascii ?
-                Heap::external_string_with_ascii_data_map() :
-                Heap::external_string_map());
+                heap->external_string_with_ascii_data_map() :
+                heap->external_string_map());
   ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
   self->set_length(length);
   self->set_hash_field(hash_field);
@@ -736,13 +757,13 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
     self->Hash();  // Force regeneration of the hash value.
     // Now morph this external string into a external symbol.
     this->set_map(is_ascii ?
-                  Heap::external_symbol_with_ascii_data_map() :
-                  Heap::external_symbol_map());
+                  heap->external_symbol_with_ascii_data_map() :
+                  heap->external_symbol_map());
   }
 
   // Fill the remainder of the string with dead wood.
   int new_size = this->Size();  // Byte size of the external String object.
-  Heap::CreateFillerObjectAt(this->address() + new_size, size - new_size);
+  heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
   return true;
 }
 
@@ -759,7 +780,7 @@ bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) {
                   resource->length() * sizeof(smart_chars[0])) == 0);
   }
 #endif  // DEBUG
-
+  Heap* heap = GetHeap();
   int size = this->Size();  // Byte size of the original string.
   if (size < ExternalString::kSize) {
     // The string is too small to fit an external String in its place. This can
@@ -773,7 +794,7 @@ bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) {
 
   // Morph the object to an external string by adjusting the map and
   // reinitializing the fields.
-  this->set_map(Heap::external_ascii_string_map());
+  this->set_map(heap->external_ascii_string_map());
   ExternalAsciiString* self = ExternalAsciiString::cast(this);
   self->set_length(length);
   self->set_hash_field(hash_field);
@@ -783,12 +804,12 @@ bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) {
   if (is_symbol) {
     self->Hash();  // Force regeneration of the hash value.
     // Now morph this external string into a external symbol.
-    this->set_map(Heap::external_ascii_symbol_map());
+    this->set_map(heap->external_ascii_symbol_map());
   }
 
   // Fill the remainder of the string with dead wood.
   int new_size = this->Size();  // Byte size of the external String object.
-  Heap::CreateFillerObjectAt(this->address() + new_size, size - new_size);
+  heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
   return true;
 }
 
@@ -887,15 +908,16 @@ void JSObject::JSObjectShortPrint(StringStream* accumulator) {
     // All other JSObjects are rather similar to each other (JSObject,
     // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue).
     default: {
+      Heap* heap = GetHeap();
       Object* constructor = map()->constructor();
       bool printed = false;
       if (constructor->IsHeapObject() &&
-          !Heap::Contains(HeapObject::cast(constructor))) {
+          !heap->Contains(HeapObject::cast(constructor))) {
         accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
       } else {
         bool global_object = IsJSGlobalProxy();
         if (constructor->IsJSFunction()) {
-          if (!Heap::Contains(JSFunction::cast(constructor)->shared())) {
+          if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
             accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
           } else {
             Object* constructor_name =
@@ -930,12 +952,13 @@ void JSObject::JSObjectShortPrint(StringStream* accumulator) {
 
 
 void HeapObject::HeapObjectShortPrint(StringStream* accumulator) {
-  // if (!Heap::InNewSpace(this)) PrintF("*", this);
-  if (!Heap::Contains(this)) {
+  // if (!HEAP->InNewSpace(this)) PrintF("*", this);
+  Heap* heap = GetHeap();
+  if (!heap->Contains(this)) {
     accumulator->Add("!!!INVALID POINTER!!!");
     return;
   }
-  if (!Heap::Contains(map())) {
+  if (!heap->Contains(map())) {
     accumulator->Add("!!!INVALID MAP!!!");
     return;
   }
@@ -1150,14 +1173,14 @@ Object* HeapNumber::HeapNumberToBoolean() {
   if (u.bits.exp == 2047) {
     // Detect NaN for IEEE double precision floating point.
     if ((u.bits.man_low | u.bits.man_high) != 0)
-      return Heap::false_value();
+      return GetHeap()->false_value();
   }
   if (u.bits.exp == 0) {
     // Detect +0, and -0 for IEEE double precision floating point.
     if ((u.bits.man_low | u.bits.man_high) == 0)
-      return Heap::false_value();
+      return GetHeap()->false_value();
   }
-  return Heap::true_value();
+  return GetHeap()->true_value();
 }
 
 
@@ -1181,14 +1204,14 @@ void HeapNumber::HeapNumberPrint(StringStream* accumulator) {
 
 String* JSObject::class_name() {
   if (IsJSFunction()) {
-    return Heap::function_class_symbol();
+    return GetHeap()->function_class_symbol();
   }
   if (map()->constructor()->IsJSFunction()) {
     JSFunction* constructor = JSFunction::cast(map()->constructor());
     return String::cast(constructor->shared()->instance_class_name());
   }
   // If the constructor is not present, return "Object".
-  return Heap::Object_symbol();
+  return GetHeap()->Object_symbol();
 }
 
 
@@ -1203,7 +1226,7 @@ String* JSObject::constructor_name() {
     if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name();
   }
   // If the constructor is not present, return "Object".
-  return Heap::Object_symbol();
+  return GetHeap()->Object_symbol();
 }
 
 
@@ -1233,9 +1256,10 @@ MaybeObject* JSObject::AddFastProperty(String* name,
 
   // Normalize the object if the name is an actual string (not the
   // hidden symbols) and is not a real identifier.
+  Isolate* isolate = GetHeap()->isolate();
   StringInputBuffer buffer(name);
-  if (!ScannerConstants::IsIdentifier(&buffer)
-      && name != Heap::hidden_symbol()) {
+  if (!isolate->scanner_constants()->IsIdentifier(&buffer)
+      && name != HEAP->hidden_symbol()) {
     Object* obj;
     { MaybeObject* maybe_obj =
           NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
@@ -1262,7 +1286,8 @@ MaybeObject* JSObject::AddFastProperty(String* name,
   // global object_function's map and there is not a transition for name.
   bool allow_map_transition =
         !old_descriptors->Contains(name) &&
-        (Top::context()->global_context()->object_function()->map() != map());
+        (isolate->context()->global_context()->object_function()->
+            map() != map());
 
   ASSERT(index < map()->inobject_properties() ||
          (index - map()->inobject_properties()) < properties()->length() ||
@@ -1316,7 +1341,8 @@ MaybeObject* JSObject::AddConstantFunctionProperty(
     String* name,
     JSFunction* function,
     PropertyAttributes attributes) {
-  ASSERT(!Heap::InNewSpace(function));
+  Heap* heap = GetHeap();
+  ASSERT(!heap->InNewSpace(function));
 
   // Allocate new instance descriptors with (name, function) added
   ConstantFunctionDescriptor d(name, function, attributes);
@@ -1341,7 +1367,8 @@ MaybeObject* JSObject::AddConstantFunctionProperty(
 
   // If the old map is the global object map (from new Object()),
   // then transitions are not added to it, so we are done.
-  if (old_map == Top::context()->global_context()->object_function()->map()) {
+  if (old_map == heap->isolate()->context()->global_context()->
+      object_function()->map()) {
     return function;
   }
 
@@ -1376,6 +1403,7 @@ MaybeObject* JSObject::AddSlowProperty(String* name,
                                        Object* value,
                                        PropertyAttributes attributes) {
   ASSERT(!HasFastProperties());
+  Heap* heap = GetHeap();
   StringDictionary* dict = property_dictionary();
   Object* store_value = value;
   if (IsGlobalObject()) {
@@ -1393,7 +1421,7 @@ MaybeObject* JSObject::AddSlowProperty(String* name,
       return value;
     }
     { MaybeObject* maybe_store_value =
-          Heap::AllocateJSGlobalPropertyCell(value);
+          heap->AllocateJSGlobalPropertyCell(value);
       if (!maybe_store_value->ToObject(&store_value)) return maybe_store_value;
     }
     JSGlobalPropertyCell::cast(store_value)->set_value(value);
@@ -1412,16 +1440,17 @@ MaybeObject* JSObject::AddProperty(String* name,
                                    Object* value,
                                    PropertyAttributes attributes) {
   ASSERT(!IsJSGlobalProxy());
+  Heap* heap = GetHeap();
   if (!map()->is_extensible()) {
     Handle<Object> args[1] = {Handle<String>(name)};
-    return Top::Throw(*Factory::NewTypeError("object_not_extensible",
-                                             HandleVector(args, 1)));
+    return heap->isolate()->Throw(
+        *FACTORY->NewTypeError("object_not_extensible", HandleVector(args, 1)));
   }
   if (HasFastProperties()) {
     // Ensure the descriptor array does not get too big.
     if (map()->instance_descriptors()->number_of_descriptors() <
         DescriptorArray::kMaxNumberOfDescriptors) {
-      if (value->IsJSFunction() && !Heap::InNewSpace(value)) {
+      if (value->IsJSFunction() && !heap->InNewSpace(value)) {
         return AddConstantFunctionProperty(name,
                                            JSFunction::cast(value),
                                            attributes);
@@ -1493,7 +1522,8 @@ MaybeObject* JSObject::ConvertDescriptorToFieldAndMapTransition(
     return result;
   }
   // Do not add transitions to the map of "new Object()".
-  if (map() == Top::context()->global_context()->object_function()->map()) {
+  if (map() == GetHeap()->isolate()->context()->global_context()->
+      object_function()->map()) {
     return result;
   }
 
@@ -1580,29 +1610,31 @@ MaybeObject* JSObject::SetPropertyWithInterceptor(
     Object* value,
     PropertyAttributes attributes,
     StrictModeFlag strict_mode) {
-  HandleScope scope;
+  Isolate* isolate = GetIsolate();
+  HandleScope scope(isolate);
   Handle<JSObject> this_handle(this);
   Handle<String> name_handle(name);
-  Handle<Object> value_handle(value);
+  Handle<Object> value_handle(value, isolate);
   Handle<InterceptorInfo> interceptor(GetNamedInterceptor());
   if (!interceptor->setter()->IsUndefined()) {
-    LOG(ApiNamedPropertyAccess("interceptor-named-set", this, name));
-    CustomArguments args(interceptor->data(), this, this);
+    LOG(isolate, ApiNamedPropertyAccess("interceptor-named-set", this, name));
+    CustomArguments args(isolate, interceptor->data(), this, this);
     v8::AccessorInfo info(args.end());
     v8::NamedPropertySetter setter =
         v8::ToCData<v8::NamedPropertySetter>(interceptor->setter());
     v8::Handle<v8::Value> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       Handle<Object> value_unhole(value->IsTheHole() ?
-                                  Heap::undefined_value() :
-                                  value);
+                                  isolate->heap()->undefined_value() :
+                                  value,
+                                  isolate);
       result = setter(v8::Utils::ToLocal(name_handle),
                       v8::Utils::ToLocal(value_unhole),
                       info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     if (!result.IsEmpty()) return *value_handle;
   }
   MaybeObject* raw_result =
@@ -1610,7 +1642,7 @@ MaybeObject* JSObject::SetPropertyWithInterceptor(
                                               *value_handle,
                                               attributes,
                                               strict_mode);
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return raw_result;
 }
 
@@ -1629,12 +1661,13 @@ MaybeObject* JSObject::SetPropertyWithCallback(Object* structure,
                                                String* name,
                                                Object* value,
                                                JSObject* holder) {
-  HandleScope scope;
+  Isolate* isolate = GetIsolate();
+  HandleScope scope(isolate);
 
   // We should never get here to initialize a const with the hole
   // value since a const declaration would conflict with the setter.
   ASSERT(!value->IsTheHole());
-  Handle<Object> value_handle(value);
+  Handle<Object> value_handle(value, isolate);
 
   // To accommodate both the old and the new api we switch on the
   // data structure used to store the callbacks.  Eventually proxy
@@ -1643,7 +1676,7 @@ MaybeObject* JSObject::SetPropertyWithCallback(Object* structure,
     AccessorDescriptor* callback =
         reinterpret_cast<AccessorDescriptor*>(Proxy::cast(structure)->proxy());
     MaybeObject* obj = (callback->setter)(this,  value, callback->data);
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     if (obj->IsFailure()) return obj;
     return *value_handle;
   }
@@ -1655,17 +1688,17 @@ MaybeObject* JSObject::SetPropertyWithCallback(Object* structure,
     v8::AccessorSetter call_fun = v8::ToCData<v8::AccessorSetter>(call_obj);
     if (call_fun == NULL) return value;
     Handle<String> key(name);
-    LOG(ApiNamedPropertyAccess("store", this, name));
-    CustomArguments args(data->data(), this, JSObject::cast(holder));
+    LOG(isolate, ApiNamedPropertyAccess("store", this, name));
+    CustomArguments args(isolate, data->data(), this, JSObject::cast(holder));
     v8::AccessorInfo info(args.end());
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       call_fun(v8::Utils::ToLocal(key),
                v8::Utils::ToLocal(value_handle),
                info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     return *value_handle;
   }
 
@@ -1675,10 +1708,11 @@ MaybeObject* JSObject::SetPropertyWithCallback(Object* structure,
      return SetPropertyWithDefinedSetter(JSFunction::cast(setter), value);
     } else {
       Handle<String> key(name);
-      Handle<Object> holder_handle(holder);
+      Handle<Object> holder_handle(holder, isolate);
       Handle<Object> args[2] = { key, holder_handle };
-      return Top::Throw(*Factory::NewTypeError("no_setter_in_callback",
-                                               HandleVector(args, 2)));
+      return isolate->Throw(
+          *isolate->factory()->NewTypeError("no_setter_in_callback",
+                                            HandleVector(args, 2)));
     }
   }
 
@@ -1689,13 +1723,15 @@ MaybeObject* JSObject::SetPropertyWithCallback(Object* structure,
 
 MaybeObject* JSObject::SetPropertyWithDefinedSetter(JSFunction* setter,
                                                     Object* value) {
-  Handle<Object> value_handle(value);
-  Handle<JSFunction> fun(JSFunction::cast(setter));
-  Handle<JSObject> self(this);
+  Isolate* isolate = GetIsolate();
+  Handle<Object> value_handle(value, isolate);
+  Handle<JSFunction> fun(JSFunction::cast(setter), isolate);
+  Handle<JSObject> self(this, isolate);
 #ifdef ENABLE_DEBUGGER_SUPPORT
+  Debug* debug = isolate->debug();
   // Handle stepping into a setter if step into is active.
-  if (Debug::StepInActive()) {
-    Debug::HandleStepIn(fun, Handle<Object>::null(), 0, false);
+  if (debug->StepInActive()) {
+    debug->HandleStepIn(fun, Handle<Object>::null(), 0, false);
   }
 #endif
   bool has_pending_exception;
@@ -1709,8 +1745,9 @@ MaybeObject* JSObject::SetPropertyWithDefinedSetter(JSFunction* setter,
 
 void JSObject::LookupCallbackSetterInPrototypes(String* name,
                                                 LookupResult* result) {
+  Heap* heap = GetHeap();
   for (Object* pt = GetPrototype();
-       pt != Heap::null_value();
+       pt != heap->null_value();
        pt = pt->GetPrototype()) {
     JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
     if (result->IsProperty()) {
@@ -1730,8 +1767,9 @@ void JSObject::LookupCallbackSetterInPrototypes(String* name,
 MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(uint32_t index,
                                                                 Object* value,
                                                                 bool* found) {
+  Heap* heap = GetHeap();
   for (Object* pt = GetPrototype();
-       pt != Heap::null_value();
+       pt != heap->null_value();
        pt = pt->GetPrototype()) {
     if (!JSObject::cast(pt)->HasDictionaryElements()) {
         continue;
@@ -1748,7 +1786,7 @@ MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(uint32_t index,
     }
   }
   *found = false;
-  return Heap::the_hole_value();
+  return heap->the_hole_value();
 }
 
 
@@ -1767,10 +1805,11 @@ void Map::LookupInDescriptors(JSObject* holder,
                               String* name,
                               LookupResult* result) {
   DescriptorArray* descriptors = instance_descriptors();
-  int number = DescriptorLookupCache::Lookup(descriptors, name);
+  DescriptorLookupCache* cache = heap()->isolate()->descriptor_lookup_cache();
+  int number = cache->Lookup(descriptors, name);
   if (number == DescriptorLookupCache::kAbsent) {
     number = descriptors->Search(name);
-    DescriptorLookupCache::Update(descriptors, name, number);
+    cache->Update(descriptors, name, number);
   }
   if (number != DescriptorArray::kNotFound) {
     result->DescriptorResult(holder, descriptors->GetDetails(number), number);
@@ -1838,8 +1877,9 @@ void JSObject::LookupRealNamedProperty(String* name, LookupResult* result) {
 
 void JSObject::LookupRealNamedPropertyInPrototypes(String* name,
                                                    LookupResult* result) {
+  Heap* heap = GetHeap();
   for (Object* pt = GetPrototype();
-       pt != Heap::null_value();
+       pt != heap->null_value();
        pt = JSObject::cast(pt)->GetPrototype()) {
     JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
     if (result->IsProperty() && (result->type() != INTERCEPTOR)) return;
@@ -1853,6 +1893,7 @@ MaybeObject* JSObject::SetPropertyWithFailedAccessCheck(LookupResult* result,
                                                         String* name,
                                                         Object* value,
                                                         bool check_prototype) {
+  Heap* heap = GetHeap();
   if (check_prototype && !result->IsProperty()) {
     LookupCallbackSetterInPrototypes(name, result);
   }
@@ -1893,7 +1934,7 @@ MaybeObject* JSObject::SetPropertyWithFailedAccessCheck(LookupResult* result,
 
   HandleScope scope;
   Handle<Object> value_handle(value);
-  Top::ReportFailedAccessCheck(this, v8::ACCESS_SET);
+  heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_SET);
   return *value_handle;
 }
 
@@ -1903,6 +1944,7 @@ MaybeObject* JSObject::SetProperty(LookupResult* result,
                                    Object* value,
                                    PropertyAttributes attributes,
                                    StrictModeFlag strict_mode) {
+  Heap* heap = GetHeap();
   // Make sure that the top context does not change when doing callbacks or
   // interceptor calls.
   AssertNoContextChange ncc;
@@ -1912,7 +1954,7 @@ MaybeObject* JSObject::SetProperty(LookupResult* result,
   // reallocating them.
   if (!name->IsSymbol() && name->length() <= 2) {
     Object* symbol_version;
-    { MaybeObject* maybe_symbol_version = Heap::LookupSymbol(name);
+    { MaybeObject* maybe_symbol_version = heap->LookupSymbol(name);
       if (maybe_symbol_version->ToObject(&symbol_version)) {
         name = String::cast(symbol_version);
       }
@@ -1921,7 +1963,7 @@ MaybeObject* JSObject::SetProperty(LookupResult* result,
 
   // Check access rights if needed.
   if (IsAccessCheckNeeded()
-      && !Top::MayNamedAccess(this, name, v8::ACCESS_SET)) {
+      && !heap->isolate()->MayNamedAccess(this, name, v8::ACCESS_SET)) {
     return SetPropertyWithFailedAccessCheck(result, name, value, true);
   }
 
@@ -1955,8 +1997,8 @@ MaybeObject* JSObject::SetProperty(LookupResult* result,
       Handle<String> key(name);
       Handle<Object> holder(this);
       Handle<Object> args[2] = { key, holder };
-      return Top::Throw(*Factory::NewTypeError("strict_read_only_property",
-                                                HandleVector(args, 2)));
+      return heap->isolate()->Throw(*heap->isolate()->factory()->NewTypeError(
+          "strict_read_only_property", HandleVector(args, 2)));
     } else {
       return value;
     }
@@ -1999,7 +2041,7 @@ MaybeObject* JSObject::SetProperty(LookupResult* result,
       ASSERT(target_descriptors->GetType(number) == CONSTANT_FUNCTION);
       JSFunction* function =
           JSFunction::cast(target_descriptors->GetValue(number));
-      ASSERT(!Heap::InNewSpace(function));
+      ASSERT(!HEAP->InNewSpace(function));
       if (value == function) {
         set_map(target_map);
         return value;
@@ -2028,6 +2070,8 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
     String* name,
     Object* value,
     PropertyAttributes attributes) {
+  Heap* heap = GetHeap();
+
   // Make sure that the top context does not change when doing callbacks or
   // interceptor calls.
   AssertNoContextChange ncc;
@@ -2035,7 +2079,7 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
   LocalLookup(name, &result);
   // Check access rights if needed.
   if (IsAccessCheckNeeded()
-      && !Top::MayNamedAccess(this, name, v8::ACCESS_SET)) {
+      && !heap->isolate()->MayNamedAccess(this, name, v8::ACCESS_SET)) {
     return SetPropertyWithFailedAccessCheck(&result, name, value, false);
   }
 
@@ -2107,7 +2151,7 @@ PropertyAttributes JSObject::GetPropertyAttributePostInterceptor(
   if (continue_search) {
     // Continue searching via the prototype chain.
     Object* pt = GetPrototype();
-    if (pt != Heap::null_value()) {
+    if (!pt->IsNull()) {
       return JSObject::cast(pt)->
         GetPropertyAttributeWithReceiver(receiver, name);
     }
@@ -2120,25 +2164,28 @@ PropertyAttributes JSObject::GetPropertyAttributeWithInterceptor(
       JSObject* receiver,
       String* name,
       bool continue_search) {
+  Isolate* isolate = GetIsolate();
+
   // Make sure that the top context does not change when doing
   // callbacks or interceptor calls.
   AssertNoContextChange ncc;
 
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<InterceptorInfo> interceptor(GetNamedInterceptor());
   Handle<JSObject> receiver_handle(receiver);
   Handle<JSObject> holder_handle(this);
   Handle<String> name_handle(name);
-  CustomArguments args(interceptor->data(), receiver, this);
+  CustomArguments args(isolate, interceptor->data(), receiver, this);
   v8::AccessorInfo info(args.end());
   if (!interceptor->query()->IsUndefined()) {
     v8::NamedPropertyQuery query =
         v8::ToCData<v8::NamedPropertyQuery>(interceptor->query());
-    LOG(ApiNamedPropertyAccess("interceptor-named-has", *holder_handle, name));
+    LOG(isolate,
+        ApiNamedPropertyAccess("interceptor-named-has", *holder_handle, name));
     v8::Handle<v8::Integer> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = query(v8::Utils::ToLocal(name_handle), info);
     }
     if (!result.IsEmpty()) {
@@ -2148,11 +2195,12 @@ PropertyAttributes JSObject::GetPropertyAttributeWithInterceptor(
   } else if (!interceptor->getter()->IsUndefined()) {
     v8::NamedPropertyGetter getter =
         v8::ToCData<v8::NamedPropertyGetter>(interceptor->getter());
-    LOG(ApiNamedPropertyAccess("interceptor-named-get-has", this, name));
+    LOG(isolate,
+        ApiNamedPropertyAccess("interceptor-named-get-has", this, name));
     v8::Handle<v8::Value> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = getter(v8::Utils::ToLocal(name_handle), info);
     }
     if (!result.IsEmpty()) return DONT_ENUM;
@@ -2182,9 +2230,10 @@ PropertyAttributes JSObject::GetPropertyAttribute(JSObject* receiver,
                                                   LookupResult* result,
                                                   String* name,
                                                   bool continue_search) {
+  Heap* heap = GetHeap();
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayNamedAccess(this, name, v8::ACCESS_HAS)) {
+      !heap->isolate()->MayNamedAccess(this, name, v8::ACCESS_HAS)) {
     return GetPropertyAttributeWithFailedAccessCheck(receiver,
                                                      result,
                                                      name,
@@ -2250,7 +2299,7 @@ MaybeObject* NormalizedMapCache::Get(JSObject* obj,
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   set(index, result);
-  Counters::normalized_maps.Increment();
+  COUNTERS->normalized_maps()->Increment();
 
   return result;
 }
@@ -2310,7 +2359,7 @@ MaybeObject* JSObject::UpdateMapCodeCache(String* name, Code* code) {
                                                      UNIQUE_NORMALIZED_MAP);
       if (!maybe_obj->ToObject(&obj)) return maybe_obj;
     }
-    Counters::normalized_maps.Increment();
+    COUNTERS->normalized_maps()->Increment();
 
     set_map(Map::cast(obj));
   }
@@ -2324,10 +2373,11 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
 
   // The global object is always normalized.
   ASSERT(!IsGlobalObject());
-
   // JSGlobalProxy must never be normalized
   ASSERT(!IsJSGlobalProxy());
 
+  Heap* heap = GetHeap();
+
   // Allocate new content.
   int property_count = map()->NumberOfDescribedProperties();
   if (expected_additional_properties > 0) {
@@ -2396,8 +2446,8 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
   int index = map()->instance_descriptors()->NextEnumerationIndex();
   dictionary->SetNextEnumerationIndex(index);
 
-  { MaybeObject* maybe_obj = Top::context()->global_context()->
-                normalized_map_cache()->Get(this, mode);
+  { MaybeObject* maybe_obj = heap->isolate()->context()->global_context()->
+        normalized_map_cache()->Get(this, mode);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
   Map* new_map = Map::cast(obj);
@@ -2409,14 +2459,15 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
   int new_instance_size = new_map->instance_size();
   int instance_size_delta = map()->instance_size() - new_instance_size;
   ASSERT(instance_size_delta >= 0);
-  Heap::CreateFillerObjectAt(this->address() + new_instance_size,
+  heap->CreateFillerObjectAt(this->address() + new_instance_size,
                              instance_size_delta);
 
   set_map(new_map);
+  map()->set_instance_descriptors(heap->empty_descriptor_array());
 
   set_properties(dictionary);
 
-  Counters::props_to_dictionary.Increment();
+  heap->isolate()->counters()->props_to_dictionary()->Increment();
 
 #ifdef DEBUG
   if (FLAG_trace_normalization) {
@@ -2477,7 +2528,8 @@ MaybeObject* JSObject::NormalizeElements() {
   set_map(new_map);
   set_elements(dictionary);
 
-  Counters::elements_to_dictionary.Increment();
+  new_map->GetHeap()->isolate()->counters()->elements_to_dictionary()->
+      Increment();
 
 #ifdef DEBUG
   if (FLAG_trace_normalization) {
@@ -2493,9 +2545,10 @@ MaybeObject* JSObject::NormalizeElements() {
 MaybeObject* JSObject::DeletePropertyPostInterceptor(String* name,
                                                      DeleteMode mode) {
   // Check local property, ignore interceptor.
+  Heap* heap = GetHeap();
   LookupResult result;
   LocalLookupRealNamedProperty(name, &result);
-  if (!result.IsProperty()) return Heap::true_value();
+  if (!result.IsProperty()) return heap->true_value();
 
   // Normalize object if needed.
   Object* obj;
@@ -2508,23 +2561,25 @@ MaybeObject* JSObject::DeletePropertyPostInterceptor(String* name,
 
 
 MaybeObject* JSObject::DeletePropertyWithInterceptor(String* name) {
-  HandleScope scope;
+  Isolate* isolate = GetIsolate();
+  HandleScope scope(isolate);
   Handle<InterceptorInfo> interceptor(GetNamedInterceptor());
   Handle<String> name_handle(name);
   Handle<JSObject> this_handle(this);
   if (!interceptor->deleter()->IsUndefined()) {
     v8::NamedPropertyDeleter deleter =
         v8::ToCData<v8::NamedPropertyDeleter>(interceptor->deleter());
-    LOG(ApiNamedPropertyAccess("interceptor-named-delete", *this_handle, name));
-    CustomArguments args(interceptor->data(), this, this);
+    LOG(isolate,
+        ApiNamedPropertyAccess("interceptor-named-delete", *this_handle, name));
+    CustomArguments args(isolate, interceptor->data(), this, this);
     v8::AccessorInfo info(args.end());
     v8::Handle<v8::Boolean> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = deleter(v8::Utils::ToLocal(name_handle), info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     if (!result.IsEmpty()) {
       ASSERT(result->IsBoolean());
       return *v8::Utils::OpenHandle(*result);
@@ -2532,13 +2587,14 @@ MaybeObject* JSObject::DeletePropertyWithInterceptor(String* name) {
   }
   MaybeObject* raw_result =
       this_handle->DeletePropertyPostInterceptor(*name_handle, NORMAL_DELETION);
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return raw_result;
 }
 
 
 MaybeObject* JSObject::DeleteElementPostInterceptor(uint32_t index,
                                                     DeleteMode mode) {
+  Heap* heap = GetHeap();
   ASSERT(!HasExternalArrayElements());
   switch (GetElementsKind()) {
     case FAST_ELEMENTS: {
@@ -2566,52 +2622,56 @@ MaybeObject* JSObject::DeleteElementPostInterceptor(uint32_t index,
       UNREACHABLE();
       break;
   }
-  return Heap::true_value();
+  return heap->true_value();
 }
 
 
 MaybeObject* JSObject::DeleteElementWithInterceptor(uint32_t index) {
+  Isolate* isolate = GetIsolate();
+  Heap* heap = isolate->heap();
   // Make sure that the top context does not change when doing
   // callbacks or interceptor calls.
   AssertNoContextChange ncc;
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
-  if (interceptor->deleter()->IsUndefined()) return Heap::false_value();
+  if (interceptor->deleter()->IsUndefined()) return heap->false_value();
   v8::IndexedPropertyDeleter deleter =
       v8::ToCData<v8::IndexedPropertyDeleter>(interceptor->deleter());
   Handle<JSObject> this_handle(this);
-  LOG(ApiIndexedPropertyAccess("interceptor-indexed-delete", this, index));
-  CustomArguments args(interceptor->data(), this, this);
+  LOG(isolate,
+      ApiIndexedPropertyAccess("interceptor-indexed-delete", this, index));
+  CustomArguments args(isolate, interceptor->data(), this, this);
   v8::AccessorInfo info(args.end());
   v8::Handle<v8::Boolean> result;
   {
     // Leaving JavaScript.
-    VMState state(EXTERNAL);
+    VMState state(isolate, EXTERNAL);
     result = deleter(index, info);
   }
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   if (!result.IsEmpty()) {
     ASSERT(result->IsBoolean());
     return *v8::Utils::OpenHandle(*result);
   }
   MaybeObject* raw_result =
       this_handle->DeleteElementPostInterceptor(index, NORMAL_DELETION);
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return raw_result;
 }
 
 
 MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
+  Isolate* isolate = GetIsolate();
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayIndexedAccess(this, index, v8::ACCESS_DELETE)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_DELETE);
-    return Heap::false_value();
+      !isolate->MayIndexedAccess(this, index, v8::ACCESS_DELETE)) {
+    isolate->ReportFailedAccessCheck(this, v8::ACCESS_DELETE);
+    return isolate->heap()->false_value();
   }
 
   if (IsJSGlobalProxy()) {
     Object* proto = GetPrototype();
-    if (proto->IsNull()) return Heap::false_value();
+    if (proto->IsNull()) return isolate->heap()->false_value();
     ASSERT(proto->IsJSGlobalObject());
     return JSGlobalObject::cast(proto)->DeleteElement(index, mode);
   }
@@ -2654,15 +2714,16 @@ MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
       int entry = dictionary->FindEntry(index);
       if (entry != NumberDictionary::kNotFound) {
         Object* result = dictionary->DeleteProperty(entry, mode);
-        if (mode == STRICT_DELETION && result == Heap::false_value()) {
+        if (mode == STRICT_DELETION && result ==
+            isolate->heap()->false_value()) {
           // In strict mode, deleting a non-configurable property throws
           // exception. dictionary->DeleteProperty will return false_value()
           // if a non-configurable property is being deleted.
           HandleScope scope;
-          Handle<Object> i = Factory::NewNumberFromUint(index);
+          Handle<Object> i = isolate->factory()->NewNumberFromUint(index);
           Handle<Object> args[2] = { i, Handle<Object>(this) };
-          return Top::Throw(*Factory::NewTypeError("strict_delete_property",
-                                                   HandleVector(args, 2)));
+          return isolate->Throw(*isolate->factory()->NewTypeError(
+              "strict_delete_property", HandleVector(args, 2)));
         }
       }
       break;
@@ -2671,24 +2732,25 @@ MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
       UNREACHABLE();
       break;
   }
-  return Heap::true_value();
+  return isolate->heap()->true_value();
 }
 
 
 MaybeObject* JSObject::DeleteProperty(String* name, DeleteMode mode) {
+  Isolate* isolate = GetIsolate();
   // ECMA-262, 3rd, 8.6.2.5
   ASSERT(name->IsString());
 
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayNamedAccess(this, name, v8::ACCESS_DELETE)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_DELETE);
-    return Heap::false_value();
+      !isolate->MayNamedAccess(this, name, v8::ACCESS_DELETE)) {
+    isolate->ReportFailedAccessCheck(this, v8::ACCESS_DELETE);
+    return isolate->heap()->false_value();
   }
 
   if (IsJSGlobalProxy()) {
     Object* proto = GetPrototype();
-    if (proto->IsNull()) return Heap::false_value();
+    if (proto->IsNull()) return isolate->heap()->false_value();
     ASSERT(proto->IsJSGlobalObject());
     return JSGlobalObject::cast(proto)->DeleteProperty(name, mode);
   }
@@ -2699,17 +2761,17 @@ MaybeObject* JSObject::DeleteProperty(String* name, DeleteMode mode) {
   } else {
     LookupResult result;
     LocalLookup(name, &result);
-    if (!result.IsProperty()) return Heap::true_value();
+    if (!result.IsProperty()) return isolate->heap()->true_value();
     // Ignore attributes if forcing a deletion.
     if (result.IsDontDelete() && mode != FORCE_DELETION) {
       if (mode == STRICT_DELETION) {
         // Deleting a non-configurable property in strict mode.
-        HandleScope scope;
+        HandleScope scope(isolate);
         Handle<Object> args[2] = { Handle<Object>(name), Handle<Object>(this) };
-        return Top::Throw(*Factory::NewTypeError("strict_delete_property",
-                                                 HandleVector(args, 2)));
+        return isolate->Throw(*isolate->factory()->NewTypeError(
+            "strict_delete_property", HandleVector(args, 2)));
       }
-      return Heap::false_value();
+      return isolate->heap()->false_value();
     }
     // Check for interceptor.
     if (result.type() == INTERCEPTOR) {
@@ -2733,6 +2795,7 @@ MaybeObject* JSObject::DeleteProperty(String* name, DeleteMode mode) {
 
 // Check whether this object references another object.
 bool JSObject::ReferencesObject(Object* obj) {
+  Heap* heap = GetHeap();
   AssertNoAllocation no_alloc;
 
   // Is the object the constructor for this object?
@@ -2747,7 +2810,7 @@ bool JSObject::ReferencesObject(Object* obj) {
 
   // Check if the object is among the named properties.
   Object* key = SlowReverseLookup(obj);
-  if (key != Heap::undefined_value()) {
+  if (!key->IsUndefined()) {
     return true;
   }
 
@@ -2778,7 +2841,7 @@ bool JSObject::ReferencesObject(Object* obj) {
     }
     case DICTIONARY_ELEMENTS: {
       key = element_dictionary()->SlowReverseLookup(obj);
-      if (key != Heap::undefined_value()) {
+      if (!key->IsUndefined()) {
         return true;
       }
       break;
@@ -2792,7 +2855,8 @@ bool JSObject::ReferencesObject(Object* obj) {
   if (IsJSFunction()) {
     // Get the constructor function for arguments array.
     JSObject* arguments_boilerplate =
-        Top::context()->global_context()->arguments_boilerplate();
+        heap->isolate()->context()->global_context()->
+            arguments_boilerplate();
     JSFunction* arguments_function =
         JSFunction::cast(arguments_boilerplate->map()->constructor());
 
@@ -2831,10 +2895,13 @@ bool JSObject::ReferencesObject(Object* obj) {
 
 
 MaybeObject* JSObject::PreventExtensions() {
+  Isolate* isolate = GetIsolate();
   if (IsAccessCheckNeeded() &&
-      !Top::MayNamedAccess(this, Heap::undefined_value(), v8::ACCESS_KEYS)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_KEYS);
-    return Heap::false_value();
+      !isolate->MayNamedAccess(this,
+                               isolate->heap()->undefined_value(),
+                               v8::ACCESS_KEYS)) {
+    isolate->ReportFailedAccessCheck(this, v8::ACCESS_KEYS);
+    return isolate->heap()->false_value();
   }
 
   if (IsJSGlobalProxy()) {
@@ -2873,8 +2940,9 @@ MaybeObject* JSObject::PreventExtensions() {
 // - This object has no elements.
 // - No prototype has enumerable properties/elements.
 bool JSObject::IsSimpleEnum() {
+  Heap* heap = GetHeap();
   for (Object* o = this;
-       o != Heap::null_value();
+       o != heap->null_value();
        o = JSObject::cast(o)->GetPrototype()) {
     JSObject* curr = JSObject::cast(o);
     if (!curr->map()->instance_descriptors()->HasEnumCache()) return false;
@@ -2940,6 +3008,8 @@ AccessorDescriptor* Map::FindAccessor(String* name) {
 void JSObject::LocalLookup(String* name, LookupResult* result) {
   ASSERT(name->IsString());
 
+  Heap* heap = GetHeap();
+
   if (IsJSGlobalProxy()) {
     Object* proto = GetPrototype();
     if (proto->IsNull()) return result->NotFound();
@@ -2954,13 +3024,14 @@ void JSObject::LocalLookup(String* name, LookupResult* result) {
   }
 
   // Check __proto__ before interceptor.
-  if (name->Equals(Heap::Proto_symbol()) && !IsJSContextExtensionObject()) {
+  if (name->Equals(heap->Proto_symbol()) &&
+      !IsJSContextExtensionObject()) {
     result->ConstantResult(this);
     return;
   }
 
   // Check for lookup interceptor except when bootstrapping.
-  if (HasNamedInterceptor() && !Bootstrapper::IsActive()) {
+  if (HasNamedInterceptor() && !heap->isolate()->bootstrapper()->IsActive()) {
     result->InterceptorResult(this);
     return;
   }
@@ -2971,8 +3042,9 @@ void JSObject::LocalLookup(String* name, LookupResult* result) {
 
 void JSObject::Lookup(String* name, LookupResult* result) {
   // Ecma-262 3rd 8.6.2.4
+  Heap* heap = GetHeap();
   for (Object* current = this;
-       current != Heap::null_value();
+       current != heap->null_value();
        current = JSObject::cast(current)->GetPrototype()) {
     JSObject::cast(current)->LocalLookup(name, result);
     if (result->IsProperty()) return;
@@ -2983,8 +3055,9 @@ void JSObject::Lookup(String* name, LookupResult* result) {
 
 // Search object and it's prototype chain for callback properties.
 void JSObject::LookupCallback(String* name, LookupResult* result) {
+  Heap* heap = GetHeap();
   for (Object* current = this;
-       current != Heap::null_value();
+       current != heap->null_value();
        current = JSObject::cast(current)->GetPrototype()) {
     JSObject::cast(current)->LocalLookupRealNamedProperty(name, result);
     if (result->IsProperty() && result->type() == CALLBACKS) return;
@@ -2995,6 +3068,7 @@ void JSObject::LookupCallback(String* name, LookupResult* result) {
 
 MaybeObject* JSObject::DefineGetterSetter(String* name,
                                           PropertyAttributes attributes) {
+  Heap* heap = GetHeap();
   // Make sure that the top context does not change when doing callbacks or
   // interceptor calls.
   AssertNoContextChange ncc;
@@ -3003,7 +3077,7 @@ MaybeObject* JSObject::DefineGetterSetter(String* name,
   name->TryFlatten();
 
   if (!CanSetCallback(name)) {
-    return Heap::undefined_value();
+    return heap->undefined_value();
   }
 
   uint32_t index = 0;
@@ -3023,7 +3097,7 @@ MaybeObject* JSObject::DefineGetterSetter(String* name,
       case EXTERNAL_FLOAT_ELEMENTS:
         // Ignore getters and setters on pixel and external array
         // elements.
-        return Heap::undefined_value();
+        return heap->undefined_value();
       case DICTIONARY_ELEMENTS: {
         // Lookup the index.
         NumberDictionary* dictionary = element_dictionary();
@@ -3031,7 +3105,7 @@ MaybeObject* JSObject::DefineGetterSetter(String* name,
         if (entry != NumberDictionary::kNotFound) {
           Object* result = dictionary->ValueAt(entry);
           PropertyDetails details = dictionary->DetailsAt(entry);
-          if (details.IsReadOnly()) return Heap::undefined_value();
+          if (details.IsReadOnly()) return heap->undefined_value();
           if (details.type() == CALLBACKS) {
             if (result->IsFixedArray()) {
               return result;
@@ -3050,7 +3124,7 @@ MaybeObject* JSObject::DefineGetterSetter(String* name,
     LookupResult result;
     LocalLookup(name, &result);
     if (result.IsProperty()) {
-      if (result.IsReadOnly()) return Heap::undefined_value();
+      if (result.IsReadOnly()) return heap->undefined_value();
       if (result.type() == CALLBACKS) {
         Object* obj = result.GetCallbackObject();
         // Need to preserve old getters/setters.
@@ -3064,7 +3138,7 @@ MaybeObject* JSObject::DefineGetterSetter(String* name,
 
   // Allocate the fixed array to hold getter and setter.
   Object* structure;
-  { MaybeObject* maybe_structure = Heap::AllocateFixedArray(2, TENURED);
+  { MaybeObject* maybe_structure = heap->AllocateFixedArray(2, TENURED);
     if (!maybe_structure->ToObject(&structure)) return maybe_structure;
   }
 
@@ -3078,7 +3152,7 @@ MaybeObject* JSObject::DefineGetterSetter(String* name,
 
 bool JSObject::CanSetCallback(String* name) {
   ASSERT(!IsAccessCheckNeeded()
-         || Top::MayNamedAccess(this, name, v8::ACCESS_SET));
+         || Isolate::Current()->MayNamedAccess(this, name, v8::ACCESS_SET));
 
   // Check if there is an API defined callback object which prohibits
   // callback overwriting in this object or it's prototype chain.
@@ -3175,11 +3249,12 @@ MaybeObject* JSObject::DefineAccessor(String* name,
                                       Object* fun,
                                       PropertyAttributes attributes) {
   ASSERT(fun->IsJSFunction() || fun->IsUndefined());
+  Isolate* isolate = GetIsolate();
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayNamedAccess(this, name, v8::ACCESS_SET)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_SET);
-    return Heap::undefined_value();
+      !isolate->MayNamedAccess(this, name, v8::ACCESS_SET)) {
+    isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET);
+    return isolate->heap()->undefined_value();
   }
 
   if (IsJSGlobalProxy()) {
@@ -3201,12 +3276,13 @@ MaybeObject* JSObject::DefineAccessor(String* name,
 
 
 MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
+  Isolate* isolate = GetIsolate();
   String* name = String::cast(info->name());
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayNamedAccess(this, name, v8::ACCESS_SET)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_SET);
-    return Heap::undefined_value();
+      !isolate->MayNamedAccess(this, name, v8::ACCESS_SET)) {
+    isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET);
+    return isolate->heap()->undefined_value();
   }
 
   if (IsJSGlobalProxy()) {
@@ -3224,14 +3300,14 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
   name->TryFlatten();
 
   if (!CanSetCallback(name)) {
-    return Heap::undefined_value();
+    return isolate->heap()->undefined_value();
   }
 
   uint32_t index = 0;
   bool is_element = name->AsArrayIndex(&index);
 
   if (is_element) {
-    if (IsJSArray()) return Heap::undefined_value();
+    if (IsJSArray()) return isolate->heap()->undefined_value();
 
     // Accessors overwrite previous callbacks (cf. with getters/setters).
     switch (GetElementsKind()) {
@@ -3247,7 +3323,7 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
       case EXTERNAL_FLOAT_ELEMENTS:
         // Ignore getters and setters on pixel and external array
         // elements.
-        return Heap::undefined_value();
+        return isolate->heap()->undefined_value();
       case DICTIONARY_ELEMENTS:
         break;
       default:
@@ -3267,7 +3343,7 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
     // ES5 forbids turning a property into an accessor if it's not
     // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5).
     if (result.IsProperty() && (result.IsReadOnly() || result.IsDontDelete())) {
-      return Heap::undefined_value();
+      return isolate->heap()->undefined_value();
     }
     Object* ok;
     { MaybeObject* maybe_ok =
@@ -3281,15 +3357,17 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
 
 
 Object* JSObject::LookupAccessor(String* name, bool is_getter) {
+  Heap* heap = GetHeap();
+
   // Make sure that the top context does not change when doing callbacks or
   // interceptor calls.
   AssertNoContextChange ncc;
 
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayNamedAccess(this, name, v8::ACCESS_HAS)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
-    return Heap::undefined_value();
+      !heap->isolate()->MayNamedAccess(this, name, v8::ACCESS_HAS)) {
+    heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+    return heap->undefined_value();
   }
 
   // Make the lookup and include prototypes.
@@ -3297,7 +3375,7 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) {
   uint32_t index = 0;
   if (name->AsArrayIndex(&index)) {
     for (Object* obj = this;
-         obj != Heap::null_value();
+         obj != heap->null_value();
          obj = JSObject::cast(obj)->GetPrototype()) {
       JSObject* js_object = JSObject::cast(obj);
       if (js_object->HasDictionaryElements()) {
@@ -3316,12 +3394,12 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) {
     }
   } else {
     for (Object* obj = this;
-         obj != Heap::null_value();
+         obj != heap->null_value();
          obj = JSObject::cast(obj)->GetPrototype()) {
       LookupResult result;
       JSObject::cast(obj)->LocalLookup(name, &result);
       if (result.IsProperty()) {
-        if (result.IsReadOnly()) return Heap::undefined_value();
+        if (result.IsReadOnly()) return heap->undefined_value();
         if (result.type() == CALLBACKS) {
           Object* obj = result.GetCallbackObject();
           if (obj->IsFixedArray()) {
@@ -3331,11 +3409,12 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) {
       }
     }
   }
-  return Heap::undefined_value();
+  return heap->undefined_value();
 }
 
 
 Object* JSObject::SlowReverseLookup(Object* value) {
+  Heap* heap = GetHeap();
   if (HasFastProperties()) {
     DescriptorArray* descs = map()->instance_descriptors();
     for (int i = 0; i < descs->number_of_descriptors(); i++) {
@@ -3349,7 +3428,7 @@ Object* JSObject::SlowReverseLookup(Object* value) {
         }
       }
     }
-    return Heap::undefined_value();
+    return heap->undefined_value();
   } else {
     return property_dictionary()->SlowReverseLookup(value);
   }
@@ -3357,9 +3436,10 @@ Object* JSObject::SlowReverseLookup(Object* value) {
 
 
 MaybeObject* Map::CopyDropDescriptors() {
+  Heap* heap = GetHeap();
   Object* result;
   { MaybeObject* maybe_result =
-        Heap::AllocateMap(instance_type(), instance_size());
+        heap->AllocateMap(instance_type(), instance_size());
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   Map::cast(result)->set_prototype(prototype());
@@ -3369,7 +3449,8 @@ MaybeObject* Map::CopyDropDescriptors() {
   // pointing to the same transition which is bad because the garbage
   // collector relies on being able to reverse pointers from transitions
   // to maps.  If properties need to be retained use CopyDropTransitions.
-  Map::cast(result)->set_instance_descriptors(Heap::empty_descriptor_array());
+  Map::cast(result)->set_instance_descriptors(
+      heap->empty_descriptor_array());
   // Please note instance_type and instance_size are set when allocated.
   Map::cast(result)->set_inobject_properties(inobject_properties());
   Map::cast(result)->set_unused_property_fields(unused_property_fields());
@@ -3392,7 +3473,7 @@ MaybeObject* Map::CopyDropDescriptors() {
   Map::cast(result)->set_bit_field(bit_field());
   Map::cast(result)->set_bit_field2(bit_field2());
   Map::cast(result)->set_is_shared(false);
-  Map::cast(result)->ClearCodeCache();
+  Map::cast(result)->ClearCodeCache(heap);
   return result;
 }
 
@@ -3406,7 +3487,7 @@ MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode,
 
   Object* result;
   { MaybeObject* maybe_result =
-        Heap::AllocateMap(instance_type(), new_instance_size);
+        GetHeap()->AllocateMap(instance_type(), new_instance_size);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
 
@@ -3451,7 +3532,7 @@ MaybeObject* Map::UpdateCodeCache(String* name, Code* code) {
   // Allocate the code cache if not present.
   if (code_cache()->IsFixedArray()) {
     Object* result;
-    { MaybeObject* maybe_result = Heap::AllocateCodeCache();
+    { MaybeObject* maybe_result = GetHeap()->AllocateCodeCache();
       if (!maybe_result->ToObject(&result)) return maybe_result;
     }
     set_code_cache(result);
@@ -3467,7 +3548,7 @@ Object* Map::FindInCodeCache(String* name, Code::Flags flags) {
   if (!code_cache()->IsFixedArray()) {
     return CodeCache::cast(code_cache())->Lookup(name, flags);
   } else {
-    return Heap::undefined_value();
+    return GetHeap()->undefined_value();
   }
 }
 
@@ -3491,12 +3572,13 @@ void Map::RemoveFromCodeCache(String* name, Code* code, int index) {
 
 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
   Map* current = this;
-  while (current != Heap::meta_map()) {
+  Map* meta_map = heap()->meta_map();
+  while (current != meta_map) {
     DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
         *RawField(current, Map::kInstanceDescriptorsOffset));
-    if (d == Heap::empty_descriptor_array()) {
+    if (d == heap()->empty_descriptor_array()) {
       Map* prev = current->map();
-      current->set_map(Heap::meta_map());
+      current->set_map(meta_map);
       callback(current, data);
       current = prev;
       continue;
@@ -3521,9 +3603,9 @@ void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
       }
     }
     if (!map_done) continue;
-    *map_or_index_field = Heap::fixed_array_map();
+    *map_or_index_field = heap()->fixed_array_map();
     Map* prev = current->map();
-    current->set_map(Heap::meta_map());
+    current->set_map(meta_map);
     callback(current, data);
     current = prev;
   }
@@ -3636,6 +3718,7 @@ Object* CodeCache::Lookup(String* name, Code::Flags flags) {
 
 
 Object* CodeCache::LookupDefaultCache(String* name, Code::Flags flags) {
+  Heap* heap = GetHeap();
   FixedArray* cache = default_cache();
   int length = cache->length();
   for (int i = 0; i < length; i += kCodeCacheEntrySize) {
@@ -3650,7 +3733,7 @@ Object* CodeCache::LookupDefaultCache(String* name, Code::Flags flags) {
       }
     }
   }
-  return Heap::undefined_value();
+  return heap->undefined_value();
 }
 
 
@@ -3659,7 +3742,7 @@ Object* CodeCache::LookupNormalTypeCache(String* name, Code::Flags flags) {
     CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
     return cache->Lookup(name, flags);
   } else {
-    return Heap::undefined_value();
+    return GetHeap()->undefined_value();
   }
 }
 
@@ -3741,7 +3824,7 @@ class CodeCacheHashTableKey : public HashTableKey {
   MUST_USE_RESULT MaybeObject* AsObject() {
     ASSERT(code_ != NULL);
     Object* obj;
-    { MaybeObject* maybe_obj = Heap::AllocateFixedArray(2);
+    { MaybeObject* maybe_obj = code_->GetHeap()->AllocateFixedArray(2);
       if (!maybe_obj->ToObject(&obj)) return maybe_obj;
     }
     FixedArray* pair = FixedArray::cast(obj);
@@ -3760,7 +3843,7 @@ class CodeCacheHashTableKey : public HashTableKey {
 Object* CodeCacheHashTable::Lookup(String* name, Code::Flags flags) {
   CodeCacheHashTableKey key(name, flags);
   int entry = FindEntry(&key);
-  if (entry == kNotFound) return Heap::undefined_value();
+  if (entry == kNotFound) return GetHeap()->undefined_value();
   return get(EntryToIndex(entry) + 1);
 }
 
@@ -3797,8 +3880,9 @@ int CodeCacheHashTable::GetIndex(String* name, Code::Flags flags) {
 
 void CodeCacheHashTable::RemoveByIndex(int index) {
   ASSERT(index >= 0);
-  set(EntryToIndex(index), Heap::null_value());
-  set(EntryToIndex(index) + 1, Heap::null_value());
+  Heap* heap = GetHeap();
+  set(EntryToIndex(index), heap->null_value());
+  set(EntryToIndex(index) + 1, heap->null_value());
   ElementRemoved();
 }
 
@@ -3818,6 +3902,7 @@ static bool HasKey(FixedArray* array, Object* key) {
 
 
 MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
+  Heap* heap = GetHeap();
   ASSERT(!array->HasExternalArrayElements());
   switch (array->GetElementsKind()) {
     case JSObject::FAST_ELEMENTS:
@@ -3828,7 +3913,7 @@ MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
 
       // Allocate a temporary fixed array.
       Object* object;
-      { MaybeObject* maybe_object = Heap::AllocateFixedArray(size);
+      { MaybeObject* maybe_object = heap->AllocateFixedArray(size);
         if (!maybe_object->ToObject(&object)) return maybe_object;
       }
       FixedArray* key_array = FixedArray::cast(object);
@@ -3848,11 +3933,12 @@ MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
       UNREACHABLE();
   }
   UNREACHABLE();
-  return Heap::null_value();  // Failure case needs to "return" a value.
+  return heap->null_value();  // Failure case needs to "return" a value.
 }
 
 
 MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
+  Heap* heap = GetHeap();
   int len0 = length();
 #ifdef DEBUG
   if (FLAG_enable_slow_asserts) {
@@ -3878,7 +3964,7 @@ MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
 
   // Allocate the result
   Object* obj;
-  { MaybeObject* maybe_obj = Heap::AllocateFixedArray(len0 + extra);
+  { MaybeObject* maybe_obj = heap->AllocateFixedArray(len0 + extra);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
   // Fill in the content
@@ -3907,9 +3993,10 @@ MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
 
 
 MaybeObject* FixedArray::CopySize(int new_length) {
-  if (new_length == 0) return Heap::empty_fixed_array();
+  Heap* heap = GetHeap();
+  if (new_length == 0) return heap->empty_fixed_array();
   Object* obj;
-  { MaybeObject* maybe_obj = Heap::AllocateFixedArray(new_length);
+  { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
   FixedArray* result = FixedArray::cast(obj);
@@ -3947,13 +4034,14 @@ bool FixedArray::IsEqualTo(FixedArray* other) {
 
 
 MaybeObject* DescriptorArray::Allocate(int number_of_descriptors) {
+  Heap* heap = Isolate::Current()->heap();
   if (number_of_descriptors == 0) {
-    return Heap::empty_descriptor_array();
+    return heap->empty_descriptor_array();
   }
   // Allocate the array of keys.
   Object* array;
   { MaybeObject* maybe_array =
-        Heap::AllocateFixedArray(ToKeyIndex(number_of_descriptors));
+        heap->AllocateFixedArray(ToKeyIndex(number_of_descriptors));
     if (!maybe_array->ToObject(&array)) return maybe_array;
   }
   // Do not use DescriptorArray::cast on incomplete object.
@@ -3961,7 +4049,7 @@ MaybeObject* DescriptorArray::Allocate(int number_of_descriptors) {
 
   // Allocate the content array and set it in the descriptor array.
   { MaybeObject* maybe_array =
-        Heap::AllocateFixedArray(number_of_descriptors << 1);
+        heap->AllocateFixedArray(number_of_descriptors << 1);
     if (!maybe_array->ToObject(&array)) return maybe_array;
   }
   result->set(kContentArrayIndex, array);
@@ -4230,15 +4318,15 @@ int DescriptorArray::LinearSearch(String* name, int len) {
 MaybeObject* DeoptimizationInputData::Allocate(int deopt_entry_count,
                                                PretenureFlag pretenure) {
   ASSERT(deopt_entry_count > 0);
-  return Heap::AllocateFixedArray(LengthFor(deopt_entry_count),
+  return HEAP->AllocateFixedArray(LengthFor(deopt_entry_count),
                                   pretenure);
 }
 
 
 MaybeObject* DeoptimizationOutputData::Allocate(int number_of_deopt_points,
                                                 PretenureFlag pretenure) {
-  if (number_of_deopt_points == 0) return Heap::empty_fixed_array();
-  return Heap::AllocateFixedArray(LengthOfFixedArray(number_of_deopt_points),
+  if (number_of_deopt_points == 0) return HEAP->empty_fixed_array();
+  return HEAP->AllocateFixedArray(LengthOfFixedArray(number_of_deopt_points),
                                   pretenure);
 }
 
@@ -4256,11 +4344,8 @@ bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
 #endif
 
 
-static StaticResource<StringInputBuffer> string_input_buffer;
-
-
 bool String::LooksValid() {
-  if (!Heap::Contains(this)) return false;
+  if (!Isolate::Current()->heap()->Contains(this)) return false;
   return true;
 }
 
@@ -4271,8 +4356,10 @@ int String::Utf8Length() {
   // doesn't make Utf8Length faster, but it is very likely that
   // the string will be accessed later (for example by WriteUtf8)
   // so it's still a good idea.
+  Heap* heap = GetHeap();
   TryFlatten();
-  Access<StringInputBuffer> buffer(&string_input_buffer);
+  Access<StringInputBuffer> buffer(
+      heap->isolate()->objects_string_input_buffer());
   buffer->Reset(0, this);
   int result = 0;
   while (buffer->has_more())
@@ -4342,12 +4429,14 @@ SmartPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
   if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
     return SmartPointer<char>(NULL);
   }
+  Heap* heap = GetHeap();
 
   // Negative length means the to the end of the string.
   if (length < 0) length = kMaxInt - offset;
 
   // Compute the size of the UTF-8 string. Start at the specified offset.
-  Access<StringInputBuffer> buffer(&string_input_buffer);
+  Access<StringInputBuffer> buffer(
+      heap->isolate()->objects_string_input_buffer());
   buffer->Reset(offset, this);
   int character_position = offset;
   int utf8_bytes = 0;
@@ -4417,12 +4506,13 @@ const uc16* String::GetTwoByteData(unsigned start) {
 
 SmartPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) {
   ASSERT(NativeAllocationChecker::allocation_allowed());
-
   if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
     return SmartPointer<uc16>();
   }
+  Heap* heap = GetHeap();
 
-  Access<StringInputBuffer> buffer(&string_input_buffer);
+  Access<StringInputBuffer> buffer(
+      heap->isolate()->objects_string_input_buffer());
   buffer->Reset(this);
 
   uc16* result = NewArray<uc16>(length() + 1);
@@ -4705,11 +4795,9 @@ const unibrow::byte* String::ReadBlock(String* input,
 }
 
 
-Relocatable* Relocatable::top_ = NULL;
-
-
 void Relocatable::PostGarbageCollectionProcessing() {
-  Relocatable* current = top_;
+  Isolate* isolate = Isolate::Current();
+  Relocatable* current = isolate->relocatable_top();
   while (current != NULL) {
     current->PostGarbageCollection();
     current = current->prev_;
@@ -4719,21 +4807,23 @@ void Relocatable::PostGarbageCollectionProcessing() {
 
 // Reserve space for statics needing saving and restoring.
 int Relocatable::ArchiveSpacePerThread() {
-  return sizeof(top_);
+  return sizeof(Isolate::Current()->relocatable_top());
 }
 
 
 // Archive statics that are thread local.
 char* Relocatable::ArchiveState(char* to) {
-  *reinterpret_cast<Relocatable**>(to) = top_;
-  top_ = NULL;
+  Isolate* isolate = Isolate::Current();
+  *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top();
+  isolate->set_relocatable_top(NULL);
   return to + ArchiveSpacePerThread();
 }
 
 
 // Restore statics that are thread local.
 char* Relocatable::RestoreState(char* from) {
-  top_ = *reinterpret_cast<Relocatable**>(from);
+  Isolate* isolate = Isolate::Current();
+  isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from));
   return from + ArchiveSpacePerThread();
 }
 
@@ -4746,7 +4836,8 @@ char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
 
 
 void Relocatable::Iterate(ObjectVisitor* v) {
-  Iterate(v, top_);
+  Isolate* isolate = Isolate::Current();
+  Iterate(v, isolate->relocatable_top());
 }
 
 
@@ -4759,15 +4850,17 @@ void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
 }
 
 
-FlatStringReader::FlatStringReader(Handle<String> str)
-    : str_(str.location()),
+FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str)
+    : Relocatable(isolate),
+      str_(str.location()),
       length_(str->length()) {
   PostGarbageCollection();
 }
 
 
-FlatStringReader::FlatStringReader(Vector<const char> input)
-    : str_(0),
+FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input)
+    : Relocatable(isolate),
+      str_(0),
       is_ascii_(true),
       length_(input.length()),
       start_(input.start()) { }
@@ -5097,11 +5190,10 @@ static inline bool CompareRawStringContents(Vector<Char> a, Vector<Char> b) {
 }
 
 
-static StringInputBuffer string_compare_buffer_b;
-
-
 template <typename IteratorA>
-static inline bool CompareStringContentsPartial(IteratorA* ia, String* b) {
+static inline bool CompareStringContentsPartial(Isolate* isolate,
+                                                IteratorA* ia,
+                                                String* b) {
   if (b->IsFlat()) {
     if (b->IsAsciiRepresentation()) {
       VectorIterator<char> ib(b->ToAsciiVector());
@@ -5111,16 +5203,16 @@ static inline bool CompareStringContentsPartial(IteratorA* ia, String* b) {
       return CompareStringContents(ia, &ib);
     }
   } else {
-    string_compare_buffer_b.Reset(0, b);
-    return CompareStringContents(ia, &string_compare_buffer_b);
+    isolate->objects_string_compare_buffer_b()->Reset(0, b);
+    return CompareStringContents(ia,
+                                 isolate->objects_string_compare_buffer_b());
   }
 }
 
 
-static StringInputBuffer string_compare_buffer_a;
-
-
 bool String::SlowEquals(String* other) {
+  Heap* heap = GetHeap();
+
   // Fast check: negative check with lengths.
   int len = length();
   if (len != other->length()) return false;
@@ -5147,6 +5239,7 @@ bool String::SlowEquals(String* other) {
                                     Vector<const char>(str2, len));
   }
 
+  Isolate* isolate = heap->isolate();
   if (lhs->IsFlat()) {
     if (lhs->IsAsciiRepresentation()) {
       Vector<const char> vec1 = lhs->ToAsciiVector();
@@ -5161,8 +5254,9 @@ bool String::SlowEquals(String* other) {
         }
       } else {
         VectorIterator<char> buf1(vec1);
-        string_compare_buffer_b.Reset(0, rhs);
-        return CompareStringContents(&buf1, &string_compare_buffer_b);
+        isolate->objects_string_compare_buffer_b()->Reset(0, rhs);
+        return CompareStringContents(&buf1,
+            isolate->objects_string_compare_buffer_b());
       }
     } else {
       Vector<const uc16> vec1 = lhs->ToUC16Vector();
@@ -5177,13 +5271,15 @@ bool String::SlowEquals(String* other) {
         }
       } else {
         VectorIterator<uc16> buf1(vec1);
-        string_compare_buffer_b.Reset(0, rhs);
-        return CompareStringContents(&buf1, &string_compare_buffer_b);
+        isolate->objects_string_compare_buffer_b()->Reset(0, rhs);
+        return CompareStringContents(&buf1,
+            isolate->objects_string_compare_buffer_b());
       }
     }
   } else {
-    string_compare_buffer_a.Reset(0, lhs);
-    return CompareStringContentsPartial(&string_compare_buffer_a, rhs);
+    isolate->objects_string_compare_buffer_a()->Reset(0, lhs);
+    return CompareStringContentsPartial(isolate,
+        isolate->objects_string_compare_buffer_a(), rhs);
   }
 }
 
@@ -5192,11 +5288,12 @@ bool String::MarkAsUndetectable() {
   if (StringShape(this).IsSymbol()) return false;
 
   Map* map = this->map();
-  if (map == Heap::string_map()) {
-    this->set_map(Heap::undetectable_string_map());
+  Heap* heap = map->GetHeap();
+  if (map == heap->string_map()) {
+    this->set_map(heap->undetectable_string_map());
     return true;
-  } else if (map == Heap::ascii_string_map()) {
-    this->set_map(Heap::undetectable_ascii_string_map());
+  } else if (map == heap->ascii_string_map()) {
+    this->set_map(heap->undetectable_ascii_string_map());
     return true;
   }
   // Rest cannot be marked as undetectable
@@ -5205,9 +5302,10 @@ bool String::MarkAsUndetectable() {
 
 
 bool String::IsEqualTo(Vector<const char> str) {
+  Isolate* isolate = GetIsolate();
   int slen = length();
   Access<ScannerConstants::Utf8Decoder>
-      decoder(ScannerConstants::utf8_decoder());
+      decoder(isolate->scanner_constants()->utf8_decoder());
   decoder->Reset(str.start(), str.length());
   int i;
   for (i = 0; i < slen && decoder->has_more(); i++) {
@@ -5369,8 +5467,9 @@ uint32_t String::ComputeHashField(unibrow::CharacterStream* buffer,
 
 
 MaybeObject* String::SubString(int start, int end, PretenureFlag pretenure) {
+  Heap* heap = GetHeap();
   if (start == 0 && end == length()) return this;
-  MaybeObject* result = Heap::AllocateSubString(this, start, end, pretenure);
+  MaybeObject* result = heap->AllocateSubString(this, start, end, pretenure);
   return result;
 }
 
@@ -5410,12 +5509,12 @@ void Map::CreateBackPointers() {
 }
 
 
-void Map::ClearNonLiveTransitions(Object* real_prototype) {
+void Map::ClearNonLiveTransitions(Heap* heap, Object* real_prototype) {
   // Live DescriptorArray objects will be marked, so we must use
   // low-level accessors to get and modify their data.
   DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
       *RawField(this, Map::kInstanceDescriptorsOffset));
-  if (d == Heap::raw_unchecked_empty_descriptor_array()) return;
+  if (d == heap->raw_unchecked_empty_descriptor_array()) return;
   Smi* NullDescriptorDetails =
     PropertyDetails(NONE, NULL_DESCRIPTOR).AsSmi();
   FixedArray* contents = reinterpret_cast<FixedArray*>(
@@ -5435,7 +5534,7 @@ void Map::ClearNonLiveTransitions(Object* real_prototype) {
       if (!target->IsMarked()) {
         ASSERT(target->IsMap());
         contents->set_unchecked(i + 1, NullDescriptorDetails);
-        contents->set_null_unchecked(i);
+        contents->set_null_unchecked(heap, i);
         ASSERT(target->prototype() == this ||
                target->prototype() == real_prototype);
         // Getter prototype() is read-only, set_prototype() has side effects.
@@ -5459,7 +5558,8 @@ void JSFunction::MarkForLazyRecompilation() {
   ASSERT(is_compiled() && !IsOptimized());
   ASSERT(shared()->allows_lazy_compilation() ||
          code()->optimizable());
-  ReplaceCode(Builtins::builtin(Builtins::LazyRecompile));
+  Builtins* builtins = GetIsolate()->builtins();
+  ReplaceCode(builtins->builtin(Builtins::LazyRecompile));
 }
 
 
@@ -5492,7 +5592,7 @@ bool JSFunction::IsInlineable() {
 
 Object* JSFunction::SetInstancePrototype(Object* value) {
   ASSERT(value->IsJSObject());
-
+  Heap* heap = GetHeap();
   if (has_initial_map()) {
     initial_map()->set_prototype(value);
   } else {
@@ -5501,7 +5601,7 @@ Object* JSFunction::SetInstancePrototype(Object* value) {
     // prototype is put into the initial map where it belongs.
     set_prototype_or_initial_map(value);
   }
-  Heap::ClearInstanceofCache();
+  heap->ClearInstanceofCache();
   return value;
 }
 
@@ -5515,6 +5615,7 @@ MaybeObject* JSFunction::SetPrototype(Object* value) {
   // used for constructing objects to the original object prototype.
   // See ECMA-262 13.2.2.
   if (!value->IsJSObject()) {
+    Heap* heap = GetHeap();
     // Copy the map so this does not affect unrelated functions.
     // Remove map transitions because they point to maps with a
     // different prototype.
@@ -5526,7 +5627,8 @@ MaybeObject* JSFunction::SetPrototype(Object* value) {
     map()->set_constructor(value);
     map()->set_non_instance_prototype(true);
     construct_prototype =
-        Top::context()->global_context()->initial_object_prototype();
+        heap->isolate()->context()->global_context()->
+            initial_object_prototype();
   } else {
     map()->set_non_instance_prototype(false);
   }
@@ -5551,7 +5653,7 @@ Object* JSFunction::RemovePrototype() {
   ASSERT(shared()->strict_mode() || map() == global_context->function_map());
 
   set_map(no_prototype_map);
-  set_prototype_or_initial_map(Heap::the_hole_value());
+  set_prototype_or_initial_map(GetHeap()->the_hole_value());
   return this;
 }
 
@@ -5573,13 +5675,17 @@ Context* JSFunction::GlobalContextFromLiterals(FixedArray* literals) {
 }
 
 
-MaybeObject* Oddball::Initialize(const char* to_string, Object* to_number) {
+MaybeObject* Oddball::Initialize(const char* to_string,
+                                 Object* to_number,
+                                 byte kind) {
   Object* symbol;
-  { MaybeObject* maybe_symbol = Heap::LookupAsciiSymbol(to_string);
+  { MaybeObject* maybe_symbol =
+        Isolate::Current()->heap()->LookupAsciiSymbol(to_string);
     if (!maybe_symbol->ToObject(&symbol)) return maybe_symbol;
   }
   set_to_string(String::cast(symbol));
   set_to_number(to_number);
+  set_kind(kind);
   return this;
 }
 
@@ -5598,10 +5704,11 @@ bool SharedFunctionInfo::HasSourceCode() {
 
 
 Object* SharedFunctionInfo::GetSourceCode() {
-  if (!HasSourceCode()) return Heap::undefined_value();
-  HandleScope scope;
+  Isolate* isolate = GetIsolate();
+  if (!HasSourceCode()) return isolate->heap()->undefined_value();
+  HandleScope scope(isolate);
   Object* source = Script::cast(script())->source();
-  return *SubString(Handle<String>(String::cast(source)),
+  return *SubString(Handle<String>(String::cast(source), isolate),
                     start_position(), end_position());
 }
 
@@ -5628,6 +5735,8 @@ int SharedFunctionInfo::CalculateInObjectProperties() {
 
 
 bool SharedFunctionInfo::CanGenerateInlineConstructor(Object* prototype) {
+  Heap* heap = GetHeap();
+
   // Check the basic conditions for generating inline constructor code.
   if (!FLAG_inline_new
       || !has_only_simple_this_property_assignments()
@@ -5644,7 +5753,7 @@ bool SharedFunctionInfo::CanGenerateInlineConstructor(Object* prototype) {
   // Traverse the proposed prototype chain looking for setters for properties of
   // the same names as are set by the inline constructor.
   for (Object* obj = prototype;
-       obj != Heap::null_value();
+       obj != heap->null_value();
        obj = obj->GetPrototype()) {
     JSObject* js_object = JSObject::cast(obj);
     for (int i = 0; i < this_property_assignments_count(); i++) {
@@ -5680,10 +5789,11 @@ void SharedFunctionInfo::SetThisPropertyAssignmentsInfo(
 
 
 void SharedFunctionInfo::ClearThisPropertyAssignmentsInfo() {
+  Heap* heap = GetHeap();
   set_compiler_hints(BooleanBit::set(compiler_hints(),
                                      kHasOnlySimpleThisPropertyAssignments,
                                      false));
-  set_this_property_assignments(Heap::undefined_value());
+  set_this_property_assignments(heap->undefined_value());
   set_this_property_assignments_count(0);
 }
 
@@ -5828,9 +5938,10 @@ void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) {
     set_construction_count(kGenerousAllocationCount);
   }
   set_initial_map(map);
-  ASSERT_EQ(Builtins::builtin(Builtins::JSConstructStubGeneric),
+  Builtins* builtins = map->heap()->isolate()->builtins();
+  ASSERT_EQ(builtins->builtin(Builtins::JSConstructStubGeneric),
             construct_stub());
-  set_construct_stub(Builtins::builtin(Builtins::JSConstructStubCountdown));
+  set_construct_stub(builtins->builtin(Builtins::JSConstructStubCountdown));
 }
 
 
@@ -5847,10 +5958,11 @@ void SharedFunctionInfo::DetachInitialMap() {
   // then StartInobjectTracking will be called again the next time the
   // constructor is called. The countdown will continue and (possibly after
   // several more GCs) CompleteInobjectSlackTracking will eventually be called.
-  set_initial_map(Heap::raw_unchecked_undefined_value());
-  ASSERT_EQ(Builtins::builtin(Builtins::JSConstructStubCountdown),
+  set_initial_map(map->heap()->raw_unchecked_undefined_value());
+  Builtins* builtins = map->heap()->isolate()->builtins();
+  ASSERT_EQ(builtins->builtin(Builtins::JSConstructStubCountdown),
             *RawField(this, kConstructStubOffset));
-  set_construct_stub(Builtins::builtin(Builtins::JSConstructStubGeneric));
+  set_construct_stub(builtins->builtin(Builtins::JSConstructStubGeneric));
   // It is safe to clear the flag: it will be set again if the map is live.
   set_live_objects_may_exist(false);
 }
@@ -5863,9 +5975,10 @@ void SharedFunctionInfo::AttachInitialMap(Map* map) {
 
   // Resume inobject slack tracking.
   set_initial_map(map);
-  ASSERT_EQ(Builtins::builtin(Builtins::JSConstructStubGeneric),
+  Builtins* builtins = map->heap()->isolate()->builtins();
+  ASSERT_EQ(builtins->builtin(Builtins::JSConstructStubGeneric),
             *RawField(this, kConstructStubOffset));
-  set_construct_stub(Builtins::builtin(Builtins::JSConstructStubCountdown));
+  set_construct_stub(builtins->builtin(Builtins::JSConstructStubCountdown));
   // The map survived the gc, so there may be objects referencing it.
   set_live_objects_may_exist(true);
 }
@@ -5894,10 +6007,12 @@ void SharedFunctionInfo::CompleteInobjectSlackTracking() {
   ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress());
   Map* map = Map::cast(initial_map());
 
-  set_initial_map(Heap::undefined_value());
-  ASSERT_EQ(Builtins::builtin(Builtins::JSConstructStubCountdown),
+  Heap* heap = map->heap();
+  set_initial_map(heap->undefined_value());
+  Builtins* builtins = heap->isolate()->builtins();
+  ASSERT_EQ(builtins->builtin(Builtins::JSConstructStubCountdown),
             construct_stub());
-  set_construct_stub(Builtins::builtin(Builtins::JSConstructStubGeneric));
+  set_construct_stub(builtins->builtin(Builtins::JSConstructStubGeneric));
 
   int slack = map->unused_property_fields();
   map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
@@ -5954,8 +6069,7 @@ void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
 
 
 void Code::InvalidateRelocation() {
-  HandleScope scope;
-  set_relocation_info(Heap::empty_byte_array());
+  set_relocation_info(GetHeap()->empty_byte_array());
 }
 
 
@@ -6401,11 +6515,12 @@ void Code::Disassemble(const char* name, FILE* out) {
 
 MaybeObject* JSObject::SetFastElementsCapacityAndLength(int capacity,
                                                         int length) {
+  Heap* heap = GetHeap();
   // We should never end in here with a pixel or external array.
   ASSERT(!HasExternalArrayElements());
 
   Object* obj;
-  { MaybeObject* maybe_obj = Heap::AllocateFixedArrayWithHoles(capacity);
+  { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(capacity);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
   FixedArray* elems = FixedArray::cast(obj);
@@ -6492,14 +6607,15 @@ MaybeObject* JSObject::SetSlowElements(Object* len) {
 
 
 MaybeObject* JSArray::Initialize(int capacity) {
+  Heap* heap = GetHeap();
   ASSERT(capacity >= 0);
   set_length(Smi::FromInt(0));
   FixedArray* new_elements;
   if (capacity == 0) {
-    new_elements = Heap::empty_fixed_array();
+    new_elements = heap->empty_fixed_array();
   } else {
     Object* obj;
-    { MaybeObject* maybe_obj = Heap::AllocateFixedArrayWithHoles(capacity);
+    { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(capacity);
       if (!maybe_obj->ToObject(&obj)) return maybe_obj;
     }
     new_elements = FixedArray::cast(obj);
@@ -6514,21 +6630,23 @@ void JSArray::Expand(int required_size) {
   Handle<FixedArray> old_backing(FixedArray::cast(elements()));
   int old_size = old_backing->length();
   int new_size = required_size > old_size ? required_size : old_size;
-  Handle<FixedArray> new_backing = Factory::NewFixedArray(new_size);
+  Handle<FixedArray> new_backing = FACTORY->NewFixedArray(new_size);
   // Can't use this any more now because we may have had a GC!
   for (int i = 0; i < old_size; i++) new_backing->set(i, old_backing->get(i));
   self->SetContent(*new_backing);
 }
 
 
-static Failure* ArrayLengthRangeError() {
+static Failure* ArrayLengthRangeError(Heap* heap) {
   HandleScope scope;
-  return Top::Throw(*Factory::NewRangeError("invalid_array_length",
-                                            HandleVector<Object>(NULL, 0)));
+  return heap->isolate()->Throw(
+      *FACTORY->NewRangeError("invalid_array_length",
+          HandleVector<Object>(NULL, 0)));
 }
 
 
 MaybeObject* JSObject::SetElementsLength(Object* len) {
+  Heap* heap = GetHeap();
   // We should never end in here with a pixel or external array.
   ASSERT(AllowsSetElementsLength());
 
@@ -6536,7 +6654,7 @@ MaybeObject* JSObject::SetElementsLength(Object* len) {
   Object* smi_length = Smi::FromInt(0);
   if (maybe_smi_length->ToObject(&smi_length) && smi_length->IsSmi()) {
     const int value = Smi::cast(smi_length)->value();
-    if (value < 0) return ArrayLengthRangeError();
+    if (value < 0) return ArrayLengthRangeError(heap);
     switch (GetElementsKind()) {
       case FAST_ELEMENTS: {
         int old_capacity = FixedArray::cast(elements())->length();
@@ -6602,14 +6720,14 @@ MaybeObject* JSObject::SetElementsLength(Object* len) {
     if (len->ToArrayIndex(&length)) {
       return SetSlowElements(len);
     } else {
-      return ArrayLengthRangeError();
+      return ArrayLengthRangeError(heap);
     }
   }
 
   // len is not a number so make the array size one and
   // set only element to len.
   Object* obj;
-  { MaybeObject* maybe_obj = Heap::AllocateFixedArray(1);
+  { MaybeObject* maybe_obj = heap->AllocateFixedArray(1);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
   FixedArray::cast(obj)->set(0, len);
@@ -6621,6 +6739,7 @@ MaybeObject* JSObject::SetElementsLength(Object* len) {
 
 MaybeObject* JSObject::SetPrototype(Object* value,
                                     bool skip_hidden_prototypes) {
+  Heap* heap = GetHeap();
   // Silently ignore the change if value is not a JSObject or null.
   // SpiderMonkey behaves this way.
   if (!value->IsJSObject() && !value->IsNull()) return value;
@@ -6629,12 +6748,12 @@ MaybeObject* JSObject::SetPrototype(Object* value,
   // prototype cycles are prevented.
   // It is sufficient to validate that the receiver is not in the new prototype
   // chain.
-  for (Object* pt = value; pt != Heap::null_value(); pt = pt->GetPrototype()) {
+  for (Object* pt = value; pt != heap->null_value(); pt = pt->GetPrototype()) {
     if (JSObject::cast(pt) == this) {
       // Cycle detected.
       HandleScope scope;
-      return Top::Throw(*Factory::NewError("cyclic_proto",
-                                           HandleVector<Object>(NULL, 0)));
+      return heap->isolate()->Throw(
+          *FACTORY->NewError("cyclic_proto", HandleVector<Object>(NULL, 0)));
     }
   }
 
@@ -6659,7 +6778,7 @@ MaybeObject* JSObject::SetPrototype(Object* value,
   Map::cast(new_map)->set_prototype(value);
   real_receiver->set_map(Map::cast(new_map));
 
-  Heap::ClearInstanceofCache();
+  heap->ClearInstanceofCache();
 
   return value;
 }
@@ -6714,29 +6833,31 @@ bool JSObject::HasElementPostInterceptor(JSObject* receiver, uint32_t index) {
   if (this->IsStringObjectWithCharacterAt(index)) return true;
 
   Object* pt = GetPrototype();
-  if (pt == Heap::null_value()) return false;
+  if (pt->IsNull()) return false;
   return JSObject::cast(pt)->HasElementWithReceiver(receiver, index);
 }
 
 
 bool JSObject::HasElementWithInterceptor(JSObject* receiver, uint32_t index) {
+  Isolate* isolate = GetIsolate();
   // Make sure that the top context does not change when doing
   // callbacks or interceptor calls.
   AssertNoContextChange ncc;
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
   Handle<JSObject> receiver_handle(receiver);
   Handle<JSObject> holder_handle(this);
-  CustomArguments args(interceptor->data(), receiver, this);
+  CustomArguments args(isolate, interceptor->data(), receiver, this);
   v8::AccessorInfo info(args.end());
   if (!interceptor->query()->IsUndefined()) {
     v8::IndexedPropertyQuery query =
         v8::ToCData<v8::IndexedPropertyQuery>(interceptor->query());
-    LOG(ApiIndexedPropertyAccess("interceptor-indexed-has", this, index));
+    LOG(isolate,
+        ApiIndexedPropertyAccess("interceptor-indexed-has", this, index));
     v8::Handle<v8::Integer> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = query(index, info);
     }
     if (!result.IsEmpty()) {
@@ -6746,11 +6867,12 @@ bool JSObject::HasElementWithInterceptor(JSObject* receiver, uint32_t index) {
   } else if (!interceptor->getter()->IsUndefined()) {
     v8::IndexedPropertyGetter getter =
         v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter());
-    LOG(ApiIndexedPropertyAccess("interceptor-indexed-has-get", this, index));
+    LOG(isolate,
+        ApiIndexedPropertyAccess("interceptor-indexed-has-get", this, index));
     v8::Handle<v8::Value> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = getter(index, info);
     }
     if (!result.IsEmpty()) return true;
@@ -6760,10 +6882,12 @@ bool JSObject::HasElementWithInterceptor(JSObject* receiver, uint32_t index) {
 
 
 JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
+  Heap* heap = GetHeap();
+
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+      !heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
+    heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
     return UNDEFINED_ELEMENT;
   }
 
@@ -6830,10 +6954,12 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
 
 
 bool JSObject::HasElementWithReceiver(JSObject* receiver, uint32_t index) {
+  Heap* heap = GetHeap();
+
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+      !heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
+    heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
     return false;
   }
 
@@ -6888,7 +7014,7 @@ bool JSObject::HasElementWithReceiver(JSObject* receiver, uint32_t index) {
   if (this->IsStringObjectWithCharacterAt(index)) return true;
 
   Object* pt = GetPrototype();
-  if (pt == Heap::null_value()) return false;
+  if (pt->IsNull()) return false;
   return JSObject::cast(pt)->HasElementWithReceiver(receiver, index);
 }
 
@@ -6897,26 +7023,28 @@ MaybeObject* JSObject::SetElementWithInterceptor(uint32_t index,
                                                  Object* value,
                                                  StrictModeFlag strict_mode,
                                                  bool check_prototype) {
+  Isolate* isolate = GetIsolate();
   // Make sure that the top context does not change when doing
   // callbacks or interceptor calls.
   AssertNoContextChange ncc;
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
   Handle<JSObject> this_handle(this);
-  Handle<Object> value_handle(value);
+  Handle<Object> value_handle(value, isolate);
   if (!interceptor->setter()->IsUndefined()) {
     v8::IndexedPropertySetter setter =
         v8::ToCData<v8::IndexedPropertySetter>(interceptor->setter());
-    LOG(ApiIndexedPropertyAccess("interceptor-indexed-set", this, index));
-    CustomArguments args(interceptor->data(), this, this);
+    LOG(isolate,
+        ApiIndexedPropertyAccess("interceptor-indexed-set", this, index));
+    CustomArguments args(isolate, interceptor->data(), this, this);
     v8::AccessorInfo info(args.end());
     v8::Handle<v8::Value> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = setter(index, v8::Utils::ToLocal(value_handle), info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     if (!result.IsEmpty()) return *value_handle;
   }
   MaybeObject* raw_result =
@@ -6924,7 +7052,7 @@ MaybeObject* JSObject::SetElementWithInterceptor(uint32_t index,
                                                 *value_handle,
                                                 strict_mode,
                                                 check_prototype);
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return raw_result;
 }
 
@@ -6933,6 +7061,7 @@ MaybeObject* JSObject::GetElementWithCallback(Object* receiver,
                                               Object* structure,
                                               uint32_t index,
                                               Object* holder) {
+  Isolate* isolate = GetIsolate();
   ASSERT(!structure->IsProxy());
 
   // api style callbacks.
@@ -6940,22 +7069,22 @@ MaybeObject* JSObject::GetElementWithCallback(Object* receiver,
     AccessorInfo* data = AccessorInfo::cast(structure);
     Object* fun_obj = data->getter();
     v8::AccessorGetter call_fun = v8::ToCData<v8::AccessorGetter>(fun_obj);
-    HandleScope scope;
+    HandleScope scope(isolate);
     Handle<JSObject> self(JSObject::cast(receiver));
     Handle<JSObject> holder_handle(JSObject::cast(holder));
-    Handle<Object> number = Factory::NewNumberFromUint(index);
-    Handle<String> key(Factory::NumberToString(number));
-    LOG(ApiNamedPropertyAccess("load", *self, *key));
-    CustomArguments args(data->data(), *self, *holder_handle);
+    Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
+    Handle<String> key(isolate->factory()->NumberToString(number));
+    LOG(isolate, ApiNamedPropertyAccess("load", *self, *key));
+    CustomArguments args(isolate, data->data(), *self, *holder_handle);
     v8::AccessorInfo info(args.end());
     v8::Handle<v8::Value> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = call_fun(v8::Utils::ToLocal(key), info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
-    if (result.IsEmpty()) return Heap::undefined_value();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
+    if (result.IsEmpty()) return isolate->heap()->undefined_value();
     return *v8::Utils::OpenHandle(*result);
   }
 
@@ -6967,7 +7096,7 @@ MaybeObject* JSObject::GetElementWithCallback(Object* receiver,
                                                   JSFunction::cast(getter));
     }
     // Getter is not a function.
-    return Heap::undefined_value();
+    return isolate->heap()->undefined_value();
   }
 
   UNREACHABLE();
@@ -6979,12 +7108,13 @@ MaybeObject* JSObject::SetElementWithCallback(Object* structure,
                                               uint32_t index,
                                               Object* value,
                                               JSObject* holder) {
-  HandleScope scope;
+  Isolate* isolate = GetIsolate();
+  HandleScope scope(isolate);
 
   // We should never get here to initialize a const with the hole
   // value since a const declaration would conflict with the setter.
   ASSERT(!value->IsTheHole());
-  Handle<Object> value_handle(value);
+  Handle<Object> value_handle(value, isolate);
 
   // To accommodate both the old and the new api we switch on the
   // data structure used to store the callbacks.  Eventually proxy
@@ -6997,19 +7127,19 @@ MaybeObject* JSObject::SetElementWithCallback(Object* structure,
     Object* call_obj = data->setter();
     v8::AccessorSetter call_fun = v8::ToCData<v8::AccessorSetter>(call_obj);
     if (call_fun == NULL) return value;
-    Handle<Object> number = Factory::NewNumberFromUint(index);
-    Handle<String> key(Factory::NumberToString(number));
-    LOG(ApiNamedPropertyAccess("store", this, *key));
-    CustomArguments args(data->data(), this, JSObject::cast(holder));
+    Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
+    Handle<String> key(isolate->factory()->NumberToString(number));
+    LOG(isolate, ApiNamedPropertyAccess("store", this, *key));
+    CustomArguments args(isolate, data->data(), this, JSObject::cast(holder));
     v8::AccessorInfo info(args.end());
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       call_fun(v8::Utils::ToLocal(key),
                v8::Utils::ToLocal(value_handle),
                info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     return *value_handle;
   }
 
@@ -7018,11 +7148,12 @@ MaybeObject* JSObject::SetElementWithCallback(Object* structure,
     if (setter->IsJSFunction()) {
      return SetPropertyWithDefinedSetter(JSFunction::cast(setter), value);
     } else {
-      Handle<Object> holder_handle(holder);
-      Handle<Object> key(Factory::NewNumberFromUint(index));
+      Handle<Object> holder_handle(holder, isolate);
+      Handle<Object> key(isolate->factory()->NewNumberFromUint(index));
       Handle<Object> args[2] = { key, holder_handle };
-      return Top::Throw(*Factory::NewTypeError("no_setter_in_callback",
-                                               HandleVector(args, 2)));
+      return isolate->Throw(
+          *isolate->factory()->NewTypeError("no_setter_in_callback",
+                                            HandleVector(args, 2)));
     }
   }
 
@@ -7101,12 +7232,13 @@ MaybeObject* JSObject::SetElement(uint32_t index,
                                   Object* value,
                                   StrictModeFlag strict_mode,
                                   bool check_prototype) {
+  Heap* heap = GetHeap();
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayIndexedAccess(this, index, v8::ACCESS_SET)) {
+      !heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_SET)) {
     HandleScope scope;
     Handle<Object> value_handle(value);
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_SET);
+    heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_SET);
     return *value_handle;
   }
 
@@ -7139,6 +7271,7 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
                                                     Object* value,
                                                     StrictModeFlag strict_mode,
                                                     bool check_prototype) {
+  Isolate* isolate = GetIsolate();
   switch (GetElementsKind()) {
     case FAST_ELEMENTS:
       // Fast case.
@@ -7194,12 +7327,12 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
           // If put fails instrict mode, throw exception.
           if (!dictionary->ValueAtPut(entry, value) &&
               strict_mode == kStrictMode) {
-            Handle<Object> number(Factory::NewNumberFromUint(index));
+            Handle<Object> number(isolate->factory()->NewNumberFromUint(index));
             Handle<Object> holder(this);
             Handle<Object> args[2] = { number, holder };
-            return Top::Throw(
-                *Factory::NewTypeError("strict_read_only_property",
-                                       HandleVector(args, 2)));
+            return isolate->Throw(
+                *isolate->factory()->NewTypeError("strict_read_only_property",
+                                                  HandleVector(args, 2)));
           }
         }
       } else {
@@ -7214,11 +7347,13 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
         // When we set the is_extensible flag to false we always force
         // the element into dictionary mode (and force them to stay there).
         if (!map()->is_extensible()) {
-          Handle<Object> number(Factory::NewNumberFromUint(index));
-          Handle<String> index_string(Factory::NumberToString(number));
+          Handle<Object> number(isolate->factory()->NewNumberFromUint(index));
+          Handle<String> index_string(
+              isolate->factory()->NumberToString(number));
           Handle<Object> args[1] = { index_string };
-          return Top::Throw(*Factory::NewTypeError("object_not_extensible",
-                                                   HandleVector(args, 1)));
+          return isolate->Throw(
+              *isolate->factory()->NewTypeError("object_not_extensible",
+                                                HandleVector(args, 1)));
         }
         Object* result;
         { MaybeObject* maybe_result = dictionary->AtNumberPut(index, value);
@@ -7271,7 +7406,7 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
   // All possible cases have been handled above. Add a return to avoid the
   // complaints from the compiler.
   UNREACHABLE();
-  return Heap::null_value();
+  return isolate->heap()->null_value();
 }
 
 
@@ -7284,7 +7419,7 @@ MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index,
   if (index >= old_len && index != 0xffffffff) {
     Object* len;
     { MaybeObject* maybe_len =
-          Heap::NumberFromDouble(static_cast<double>(index) + 1);
+          GetHeap()->NumberFromDouble(static_cast<double>(index) + 1);
       if (!maybe_len->ToObject(&len)) return maybe_len;
     }
     set_length(len);
@@ -7295,6 +7430,7 @@ MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index,
 
 MaybeObject* JSObject::GetElementPostInterceptor(Object* receiver,
                                                  uint32_t index) {
+  Heap* heap = GetHeap();
   // Get element works for both JSObject and JSArray since
   // JSArray::length cannot change.
   switch (GetElementsKind()) {
@@ -7343,17 +7479,18 @@ MaybeObject* JSObject::GetElementPostInterceptor(Object* receiver,
 
   // Continue searching via the prototype chain.
   Object* pt = GetPrototype();
-  if (pt == Heap::null_value()) return Heap::undefined_value();
+  if (pt->IsNull()) return heap->undefined_value();
   return pt->GetElementWithReceiver(receiver, index);
 }
 
 
 MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver,
                                                  uint32_t index) {
+  Isolate* isolate = GetIsolate();
   // Make sure that the top context does not change when doing
   // callbacks or interceptor calls.
   AssertNoContextChange ncc;
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
   Handle<Object> this_handle(receiver);
   Handle<JSObject> holder_handle(this);
@@ -7361,33 +7498,35 @@ MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver,
   if (!interceptor->getter()->IsUndefined()) {
     v8::IndexedPropertyGetter getter =
         v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter());
-    LOG(ApiIndexedPropertyAccess("interceptor-indexed-get", this, index));
-    CustomArguments args(interceptor->data(), receiver, this);
+    LOG(isolate,
+        ApiIndexedPropertyAccess("interceptor-indexed-get", this, index));
+    CustomArguments args(isolate, interceptor->data(), receiver, this);
     v8::AccessorInfo info(args.end());
     v8::Handle<v8::Value> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = getter(index, info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result);
   }
 
   MaybeObject* raw_result =
       holder_handle->GetElementPostInterceptor(*this_handle, index);
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return raw_result;
 }
 
 
 MaybeObject* JSObject::GetElementWithReceiver(Object* receiver,
                                               uint32_t index) {
+  Heap* heap = GetHeap();
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayIndexedAccess(this, index, v8::ACCESS_GET)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_GET);
-    return Heap::undefined_value();
+      !heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_GET)) {
+    heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_GET);
+    return heap->undefined_value();
   }
 
   if (HasIndexedInterceptor()) {
@@ -7438,7 +7577,7 @@ MaybeObject* JSObject::GetElementWithReceiver(Object* receiver,
   }
 
   Object* pt = GetPrototype();
-  if (pt == Heap::null_value()) return Heap::undefined_value();
+  if (pt == heap->null_value()) return heap->undefined_value();
   return pt->GetElementWithReceiver(receiver, index);
 }
 
@@ -7493,7 +7632,7 @@ MaybeObject* JSObject::GetExternalElement(uint32_t index) {
       ExternalIntArray* array = ExternalIntArray::cast(elements());
       if (index < static_cast<uint32_t>(array->length())) {
         int32_t value = array->get(index);
-        return Heap::NumberFromInt32(value);
+        return GetHeap()->NumberFromInt32(value);
       }
       break;
     }
@@ -7502,7 +7641,7 @@ MaybeObject* JSObject::GetExternalElement(uint32_t index) {
           ExternalUnsignedIntArray::cast(elements());
       if (index < static_cast<uint32_t>(array->length())) {
         uint32_t value = array->get(index);
-        return Heap::NumberFromUint32(value);
+        return GetHeap()->NumberFromUint32(value);
       }
       break;
     }
@@ -7510,7 +7649,7 @@ MaybeObject* JSObject::GetExternalElement(uint32_t index) {
       ExternalFloatArray* array = ExternalFloatArray::cast(elements());
       if (index < static_cast<uint32_t>(array->length())) {
         float value = array->get(index);
-        return Heap::AllocateHeapNumber(value);
+        return GetHeap()->AllocateHeapNumber(value);
       }
       break;
     }
@@ -7519,7 +7658,7 @@ MaybeObject* JSObject::GetExternalElement(uint32_t index) {
       UNREACHABLE();
       break;
   }
-  return Heap::undefined_value();
+  return GetHeap()->undefined_value();
 }
 
 
@@ -7664,6 +7803,7 @@ MaybeObject* JSObject::GetPropertyPostInterceptor(
     JSObject* receiver,
     String* name,
     PropertyAttributes* attributes) {
+  Heap* heap = GetHeap();
   // Check local property in holder, ignore interceptor.
   LookupResult result;
   LocalLookupRealNamedProperty(name, &result);
@@ -7673,7 +7813,7 @@ MaybeObject* JSObject::GetPropertyPostInterceptor(
   // Continue searching via the prototype chain.
   Object* pt = GetPrototype();
   *attributes = ABSENT;
-  if (pt == Heap::null_value()) return Heap::undefined_value();
+  if (pt->IsNull()) return heap->undefined_value();
   return pt->GetPropertyWithReceiver(receiver, name, attributes);
 }
 
@@ -7682,13 +7822,14 @@ MaybeObject* JSObject::GetLocalPropertyPostInterceptor(
     JSObject* receiver,
     String* name,
     PropertyAttributes* attributes) {
+  Heap* heap = GetHeap();
   // Check local property in holder, ignore interceptor.
   LookupResult result;
   LocalLookupRealNamedProperty(name, &result);
   if (result.IsProperty()) {
     return GetProperty(receiver, &result, name, attributes);
   }
-  return Heap::undefined_value();
+  return heap->undefined_value();
 }
 
 
@@ -7696,8 +7837,9 @@ MaybeObject* JSObject::GetPropertyWithInterceptor(
     JSObject* receiver,
     String* name,
     PropertyAttributes* attributes) {
+  Isolate* isolate = GetIsolate();
   InterceptorInfo* interceptor = GetNamedInterceptor();
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<JSObject> receiver_handle(receiver);
   Handle<JSObject> holder_handle(this);
   Handle<String> name_handle(name);
@@ -7705,16 +7847,17 @@ MaybeObject* JSObject::GetPropertyWithInterceptor(
   if (!interceptor->getter()->IsUndefined()) {
     v8::NamedPropertyGetter getter =
         v8::ToCData<v8::NamedPropertyGetter>(interceptor->getter());
-    LOG(ApiNamedPropertyAccess("interceptor-named-get", *holder_handle, name));
-    CustomArguments args(interceptor->data(), receiver, this);
+    LOG(isolate,
+        ApiNamedPropertyAccess("interceptor-named-get", *holder_handle, name));
+    CustomArguments args(isolate, interceptor->data(), receiver, this);
     v8::AccessorInfo info(args.end());
     v8::Handle<v8::Value> result;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       result = getter(v8::Utils::ToLocal(name_handle), info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     if (!result.IsEmpty()) {
       *attributes = NONE;
       return *v8::Utils::OpenHandle(*result);
@@ -7725,16 +7868,17 @@ MaybeObject* JSObject::GetPropertyWithInterceptor(
       *receiver_handle,
       *name_handle,
       attributes);
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return result;
 }
 
 
 bool JSObject::HasRealNamedProperty(String* key) {
+  Heap* heap = GetHeap();
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayNamedAccess(this, key, v8::ACCESS_HAS)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+      !heap->isolate()->MayNamedAccess(this, key, v8::ACCESS_HAS)) {
+    heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
     return false;
   }
 
@@ -7745,10 +7889,11 @@ bool JSObject::HasRealNamedProperty(String* key) {
 
 
 bool JSObject::HasRealElementProperty(uint32_t index) {
+  Heap* heap = GetHeap();
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+      !heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
+    heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
     return false;
   }
 
@@ -7788,15 +7933,16 @@ bool JSObject::HasRealElementProperty(uint32_t index) {
   }
   // All possibilities have been handled above already.
   UNREACHABLE();
-  return Heap::null_value();
+  return heap->null_value();
 }
 
 
 bool JSObject::HasRealNamedCallbackProperty(String* key) {
+  Heap* heap = GetHeap();
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !Top::MayNamedAccess(this, key, v8::ACCESS_HAS)) {
-    Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+      !heap->isolate()->MayNamedAccess(this, key, v8::ACCESS_HAS)) {
+    heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
     return false;
   }
 
@@ -8059,51 +8205,6 @@ int JSObject::GetEnumElementKeys(FixedArray* storage) {
 }
 
 
-bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
-  ASSERT(other->IsNumber());
-  return key == static_cast<uint32_t>(other->Number());
-}
-
-
-uint32_t NumberDictionaryShape::Hash(uint32_t key) {
-  return ComputeIntegerHash(key);
-}
-
-
-uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
-  ASSERT(other->IsNumber());
-  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
-}
-
-
-MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
-  return Heap::NumberFromUint32(key);
-}
-
-
-bool StringDictionaryShape::IsMatch(String* key, Object* other) {
-  // We know that all entries in a hash table had their hash keys created.
-  // Use that knowledge to have fast failure.
-  if (key->Hash() != String::cast(other)->Hash()) return false;
-  return key->Equals(String::cast(other));
-}
-
-
-uint32_t StringDictionaryShape::Hash(String* key) {
-  return key->Hash();
-}
-
-
-uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
-  return String::cast(other)->Hash();
-}
-
-
-MaybeObject* StringDictionaryShape::AsObject(String* key) {
-  return key;
-}
-
-
 // StringKey simply carries a string object as key.
 class StringKey : public HashTableKey {
  public:
@@ -8186,7 +8287,7 @@ class StringSharedKey : public HashTableKey {
 
   MUST_USE_RESULT MaybeObject* AsObject() {
     Object* obj;
-    { MaybeObject* maybe_obj = Heap::AllocateFixedArray(3);
+    { MaybeObject* maybe_obj = source_->GetHeap()->AllocateFixedArray(3);
       if (!maybe_obj->ToObject(&obj)) return maybe_obj;
     }
     FixedArray* pair = FixedArray::cast(obj);
@@ -8270,7 +8371,8 @@ class Utf8SymbolKey : public HashTableKey {
 
   MaybeObject* AsObject() {
     if (hash_field_ == 0) Hash();
-    return Heap::AllocateSymbol(string_, chars_, hash_field_);
+    return Isolate::Current()->heap()->AllocateSymbol(
+        string_, chars_, hash_field_);
   }
 
   Vector<const char> string_;
@@ -8337,7 +8439,7 @@ class AsciiSymbolKey : public SequentialSymbolKey<char> {
 
   MaybeObject* AsObject() {
     if (hash_field_ == 0) Hash();
-    return Heap::AllocateAsciiSymbol(string_, hash_field_);
+    return HEAP->AllocateAsciiSymbol(string_, hash_field_);
   }
 };
 
@@ -8353,7 +8455,7 @@ class TwoByteSymbolKey : public SequentialSymbolKey<uc16> {
 
   MaybeObject* AsObject() {
     if (hash_field_ == 0) Hash();
-    return Heap::AllocateTwoByteSymbol(string_, hash_field_);
+    return HEAP->AllocateTwoByteSymbol(string_, hash_field_);
   }
 };
 
@@ -8361,7 +8463,8 @@ class TwoByteSymbolKey : public SequentialSymbolKey<uc16> {
 // SymbolKey carries a string/symbol object as key.
 class SymbolKey : public HashTableKey {
  public:
-  explicit SymbolKey(String* string) : string_(string) { }
+  explicit SymbolKey(String* string)
+      : string_(string) { }
 
   bool IsMatch(Object* string) {
     return String::cast(string)->Equals(string_);
@@ -8377,8 +8480,9 @@ class SymbolKey : public HashTableKey {
     // Attempt to flatten the string, so that symbols will most often
     // be flat strings.
     string_ = string_->TryFlattenGetString();
+    Heap* heap = string_->GetHeap();
     // Transform string to symbol if possible.
-    Map* map = Heap::SymbolMapForString(string_);
+    Map* map = heap->SymbolMapForString(string_);
     if (map != NULL) {
       string_->set_map(map);
       ASSERT(string_->IsSymbol());
@@ -8386,7 +8490,7 @@ class SymbolKey : public HashTableKey {
     }
     // Otherwise allocate a new symbol.
     StringInputBuffer buffer(string_);
-    return Heap::AllocateInternalSymbol(&buffer,
+    return heap->AllocateInternalSymbol(&buffer,
                                         string_->length(),
                                         string_->hash_field());
   }
@@ -8425,8 +8529,8 @@ MaybeObject* HashTable<Shape, Key>::Allocate(int at_least_space_for,
   }
 
   Object* obj;
-  { MaybeObject* maybe_obj =
-        Heap::AllocateHashTable(EntryToIndex(capacity), pretenure);
+  { MaybeObject* maybe_obj = Isolate::Current()->heap()->
+        AllocateHashTable(EntryToIndex(capacity), pretenure);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
   HashTable::cast(obj)->SetNumberOfElements(0);
@@ -8436,23 +8540,6 @@ MaybeObject* HashTable<Shape, Key>::Allocate(int at_least_space_for,
 }
 
 
-// Find entry for key otherwise return kNotFound.
-template<typename Shape, typename Key>
-int HashTable<Shape, Key>::FindEntry(Key key) {
-  uint32_t capacity = Capacity();
-  uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
-  uint32_t count = 1;
-  // EnsureCapacity will guarantee the hash table is never full.
-  while (true) {
-    Object* element = KeyAt(entry);
-    if (element->IsUndefined()) break;  // Empty entry.
-    if (!element->IsNull() && Shape::IsMatch(key, element)) return entry;
-    entry = NextProbe(entry, count++, capacity);
-  }
-  return kNotFound;
-}
-
-
 // Find entry for key otherwise return kNotFound.
 int StringDictionary::FindEntry(String* key) {
   if (!key->IsSymbol()) {
@@ -8495,6 +8582,7 @@ int StringDictionary::FindEntry(String* key) {
 
 template<typename Shape, typename Key>
 MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n, Key key) {
+  Heap* heap = GetHeap();
   int capacity = Capacity();
   int nof = NumberOfElements() + n;
   int nod = NumberOfDeletedElements();
@@ -8508,7 +8596,7 @@ MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n, Key key) {
 
   const int kMinCapacityForPretenure = 256;
   bool pretenure =
-      (capacity > kMinCapacityForPretenure) && !Heap::InNewSpace(this);
+      (capacity > kMinCapacityForPretenure) && !heap->InNewSpace(this);
   Object* obj;
   { MaybeObject* maybe_obj =
         Allocate(nof * 2, pretenure ? TENURED : NOT_TENURED);
@@ -8640,6 +8728,7 @@ int HashTable<NumberDictionaryShape, uint32_t>::FindEntry(uint32_t);
 // Collates undefined and unexisting elements below limit from position
 // zero of the elements. The object stays in Dictionary mode.
 MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
+  Heap* heap = GetHeap();
   ASSERT(HasDictionaryElements());
   // Must stay in dictionary mode, either because of requires_slow_elements,
   // or because we are not going to sort (and therefore compact) all of the
@@ -8649,7 +8738,7 @@ MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
   if (limit > static_cast<uint32_t>(Smi::kMaxValue)) {
     // Allocate space for result before we start mutating the object.
     Object* new_double;
-    { MaybeObject* maybe_new_double = Heap::AllocateHeapNumber(0.0);
+    { MaybeObject* maybe_new_double = heap->AllocateHeapNumber(0.0);
       if (!maybe_new_double->ToObject(&new_double)) return maybe_new_double;
     }
     result_double = HeapNumber::cast(new_double);
@@ -8715,7 +8804,7 @@ MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
       // allocation. Bailout.
       return Smi::FromInt(-1);
     }
-    new_dict->AddNumberEntry(pos, Heap::undefined_value(), no_details)->
+    new_dict->AddNumberEntry(pos, heap->undefined_value(), no_details)->
         ToObjectUnchecked();
     pos++;
     undefs--;
@@ -8738,6 +8827,7 @@ MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
 // If the object is in dictionary mode, it is converted to fast elements
 // mode.
 MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
+  Heap* heap = GetHeap();
   ASSERT(!HasExternalArrayElements());
 
   if (HasDictionaryElements()) {
@@ -8756,10 +8846,10 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
     }
     Map* new_map = Map::cast(obj);
 
-    PretenureFlag tenure = Heap::InNewSpace(this) ? NOT_TENURED: TENURED;
+    PretenureFlag tenure = heap->InNewSpace(this) ? NOT_TENURED: TENURED;
     Object* new_array;
     { MaybeObject* maybe_new_array =
-          Heap::AllocateFixedArray(dict->NumberOfElements(), tenure);
+          heap->AllocateFixedArray(dict->NumberOfElements(), tenure);
       if (!maybe_new_array->ToObject(&new_array)) return maybe_new_array;
     }
     FixedArray* fast_elements = FixedArray::cast(new_array);
@@ -8792,7 +8882,7 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
     // Pessimistically allocate space for return value before
     // we start mutating the array.
     Object* new_double;
-    { MaybeObject* maybe_new_double = Heap::AllocateHeapNumber(0.0);
+    { MaybeObject* maybe_new_double = heap->AllocateHeapNumber(0.0);
       if (!maybe_new_double->ToObject(&new_double)) return maybe_new_double;
     }
     result_double = HeapNumber::cast(new_double);
@@ -8886,7 +8976,8 @@ Object* ExternalPixelArray::SetValue(uint32_t index, Object* value) {
 
 
 template<typename ExternalArrayClass, typename ValueType>
-static MaybeObject* ExternalArrayIntSetter(ExternalArrayClass* receiver,
+static MaybeObject* ExternalArrayIntSetter(Heap* heap,
+                                           ExternalArrayClass* receiver,
                                            uint32_t index,
                                            Object* value) {
   ValueType cast_value = 0;
@@ -8904,45 +8995,46 @@ static MaybeObject* ExternalArrayIntSetter(ExternalArrayClass* receiver,
     }
     receiver->set(index, cast_value);
   }
-  return Heap::NumberFromInt32(cast_value);
+  return heap->NumberFromInt32(cast_value);
 }
 
 
 MaybeObject* ExternalByteArray::SetValue(uint32_t index, Object* value) {
   return ExternalArrayIntSetter<ExternalByteArray, int8_t>
-      (this, index, value);
+      (GetHeap(), this, index, value);
 }
 
 
 MaybeObject* ExternalUnsignedByteArray::SetValue(uint32_t index,
                                                  Object* value) {
   return ExternalArrayIntSetter<ExternalUnsignedByteArray, uint8_t>
-      (this, index, value);
+      (GetHeap(), this, index, value);
 }
 
 
 MaybeObject* ExternalShortArray::SetValue(uint32_t index,
                                           Object* value) {
   return ExternalArrayIntSetter<ExternalShortArray, int16_t>
-      (this, index, value);
+      (GetHeap(), this, index, value);
 }
 
 
 MaybeObject* ExternalUnsignedShortArray::SetValue(uint32_t index,
                                                   Object* value) {
   return ExternalArrayIntSetter<ExternalUnsignedShortArray, uint16_t>
-      (this, index, value);
+      (GetHeap(), this, index, value);
 }
 
 
 MaybeObject* ExternalIntArray::SetValue(uint32_t index, Object* value) {
   return ExternalArrayIntSetter<ExternalIntArray, int32_t>
-      (this, index, value);
+      (GetHeap(), this, index, value);
 }
 
 
 MaybeObject* ExternalUnsignedIntArray::SetValue(uint32_t index, Object* value) {
   uint32_t cast_value = 0;
+  Heap* heap = GetHeap();
   if (index < static_cast<uint32_t>(length())) {
     if (value->IsSmi()) {
       int int_value = Smi::cast(value)->value();
@@ -8957,12 +9049,13 @@ MaybeObject* ExternalUnsignedIntArray::SetValue(uint32_t index, Object* value) {
     }
     set(index, cast_value);
   }
-  return Heap::NumberFromUint32(cast_value);
+  return heap->NumberFromUint32(cast_value);
 }
 
 
 MaybeObject* ExternalFloatArray::SetValue(uint32_t index, Object* value) {
   float cast_value = 0;
+  Heap* heap = GetHeap();
   if (index < static_cast<uint32_t>(length())) {
     if (value->IsSmi()) {
       int int_value = Smi::cast(value)->value();
@@ -8977,7 +9070,7 @@ MaybeObject* ExternalFloatArray::SetValue(uint32_t index, Object* value) {
     }
     set(index, cast_value);
   }
-  return Heap::AllocateHeapNumber(cast_value);
+  return heap->AllocateHeapNumber(cast_value);
 }
 
 
@@ -8990,11 +9083,12 @@ JSGlobalPropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) {
 
 MaybeObject* GlobalObject::EnsurePropertyCell(String* name) {
   ASSERT(!HasFastProperties());
+  Heap* heap = GetHeap();
   int entry = property_dictionary()->FindEntry(name);
   if (entry == StringDictionary::kNotFound) {
     Object* cell;
     { MaybeObject* maybe_cell =
-          Heap::AllocateJSGlobalPropertyCell(Heap::the_hole_value());
+          heap->AllocateJSGlobalPropertyCell(heap->the_hole_value());
       if (!maybe_cell->ToObject(&cell)) return maybe_cell;
     }
     PropertyDetails details(NONE, NORMAL);
@@ -9166,9 +9260,10 @@ MaybeObject* SymbolTable::LookupKey(HashTableKey* key, Object** s) {
 
 
 Object* CompilationCacheTable::Lookup(String* src) {
+  Heap* heap = GetHeap();
   StringKey key(src);
   int entry = FindEntry(&key);
-  if (entry == kNotFound) return Heap::undefined_value();
+  if (entry == kNotFound) return heap->undefined_value();
   return get(EntryToIndex(entry) + 1);
 }
 
@@ -9178,16 +9273,17 @@ Object* CompilationCacheTable::LookupEval(String* src,
                                           StrictModeFlag strict_mode) {
   StringSharedKey key(src, context->closure()->shared(), strict_mode);
   int entry = FindEntry(&key);
-  if (entry == kNotFound) return Heap::undefined_value();
+  if (entry == kNotFound) return GetHeap()->undefined_value();
   return get(EntryToIndex(entry) + 1);
 }
 
 
 Object* CompilationCacheTable::LookupRegExp(String* src,
                                             JSRegExp::Flags flags) {
+  Heap* heap = GetHeap();
   RegExpKey key(src, flags);
   int entry = FindEntry(&key);
-  if (entry == kNotFound) return Heap::undefined_value();
+  if (entry == kNotFound) return heap->undefined_value();
   return get(EntryToIndex(entry) + 1);
 }
 
@@ -9258,12 +9354,13 @@ MaybeObject* CompilationCacheTable::PutRegExp(String* src,
 
 
 void CompilationCacheTable::Remove(Object* value) {
+  Object* null_value = GetHeap()->null_value();
   for (int entry = 0, size = Capacity(); entry < size; entry++) {
     int entry_index = EntryToIndex(entry);
     int value_index = entry_index + 1;
     if (get(value_index) == value) {
-      fast_set(this, entry_index, Heap::null_value());
-      fast_set(this, value_index, Heap::null_value());
+      fast_set(this, entry_index, null_value);
+      fast_set(this, value_index, null_value);
       ElementRemoved();
     }
   }
@@ -9306,9 +9403,10 @@ class SymbolsKey : public HashTableKey {
 
 
 Object* MapCache::Lookup(FixedArray* array) {
+  Heap* heap = GetHeap();
   SymbolsKey key(array);
   int entry = FindEntry(&key);
-  if (entry == kNotFound) return Heap::undefined_value();
+  if (entry == kNotFound) return heap->undefined_value();
   return get(EntryToIndex(entry) + 1);
 }
 
@@ -9345,11 +9443,12 @@ MaybeObject* Dictionary<Shape, Key>::Allocate(int at_least_space_for) {
 
 template<typename Shape, typename Key>
 MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
+  Heap* heap = Dictionary<Shape, Key>::GetHeap();
   int length = HashTable<Shape, Key>::NumberOfElements();
 
   // Allocate and initialize iteration order array.
   Object* obj;
-  { MaybeObject* maybe_obj = Heap::AllocateFixedArray(length);
+  { MaybeObject* maybe_obj = heap->AllocateFixedArray(length);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
   FixedArray* iteration_order = FixedArray::cast(obj);
@@ -9358,7 +9457,7 @@ MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
   }
 
   // Allocate array with enumeration order.
-  { MaybeObject* maybe_obj = Heap::AllocateFixedArray(length);
+  { MaybeObject* maybe_obj = heap->AllocateFixedArray(length);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
   FixedArray* enumeration_order = FixedArray::cast(obj);
@@ -9419,8 +9518,9 @@ void NumberDictionary::RemoveNumberEntries(uint32_t from, uint32_t to) {
   // Do nothing if the interval [from, to) is empty.
   if (from >= to) return;
 
+  Heap* heap = GetHeap();
   int removed_entries = 0;
-  Object* sentinel = Heap::null_value();
+  Object* sentinel = heap->null_value();
   int capacity = Capacity();
   for (int i = 0; i < capacity; i++) {
     Object* key = KeyAt(i);
@@ -9441,14 +9541,15 @@ void NumberDictionary::RemoveNumberEntries(uint32_t from, uint32_t to) {
 template<typename Shape, typename Key>
 Object* Dictionary<Shape, Key>::DeleteProperty(int entry,
                                                JSObject::DeleteMode mode) {
+  Heap* heap = Dictionary<Shape, Key>::GetHeap();
   PropertyDetails details = DetailsAt(entry);
   // Ignore attributes if forcing a deletion.
   if (details.IsDontDelete() && mode != JSObject::FORCE_DELETION) {
-    return Heap::false_value();
+    return heap->false_value();
   }
-  SetEntry(entry, Heap::null_value(), Heap::null_value(), Smi::FromInt(0));
+  SetEntry(entry, heap->null_value(), heap->null_value(), Smi::FromInt(0));
   HashTable<Shape, Key>::ElementRemoved();
-  return Heap::true_value();
+  return heap->true_value();
 }
 
 
@@ -9661,6 +9762,7 @@ void Dictionary<Shape, Key>::CopyKeysTo(FixedArray* storage) {
 // Backwards lookup (slow).
 template<typename Shape, typename Key>
 Object* Dictionary<Shape, Key>::SlowReverseLookup(Object* value) {
+  Heap* heap = Dictionary<Shape, Key>::GetHeap();
   int capacity = HashTable<Shape, Key>::Capacity();
   for (int i = 0; i < capacity; i++) {
     Object* k =  HashTable<Shape, Key>::KeyAt(i);
@@ -9672,12 +9774,13 @@ Object* Dictionary<Shape, Key>::SlowReverseLookup(Object* value) {
       if (e == value) return k;
     }
   }
-  return Heap::undefined_value();
+  return heap->undefined_value();
 }
 
 
 MaybeObject* StringDictionary::TransformPropertiesToFastFor(
     JSObject* obj, int unused_property_fields) {
+  Heap* heap = GetHeap();
   // Make sure we preserve dictionary representation if there are too many
   // descriptors.
   if (NumberOfElements() > DescriptorArray::kMaxNumberOfDescriptors) return obj;
@@ -9707,7 +9810,7 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
       ASSERT(type != FIELD);
       instance_descriptor_length++;
       if (type == NORMAL &&
-          (!value->IsJSFunction() || Heap::InNewSpace(value))) {
+          (!value->IsJSFunction() || heap->InNewSpace(value))) {
         number_of_fields += 1;
       }
     }
@@ -9735,7 +9838,7 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
   // Allocate the fixed array for the fields.
   Object* fields;
   { MaybeObject* maybe_fields =
-        Heap::AllocateFixedArray(number_of_allocated_fields);
+        heap->AllocateFixedArray(number_of_allocated_fields);
     if (!maybe_fields->ToObject(&fields)) return maybe_fields;
   }
 
@@ -9748,13 +9851,13 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
       Object* value = ValueAt(i);
       // Ensure the key is a symbol before writing into the instance descriptor.
       Object* key;
-      { MaybeObject* maybe_key = Heap::LookupSymbol(String::cast(k));
+      { MaybeObject* maybe_key = heap->LookupSymbol(String::cast(k));
         if (!maybe_key->ToObject(&key)) return maybe_key;
       }
       PropertyDetails details = DetailsAt(i);
       PropertyType type = details.type();
 
-      if (value->IsJSFunction() && !Heap::InNewSpace(value)) {
+      if (value->IsJSFunction() && !heap->InNewSpace(value)) {
         ConstantFunctionDescriptor d(String::cast(key),
                                      JSFunction::cast(value),
                                      details.attributes(),
@@ -9825,11 +9928,12 @@ bool DebugInfo::HasBreakPoint(int code_position) {
 
 // Get the break point info object for this code position.
 Object* DebugInfo::GetBreakPointInfo(int code_position) {
+  Heap* heap = GetHeap();
   // Find the index of the break point info object for this code position.
   int index = GetBreakPointInfoIndex(code_position);
 
   // Return the break point info object if any.
-  if (index == kNoBreakPointInfo) return Heap::undefined_value();
+  if (index == kNoBreakPointInfo) return heap->undefined_value();
   return BreakPointInfo::cast(break_points()->get(index));
 }
 
@@ -9851,6 +9955,7 @@ void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
                               int source_position,
                               int statement_position,
                               Handle<Object> break_point_object) {
+  Isolate* isolate = Isolate::Current();
   Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position));
   if (!break_point_info->IsUndefined()) {
     BreakPointInfo::SetBreakPoint(
@@ -9873,8 +9978,9 @@ void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
     Handle<FixedArray> old_break_points =
         Handle<FixedArray>(FixedArray::cast(debug_info->break_points()));
     Handle<FixedArray> new_break_points =
-        Factory::NewFixedArray(old_break_points->length() +
-                               Debug::kEstimatedNofBreakPointsInFunction);
+        isolate->factory()->NewFixedArray(
+            old_break_points->length() +
+            Debug::kEstimatedNofBreakPointsInFunction);
 
     debug_info->set_break_points(*new_break_points);
     for (int i = 0; i < old_break_points->length(); i++) {
@@ -9885,13 +9991,14 @@ void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
   ASSERT(index != kNoBreakPointInfo);
 
   // Allocate new BreakPointInfo object and set the break point.
-  Handle<BreakPointInfo> new_break_point_info =
-      Handle<BreakPointInfo>::cast(Factory::NewStruct(BREAK_POINT_INFO_TYPE));
+  Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast(
+      isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE));
   new_break_point_info->set_code_position(Smi::FromInt(code_position));
   new_break_point_info->set_source_position(Smi::FromInt(source_position));
   new_break_point_info->
       set_statement_position(Smi::FromInt(statement_position));
-  new_break_point_info->set_break_point_objects(Heap::undefined_value());
+  new_break_point_info->set_break_point_objects(
+      isolate->heap()->undefined_value());
   BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
   debug_info->break_points()->set(index, *new_break_point_info);
 }
@@ -9899,9 +10006,10 @@ void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
 
 // Get the break point objects for a code position.
 Object* DebugInfo::GetBreakPointObjects(int code_position) {
+  Heap* heap = GetHeap();
   Object* break_point_info = GetBreakPointInfo(code_position);
   if (break_point_info->IsUndefined()) {
-    return Heap::undefined_value();
+    return heap->undefined_value();
   }
   return BreakPointInfo::cast(break_point_info)->break_point_objects();
 }
@@ -9924,7 +10032,8 @@ int DebugInfo::GetBreakPointCount() {
 
 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
                                       Handle<Object> break_point_object) {
-  if (debug_info->break_points()->IsUndefined()) return Heap::undefined_value();
+  Heap* heap = Isolate::Current()->heap();
+  if (debug_info->break_points()->IsUndefined()) return heap->undefined_value();
   for (int i = 0; i < debug_info->break_points()->length(); i++) {
     if (!debug_info->break_points()->get(i)->IsUndefined()) {
       Handle<BreakPointInfo> break_point_info =
@@ -9936,7 +10045,7 @@ Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
       }
     }
   }
-  return Heap::undefined_value();
+  return heap->undefined_value();
 }
 
 
@@ -9960,12 +10069,14 @@ int DebugInfo::GetBreakPointInfoIndex(int code_position) {
 // Remove the specified break point object.
 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
                                      Handle<Object> break_point_object) {
+  Isolate* isolate = Isolate::Current();
   // If there are no break points just ignore.
   if (break_point_info->break_point_objects()->IsUndefined()) return;
   // If there is a single break point clear it if it is the same.
   if (!break_point_info->break_point_objects()->IsFixedArray()) {
     if (break_point_info->break_point_objects() == *break_point_object) {
-      break_point_info->set_break_point_objects(Heap::undefined_value());
+      break_point_info->set_break_point_objects(
+          isolate->heap()->undefined_value());
     }
     return;
   }
@@ -9975,7 +10086,7 @@ void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
       Handle<FixedArray>(
           FixedArray::cast(break_point_info->break_point_objects()));
   Handle<FixedArray> new_array =
-      Factory::NewFixedArray(old_array->length() - 1);
+      isolate->factory()->NewFixedArray(old_array->length() - 1);
   int found_count = 0;
   for (int i = 0; i < old_array->length(); i++) {
     if (old_array->get(i) == *break_point_object) {
@@ -10002,7 +10113,7 @@ void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
   if (break_point_info->break_point_objects() == *break_point_object) return;
   // If there was one break point object before replace with array.
   if (!break_point_info->break_point_objects()->IsFixedArray()) {
-    Handle<FixedArray> array = Factory::NewFixedArray(2);
+    Handle<FixedArray> array = FACTORY->NewFixedArray(2);
     array->set(0, break_point_info->break_point_objects());
     array->set(1, *break_point_object);
     break_point_info->set_break_point_objects(*array);
@@ -10013,7 +10124,7 @@ void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
       Handle<FixedArray>(
           FixedArray::cast(break_point_info->break_point_objects()));
   Handle<FixedArray> new_array =
-      Factory::NewFixedArray(old_array->length() + 1);
+      FACTORY->NewFixedArray(old_array->length() + 1);
   for (int i = 0; i < old_array->length(); i++) {
     // If the break point was there before just ignore.
     if (old_array->get(i) == *break_point_object) return;
index 06657017a8af1ffa7cfbe4e3853b547fdc3ab7ff..a5bac543c528a352df3ee737d9dfb9efd61304c2 100644 (file)
@@ -729,6 +729,7 @@ class Object : public MaybeObject {
   // Oddball testing.
   INLINE(bool IsUndefined());
   INLINE(bool IsNull());
+  INLINE(bool IsTheHole());  // Shadows MaybeObject's implementation.
   INLINE(bool IsTrue());
   INLINE(bool IsFalse());
   inline bool IsArgumentsMarker();
@@ -885,7 +886,7 @@ class Failure: public MaybeObject {
   enum Type {
     RETRY_AFTER_GC = 0,
     EXCEPTION = 1,       // Returning this marker tells the real exception
-                         // is in Top::pending_exception.
+                         // is in Isolate::pending_exception.
     INTERNAL_ERROR = 2,
     OUT_OF_MEMORY_EXCEPTION = 3
   };
@@ -1073,6 +1074,14 @@ class HeapObject: public Object {
   inline MapWord map_word();
   inline void set_map_word(MapWord map_word);
 
+  // The Heap the object was allocated in. Used also to access Isolate.
+  // This method can not be used during GC, it ASSERTs this.
+  inline Heap* GetHeap();
+  // Convenience method to get current isolate. This method can be
+  // accessed only when its result is the same as
+  // Isolate::Current(), it ASSERTs this. See also comment for GetHeap.
+  inline Isolate* GetIsolate();
+
   // Converts an address to a HeapObject pointer.
   static inline HeapObject* FromAddress(Address address);
 
@@ -1892,13 +1901,18 @@ class FixedArray: public HeapObject {
 
   // Setters for frequently used oddballs located in old space.
   inline void set_undefined(int index);
+  // TODO(isolates): duplicate.
+  inline void set_undefined(Heap* heap, int index);
   inline void set_null(int index);
+  // TODO(isolates): duplicate.
+  inline void set_null(Heap* heap, int index);
   inline void set_the_hole(int index);
 
   // Setters with less debug checks for the GC to use.
   inline void set_unchecked(int index, Smi* value);
-  inline void set_null_unchecked(int index);
-  inline void set_unchecked(int index, Object* value, WriteBarrierMode mode);
+  inline void set_null_unchecked(Heap* heap, int index);
+  inline void set_unchecked(Heap* heap, int index, Object* value,
+                            WriteBarrierMode mode);
 
   // Gives access to raw memory which stores the array's data.
   inline Object** data_start();
@@ -1993,7 +2007,9 @@ class DescriptorArray: public FixedArray {
 
   // Returns the number of descriptors in the array.
   int number_of_descriptors() {
-    return IsEmpty() ? 0 : length() - kFirstIndex;
+    ASSERT(length() > kFirstIndex || IsEmpty());
+    int len = length();
+    return len <= kFirstIndex ? 0 : len - kFirstIndex;
   }
 
   int NextEnumerationIndex() {
@@ -2285,7 +2301,8 @@ class HashTable: public FixedArray {
       (FixedArray::kMaxLength - kElementsStartOffset) / kEntrySize;
 
   // Find entry for key otherwise return kNotFound.
-  int FindEntry(Key key);
+  inline int FindEntry(Key key);
+  int FindEntry(Isolate* isolate, Key key);
 
  protected:
 
@@ -2357,16 +2374,16 @@ class HashTableKey {
 
 class SymbolTableShape {
  public:
-  static bool IsMatch(HashTableKey* key, Object* value) {
+  static inline bool IsMatch(HashTableKey* key, Object* value) {
     return key->IsMatch(value);
   }
-  static uint32_t Hash(HashTableKey* key) {
+  static inline uint32_t Hash(HashTableKey* key) {
     return key->Hash();
   }
-  static uint32_t HashForObject(HashTableKey* key, Object* object) {
+  static inline uint32_t HashForObject(HashTableKey* key, Object* object) {
     return key->HashForObject(object);
   }
-  MUST_USE_RESULT static MaybeObject* AsObject(HashTableKey* key) {
+  MUST_USE_RESULT static inline MaybeObject* AsObject(HashTableKey* key) {
     return key->AsObject();
   }
 
@@ -2409,18 +2426,18 @@ class SymbolTable: public HashTable<SymbolTableShape, HashTableKey*> {
 
 class MapCacheShape {
  public:
-  static bool IsMatch(HashTableKey* key, Object* value) {
+  static inline bool IsMatch(HashTableKey* key, Object* value) {
     return key->IsMatch(value);
   }
-  static uint32_t Hash(HashTableKey* key) {
+  static inline uint32_t Hash(HashTableKey* key) {
     return key->Hash();
   }
 
-  static uint32_t HashForObject(HashTableKey* key, Object* object) {
+  static inline uint32_t HashForObject(HashTableKey* key, Object* object) {
     return key->HashForObject(object);
   }
 
-  MUST_USE_RESULT static MaybeObject* AsObject(HashTableKey* key) {
+  MUST_USE_RESULT static inline MaybeObject* AsObject(HashTableKey* key) {
     return key->AsObject();
   }
 
@@ -3430,7 +3447,7 @@ class Code: public HeapObject {
   inline void CodeIterateBody(ObjectVisitor* v);
 
   template<typename StaticVisitor>
-  inline void CodeIterateBody();
+  inline void CodeIterateBody(Heap* heap);
 #ifdef OBJECT_PRINT
   inline void CodePrint() {
     CodePrint(stdout);
@@ -3718,7 +3735,7 @@ class Map: public HeapObject {
   // Code cache operations.
 
   // Clears the code cache.
-  inline void ClearCodeCache();
+  inline void ClearCodeCache(Heap* heap);
 
   // Update code cache.
   MUST_USE_RESULT MaybeObject* UpdateCodeCache(String* name, Code* code);
@@ -3742,7 +3759,7 @@ class Map: public HeapObject {
   // Also, restore the original prototype on the targets of these
   // transitions, so that we do not process this map again while
   // following back pointers.
-  void ClearNonLiveTransitions(Object* real_prototype);
+  void ClearNonLiveTransitions(Heap* heap, Object* real_prototype);
 
   // Dispatched behavior.
 #ifdef OBJECT_PRINT
@@ -3759,6 +3776,9 @@ class Map: public HeapObject {
   inline int visitor_id();
   inline void set_visitor_id(int visitor_id);
 
+  // Returns the heap this map belongs to.
+  inline Heap* heap();
+
   typedef void (*TraverseCallback)(Map* map, void* data);
 
   void TraverseTransitionTree(TraverseCallback callback, void* data);
@@ -5804,11 +5824,8 @@ class ExternalTwoByteString: public ExternalString {
 // iterating or updating after gc.
 class Relocatable BASE_EMBEDDED {
  public:
-  inline Relocatable() : prev_(top_) { top_ = this; }
-  virtual ~Relocatable() {
-    ASSERT_EQ(top_, this);
-    top_ = prev_;
-  }
+  explicit inline Relocatable(Isolate* isolate);
+  inline virtual ~Relocatable();
   virtual void IterateInstance(ObjectVisitor* v) { }
   virtual void PostGarbageCollection() { }
 
@@ -5820,7 +5837,7 @@ class Relocatable BASE_EMBEDDED {
   static void Iterate(ObjectVisitor* v, Relocatable* top);
   static char* Iterate(ObjectVisitor* v, char* t);
  private:
-  static Relocatable* top_;
+  Isolate* isolate_;
   Relocatable* prev_;
 };
 
@@ -5830,8 +5847,8 @@ class Relocatable BASE_EMBEDDED {
 // must be valid as long as the reader is being used.
 class FlatStringReader : public Relocatable {
  public:
-  explicit FlatStringReader(Handle<String> str);
-  explicit FlatStringReader(Vector<const char> input);
+  FlatStringReader(Isolate* isolate, Handle<String> str);
+  FlatStringReader(Isolate* isolate, Vector<const char> input);
   void PostGarbageCollection();
   inline uc32 Get(int index);
   int length() { return length_; }
@@ -5894,6 +5911,9 @@ class Oddball: public HeapObject {
   // [to_number]: Cached to_number computed at startup.
   DECL_ACCESSORS(to_number, Object)
 
+  inline byte kind();
+  inline void set_kind(byte kind);
+
   // Casting.
   static inline Oddball* cast(Object* obj);
 
@@ -5904,12 +5924,23 @@ class Oddball: public HeapObject {
 
   // Initialize the fields.
   MUST_USE_RESULT MaybeObject* Initialize(const char* to_string,
-                                          Object* to_number);
+                                          Object* to_number,
+                                          byte kind);
 
   // Layout description.
   static const int kToStringOffset = HeapObject::kHeaderSize;
   static const int kToNumberOffset = kToStringOffset + kPointerSize;
-  static const int kSize = kToNumberOffset + kPointerSize;
+  static const int kKindOffset = kToNumberOffset + kPointerSize;
+  static const int kSize = kKindOffset + kPointerSize;
+
+  static const byte kFalse = 0;
+  static const byte kTrue = 1;
+  static const byte kNotBooleanMask = ~1;
+  static const byte kTheHole = 2;
+  static const byte kNull = 3;
+  static const byte kArgumentMarker = 4;
+  static const byte kUndefined = 5;
+  static const byte kOther = 6;
 
   typedef FixedBodyDescriptor<kToStringOffset,
                               kToNumberOffset + kPointerSize,
index 98dfa2bc447cec384abef0a1deb7d8fc0da9e90b..19dd24ef389d065f37bc587f999ba10b5fb660ab 100644 (file)
@@ -310,7 +310,8 @@ TemporaryScope::TemporaryScope(TemporaryScope** variable)
   : materialized_literal_count_(0),
     expected_property_count_(0),
     only_simple_this_property_assignments_(false),
-    this_property_assignments_(Factory::empty_fixed_array()),
+    this_property_assignments_(
+        Isolate::Current()->factory()->empty_fixed_array()),
     loop_count_(0),
     variable_(variable),
     parent_(*variable) {
@@ -331,9 +332,11 @@ Handle<String> Parser::LookupSymbol(int symbol_id) {
   if (static_cast<unsigned>(symbol_id)
       >= static_cast<unsigned>(symbol_cache_.length())) {
     if (scanner().is_literal_ascii()) {
-      return Factory::LookupAsciiSymbol(scanner().literal_ascii_string());
+      return isolate()->factory()->LookupAsciiSymbol(
+          scanner().literal_ascii_string());
     } else {
-      return Factory::LookupTwoByteSymbol(scanner().literal_uc16_string());
+      return isolate()->factory()->LookupTwoByteSymbol(
+          scanner().literal_uc16_string());
     }
   }
   return LookupCachedSymbol(symbol_id);
@@ -350,14 +353,16 @@ Handle<String> Parser::LookupCachedSymbol(int symbol_id) {
   Handle<String> result = symbol_cache_.at(symbol_id);
   if (result.is_null()) {
     if (scanner().is_literal_ascii()) {
-      result = Factory::LookupAsciiSymbol(scanner().literal_ascii_string());
+      result = isolate()->factory()->LookupAsciiSymbol(
+          scanner().literal_ascii_string());
     } else {
-      result = Factory::LookupTwoByteSymbol(scanner().literal_uc16_string());
+      result = isolate()->factory()->LookupTwoByteSymbol(
+          scanner().literal_uc16_string());
     }
     symbol_cache_.at(symbol_id) = result;
     return result;
   }
-  Counters::total_preparse_symbols_skipped.Increment();
+  COUNTERS->total_preparse_symbols_skipped()->Increment();
   return result;
 }
 
@@ -588,9 +593,10 @@ Parser::Parser(Handle<Script> script,
                bool allow_natives_syntax,
                v8::Extension* extension,
                ScriptDataImpl* pre_data)
-    : symbol_cache_(pre_data ? pre_data->symbol_count() : 0),
+    : isolate_(script->GetIsolate()),
+      symbol_cache_(pre_data ? pre_data->symbol_count() : 0),
       script_(script),
-      scanner_(),
+      scanner_(isolate_),
       top_scope_(NULL),
       with_nesting_level_(0),
       temp_scope_(NULL),
@@ -610,8 +616,8 @@ FunctionLiteral* Parser::ParseProgram(Handle<String> source,
                                       StrictModeFlag strict_mode) {
   CompilationZoneScope zone_scope(DONT_DELETE_ON_EXIT);
 
-  HistogramTimerScope timer(&Counters::parse);
-  Counters::total_parse_size.Increment(source->length());
+  HistogramTimerScope timer(COUNTERS->parse());
+  COUNTERS->total_parse_size()->Increment(source->length());
   fni_ = new FuncNameInferrer();
 
   // Initialize parser state.
@@ -647,7 +653,7 @@ FunctionLiteral* Parser::DoParseProgram(Handle<String> source,
     in_global_context
       ? Scope::GLOBAL_SCOPE
       : Scope::EVAL_SCOPE;
-  Handle<String> no_name = Factory::empty_symbol();
+  Handle<String> no_name = isolate()->factory()->empty_symbol();
 
   FunctionLiteral* result = NULL;
   { Scope* scope = NewScope(top_scope_, type, inside_with());
@@ -679,7 +685,7 @@ FunctionLiteral* Parser::DoParseProgram(Handle<String> source,
           false,
           temp_scope.ContainsLoops());
     } else if (stack_overflow_) {
-      Top::StackOverflow();
+      isolate()->StackOverflow();
     }
   }
 
@@ -694,9 +700,9 @@ FunctionLiteral* Parser::DoParseProgram(Handle<String> source,
 
 FunctionLiteral* Parser::ParseLazy(CompilationInfo* info) {
   CompilationZoneScope zone_scope(DONT_DELETE_ON_EXIT);
-  HistogramTimerScope timer(&Counters::parse_lazy);
+  HistogramTimerScope timer(COUNTERS->parse_lazy());
   Handle<String> source(String::cast(script_->source()));
-  Counters::total_parse_size.Increment(source->length());
+  COUNTERS->total_parse_size()->Increment(source->length());
 
   Handle<SharedFunctionInfo> shared_info = info->shared_info();
   // Initialize parser state.
@@ -736,7 +742,7 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info,
 
   {
     // Parse the function literal.
-    Handle<String> no_name = Factory::empty_symbol();
+    Handle<String> no_name = isolate()->factory()->empty_symbol();
     Scope* scope = NewScope(top_scope_, Scope::GLOBAL_SCOPE, inside_with());
     if (!info->closure().is_null()) {
       scope = Scope::DeserializeScopeChain(info, scope);
@@ -766,7 +772,7 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info,
   // not safe to do before scope has been deleted.
   if (result == NULL) {
     zone_scope->DeleteOnExit();
-    if (stack_overflow_) Top::StackOverflow();
+    if (stack_overflow_) isolate()->StackOverflow();
   } else {
     Handle<String> inferred_name(shared_info->inferred_name());
     result->set_inferred_name(inferred_name);
@@ -796,14 +802,15 @@ void Parser::ReportMessageAt(Scanner::Location source_location,
   MessageLocation location(script_,
                            source_location.beg_pos,
                            source_location.end_pos);
-  Handle<FixedArray> elements = Factory::NewFixedArray(args.length());
+  Factory* factory = isolate()->factory();
+  Handle<FixedArray> elements = factory->NewFixedArray(args.length());
   for (int i = 0; i < args.length(); i++) {
-    Handle<String> arg_string = Factory::NewStringFromUtf8(CStrVector(args[i]));
+    Handle<String> arg_string = factory->NewStringFromUtf8(CStrVector(args[i]));
     elements->set(i, *arg_string);
   }
-  Handle<JSArray> array = Factory::NewJSArrayWithElements(elements);
-  Handle<Object> result = Factory::NewSyntaxError(type, array);
-  Top::Throw(*result, &location);
+  Handle<JSArray> array = factory->NewJSArrayWithElements(elements);
+  Handle<Object> result = factory->NewSyntaxError(type, array);
+  isolate()->Throw(*result, &location);
 }
 
 
@@ -813,13 +820,14 @@ void Parser::ReportMessageAt(Scanner::Location source_location,
   MessageLocation location(script_,
                            source_location.beg_pos,
                            source_location.end_pos);
-  Handle<FixedArray> elements = Factory::NewFixedArray(args.length());
+  Factory* factory = isolate()->factory();
+  Handle<FixedArray> elements = factory->NewFixedArray(args.length());
   for (int i = 0; i < args.length(); i++) {
     elements->set(i, *args[i]);
   }
-  Handle<JSArray> array = Factory::NewJSArrayWithElements(elements);
-  Handle<Object> result = Factory::NewSyntaxError(type, array);
-  Top::Throw(*result, &location);
+  Handle<JSArray> array = factory->NewJSArrayWithElements(elements);
+  Handle<Object> result = factory->NewSyntaxError(type, array);
+  isolate()->Throw(*result, &location);
 }
 
 
@@ -976,14 +984,14 @@ class ThisNamedPropertyAssigmentFinder : public ParserFinder {
   // form this.x = y;
   Handle<FixedArray> GetThisPropertyAssignments() {
     if (names_ == NULL) {
-      return Factory::empty_fixed_array();
+      return FACTORY->empty_fixed_array();
     }
     ASSERT(names_ != NULL);
     ASSERT(assigned_arguments_ != NULL);
     ASSERT_EQ(names_->length(), assigned_arguments_->length());
     ASSERT_EQ(names_->length(), assigned_constants_->length());
     Handle<FixedArray> assignments =
-        Factory::NewFixedArray(names_->length() * 3);
+        FACTORY->NewFixedArray(names_->length() * 3);
     for (int i = 0; i < names_->length(); i++) {
       assignments->set(i * 3, *names_->at(i));
       assignments->set(i * 3 + 1, Smi::FromInt(assigned_arguments_->at(i)));
@@ -1013,7 +1021,7 @@ class ThisNamedPropertyAssigmentFinder : public ParserFinder {
     uint32_t dummy;
     if (literal != NULL &&
         literal->handle()->IsString() &&
-        !String::cast(*(literal->handle()))->Equals(Heap::Proto_symbol()) &&
+        !String::cast(*(literal->handle()))->Equals(HEAP->Proto_symbol()) &&
         !String::cast(*(literal->handle()))->AsArrayIndex(&dummy)) {
       Handle<String> key = Handle<String>::cast(literal->handle());
 
@@ -1047,7 +1055,7 @@ class ThisNamedPropertyAssigmentFinder : public ParserFinder {
     EnsureAllocation();
     names_->Add(name);
     assigned_arguments_->Add(index);
-    assigned_constants_->Add(Factory::undefined_value());
+    assigned_constants_->Add(FACTORY->undefined_value());
   }
 
   void AssignmentFromConstant(Handle<String> name, Handle<Object> value) {
@@ -1134,9 +1142,9 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
 
         // Check "use strict" directive (ES5 14.1).
         if (!top_scope_->is_strict_mode() &&
-            directive->Equals(Heap::use_strict()) &&
+            directive->Equals(isolate()->heap()->use_strict()) &&
             token_loc.end_pos - token_loc.beg_pos ==
-              Heap::use_strict()->length() + 2) {
+              isolate()->heap()->use_strict()->length() + 2) {
           top_scope_->EnableStrictMode();
           // "use strict" is the only directive for now.
           directive_prologue = false;
@@ -1334,9 +1342,9 @@ VariableProxy* Parser::Declare(Handle<String> name,
                var->mode() == Variable::CONST);
         const char* type = (var->mode() == Variable::VAR) ? "var" : "const";
         Handle<String> type_string =
-            Factory::NewStringFromUtf8(CStrVector(type), TENURED);
+            isolate()->factory()->NewStringFromUtf8(CStrVector(type), TENURED);
         Expression* expression =
-            NewThrowTypeError(Factory::redeclaration_symbol(),
+            NewThrowTypeError(isolate()->factory()->redeclaration_symbol(),
                               type_string, name);
         top_scope_->SetIllegalRedeclaration(expression);
       }
@@ -1442,7 +1450,7 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
   Handle<Code> code = Handle<Code>(fun->shared()->code());
   Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
   Handle<SharedFunctionInfo> shared =
-      Factory::NewSharedFunctionInfo(name, literals, code,
+      isolate()->factory()->NewSharedFunctionInfo(name, literals, code,
           Handle<SerializedScopeInfo>(fun->shared()->scope_info()));
   shared->set_construct_stub(*construct_stub);
 
@@ -1512,8 +1520,8 @@ Block* Parser::ParseVariableStatement(bool* ok) {
 }
 
 static bool IsEvalOrArguments(Handle<String> string) {
-  return string.is_identical_to(Factory::eval_symbol()) ||
-         string.is_identical_to(Factory::arguments_symbol());
+  return string.is_identical_to(FACTORY->eval_symbol()) ||
+         string.is_identical_to(FACTORY->arguments_symbol());
 }
 
 // If the variable declaration declares exactly one non-const
@@ -1671,7 +1679,7 @@ Block* Parser::ParseVariableDeclarations(bool accept_IN,
         // the number of arguments (1 or 2).
         initialize =
             new CallRuntime(
-              Factory::InitializeConstGlobal_symbol(),
+              isolate()->factory()->InitializeConstGlobal_symbol(),
               Runtime::FunctionForId(Runtime::kInitializeConstGlobal),
               arguments);
       } else {
@@ -1695,7 +1703,7 @@ Block* Parser::ParseVariableDeclarations(bool accept_IN,
         // the number of arguments (2 or 3).
         initialize =
             new CallRuntime(
-              Factory::InitializeVarGlobal_symbol(),
+              isolate()->factory()->InitializeVarGlobal_symbol(),
               Runtime::FunctionForId(Runtime::kInitializeVarGlobal),
               arguments);
       }
@@ -1886,7 +1894,7 @@ Statement* Parser::ParseReturnStatement(bool* ok) {
   //
   // To be consistent with KJS we report the syntax error at runtime.
   if (!top_scope_->is_function_scope()) {
-    Handle<String> type = Factory::illegal_return_symbol();
+    Handle<String> type = isolate()->factory()->illegal_return_symbol();
     Expression* throw_error = NewThrowSyntaxError(type, Handle<Object>::null());
     return new ExpressionStatement(throw_error);
   }
@@ -2101,7 +2109,8 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
     if (peek() == Token::LBRACE) {
       // Allocate a temporary for holding the finally state while
       // executing the finally block.
-      catch_var = top_scope_->NewTemporary(Factory::catch_var_symbol());
+      catch_var =
+          top_scope_->NewTemporary(isolate()->factory()->catch_var_symbol());
       Literal* name_literal = new Literal(name);
       VariableProxy* catch_var_use = new VariableProxy(catch_var);
       Expression* obj = new CatchExtensionObject(name_literal, catch_var_use);
@@ -2252,7 +2261,8 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
         // error here but for compatibility with JSC we choose to report
         // the error at runtime.
         if (expression == NULL || !expression->IsValidLeftHandSide()) {
-          Handle<String> type = Factory::invalid_lhs_in_for_in_symbol();
+          Handle<String> type =
+              isolate()->factory()->invalid_lhs_in_for_in_symbol();
           expression = NewThrowReferenceError(type);
         }
         ForInStatement* loop = new ForInStatement(labels);
@@ -2337,7 +2347,8 @@ Expression* Parser::ParseAssignmentExpression(bool accept_IN, bool* ok) {
   // for compatibility with JSC we choose to report the error at
   // runtime.
   if (expression == NULL || !expression->IsValidLeftHandSide()) {
-    Handle<String> type = Factory::invalid_lhs_in_assignment_symbol();
+    Handle<String> type =
+        isolate()->factory()->invalid_lhs_in_assignment_symbol();
     expression = NewThrowReferenceError(type);
   }
 
@@ -2579,7 +2590,8 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
     // error here but for compatibility with JSC we choose to report the
     // error at runtime.
     if (expression == NULL || !expression->IsValidLeftHandSide()) {
-      Handle<String> type = Factory::invalid_lhs_in_prefix_op_symbol();
+      Handle<String> type =
+          isolate()->factory()->invalid_lhs_in_prefix_op_symbol();
       expression = NewThrowReferenceError(type);
     }
 
@@ -2610,7 +2622,8 @@ Expression* Parser::ParsePostfixExpression(bool* ok) {
     // error here but for compatibility with JSC we choose to report the
     // error at runtime.
     if (expression == NULL || !expression->IsValidLeftHandSide()) {
-      Handle<String> type = Factory::invalid_lhs_in_postfix_op_symbol();
+      Handle<String> type =
+          isolate()->factory()->invalid_lhs_in_postfix_op_symbol();
       expression = NewThrowReferenceError(type);
     }
 
@@ -2667,7 +2680,8 @@ Expression* Parser::ParseLeftHandSideExpression(bool* ok) {
         // is called without a receiver and it refers to the original eval
         // function.
         VariableProxy* callee = result->AsVariableProxy();
-        if (callee != NULL && callee->IsVariable(Factory::eval_symbol())) {
+        if (callee != NULL &&
+            callee->IsVariable(isolate()->factory()->eval_symbol())) {
           Handle<String> name = callee->name();
           Variable* var = top_scope_->Lookup(name);
           if (var == NULL) {
@@ -2868,17 +2882,17 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
 
     case Token::NULL_LITERAL:
       Consume(Token::NULL_LITERAL);
-      result = new Literal(Factory::null_value());
+      result = new Literal(isolate()->factory()->null_value());
       break;
 
     case Token::TRUE_LITERAL:
       Consume(Token::TRUE_LITERAL);
-      result = new Literal(Factory::true_value());
+      result = new Literal(isolate()->factory()->true_value());
       break;
 
     case Token::FALSE_LITERAL:
       Consume(Token::FALSE_LITERAL);
-      result = new Literal(Factory::false_value());
+      result = new Literal(isolate()->factory()->false_value());
       break;
 
     case Token::IDENTIFIER:
@@ -3003,7 +3017,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
 
   // Allocate a fixed array with all the literals.
   Handle<FixedArray> literals =
-      Factory::NewFixedArray(values->length(), TENURED);
+      isolate()->factory()->NewFixedArray(values->length(), TENURED);
 
   // Fill in the literals.
   bool is_simple = true;
@@ -3025,7 +3039,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
   // Simple and shallow arrays can be lazily copied, we transform the
   // elements array to a copy-on-write array.
   if (is_simple && depth == 1 && values->length() > 0) {
-    literals->set_map(Heap::fixed_cow_array_map());
+    literals->set_map(isolate()->heap()->fixed_cow_array_map());
   }
 
   return new ArrayLiteral(literals, values,
@@ -3060,7 +3074,7 @@ bool CompileTimeValue::ArrayLiteralElementNeedsInitialization(
 
 Handle<FixedArray> CompileTimeValue::GetValue(Expression* expression) {
   ASSERT(IsCompileTimeValue(expression));
-  Handle<FixedArray> result = Factory::NewFixedArray(2, TENURED);
+  Handle<FixedArray> result = FACTORY->NewFixedArray(2, TENURED);
   ObjectLiteral* object_literal = expression->AsObjectLiteral();
   if (object_literal != NULL) {
     ASSERT(object_literal->is_simple());
@@ -3098,7 +3112,7 @@ Handle<Object> Parser::GetBoilerplateValue(Expression* expression) {
   if (CompileTimeValue::IsCompileTimeValue(expression)) {
     return CompileTimeValue::GetValue(expression);
   }
-  return Factory::undefined_value();
+  return isolate()->factory()->undefined_value();
 }
 
 // Defined in ast.cc
@@ -3164,7 +3178,7 @@ void ObjectLiteralPropertyChecker::CheckProperty(
   if (handle->IsSymbol()) {
     Handle<String> name(String::cast(*handle));
     if (name->AsArrayIndex(&hash)) {
-      Handle<Object> key_handle = Factory::NewNumberFromUint(hash);
+      Handle<Object> key_handle = FACTORY->NewNumberFromUint(hash);
       key = key_handle.location();
       map = &elems;
     } else {
@@ -3181,7 +3195,7 @@ void ObjectLiteralPropertyChecker::CheckProperty(
     char arr[100];
     Vector<char> buffer(arr, ARRAY_SIZE(arr));
     const char* str = DoubleToCString(num, buffer);
-    Handle<String> name = Factory::NewStringFromAscii(CStrVector(str));
+    Handle<String> name = FACTORY->NewStringFromAscii(CStrVector(str));
     key = name.location();
     hash = name->Hash();
     map = &props;
@@ -3293,7 +3307,7 @@ ObjectLiteral::Property* Parser::ParseObjectLiteralGetSet(bool is_getter,
       next == Token::STRING || is_keyword) {
     Handle<String> name;
     if (is_keyword) {
-      name = Factory::LookupAsciiSymbol(Token::String(next));
+      name = isolate_->factory()->LookupAsciiSymbol(Token::String(next));
     } else {
       name = GetSymbol(CHECK_OK);
     }
@@ -3433,8 +3447,8 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
   // Computation of literal_index must happen before pre parse bailout.
   int literal_index = temp_scope_->NextMaterializedLiteralIndex();
 
-  Handle<FixedArray> constant_properties =
-      Factory::NewFixedArray(number_of_boilerplate_properties * 2, TENURED);
+  Handle<FixedArray> constant_properties = isolate()->factory()->NewFixedArray(
+      number_of_boilerplate_properties * 2, TENURED);
 
   bool is_simple = true;
   bool fast_elements = true;
@@ -3503,9 +3517,10 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
   // this is the actual function name, otherwise this is the name of the
   // variable declared and initialized with the function (expression). In
   // that case, we don't have a function name (it's empty).
-  Handle<String> name = is_named ? var_name : Factory::empty_symbol();
+  Handle<String> name =
+      is_named ? var_name : isolate()->factory()->empty_symbol();
   // The function name, if any.
-  Handle<String> function_name = Factory::empty_symbol();
+  Handle<String> function_name = isolate()->factory()->empty_symbol();
   if (is_named && (type == EXPRESSION || type == NESTED)) {
     function_name = name;
   }
@@ -3602,13 +3617,14 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
         // End position greater than end of stream is safe, and hard to check.
         ReportInvalidPreparseData(name, CHECK_OK);
       }
-      Counters::total_preparse_skipped.Increment(end_pos - function_block_pos);
+      COUNTERS->total_preparse_skipped()->Increment(
+          end_pos - function_block_pos);
       // Seek to position just before terminal '}'.
       scanner().SeekForward(end_pos - 1);
       materialized_literal_count = entry.literal_count();
       expected_property_count = entry.property_count();
       only_simple_this_property_assignments = false;
-      this_property_assignments = Factory::empty_fixed_array();
+      this_property_assignments = isolate()->factory()->empty_fixed_array();
       Expect(Token::RBRACE, CHECK_OK);
     } else {
       ParseSourceElements(body, Token::RBRACE, CHECK_OK);
@@ -3701,7 +3717,7 @@ Expression* Parser::ParseV8Intrinsic(bool* ok) {
     top_scope_->ForceEagerCompilation();
   }
 
-  Runtime::Function* function = Runtime::FunctionForSymbol(name);
+  const Runtime::Function* function = Runtime::FunctionForSymbol(name);
 
   // Check for built-in IS_VAR macro.
   if (function != NULL &&
@@ -3784,12 +3800,12 @@ void Parser::ExpectSemicolon(bool* ok) {
 
 
 Literal* Parser::GetLiteralUndefined() {
-  return new Literal(Factory::undefined_value());
+  return new Literal(isolate()->factory()->undefined_value());
 }
 
 
 Literal* Parser::GetLiteralTheHole() {
-  return new Literal(Factory::the_hole_value());
+  return new Literal(isolate()->factory()->the_hole_value());
 }
 
 
@@ -3937,12 +3953,12 @@ void Parser::RegisterTargetUse(BreakTarget* target, Target* stop) {
 
 
 Literal* Parser::NewNumberLiteral(double number) {
-  return new Literal(Factory::NewNumber(number, TENURED));
+  return new Literal(isolate()->factory()->NewNumber(number, TENURED));
 }
 
 
 Expression* Parser::NewThrowReferenceError(Handle<String> type) {
-  return NewThrowError(Factory::MakeReferenceError_symbol(),
+  return NewThrowError(isolate()->factory()->MakeReferenceError_symbol(),
                        type, HandleVector<Object>(NULL, 0));
 }
 
@@ -3951,7 +3967,8 @@ Expression* Parser::NewThrowSyntaxError(Handle<String> type,
                                         Handle<Object> first) {
   int argc = first.is_null() ? 0 : 1;
   Vector< Handle<Object> > arguments = HandleVector<Object>(&first, argc);
-  return NewThrowError(Factory::MakeSyntaxError_symbol(), type, arguments);
+  return NewThrowError(
+      isolate()->factory()->MakeSyntaxError_symbol(), type, arguments);
 }
 
 
@@ -3962,7 +3979,8 @@ Expression* Parser::NewThrowTypeError(Handle<String> type,
   Handle<Object> elements[] = { first, second };
   Vector< Handle<Object> > arguments =
       HandleVector<Object>(elements, ARRAY_SIZE(elements));
-  return NewThrowError(Factory::MakeTypeError_symbol(), type, arguments);
+  return NewThrowError(
+      isolate()->factory()->MakeTypeError_symbol(), type, arguments);
 }
 
 
@@ -3970,14 +3988,16 @@ Expression* Parser::NewThrowError(Handle<String> constructor,
                                   Handle<String> type,
                                   Vector< Handle<Object> > arguments) {
   int argc = arguments.length();
-  Handle<FixedArray> elements = Factory::NewFixedArray(argc, TENURED);
+  Handle<FixedArray> elements = isolate()->factory()->NewFixedArray(argc,
+                                                                    TENURED);
   for (int i = 0; i < argc; i++) {
     Handle<Object> element = arguments[i];
     if (!element.is_null()) {
       elements->set(i, *element);
     }
   }
-  Handle<JSArray> array = Factory::NewJSArrayWithElements(elements, TENURED);
+  Handle<JSArray> array = isolate()->factory()->NewJSArrayWithElements(elements,
+                                                                       TENURED);
 
   ZoneList<Expression*>* args = new ZoneList<Expression*>(2);
   args->Add(new Literal(type));
@@ -3997,7 +4017,7 @@ Handle<Object> JsonParser::ParseJson(Handle<String> script,
   if (result.is_null() || scanner_.Next() != Token::EOS) {
     if (stack_overflow_) {
       // Scanner failed.
-      Top::StackOverflow();
+      isolate()->StackOverflow();
     } else {
       // Parse failed. Scanner's current token is the unexpected token.
       Token::Value token = scanner_.current_token();
@@ -4027,20 +4047,21 @@ Handle<Object> JsonParser::ParseJson(Handle<String> script,
       }
 
       Scanner::Location source_location = scanner_.location();
-      MessageLocation location(Factory::NewScript(script),
+      Factory* factory = isolate()->factory();
+      MessageLocation location(factory->NewScript(script),
                                source_location.beg_pos,
                                source_location.end_pos);
       Handle<JSArray> array;
       if (name_opt == NULL) {
-        array = Factory::NewJSArray(0);
+        array = factory->NewJSArray(0);
       } else {
-        Handle<String> name = Factory::NewStringFromUtf8(CStrVector(name_opt));
-        Handle<FixedArray> element = Factory::NewFixedArray(1);
+        Handle<String> name = factory->NewStringFromUtf8(CStrVector(name_opt));
+        Handle<FixedArray> element = factory->NewFixedArray(1);
         element->set(0, *name);
-        array = Factory::NewJSArrayWithElements(element);
+        array = factory->NewJSArrayWithElements(element);
       }
-      Handle<Object> result = Factory::NewSyntaxError(message, array);
-      Top::Throw(*result, &location);
+      Handle<Object> result = factory->NewSyntaxError(message, array);
+      isolate()->Throw(*result, &location);
       return Handle<Object>::null();
     }
   }
@@ -4051,12 +4072,14 @@ Handle<Object> JsonParser::ParseJson(Handle<String> script,
 Handle<String> JsonParser::GetString() {
   int literal_length = scanner_.literal_length();
   if (literal_length == 0) {
-    return Factory::empty_string();
+    return isolate()->factory()->empty_string();
   }
   if (scanner_.is_literal_ascii()) {
-    return Factory::NewStringFromAscii(scanner_.literal_ascii_string());
+    return isolate()->factory()->NewStringFromAscii(
+        scanner_.literal_ascii_string());
   } else {
-    return Factory::NewStringFromTwoByte(scanner_.literal_uc16_string());
+    return isolate()->factory()->NewStringFromTwoByte(
+        scanner_.literal_uc16_string());
   }
 }
 
@@ -4068,13 +4091,13 @@ Handle<Object> JsonParser::ParseJsonValue() {
     case Token::STRING:
       return GetString();
     case Token::NUMBER:
-      return Factory::NewNumber(scanner_.number());
+      return isolate()->factory()->NewNumber(scanner_.number());
     case Token::FALSE_LITERAL:
-      return Factory::false_value();
+      return isolate()->factory()->false_value();
     case Token::TRUE_LITERAL:
-      return Factory::true_value();
+      return isolate()->factory()->true_value();
     case Token::NULL_LITERAL:
-      return Factory::null_value();
+      return isolate()->factory()->null_value();
     case Token::LBRACE:
       return ParseJsonObject();
     case Token::LBRACK:
@@ -4088,12 +4111,13 @@ Handle<Object> JsonParser::ParseJsonValue() {
 // Parse a JSON object. Scanner must be right after '{' token.
 Handle<Object> JsonParser::ParseJsonObject() {
   Handle<JSFunction> object_constructor(
-      Top::global_context()->object_function());
-  Handle<JSObject> json_object = Factory::NewJSObject(object_constructor);
+      isolate()->global_context()->object_function());
+  Handle<JSObject> json_object =
+      isolate()->factory()->NewJSObject(object_constructor);
   if (scanner_.peek() == Token::RBRACE) {
     scanner_.Next();
   } else {
-    if (StackLimitCheck().HasOverflowed()) {
+    if (StackLimitCheck(isolate()).HasOverflowed()) {
       stack_overflow_ = true;
       return Handle<Object>::null();
     }
@@ -4110,7 +4134,7 @@ Handle<Object> JsonParser::ParseJsonObject() {
       uint32_t index;
       if (key->AsArrayIndex(&index)) {
         SetOwnElement(json_object, index, value, kNonStrictMode);
-      } else if (key->Equals(Heap::Proto_symbol())) {
+      } else if (key->Equals(isolate()->heap()->Proto_symbol())) {
         // We can't remove the __proto__ accessor since it's hardcoded
         // in several places. Instead go along and add the value as
         // the prototype of the created object if possible.
@@ -4136,7 +4160,7 @@ Handle<Object> JsonParser::ParseJsonArray() {
   if (token == Token::RBRACK) {
     scanner_.Next();
   } else {
-    if (StackLimitCheck().HasOverflowed()) {
+    if (StackLimitCheck(isolate()).HasOverflowed()) {
       stack_overflow_ = true;
       return Handle<Object>::null();
     }
@@ -4153,13 +4177,13 @@ Handle<Object> JsonParser::ParseJsonArray() {
 
   // Allocate a fixed array with all the elements.
   Handle<FixedArray> fast_elements =
-      Factory::NewFixedArray(elements.length());
+      isolate()->factory()->NewFixedArray(elements.length());
 
   for (int i = 0, n = elements.length(); i < n; i++) {
     fast_elements->set(i, *elements[i]);
   }
 
-  return Factory::NewJSArrayWithElements(fast_elements);
+  return isolate()->factory()->NewJSArrayWithElements(fast_elements);
 }
 
 // ----------------------------------------------------------------------------
@@ -4169,18 +4193,19 @@ Handle<Object> JsonParser::ParseJsonArray() {
 RegExpParser::RegExpParser(FlatStringReader* in,
                            Handle<String>* error,
                            bool multiline)
-  : error_(error),
-    captures_(NULL),
-    in_(in),
-    current_(kEndMarker),
-    next_pos_(0),
-    capture_count_(0),
-    has_more_(true),
-    multiline_(multiline),
-    simple_(false),
-    contains_anchor_(false),
-    is_scanned_for_captures_(false),
-    failed_(false) {
+    : isolate_(Isolate::Current()),
+      error_(error),
+      captures_(NULL),
+      in_(in),
+      current_(kEndMarker),
+      next_pos_(0),
+      capture_count_(0),
+      has_more_(true),
+      multiline_(multiline),
+      simple_(false),
+      contains_anchor_(false),
+      is_scanned_for_captures_(false),
+      failed_(false) {
   Advance();
 }
 
@@ -4196,10 +4221,10 @@ uc32 RegExpParser::Next() {
 
 void RegExpParser::Advance() {
   if (next_pos_ < in()->length()) {
-    StackLimitCheck check;
+    StackLimitCheck check(isolate());
     if (check.HasOverflowed()) {
-      ReportError(CStrVector(Top::kStackOverflowMessage));
-    } else if (Zone::excess_allocation()) {
+      ReportError(CStrVector(Isolate::kStackOverflowMessage));
+    } else if (isolate()->zone()->excess_allocation()) {
       ReportError(CStrVector("Regular expression too large"));
     } else {
       current_ = in()->Get(next_pos_);
@@ -4230,7 +4255,7 @@ bool RegExpParser::simple() {
 
 RegExpTree* RegExpParser::ReportError(Vector<const char> message) {
   failed_ = true;
-  *error_ = Factory::NewStringFromAscii(message, NOT_TENURED);
+  *error_ = isolate()->factory()->NewStringFromAscii(message, NOT_TENURED);
   // Zip to the end to make sure the no more input is read.
   current_ = kEndMarker;
   next_pos_ = in()->length();
@@ -4584,30 +4609,6 @@ RegExpTree* RegExpParser::ParseDisjunction() {
   }
 }
 
-class SourceCharacter {
- public:
-  static bool Is(uc32 c) {
-    switch (c) {
-      // case ']': case '}':
-      // In spidermonkey and jsc these are treated as source characters
-      // so we do too.
-      case '^': case '$': case '\\': case '.': case '*': case '+':
-      case '?': case '(': case ')': case '[': case '{': case '|':
-      case RegExpParser::kEndMarker:
-        return false;
-      default:
-        return true;
-    }
-  }
-};
-
-
-static unibrow::Predicate<SourceCharacter> source_character;
-
-
-static inline bool IsSourceCharacter(uc32 c) {
-  return source_character.get(c);
-}
 
 #ifdef DEBUG
 // Currently only used in an ASSERT.
@@ -5060,14 +5061,15 @@ int ScriptDataImpl::ReadNumber(byte** source) {
 static ScriptDataImpl* DoPreParse(UC16CharacterStream* source,
                                   bool allow_lazy,
                                   ParserRecorder* recorder) {
-  V8JavaScriptScanner scanner;
+  Isolate* isolate = Isolate::Current();
+  V8JavaScriptScanner scanner(isolate);
   scanner.Initialize(source);
-  intptr_t stack_limit = StackGuard::real_climit();
+  intptr_t stack_limit = isolate->stack_guard()->real_climit();
   if (!preparser::PreParser::PreParseProgram(&scanner,
                                              recorder,
                                              allow_lazy,
                                              stack_limit)) {
-    Top::StackOverflow();
+    isolate->StackOverflow();
     return NULL;
   }
 
@@ -5146,7 +5148,7 @@ bool ParserApi::Parse(CompilationInfo* info) {
         DeleteArray(args[i]);
       }
       DeleteArray(args.start());
-      ASSERT(Top::has_pending_exception());
+      ASSERT(info->isolate()->has_pending_exception());
     } else {
       Handle<String> source = Handle<String>(String::cast(script->source()));
       result = parser.ParseProgram(source,
index bc7bc562eab6adb50730f6b2ab3f88858be6db0d..0ebc84312ab976f7b282658e8a6c52d4a88173b2 100644 (file)
@@ -388,6 +388,8 @@ class RegExpParser {
     int disjunction_capture_index_;
   };
 
+  Isolate* isolate() { return isolate_; }
+
   uc32 current() { return current_; }
   bool has_more() { return has_more_; }
   bool has_next() { return next_pos_ < in()->length(); }
@@ -395,6 +397,7 @@ class RegExpParser {
   FlatStringReader* in() { return in_; }
   void ScanForCaptures();
 
+  Isolate* isolate_;
   Handle<String>* error_;
   ZoneList<RegExpCapture*>* captures_;
   FlatStringReader* in_;
@@ -449,6 +452,8 @@ class Parser {
     PARSE_EAGERLY
   };
 
+  Isolate* isolate() { return isolate_; }
+
   // Called by ParseProgram after setting up the scanner.
   FunctionLiteral* DoParseProgram(Handle<String> source,
                                   bool in_global_context,
@@ -574,7 +579,7 @@ class Parser {
     if (stack_overflow_) {
       return Token::ILLEGAL;
     }
-    if (StackLimitCheck().HasOverflowed()) {
+    if (StackLimitCheck(isolate()).HasOverflowed()) {
       // Any further calls to Next or peek will return the illegal token.
       // The current call must return the next token, which might already
       // have been peek'ed.
@@ -592,21 +597,21 @@ class Parser {
 
   Handle<String> LiteralString(PretenureFlag tenured) {
     if (scanner().is_literal_ascii()) {
-      return Factory::NewStringFromAscii(scanner().literal_ascii_string(),
-                                         tenured);
+      return isolate_->factory()->NewStringFromAscii(
+          scanner().literal_ascii_string(), tenured);
     } else {
-      return Factory::NewStringFromTwoByte(scanner().literal_uc16_string(),
-                                           tenured);
+      return isolate_->factory()->NewStringFromTwoByte(
+            scanner().literal_uc16_string(), tenured);
     }
   }
 
   Handle<String> NextLiteralString(PretenureFlag tenured) {
     if (scanner().is_next_literal_ascii()) {
-      return Factory::NewStringFromAscii(scanner().next_literal_ascii_string(),
-                                         tenured);
+      return isolate_->factory()->NewStringFromAscii(
+          scanner().next_literal_ascii_string(), tenured);
     } else {
-      return Factory::NewStringFromTwoByte(scanner().next_literal_uc16_string(),
-                                           tenured);
+      return isolate_->factory()->NewStringFromTwoByte(
+          scanner().next_literal_uc16_string(), tenured);
     }
   }
 
@@ -686,6 +691,7 @@ class Parser {
                             Handle<String> type,
                             Vector< Handle<Object> > arguments);
 
+  Isolate* isolate_;
   ZoneList<Handle<String> > symbol_cache_;
 
   Handle<Script> script_;
@@ -765,9 +771,11 @@ class JsonParser BASE_EMBEDDED {
   }
 
  private:
-  JsonParser() { }
+  JsonParser() : isolate_(Isolate::Current()), scanner_(isolate_) { }
   ~JsonParser() { }
 
+  Isolate* isolate() { return isolate_; }
+
   // Parse a string containing a single JSON value.
   Handle<Object> ParseJson(Handle<String> script, UC16CharacterStream* source);
   // Parse a single JSON value from input (grammar production JSONValue).
@@ -794,6 +802,7 @@ class JsonParser BASE_EMBEDDED {
   // Converts the currently parsed literal to a JavaScript String.
   Handle<String> GetString();
 
+  Isolate* isolate_;
   JsonScanner scanner_;
   bool stack_overflow_;
 };
index a7cc5256f5f0d4f4f55015560d90df898cc00199..03fb136e6d3945606c22b65ce82d63b6a018d681 100644 (file)
@@ -143,7 +143,7 @@ void* OS::Allocate(const size_t requested,
   int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
   void* mbase = mmap(NULL, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
   if (mbase == MAP_FAILED) {
-    LOG(StringEvent("OS::Allocate", "mmap failed"));
+    LOG(ISOLATE, StringEvent("OS::Allocate", "mmap failed"));
     return NULL;
   }
   *allocated = msize;
index c2c81dc108e5ea032e48f94481b9b6e28d6d63df..9eb88849c845eaabd1470ecf5fa0fc9f55ccf759 100644 (file)
@@ -156,7 +156,7 @@ void* OS::Allocate(const size_t requested,
   void* mbase = mmap(NULL, msize, prot, MAP_PRIVATE | MAP_ANON, -1, 0);
 
   if (mbase == MAP_FAILED) {
-    LOG(StringEvent("OS::Allocate", "mmap failed"));
+    LOG(ISOLATE, StringEvent("OS::Allocate", "mmap failed"));
     return NULL;
   }
   *allocated = msize;
@@ -425,12 +425,15 @@ bool ThreadHandle::IsValid() const {
 }
 
 
-Thread::Thread() : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name("v8:<unknown>");
 }
 
-
-Thread::Thread(const char* name) : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate, const char* name)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name(name);
 }
 
@@ -446,6 +449,7 @@ static void* ThreadEntry(void* arg) {
   // one) so we initialize it here too.
   thread->thread_handle_data()->thread_ = pthread_self();
   ASSERT(thread->IsValid());
+  Thread::SetThreadLocal(Isolate::isolate_key(), thread->isolate());
   thread->Run();
   return NULL;
 }
@@ -718,8 +722,9 @@ static void* SenderEntry(void* arg) {
 }
 
 
-Sampler::Sampler(int interval)
-    : interval_(interval),
+Sampler::Sampler(Isolate* isolate, int interval)
+    : isolate_(isolate),
+      interval_(interval),
       profiling_(false),
       active_(false),
       samples_taken_(0) {
index 16aa7c81fdb78a8a090d1c7b28e5d88785e6d901..50b0b67d891a3f506fe7243c5d5bc2e92b147423 100644 (file)
@@ -58,7 +58,6 @@
 #include "v8.h"
 
 #include "platform.h"
-#include "top.h"
 #include "v8threads.h"
 #include "vm-state-inl.h"
 
@@ -76,6 +75,9 @@ double ceiling(double x) {
 }
 
 
+static Mutex* limit_mutex = NULL;
+
+
 void OS::Setup() {
   // Seed the random number generator.
   // Convert the current time to a 64-bit integer first, before converting it
@@ -84,6 +86,7 @@ void OS::Setup() {
   // call this setup code within the same millisecond.
   uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
   srandom(static_cast<unsigned int>(seed));
+  limit_mutex = CreateMutex();
 }
 
 
@@ -226,6 +229,9 @@ static void* highest_ever_allocated = reinterpret_cast<void*>(0);
 
 
 static void UpdateAllocatedSpaceLimits(void* address, int size) {
+  ASSERT(limit_mutex != NULL);
+  ScopedLock lock(limit_mutex);
+
   lowest_ever_allocated = Min(lowest_ever_allocated, address);
   highest_ever_allocated =
       Max(highest_ever_allocated,
@@ -251,7 +257,8 @@ void* OS::Allocate(const size_t requested,
   int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
   void* mbase = mmap(NULL, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
   if (mbase == MAP_FAILED) {
-    LOG(StringEvent("OS::Allocate", "mmap failed"));
+    LOG(i::Isolate::Current(),
+        StringEvent("OS::Allocate", "mmap failed"));
     return NULL;
   }
   *allocated = msize;
@@ -372,6 +379,7 @@ void OS::LogSharedLibraryAddresses() {
   const int kLibNameLen = FILENAME_MAX + 1;
   char* lib_name = reinterpret_cast<char*>(malloc(kLibNameLen));
 
+  i::Isolate* isolate = ISOLATE;
   // This loop will terminate once the scanning hits an EOF.
   while (true) {
     uintptr_t start, end;
@@ -405,7 +413,7 @@ void OS::LogSharedLibraryAddresses() {
         snprintf(lib_name, kLibNameLen,
                  "%08" V8PRIxPTR "-%08" V8PRIxPTR, start, end);
       }
-      LOG(SharedLibraryEvent(lib_name, start, end));
+      LOG(isolate, SharedLibraryEvent(lib_name, start, end));
     } else {
       // Entry not describing executable data. Skip to end of line to setup
       // reading the next entry.
@@ -565,12 +573,16 @@ bool ThreadHandle::IsValid() const {
 }
 
 
-Thread::Thread() : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name("v8:<unknown>");
 }
 
 
-Thread::Thread(const char* name) : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate, const char* name)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name(name);
 }
 
@@ -589,6 +601,7 @@ static void* ThreadEntry(void* arg) {
         0, 0, 0);
   thread->thread_handle_data()->thread_ = pthread_self();
   ASSERT(thread->IsValid());
+  Thread::SetThreadLocal(Isolate::isolate_key(), thread->isolate());
   thread->Run();
   return NULL;
 }
@@ -762,10 +775,6 @@ Semaphore* OS::CreateSemaphore(int count) {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 
-static Sampler* active_sampler_ = NULL;
-static int vm_tid_ = 0;
-
-
 #if !defined(__GLIBC__) && (defined(__arm__) || defined(__thumb__))
 // Android runs a fairly new Linux kernel, so signal info is there,
 // but the C library doesn't have the structs defined.
@@ -802,17 +811,22 @@ static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
 #ifndef V8_HOST_ARCH_MIPS
   USE(info);
   if (signal != SIGPROF) return;
-  if (active_sampler_ == NULL || !active_sampler_->IsActive()) return;
-  if (vm_tid_ != GetThreadID()) return;
+  Isolate* isolate = Isolate::UncheckedCurrent();
+  if (isolate == NULL || !isolate->IsInitialized() || !isolate->IsInUse()) {
+    // We require a fully initialized and entered isolate.
+    return;
+  }
+  Sampler* sampler = isolate->logger()->sampler();
+  if (sampler == NULL || !sampler->IsActive()) return;
 
   TickSample sample_obj;
-  TickSample* sample = CpuProfiler::TickSampleEvent();
+  TickSample* sample = CpuProfiler::TickSampleEvent(isolate);
   if (sample == NULL) sample = &sample_obj;
 
   // Extracting the sample from the context is extremely machine dependent.
   ucontext_t* ucontext = reinterpret_cast<ucontext_t*>(context);
   mcontext_t& mcontext = ucontext->uc_mcontext;
-  sample->state = Top::current_vm_state();
+  sample->state = isolate->current_vm_state();
 #if V8_HOST_ARCH_IA32
   sample->pc = reinterpret_cast<Address>(mcontext.gregs[REG_EIP]);
   sample->sp = reinterpret_cast<Address>(mcontext.gregs[REG_ESP]);
@@ -836,52 +850,130 @@ static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
   // Implement this on MIPS.
   UNIMPLEMENTED();
 #endif
-  active_sampler_->SampleStack(sample);
-  active_sampler_->Tick(sample);
+  sampler->SampleStack(sample);
+  sampler->Tick(sample);
 #endif
 }
 
 
 class Sampler::PlatformData : public Malloced {
+ public:
+  PlatformData() : vm_tid_(GetThreadID()) {}
+
+  int vm_tid() const { return vm_tid_; }
+
+ private:
+  const int vm_tid_;
+};
+
+
+class SignalSender : public Thread {
  public:
   enum SleepInterval {
-    FULL_INTERVAL,
-    HALF_INTERVAL
+    HALF_INTERVAL,
+    FULL_INTERVAL
   };
 
-  explicit PlatformData(Sampler* sampler)
-      : sampler_(sampler),
-        signal_handler_installed_(false),
-        vm_tgid_(getpid()),
-        signal_sender_launched_(false) {
+  explicit SignalSender(int interval)
+      : Thread(NULL), vm_tgid_(getpid()), interval_(interval) {}
+
+  static void AddActiveSampler(Sampler* sampler) {
+    ScopedLock lock(mutex_);
+    SamplerRegistry::AddActiveSampler(sampler);
+    if (instance_ == NULL) {
+      // Install a signal handler.
+      struct sigaction sa;
+      sa.sa_sigaction = ProfilerSignalHandler;
+      sigemptyset(&sa.sa_mask);
+      sa.sa_flags = SA_RESTART | SA_SIGINFO;
+      signal_handler_installed_ =
+          (sigaction(SIGPROF, &sa, &old_signal_handler_) == 0);
+
+      // Start a thread that sends SIGPROF signal to VM threads.
+      instance_ = new SignalSender(sampler->interval());
+      instance_->Start();
+    } else {
+      ASSERT(instance_->interval_ == sampler->interval());
+    }
+  }
+
+  static void RemoveActiveSampler(Sampler* sampler) {
+    ScopedLock lock(mutex_);
+    SamplerRegistry::RemoveActiveSampler(sampler);
+    if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
+      RuntimeProfiler::WakeUpRuntimeProfilerThreadBeforeShutdown();
+      instance_->Join();
+      delete instance_;
+      instance_ = NULL;
+
+      // Restore the old signal handler.
+      if (signal_handler_installed_) {
+        sigaction(SIGPROF, &old_signal_handler_, 0);
+        signal_handler_installed_ = false;
+      }
+    }
   }
 
-  void SignalSender() {
-    while (sampler_->IsActive()) {
-      if (rate_limiter_.SuspendIfNecessary()) continue;
-      if (sampler_->IsProfiling() && RuntimeProfiler::IsEnabled()) {
-        SendProfilingSignal();
+  // Implement Thread::Run().
+  virtual void Run() {
+    SamplerRegistry::State state = SamplerRegistry::GetState();
+    while (state != SamplerRegistry::HAS_NO_SAMPLERS) {
+      bool cpu_profiling_enabled =
+          (state == SamplerRegistry::HAS_CPU_PROFILING_SAMPLERS);
+      bool runtime_profiler_enabled = RuntimeProfiler::IsEnabled();
+      // When CPU profiling is enabled both JavaScript and C++ code is
+      // profiled. We must not suspend.
+      if (!cpu_profiling_enabled) {
+        if (rate_limiter_.SuspendIfNecessary()) continue;
+      }
+      if (cpu_profiling_enabled && runtime_profiler_enabled) {
+        if (!SamplerRegistry::IterateActiveSamplers(&DoCpuProfile, this)) {
+          return;
+        }
         Sleep(HALF_INTERVAL);
-        RuntimeProfiler::NotifyTick();
+        if (!SamplerRegistry::IterateActiveSamplers(&DoRuntimeProfile, NULL)) {
+          return;
+        }
         Sleep(HALF_INTERVAL);
       } else {
-        if (sampler_->IsProfiling()) SendProfilingSignal();
-        if (RuntimeProfiler::IsEnabled()) RuntimeProfiler::NotifyTick();
+        if (cpu_profiling_enabled) {
+          if (!SamplerRegistry::IterateActiveSamplers(&DoCpuProfile,
+                                                      this)) {
+            return;
+          }
+        }
+        if (runtime_profiler_enabled) {
+          if (!SamplerRegistry::IterateActiveSamplers(&DoRuntimeProfile,
+                                                      NULL)) {
+            return;
+          }
+        }
         Sleep(FULL_INTERVAL);
       }
     }
   }
 
-  void SendProfilingSignal() {
+  static void DoCpuProfile(Sampler* sampler, void* raw_sender) {
+    if (!sampler->IsProfiling()) return;
+    SignalSender* sender = reinterpret_cast<SignalSender*>(raw_sender);
+    sender->SendProfilingSignal(sampler->platform_data()->vm_tid());
+  }
+
+  static void DoRuntimeProfile(Sampler* sampler, void* ignored) {
+    if (!sampler->isolate()->IsInitialized()) return;
+    sampler->isolate()->runtime_profiler()->NotifyTick();
+  }
+
+  void SendProfilingSignal(int tid) {
     if (!signal_handler_installed_) return;
     // Glibc doesn't provide a wrapper for tgkill(2).
-    syscall(SYS_tgkill, vm_tgid_, vm_tid_, SIGPROF);
+    syscall(SYS_tgkill, vm_tgid_, tid, SIGPROF);
   }
 
   void Sleep(SleepInterval full_or_half) {
     // Convert ms to us and subtract 100 us to compensate delays
     // occuring during signal delivery.
-    useconds_t interval = sampler_->interval_ * 1000 - 100;
+    useconds_t interval = interval_ * 1000 - 100;
     if (full_or_half == HALF_INTERVAL) interval /= 2;
     int result = usleep(interval);
 #ifdef DEBUG
@@ -896,89 +988,55 @@ class Sampler::PlatformData : public Malloced {
     USE(result);
   }
 
-  Sampler* sampler_;
-  bool signal_handler_installed_;
-  struct sigaction old_signal_handler_;
-  int vm_tgid_;
-  bool signal_sender_launched_;
-  pthread_t signal_sender_thread_;
+  const int vm_tgid_;
+  const int interval_;
   RuntimeProfilerRateLimiter rate_limiter_;
+
+  // Protects the process wide state below.
+  static Mutex* mutex_;
+  static SignalSender* instance_;
+  static bool signal_handler_installed_;
+  static struct sigaction old_signal_handler_;
+
+  DISALLOW_COPY_AND_ASSIGN(SignalSender);
 };
 
 
-static void* SenderEntry(void* arg) {
-  Sampler::PlatformData* data =
-      reinterpret_cast<Sampler::PlatformData*>(arg);
-  data->SignalSender();
-  return 0;
-}
+Mutex* SignalSender::mutex_ = OS::CreateMutex();
+SignalSender* SignalSender::instance_ = NULL;
+struct sigaction SignalSender::old_signal_handler_;
+bool SignalSender::signal_handler_installed_ = false;
 
 
-Sampler::Sampler(int interval)
-    : interval_(interval),
+Sampler::Sampler(Isolate* isolate, int interval)
+    : isolate_(isolate),
+      interval_(interval),
       profiling_(false),
       active_(false),
       samples_taken_(0) {
-  data_ = new PlatformData(this);
+  data_ = new PlatformData;
 }
 
 
 Sampler::~Sampler() {
-  ASSERT(!data_->signal_sender_launched_);
+  ASSERT(!IsActive());
   delete data_;
 }
 
 
 void Sampler::Start() {
-  // There can only be one active sampler at the time on POSIX
-  // platforms.
   ASSERT(!IsActive());
-  vm_tid_ = GetThreadID();
-
-  // Request profiling signals.
-  struct sigaction sa;
-  sa.sa_sigaction = ProfilerSignalHandler;
-  sigemptyset(&sa.sa_mask);
-  sa.sa_flags = SA_RESTART | SA_SIGINFO;
-  data_->signal_handler_installed_ =
-      sigaction(SIGPROF, &sa, &data_->old_signal_handler_) == 0;
-
-  // Start a thread that sends SIGPROF signal to VM thread.
-  // Sending the signal ourselves instead of relying on itimer provides
-  // much better accuracy.
   SetActive(true);
-  if (pthread_create(
-          &data_->signal_sender_thread_, NULL, SenderEntry, data_) == 0) {
-    data_->signal_sender_launched_ = true;
-  }
-
-  // Set this sampler as the active sampler.
-  active_sampler_ = this;
+  SignalSender::AddActiveSampler(this);
 }
 
 
 void Sampler::Stop() {
+  ASSERT(IsActive());
+  SignalSender::RemoveActiveSampler(this);
   SetActive(false);
-
-  // Wait for signal sender termination (it will exit after setting
-  // active_ to false).
-  if (data_->signal_sender_launched_) {
-    Top::WakeUpRuntimeProfilerThreadBeforeShutdown();
-    pthread_join(data_->signal_sender_thread_, NULL);
-    data_->signal_sender_launched_ = false;
-  }
-
-  // Restore old signal handler
-  if (data_->signal_handler_installed_) {
-    sigaction(SIGPROF, &data_->old_signal_handler_, 0);
-    data_->signal_handler_installed_ = false;
-  }
-
-  // This sampler is no longer the active sampler.
-  active_sampler_ = NULL;
 }
 
-
 #endif  // ENABLE_LOGGING_AND_PROFILING
 
 } }  // namespace v8::internal
index 35724c3525918b159a15164690d32052399c83a4..df2cd672efe2f6ad28953ded029d27740d070d57 100644 (file)
@@ -88,6 +88,9 @@ double ceiling(double x) {
 }
 
 
+static Mutex* limit_mutex = NULL;
+
+
 void OS::Setup() {
   // Seed the random number generator.
   // Convert the current time to a 64-bit integer first, before converting it
@@ -96,6 +99,7 @@ void OS::Setup() {
   // call this setup code within the same millisecond.
   uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
   srandom(static_cast<unsigned int>(seed));
+  limit_mutex = CreateMutex();
 }
 
 
@@ -109,6 +113,9 @@ static void* highest_ever_allocated = reinterpret_cast<void*>(0);
 
 
 static void UpdateAllocatedSpaceLimits(void* address, int size) {
+  ASSERT(limit_mutex != NULL);
+  ScopedLock lock(limit_mutex);
+
   lowest_ever_allocated = Min(lowest_ever_allocated, address);
   highest_ever_allocated =
       Max(highest_ever_allocated,
@@ -143,7 +150,7 @@ void* OS::Allocate(const size_t requested,
                      MAP_PRIVATE | MAP_ANON,
                      kMmapFd, kMmapFdOffset);
   if (mbase == MAP_FAILED) {
-    LOG(StringEvent("OS::Allocate", "mmap failed"));
+    LOG(Isolate::Current(), StringEvent("OS::Allocate", "mmap failed"));
     return NULL;
   }
   *allocated = msize;
@@ -258,7 +265,8 @@ void OS::LogSharedLibraryAddresses() {
     if (code_ptr == NULL) continue;
     const uintptr_t slide = _dyld_get_image_vmaddr_slide(i);
     const uintptr_t start = reinterpret_cast<uintptr_t>(code_ptr) + slide;
-    LOG(SharedLibraryEvent(_dyld_get_image_name(i), start, start + size));
+    LOG(Isolate::Current(),
+        SharedLibraryEvent(_dyld_get_image_name(i), start, start + size));
   }
 #endif  // ENABLE_LOGGING_AND_PROFILING
 }
@@ -424,12 +432,16 @@ bool ThreadHandle::IsValid() const {
 }
 
 
-Thread::Thread() : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name("v8:<unknown>");
 }
 
 
-Thread::Thread(const char* name) : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate, const char* name)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name(name);
 }
 
@@ -464,6 +476,7 @@ static void* ThreadEntry(void* arg) {
   thread->thread_handle_data()->thread_ = pthread_self();
   SetThreadName(thread->name());
   ASSERT(thread->IsValid());
+  Thread::SetThreadLocal(Isolate::isolate_key(), thread->isolate());
   thread->Run();
   return NULL;
 }
@@ -595,52 +608,109 @@ Semaphore* OS::CreateSemaphore(int count) {
 
 class Sampler::PlatformData : public Malloced {
  public:
-  explicit PlatformData(Sampler* sampler)
-      : sampler_(sampler),
-        task_self_(mach_task_self()),
-        profiled_thread_(0),
-        sampler_thread_(0) {
+  PlatformData() : profiled_thread_(mach_thread_self()) {}
+
+  ~PlatformData() {
+    // Deallocate Mach port for thread.
+    mach_port_deallocate(mach_task_self(), profiled_thread_);
   }
 
-  Sampler* sampler_;
+  thread_act_t profiled_thread() { return profiled_thread_; }
+
+ private:
   // Note: for profiled_thread_ Mach primitives are used instead of PThread's
   // because the latter doesn't provide thread manipulation primitives required.
   // For details, consult "Mac OS X Internals" book, Section 7.3.
-  mach_port_t task_self_;
   thread_act_t profiled_thread_;
-  pthread_t sampler_thread_;
-  RuntimeProfilerRateLimiter rate_limiter_;
+};
 
-  // Sampler thread handler.
-  void Runner() {
-    while (sampler_->IsActive()) {
-      if (rate_limiter_.SuspendIfNecessary()) continue;
-      Sample();
-      OS::Sleep(sampler_->interval_);
+class SamplerThread : public Thread {
+ public:
+  explicit SamplerThread(int interval) : Thread(NULL), interval_(interval) {}
+
+  static void AddActiveSampler(Sampler* sampler) {
+    ScopedLock lock(mutex_);
+    SamplerRegistry::AddActiveSampler(sampler);
+    if (instance_ == NULL) {
+      instance_ = new SamplerThread(sampler->interval());
+      instance_->Start();
+    } else {
+      ASSERT(instance_->interval_ == sampler->interval());
+    }
+  }
+
+  static void RemoveActiveSampler(Sampler* sampler) {
+    ScopedLock lock(mutex_);
+    SamplerRegistry::RemoveActiveSampler(sampler);
+    if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
+      RuntimeProfiler::WakeUpRuntimeProfilerThreadBeforeShutdown();
+      instance_->Join();
+      delete instance_;
+      instance_ = NULL;
+    }
+  }
+
+  // Implement Thread::Run().
+  virtual void Run() {
+    SamplerRegistry::State state = SamplerRegistry::GetState();
+    while (state != SamplerRegistry::HAS_NO_SAMPLERS) {
+      bool cpu_profiling_enabled =
+          (state == SamplerRegistry::HAS_CPU_PROFILING_SAMPLERS);
+      bool runtime_profiler_enabled = RuntimeProfiler::IsEnabled();
+      // When CPU profiling is enabled both JavaScript and C++ code is
+      // profiled. We must not suspend.
+      if (!cpu_profiling_enabled) {
+        if (rate_limiter_.SuspendIfNecessary()) continue;
+      }
+      if (cpu_profiling_enabled) {
+        if (!SamplerRegistry::IterateActiveSamplers(&DoCpuProfile, this)) {
+          return;
+        }
+      }
+      if (runtime_profiler_enabled) {
+        if (!SamplerRegistry::IterateActiveSamplers(&DoRuntimeProfile, NULL)) {
+          return;
+        }
+      }
+      OS::Sleep(interval_);
+      state = SamplerRegistry::GetState();
     }
   }
 
-  void Sample() {
-    if (sampler_->IsProfiling()) {
-      TickSample sample_obj;
-      TickSample* sample = CpuProfiler::TickSampleEvent();
-      if (sample == NULL) sample = &sample_obj;
+  static void DoCpuProfile(Sampler* sampler, void* raw_sampler_thread) {
+    if (!sampler->isolate()->IsInitialized()) return;
+    if (!sampler->IsProfiling()) return;
+    SamplerThread* sampler_thread =
+        reinterpret_cast<SamplerThread*>(raw_sampler_thread);
+    sampler_thread->SampleContext(sampler);
+  }
+
+  static void DoRuntimeProfile(Sampler* sampler, void* ignored) {
+    if (!sampler->isolate()->IsInitialized()) return;
+    sampler->isolate()->runtime_profiler()->NotifyTick();
+  }
+
+  void SampleContext(Sampler* sampler) {
+    thread_act_t profiled_thread = sampler->platform_data()->profiled_thread();
+    TickSample sample_obj;
+    TickSample* sample = CpuProfiler::TickSampleEvent(sampler->isolate());
+    if (sample == NULL) sample = &sample_obj;
 
-      if (KERN_SUCCESS != thread_suspend(profiled_thread_)) return;
+    if (KERN_SUCCESS != thread_suspend(profiled_thread)) return;
 
 #if V8_HOST_ARCH_X64
-      thread_state_flavor_t flavor = x86_THREAD_STATE64;
-      x86_thread_state64_t state;
-      mach_msg_type_number_t count = x86_THREAD_STATE64_COUNT;
+    thread_state_flavor_t flavor = x86_THREAD_STATE64;
+    x86_thread_state64_t state;
+    mach_msg_type_number_t count = x86_THREAD_STATE64_COUNT;
 #if __DARWIN_UNIX03
 #define REGISTER_FIELD(name) __r ## name
 #else
 #define REGISTER_FIELD(name) r ## name
 #endif  // __DARWIN_UNIX03
 #elif V8_HOST_ARCH_IA32
-      thread_state_flavor_t flavor = i386_THREAD_STATE;
-      i386_thread_state_t state;
-      mach_msg_type_number_t count = i386_THREAD_STATE_COUNT;
+    thread_state_flavor_t flavor = i386_THREAD_STATE;
+    i386_thread_state_t state;
+    mach_msg_type_number_t count = i386_THREAD_STATE_COUNT;
 #if __DARWIN_UNIX03
 #define REGISTER_FIELD(name) __e ## name
 #else
@@ -650,81 +720,64 @@ class Sampler::PlatformData : public Malloced {
 #error Unsupported Mac OS X host architecture.
 #endif  // V8_HOST_ARCH
 
-      if (thread_get_state(profiled_thread_,
-                           flavor,
-                           reinterpret_cast<natural_t*>(&state),
-                           &count) == KERN_SUCCESS) {
-        sample->state = Top::current_vm_state();
-        sample->pc = reinterpret_cast<Address>(state.REGISTER_FIELD(ip));
-        sample->sp = reinterpret_cast<Address>(state.REGISTER_FIELD(sp));
-        sample->fp = reinterpret_cast<Address>(state.REGISTER_FIELD(bp));
-        sampler_->SampleStack(sample);
-        sampler_->Tick(sample);
-      }
-      thread_resume(profiled_thread_);
+    if (thread_get_state(profiled_thread,
+                         flavor,
+                         reinterpret_cast<natural_t*>(&state),
+                         &count) == KERN_SUCCESS) {
+      sample->state = sampler->isolate()->current_vm_state();
+      sample->pc = reinterpret_cast<Address>(state.REGISTER_FIELD(ip));
+      sample->sp = reinterpret_cast<Address>(state.REGISTER_FIELD(sp));
+      sample->fp = reinterpret_cast<Address>(state.REGISTER_FIELD(bp));
+      sampler->SampleStack(sample);
+      sampler->Tick(sample);
     }
-    if (RuntimeProfiler::IsEnabled()) RuntimeProfiler::NotifyTick();
+    thread_resume(profiled_thread);
   }
+
+  const int interval_;
+  RuntimeProfilerRateLimiter rate_limiter_;
+
+  // Protects the process wide state below.
+  static Mutex* mutex_;
+  static SamplerThread* instance_;
+
+  DISALLOW_COPY_AND_ASSIGN(SamplerThread);
 };
 
 #undef REGISTER_FIELD
 
 
-// Entry point for sampler thread.
-static void* SamplerEntry(void* arg) {
-  Sampler::PlatformData* data =
-      reinterpret_cast<Sampler::PlatformData*>(arg);
-  data->Runner();
-  return 0;
-}
+Mutex* SamplerThread::mutex_ = OS::CreateMutex();
+SamplerThread* SamplerThread::instance_ = NULL;
 
 
-Sampler::Sampler(int interval)
-    : interval_(interval),
+Sampler::Sampler(Isolate* isolate, int interval)
+    : isolate_(isolate),
+      interval_(interval),
       profiling_(false),
       active_(false),
       samples_taken_(0) {
-  data_ = new PlatformData(this);
+  data_ = new PlatformData;
 }
 
 
 Sampler::~Sampler() {
+  ASSERT(!IsActive());
   delete data_;
 }
 
 
 void Sampler::Start() {
-  // Do not start multiple threads for the same sampler.
   ASSERT(!IsActive());
-  data_->profiled_thread_ = mach_thread_self();
-
-  // Create sampler thread with high priority.
-  // According to POSIX spec, when SCHED_FIFO policy is used, a thread
-  // runs until it exits or blocks.
-  pthread_attr_t sched_attr;
-  sched_param fifo_param;
-  pthread_attr_init(&sched_attr);
-  pthread_attr_setinheritsched(&sched_attr, PTHREAD_EXPLICIT_SCHED);
-  pthread_attr_setschedpolicy(&sched_attr, SCHED_FIFO);
-  fifo_param.sched_priority = sched_get_priority_max(SCHED_FIFO);
-  pthread_attr_setschedparam(&sched_attr, &fifo_param);
-
   SetActive(true);
-  pthread_create(&data_->sampler_thread_, &sched_attr, SamplerEntry, data_);
+  SamplerThread::AddActiveSampler(this);
 }
 
 
 void Sampler::Stop() {
-  // Seting active to false triggers termination of the sampler
-  // thread.
+  ASSERT(IsActive());
+  SamplerThread::RemoveActiveSampler(this);
   SetActive(false);
-
-  // Wait for sampler thread to terminate.
-  Top::WakeUpRuntimeProfilerThreadBeforeShutdown();
-  pthread_join(data_->sampler_thread_, NULL);
-
-  // Deallocate Mach port for thread.
-  mach_port_deallocate(data_->task_self_, data_->profiled_thread_);
 }
 
 #endif  // ENABLE_LOGGING_AND_PROFILING
index 49d3dd988d8b24a7dd83f1613ce231e8bf3ae964..3608b6b8af9d865cd329f95f93d1370047f4565b 100644 (file)
@@ -340,13 +340,17 @@ bool ThreadHandle::IsValid() const {
 }
 
 
-Thread::Thread() : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name("v8:<unknown>");
   UNIMPLEMENTED();
 }
 
 
-Thread::Thread(const char* name) : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate, const char* name)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name(name);
   UNIMPLEMENTED();
 }
index e2796294a603faccc8bb53e3155b93fd3db0d344..568acdbae868eb508508cd80ff1e40deb8c7c7d4 100644 (file)
@@ -154,7 +154,7 @@ void* OS::Allocate(const size_t requested,
   void* mbase = mmap(NULL, msize, prot, MAP_PRIVATE | MAP_ANON, -1, 0);
 
   if (mbase == MAP_FAILED) {
-    LOG(StringEvent("OS::Allocate", "mmap failed"));
+    LOG(ISOLATE, StringEvent("OS::Allocate", "mmap failed"));
     return NULL;
   }
   *allocated = msize;
@@ -400,12 +400,16 @@ bool ThreadHandle::IsValid() const {
 }
 
 
-Thread::Thread() : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name("v8:<unknown>");
 }
 
 
-Thread::Thread(const char* name) : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate, const char* name)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name(name);
 }
 
@@ -421,6 +425,7 @@ static void* ThreadEntry(void* arg) {
   // one) so we initialize it here too.
   thread->thread_handle_data()->thread_ = pthread_self();
   ASSERT(thread->IsValid());
+  Thread::SetThreadLocal(Isolate::isolate_key(), thread->isolate());
   thread->Run();
   return NULL;
 }
@@ -598,8 +603,9 @@ class Sampler::PlatformData : public Malloced {
 };
 
 
-Sampler::Sampler(int interval)
-    : interval_(interval),
+Sampler::Sampler(Isolate* isolate, int interval)
+    : isolate_(isolate),
+      interval_(interval),
       profiling_(false),
       active_(false),
       samples_taken_(0) {
index 9118818cab21ec1aa4d363ef83658ff962ac8efc..1dd486ebd6a11259b15d5555ed0de78c28eeaa4b 100644 (file)
@@ -127,7 +127,7 @@ bool OS::Remove(const char* path) {
 }
 
 
-const char* OS::LogFileOpenMode = "w";
+const char* const OS::LogFileOpenMode = "w";
 
 
 void OS::Print(const char* format, ...) {
index 794fe07cd6b4a9ca182bf7c2f1644984ea97b43e..6e66512841b8526b0d5c21e90741a70bdb0f2c88 100644 (file)
@@ -169,7 +169,7 @@ void* OS::Allocate(const size_t requested,
   void* mbase = mmap(NULL, msize, prot, MAP_PRIVATE | MAP_ANON, -1, 0);
 
   if (mbase == MAP_FAILED) {
-    LOG(StringEvent("OS::Allocate", "mmap failed"));
+    LOG(ISOLATE, StringEvent("OS::Allocate", "mmap failed"));
     return NULL;
   }
   *allocated = msize;
@@ -415,12 +415,16 @@ bool ThreadHandle::IsValid() const {
 }
 
 
-Thread::Thread() : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name("v8:<unknown>");
 }
 
 
-Thread::Thread(const char* name) : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate, const char* name)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   set_name(name);
 }
 
@@ -436,6 +440,7 @@ static void* ThreadEntry(void* arg) {
   // one) so we initialize it here too.
   thread->thread_handle_data()->thread_ = pthread_self();
   ASSERT(thread->IsValid());
+  Thread::SetThreadLocal(Isolate::isolate_key(), thread->isolate());
   thread->Run();
   return NULL;
 }
@@ -712,8 +717,9 @@ static void* SenderEntry(void* arg) {
 }
 
 
-Sampler::Sampler(int interval)
-    : interval_(interval),
+Sampler::Sampler(Isolate* isolate, int interval)
+    : isolate_(isolate),
+      interval_(interval),
       profiling_(false),
       active_(false),
       samples_taken_(0) {
index f24994b5b8058b983b72915d3ad62174ecef4aa4..31b65c1b051171f835f9260af855f3a042b22b31 100644 (file)
@@ -173,6 +173,10 @@ double ceiling(double x) {
   return ceil(x);
 }
 
+
+static Mutex* limit_mutex = NULL;
+
+
 #ifdef _WIN64
 typedef double (*ModuloFunction)(double, double);
 
@@ -540,6 +544,7 @@ void OS::Setup() {
   // call this setup code within the same millisecond.
   uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
   srand(static_cast<unsigned int>(seed));
+  limit_mutex = CreateMutex();
 }
 
 
@@ -676,7 +681,7 @@ bool OS::Remove(const char* path) {
 
 
 // Open log file in binary mode to avoid /n -> /r/n conversion.
-const char* OS::LogFileOpenMode = "wb";
+const char* const OS::LogFileOpenMode = "wb";
 
 
 // Print (debug) message to console.
@@ -765,6 +770,9 @@ static void* highest_ever_allocated = reinterpret_cast<void*>(0);
 
 
 static void UpdateAllocatedSpaceLimits(void* address, int size) {
+  ASSERT(limit_mutex != NULL);
+  ScopedLock lock(limit_mutex);
+
   lowest_ever_allocated = Min(lowest_ever_allocated, address);
   highest_ever_allocated =
       Max(highest_ever_allocated,
@@ -835,7 +843,7 @@ void* OS::Allocate(const size_t requested,
   // For exectutable pages try and randomize the allocation address
   if (prot == PAGE_EXECUTE_READWRITE &&
       msize >= static_cast<size_t>(Page::kPageSize)) {
-    address = (V8::RandomPrivate() << kPageSizeBits)
+    address = (V8::RandomPrivate(Isolate::Current()) << kPageSizeBits)
       | kAllocationRandomAddressMin;
     address &= kAllocationRandomAddressMax;
   }
@@ -848,7 +856,7 @@ void* OS::Allocate(const size_t requested,
     mbase = VirtualAlloc(NULL, msize, MEM_COMMIT | MEM_RESERVE, prot);
 
   if (mbase == NULL) {
-    LOG(StringEvent("OS::Allocate", "VirtualAlloc failed"));
+    LOG(ISOLATE, StringEvent("OS::Allocate", "VirtualAlloc failed"));
     return NULL;
   }
 
@@ -1191,7 +1199,8 @@ static bool LoadSymbols(HANDLE process_handle) {
       if (err != ERROR_MOD_NOT_FOUND &&
           err != ERROR_INVALID_HANDLE) return false;
     }
-    LOG(SharedLibraryEvent(
+    LOG(i::Isolate::Current(),
+        SharedLibraryEvent(
             module_entry.szExePath,
             reinterpret_cast<unsigned int>(module_entry.modBaseAddr),
             reinterpret_cast<unsigned int>(module_entry.modBaseAddr +
@@ -1450,6 +1459,7 @@ static unsigned int __stdcall ThreadEntry(void* arg) {
   // don't know which thread will run first (the original thread or the new
   // one) so we initialize it here too.
   thread->thread_handle_data()->tid_ = GetCurrentThreadId();
+  Thread::SetThreadLocal(Isolate::isolate_key(), thread->isolate());
   thread->Run();
   return 0;
 }
@@ -1493,13 +1503,17 @@ class Thread::PlatformData : public Malloced {
 // Initialize a Win32 thread object. The thread has an invalid thread
 // handle until it is started.
 
-Thread::Thread() : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   data_ = new PlatformData(kNoThread);
   set_name("v8:<unknown>");
 }
 
 
-Thread::Thread(const char* name) : ThreadHandle(ThreadHandle::INVALID) {
+Thread::Thread(Isolate* isolate, const char* name)
+    : ThreadHandle(ThreadHandle::INVALID),
+      isolate_(isolate) {
   data_ = new PlatformData(kNoThread);
   set_name(name);
 }
@@ -1840,135 +1854,176 @@ Socket* OS::CreateSocket() {
 
 // ----------------------------------------------------------------------------
 // Win32 profiler support.
-//
-// On win32 we use a sampler thread with high priority to sample the program
-// counter for the profiled thread.
 
 class Sampler::PlatformData : public Malloced {
  public:
-  explicit PlatformData(Sampler* sampler) {
-    sampler_ = sampler;
-    sampler_thread_ = INVALID_HANDLE_VALUE;
-    profiled_thread_ = INVALID_HANDLE_VALUE;
+  // Get a handle to the calling thread. This is the thread that we are
+  // going to profile. We need to make a copy of the handle because we are
+  // going to use it in the sampler thread. Using GetThreadHandle() will
+  // not work in this case. We're using OpenThread because DuplicateHandle
+  // for some reason doesn't work in Chrome's sandbox.
+  PlatformData() : profiled_thread_(OpenThread(THREAD_GET_CONTEXT |
+                                               THREAD_SUSPEND_RESUME |
+                                               THREAD_QUERY_INFORMATION,
+                                               false,
+                                               GetCurrentThreadId())) {}
+
+  ~PlatformData() {
+    if (profiled_thread_ != NULL) {
+      CloseHandle(profiled_thread_);
+      profiled_thread_ = NULL;
+    }
   }
 
-  Sampler* sampler_;
-  HANDLE sampler_thread_;
+  HANDLE profiled_thread() { return profiled_thread_; }
+
+ private:
   HANDLE profiled_thread_;
-  RuntimeProfilerRateLimiter rate_limiter_;
+};
+
 
-  // Sampler thread handler.
-  void Runner() {
-    while (sampler_->IsActive()) {
-      if (rate_limiter_.SuspendIfNecessary()) continue;
-      Sample();
-      Sleep(sampler_->interval_);
+class SamplerThread : public Thread {
+ public:
+  explicit SamplerThread(int interval) : Thread(NULL), interval_(interval) {}
+
+  static void AddActiveSampler(Sampler* sampler) {
+    ScopedLock lock(mutex_);
+    SamplerRegistry::AddActiveSampler(sampler);
+    if (instance_ == NULL) {
+      instance_ = new SamplerThread(sampler->interval());
+      instance_->Start();
+    } else {
+      ASSERT(instance_->interval_ == sampler->interval());
     }
   }
 
-  void Sample() {
-    if (sampler_->IsProfiling()) {
-      // Context used for sampling the register state of the profiled thread.
-      CONTEXT context;
-      memset(&context, 0, sizeof(context));
+  static void RemoveActiveSampler(Sampler* sampler) {
+    ScopedLock lock(mutex_);
+    SamplerRegistry::RemoveActiveSampler(sampler);
+    if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
+      RuntimeProfiler::WakeUpRuntimeProfilerThreadBeforeShutdown();
+      instance_->Join();
+      delete instance_;
+      instance_ = NULL;
+    }
+  }
 
-      TickSample sample_obj;
-      TickSample* sample = CpuProfiler::TickSampleEvent();
-      if (sample == NULL) sample = &sample_obj;
+  // Implement Thread::Run().
+  virtual void Run() {
+    SamplerRegistry::State state = SamplerRegistry::GetState();
+    while (state != SamplerRegistry::HAS_NO_SAMPLERS) {
+      bool cpu_profiling_enabled =
+          (state == SamplerRegistry::HAS_CPU_PROFILING_SAMPLERS);
+      bool runtime_profiler_enabled = RuntimeProfiler::IsEnabled();
+      // When CPU profiling is enabled both JavaScript and C++ code is
+      // profiled. We must not suspend.
+      if (!cpu_profiling_enabled) {
+        if (rate_limiter_.SuspendIfNecessary()) continue;
+      }
+      if (cpu_profiling_enabled) {
+        if (!SamplerRegistry::IterateActiveSamplers(&DoCpuProfile, this)) {
+          return;
+        }
+      }
+      if (runtime_profiler_enabled) {
+        if (!SamplerRegistry::IterateActiveSamplers(&DoRuntimeProfile, NULL)) {
+          return;
+        }
+      }
+      OS::Sleep(interval_);
+    }
+  }
 
-      static const DWORD kSuspendFailed = static_cast<DWORD>(-1);
-      if (SuspendThread(profiled_thread_) == kSuspendFailed) return;
-      sample->state = Top::current_vm_state();
+  static void DoCpuProfile(Sampler* sampler, void* raw_sampler_thread) {
+    if (!sampler->isolate()->IsInitialized()) return;
+    if (!sampler->IsProfiling()) return;
+    SamplerThread* sampler_thread =
+        reinterpret_cast<SamplerThread*>(raw_sampler_thread);
+    sampler_thread->SampleContext(sampler);
+  }
+
+  static void DoRuntimeProfile(Sampler* sampler, void* ignored) {
+    if (!sampler->isolate()->IsInitialized()) return;
+    sampler->isolate()->runtime_profiler()->NotifyTick();
+  }
+
+  void SampleContext(Sampler* sampler) {
+    HANDLE profiled_thread = sampler->platform_data()->profiled_thread();
+    if (profiled_thread == NULL) return;
+
+    // Context used for sampling the register state of the profiled thread.
+    CONTEXT context;
+    memset(&context, 0, sizeof(context));
+
+    TickSample sample_obj;
+    TickSample* sample = CpuProfiler::TickSampleEvent(sampler->isolate());
+    if (sample == NULL) sample = &sample_obj;
 
-      context.ContextFlags = CONTEXT_FULL;
-      if (GetThreadContext(profiled_thread_, &context) != 0) {
+    static const DWORD kSuspendFailed = static_cast<DWORD>(-1);
+    if (SuspendThread(profiled_thread) == kSuspendFailed) return;
+    sample->state = sampler->isolate()->current_vm_state();
+
+    context.ContextFlags = CONTEXT_FULL;
+    if (GetThreadContext(profiled_thread, &context) != 0) {
 #if V8_HOST_ARCH_X64
-        sample->pc = reinterpret_cast<Address>(context.Rip);
-        sample->sp = reinterpret_cast<Address>(context.Rsp);
-        sample->fp = reinterpret_cast<Address>(context.Rbp);
+      sample->pc = reinterpret_cast<Address>(context.Rip);
+      sample->sp = reinterpret_cast<Address>(context.Rsp);
+      sample->fp = reinterpret_cast<Address>(context.Rbp);
 #else
-        sample->pc = reinterpret_cast<Address>(context.Eip);
-        sample->sp = reinterpret_cast<Address>(context.Esp);
-        sample->fp = reinterpret_cast<Address>(context.Ebp);
+      sample->pc = reinterpret_cast<Address>(context.Eip);
+      sample->sp = reinterpret_cast<Address>(context.Esp);
+      sample->fp = reinterpret_cast<Address>(context.Ebp);
 #endif
-        sampler_->SampleStack(sample);
-        sampler_->Tick(sample);
-      }
-      ResumeThread(profiled_thread_);
+      sampler->SampleStack(sample);
+      sampler->Tick(sample);
     }
-    if (RuntimeProfiler::IsEnabled()) RuntimeProfiler::NotifyTick();
+    ResumeThread(profiled_thread);
   }
+
+  const int interval_;
+  RuntimeProfilerRateLimiter rate_limiter_;
+
+  // Protects the process wide state below.
+  static Mutex* mutex_;
+  static SamplerThread* instance_;
+
+  DISALLOW_COPY_AND_ASSIGN(SamplerThread);
 };
 
 
-// Entry point for sampler thread.
-static unsigned int __stdcall SamplerEntry(void* arg) {
-  Sampler::PlatformData* data =
-      reinterpret_cast<Sampler::PlatformData*>(arg);
-  data->Runner();
-  return 0;
-}
+Mutex* SamplerThread::mutex_ = OS::CreateMutex();
+SamplerThread* SamplerThread::instance_ = NULL;
 
 
-// Initialize a profile sampler.
-Sampler::Sampler(int interval)
-    : interval_(interval),
+Sampler::Sampler(Isolate* isolate, int interval)
+    : isolate_(isolate),
+      interval_(interval),
       profiling_(false),
       active_(false),
       samples_taken_(0) {
-  data_ = new PlatformData(this);
+  data_ = new PlatformData;
 }
 
 
 Sampler::~Sampler() {
+  ASSERT(!IsActive());
   delete data_;
 }
 
 
-// Start profiling.
 void Sampler::Start() {
-  // Do not start multiple threads for the same sampler.
   ASSERT(!IsActive());
-
-  // Get a handle to the calling thread. This is the thread that we are
-  // going to profile. We need to make a copy of the handle because we are
-  // going to use it in the sampler thread. Using GetThreadHandle() will
-  // not work in this case. We're using OpenThread because DuplicateHandle
-  // for some reason doesn't work in Chrome's sandbox.
-  data_->profiled_thread_ = OpenThread(THREAD_GET_CONTEXT |
-                                       THREAD_SUSPEND_RESUME |
-                                       THREAD_QUERY_INFORMATION,
-                                       false,
-                                       GetCurrentThreadId());
-  BOOL ok = data_->profiled_thread_ != NULL;
-  if (!ok) return;
-
-  // Start sampler thread.
-  unsigned int tid;
   SetActive(true);
-  data_->sampler_thread_ = reinterpret_cast<HANDLE>(
-      _beginthreadex(NULL, 0, SamplerEntry, data_, 0, &tid));
-  // Set thread to high priority to increase sampling accuracy.
-  SetThreadPriority(data_->sampler_thread_, THREAD_PRIORITY_TIME_CRITICAL);
+  SamplerThread::AddActiveSampler(this);
 }
 
 
-// Stop profiling.
 void Sampler::Stop() {
-  // Seting active to false triggers termination of the sampler
-  // thread.
+  ASSERT(IsActive());
+  SamplerThread::RemoveActiveSampler(this);
   SetActive(false);
-
-  // Wait for sampler thread to terminate.
-  Top::WakeUpRuntimeProfilerThreadBeforeShutdown();
-  WaitForSingleObject(data_->sampler_thread_, INFINITE);
-
-  // Release the thread handles
-  CloseHandle(data_->sampler_thread_);
-  CloseHandle(data_->profiled_thread_);
 }
 
-
 #endif  // ENABLE_LOGGING_AND_PROFILING
 
 } }  // namespace v8::internal
index e2f50a67fa21b1d3e6855b3fbd5c6752ec1d3863..ba9e76ea1ba76a07bf3ad9dfd7df68d901156e96 100644 (file)
@@ -177,7 +177,7 @@ class OS {
   static bool Remove(const char* path);
 
   // Log file open mode is platform-dependent due to line ends issues.
-  static const char* LogFileOpenMode;
+  static const char* const LogFileOpenMode;
 
   // Print output to console. This is mostly used for debugging output.
   // On platforms that has standard terminal output, the output
@@ -388,9 +388,9 @@ class Thread: public ThreadHandle {
     LOCAL_STORAGE_KEY_MAX_VALUE = kMaxInt
   };
 
-  // Create new thread.
-  Thread();
-  explicit Thread(const char* name);
+  // Create new thread (with a value for storing in the TLS isolate field).
+  explicit Thread(Isolate* isolate);
+  Thread(Isolate* isolate, const char* name);
   virtual ~Thread();
 
   // Start new thread by calling the Run() method in the new thread.
@@ -424,6 +424,8 @@ class Thread: public ThreadHandle {
   // A hint to the scheduler to let another thread run.
   static void YieldCPU();
 
+  Isolate* isolate() const { return isolate_; }
+
   // The thread name length is limited to 16 based on Linux's implementation of
   // prctl().
   static const int kMaxThreadNameLength = 16;
@@ -432,7 +434,7 @@ class Thread: public ThreadHandle {
 
   class PlatformData;
   PlatformData* data_;
-
+  Isolate* isolate_;
   char name_[kMaxThreadNameLength];
 
   DISALLOW_COPY_AND_ASSIGN(Thread);
@@ -466,13 +468,14 @@ class Mutex {
 
 
 // ----------------------------------------------------------------------------
-// ScopedLock
+// ScopedLock/ScopedUnlock
 //
-// Stack-allocated ScopedLocks provide block-scoped locking and unlocking
-// of a mutex.
+// Stack-allocated ScopedLocks/ScopedUnlocks provide block-scoped
+// locking and unlocking of a mutex.
 class ScopedLock {
  public:
   explicit ScopedLock(Mutex* mutex): mutex_(mutex) {
+    ASSERT(mutex_ != NULL);
     mutex_->Lock();
   }
   ~ScopedLock() {
@@ -583,9 +586,11 @@ class TickSample {
 class Sampler {
  public:
   // Initialize sampler.
-  explicit Sampler(int interval);
+  Sampler(Isolate* isolate, int interval);
   virtual ~Sampler();
 
+  int interval() const { return interval_; }
+
   // Performs stack sampling.
   void SampleStack(TickSample* sample) {
     DoSampleStack(sample);
@@ -608,6 +613,8 @@ class Sampler {
   // Whether the sampler is running (that is, consumes resources).
   bool IsActive() const { return NoBarrier_Load(&active_); }
 
+  Isolate* isolate() { return isolate_; }
+
   // Used in tests to make sure that stack sampling is performed.
   int samples_taken() const { return samples_taken_; }
   void ResetSamplesTaken() { samples_taken_ = 0; }
@@ -615,6 +622,8 @@ class Sampler {
   class PlatformData;
   PlatformData* data() { return data_; }
 
+  PlatformData* platform_data() { return data_; }
+
  protected:
   virtual void DoSampleStack(TickSample* sample) = 0;
 
@@ -622,6 +631,7 @@ class Sampler {
   void SetActive(bool value) { NoBarrier_Store(&active_, value); }
   void IncSamplesTaken() { if (++samples_taken_ < 0) samples_taken_ = 0; }
 
+  Isolate* isolate_;
   const int interval_;
   Atomic32 profiling_;
   Atomic32 active_;
@@ -630,6 +640,7 @@ class Sampler {
   DISALLOW_IMPLICIT_CONSTRUCTORS(Sampler);
 };
 
+
 #endif  // ENABLE_LOGGING_AND_PROFILING
 
 } }  // namespace v8::internal
index 7c9d8a6109dddf15593a171941550f3911c536ad..cea54efdac141aa167eb42e880740bdb0c0ba3aa 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
+
+/* 
+TODO(isolates): I incldue v8.h instead of these because we need Isolate and 
+some classes (NativeAllocationChecker) are moved into isolate.h 
 #include "../include/v8stdint.h"
 #include "globals.h"
 #include "checks.h"
 #include "allocation.h"
+#include "allocation-inl.h"
 #include "utils.h"
 #include "list-inl.h"
-#include "hashmap.h"
+#include "hashmap.h" 
+*/
+
 #include "preparse-data.h"
 
+
 namespace v8 {
 namespace internal {
 
index 3817935f8f8050f5327ff3a9fa964af1a2cb88cd..6ed8e36dc672de128dd7dc9e4a3f2f69126c0db4 100644 (file)
@@ -26,6 +26,9 @@
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 #include "../include/v8-preparser.h"
+
+#include "v8.h"
+
 #include "globals.h"
 #include "checks.h"
 #include "allocation.h"
@@ -158,6 +161,9 @@ class InputStreamUTF16Buffer : public UC16CharacterStream {
 
 class StandAloneJavaScriptScanner : public JavaScriptScanner {
  public:
+  StandAloneJavaScriptScanner()
+    : JavaScriptScanner(Isolate::Current()) { }
+
   void Initialize(UC16CharacterStream* source) {
     source_ = source;
     Init();
index 68020f4b3862e622b8faf977590dd97cd2a20276..a0a34a7defc34a6074907541a171d25b1a3c569a 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
+
+/*
+TODO(isolates): I incldue v8.h instead of these because we need Isolate and 
+some classes (NativeAllocationChecker) are moved into isolate.h 
 #include "../include/v8stdint.h"
 #include "unicode.h"
 #include "globals.h"
@@ -32,6 +37,8 @@
 #include "allocation.h"
 #include "utils.h"
 #include "list.h"
+*/
+
 #include "scanner-base.h"
 #include "preparse-data.h"
 #include "preparser.h"
index dda7abbb3f6de1641abaf27a76f0c5c13e969960..42467c3b702d076134bca4b8eb5f9b03830323f4 100644 (file)
@@ -526,13 +526,13 @@ void PrettyPrinter::PrintLiteral(Handle<Object> value, bool quote) {
       Print("%c", string->Get(i));
     }
     if (quote) Print("\"");
-  } else if (object == Heap::null_value()) {
+  } else if (object->IsNull()) {
     Print("null");
-  } else if (object == Heap::true_value()) {
+  } else if (object->IsTrue()) {
     Print("true");
-  } else if (object == Heap::false_value()) {
+  } else if (object->IsFalse()) {
     Print("false");
-  } else if (object == Heap::undefined_value()) {
+  } else if (object->IsUndefined()) {
     Print("undefined");
   } else if (object->IsNumber()) {
     Print("%g", object->Number());
@@ -602,7 +602,7 @@ void PrettyPrinter::PrintCaseClause(CaseClause* clause) {
 
 class IndentedScope BASE_EMBEDDED {
  public:
-  IndentedScope() {
+  explicit IndentedScope(AstPrinter* printer) : ast_printer_(printer) {
     ast_printer_->inc_indent();
   }
 
@@ -626,30 +626,20 @@ class IndentedScope BASE_EMBEDDED {
     ast_printer_->dec_indent();
   }
 
-  static void SetAstPrinter(AstPrinter* a) { ast_printer_ = a; }
-
  private:
-  static AstPrinter* ast_printer_;
+  AstPrinter* ast_printer_;
 };
 
 
-AstPrinter* IndentedScope::ast_printer_ = NULL;
-
-
 //-----------------------------------------------------------------------------
 
-int AstPrinter::indent_ = 0;
 
-
-AstPrinter::AstPrinter() {
-  ASSERT(indent_ == 0);
-  IndentedScope::SetAstPrinter(this);
+AstPrinter::AstPrinter() : indent_(0) {
 }
 
 
 AstPrinter::~AstPrinter() {
   ASSERT(indent_ == 0);
-  IndentedScope::SetAstPrinter(NULL);
 }
 
 
@@ -1009,7 +999,7 @@ void AstPrinter::VisitVariableProxy(VariableProxy* node) {
                                node->type());
   Variable* var = node->var();
   if (var != NULL && var->rewrite() != NULL) {
-    IndentedScope indent;
+    IndentedScope indent(this);
     Visit(var->rewrite());
   }
 }
@@ -1055,7 +1045,7 @@ void AstPrinter::VisitCallNew(CallNew* node) {
 
 void AstPrinter::VisitCallRuntime(CallRuntime* node) {
   PrintLiteralIndented("CALL RUNTIME ", node->name(), false);
-  IndentedScope indent;
+  IndentedScope indent(this);
   PrintArguments(node->arguments());
 }
 
index c83de345119b5dc31eb7d04f601ecf9027f72c92..284a93f32aa10300e61d06e4c58854bf9916aafb 100644 (file)
@@ -111,7 +111,7 @@ class AstPrinter: public PrettyPrinter {
   void inc_indent() { indent_++; }
   void dec_indent() { indent_--; }
 
-  static int indent_;
+  int indent_;
 };
 
 
index b74887bc4bc6f061af3ca2aa01283f5b2de0c00a..78b35e253401912d538862144323d00996ea161f 100644 (file)
@@ -31,7 +31,6 @@
 #include "global-handles.h"
 #include "heap-profiler.h"
 #include "scopeinfo.h"
-#include "top.h"
 #include "unicode.h"
 #include "zone-inl.h"
 
@@ -48,24 +47,27 @@ TokenEnumerator::TokenEnumerator()
 
 
 TokenEnumerator::~TokenEnumerator() {
+  Isolate* isolate = Isolate::Current();
   for (int i = 0; i < token_locations_.length(); ++i) {
     if (!token_removed_[i]) {
-      GlobalHandles::ClearWeakness(token_locations_[i]);
-      GlobalHandles::Destroy(token_locations_[i]);
+      isolate->global_handles()->ClearWeakness(token_locations_[i]);
+      isolate->global_handles()->Destroy(token_locations_[i]);
     }
   }
 }
 
 
 int TokenEnumerator::GetTokenId(Object* token) {
+  Isolate* isolate = Isolate::Current();
   if (token == NULL) return TokenEnumerator::kNoSecurityToken;
   for (int i = 0; i < token_locations_.length(); ++i) {
     if (*token_locations_[i] == token && !token_removed_[i]) return i;
   }
-  Handle<Object> handle = GlobalHandles::Create(token);
+  Handle<Object> handle = isolate->global_handles()->Create(token);
   // handle.location() points to a memory cell holding a pointer
   // to a token object in the V8's heap.
-  GlobalHandles::MakeWeak(handle.location(), this, TokenRemovedCallback);
+  isolate->global_handles()->MakeWeak(handle.location(), this,
+                                      TokenRemovedCallback);
   token_locations_.Add(handle.location());
   token_removed_.Add(false);
   return token_locations_.length() - 1;
@@ -162,7 +164,7 @@ const char* StringsStorage::GetName(int index) {
 }
 
 
-const char* CodeEntry::kEmptyNamePrefix = "";
+const char* const CodeEntry::kEmptyNamePrefix = "";
 
 
 void CodeEntry::CopyData(const CodeEntry& source) {
@@ -783,10 +785,12 @@ void SampleRateCalculator::UpdateMeasurements(double current_time) {
 }
 
 
-const char* ProfileGenerator::kAnonymousFunctionName = "(anonymous function)";
-const char* ProfileGenerator::kProgramEntryName = "(program)";
-const char* ProfileGenerator::kGarbageCollectorEntryName =
-  "(garbage collector)";
+const char* const ProfileGenerator::kAnonymousFunctionName =
+    "(anonymous function)";
+const char* const ProfileGenerator::kProgramEntryName =
+    "(program)";
+const char* const ProfileGenerator::kGarbageCollectorEntryName =
+    "(garbage collector)";
 
 
 ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
@@ -1882,8 +1886,8 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
     ExtractPropertyReferences(js_obj, entry);
     ExtractElementReferences(js_obj, entry);
     ExtractInternalReferences(js_obj, entry);
-    SetPropertyReference(obj, entry,
-                         Heap::Proto_symbol(), js_obj->GetPrototype());
+    SetPropertyReference(
+        obj, entry, HEAP->Proto_symbol(), js_obj->GetPrototype());
     if (obj->IsJSFunction()) {
       JSFunction* js_fun = JSFunction::cast(js_obj);
       SetInternalReference(
@@ -1895,12 +1899,12 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
         if (!proto_or_map->IsMap()) {
           SetPropertyReference(
               obj, entry,
-              Heap::prototype_symbol(), proto_or_map,
+              HEAP->prototype_symbol(), proto_or_map,
               JSFunction::kPrototypeOrInitialMapOffset);
         } else {
           SetPropertyReference(
               obj, entry,
-              Heap::prototype_symbol(), js_fun->prototype());
+              HEAP->prototype_symbol(), js_fun->prototype());
         }
       }
     }
@@ -2069,7 +2073,7 @@ bool V8HeapExplorer::IterateAndExtractReferences(
   }
   SetRootGcRootsReference();
   RootsReferencesExtractor extractor(this);
-  Heap::IterateRoots(&extractor, VISIT_ALL);
+  HEAP->IterateRoots(&extractor, VISIT_ALL);
   filler_ = NULL;
   return progress_->ProgressReport(false);
 }
@@ -2308,9 +2312,10 @@ int NativeObjectsExplorer::EstimateObjectsCount() {
 
 void NativeObjectsExplorer::FillRetainedObjects() {
   if (embedder_queried_) return;
+  Isolate* isolate = Isolate::Current();
   // Record objects that are joined into ObjectGroups.
-  Heap::CallGlobalGCPrologueCallback();
-  List<ObjectGroup*>* groups = GlobalHandles::ObjectGroups();
+  isolate->heap()->CallGlobalGCPrologueCallback();
+  List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
   for (int i = 0; i < groups->length(); ++i) {
     ObjectGroup* group = groups->at(i);
     if (group->info_ == NULL) continue;
@@ -2322,11 +2327,11 @@ void NativeObjectsExplorer::FillRetainedObjects() {
     }
     group->info_ = NULL;  // Acquire info object ownership.
   }
-  GlobalHandles::RemoveObjectGroups();
-  Heap::CallGlobalGCEpilogueCallback();
+  isolate->global_handles()->RemoveObjectGroups();
+  isolate->heap()->CallGlobalGCEpilogueCallback();
   // Record objects that are not in ObjectGroups, but have class ID.
   GlobalHandlesExtractor extractor(this);
-  GlobalHandles::IterateAllRootsWithClassIds(&extractor);
+  isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
   embedder_queried_ = true;
 }
 
@@ -2404,8 +2409,9 @@ void NativeObjectsExplorer::SetRootNativesRootReference() {
 
 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
   if (in_groups_.Contains(*p)) return;
+  Isolate* isolate = Isolate::Current();
   v8::RetainedObjectInfo* info =
-      HeapProfiler::ExecuteWrapperClassCallback(class_id, p);
+      isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
   if (info == NULL) return;
   GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
 }
@@ -3182,7 +3188,7 @@ void HeapSnapshotJSONSerializer::SortHashMap(
 
 
 String* GetConstructorNameForHeapProfile(JSObject* object) {
-  if (object->IsJSFunction()) return Heap::closure_symbol();
+  if (object->IsJSFunction()) return HEAP->closure_symbol();
   return object->constructor_name();
 }
 
index 0cbde16fb6e93000d93d336befa8d822a8d57442..d444e75ae8e595e2130264c561226f694b55adc5 100644 (file)
@@ -114,7 +114,7 @@ class CodeEntry {
   uint32_t GetCallUid() const;
   bool IsSameAs(CodeEntry* entry) const;
 
-  static const char* kEmptyNamePrefix;
+  static const char* const kEmptyNamePrefix;
 
  private:
   Logger::LogEventsAndTags tag_;
@@ -422,9 +422,9 @@ class ProfileGenerator {
     return sample_rate_calc_.ticks_per_ms();
   }
 
-  static const char* kAnonymousFunctionName;
-  static const char* kProgramEntryName;
-  static const char* kGarbageCollectorEntryName;
+  static const char* const kAnonymousFunctionName;
+  static const char* const kProgramEntryName;
+  static const char* const kGarbageCollectorEntryName;
 
  private:
   INLINE(CodeEntry* EntryForVMState(StateTag tag));
index c39fe41e75ddd99bc6b9a1cf52aebc34599f2b9f..98d1c898063135b572c2fa74bffd28aee447a553 100644 (file)
@@ -48,7 +48,7 @@ class Descriptor BASE_EMBEDDED {
   MUST_USE_RESULT MaybeObject* KeyToSymbol() {
     if (!StringShape(key_).IsSymbol()) {
       Object* result;
-      { MaybeObject* maybe_result = Heap::LookupSymbol(key_);
+      { MaybeObject* maybe_result = HEAP->LookupSymbol(key_);
         if (!maybe_result->ToObject(&result)) return maybe_result;
       }
       key_ = String::cast(result);
@@ -305,7 +305,7 @@ class LookupResult BASE_EMBEDDED {
   Object* GetCallbackObject() {
     if (lookup_type_ == CONSTANT_TYPE) {
       // For now we only have the __proto__ as constant type.
-      return Heap::prototype_accessors();
+      return HEAP->prototype_accessors();
     }
     return GetValue();
   }
index 6fbb14adda8b1fb07bbd90c3fd7d9badcf2e7791..d41a97ca0ebf9ff22192574f0c185e9224b03d58 100644 (file)
@@ -438,7 +438,7 @@ void RegExpMacroAssemblerIrregexp::IfRegisterEqPos(int register_index,
 Handle<Object> RegExpMacroAssemblerIrregexp::GetCode(Handle<String> source) {
   Bind(&backtrack_);
   Emit(BC_POP_BT, 0);
-  Handle<ByteArray> array = Factory::NewByteArray(length());
+  Handle<ByteArray> array = FACTORY->NewByteArray(length());
   Copy(array->GetDataStartAddress());
   return array;
 }
index 51f4015f6cf35da4daf2f1a344a139cea385ab45..ea41db63e21e96dd82ef7ab48985e874ceab4527 100644 (file)
@@ -105,7 +105,8 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Match(
     Handle<String> subject,
     int* offsets_vector,
     int offsets_vector_length,
-    int previous_index) {
+    int previous_index,
+    Isolate* isolate) {
 
   ASSERT(subject->IsFlat());
   ASSERT(previous_index >= 0);
@@ -142,7 +143,8 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Match(
                        start_offset,
                        input_start,
                        input_end,
-                       offsets_vector);
+                       offsets_vector,
+                       isolate);
   return res;
 }
 
@@ -153,10 +155,12 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Execute(
     int start_offset,
     const byte* input_start,
     const byte* input_end,
-    int* output) {
+    int* output,
+    Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
   // Ensure that the minimum stack has been allocated.
-  RegExpStack stack;
-  Address stack_base = RegExpStack::stack_base();
+  RegExpStackScope stack_scope(isolate);
+  Address stack_base = stack_scope.stack()->stack_base();
 
   int direct_call = 0;
   int result = CALL_GENERATED_REGEXP_CODE(code->entry(),
@@ -166,23 +170,21 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Execute(
                                           input_end,
                                           output,
                                           stack_base,
-                                          direct_call);
+                                          direct_call,
+                                          isolate);
   ASSERT(result <= SUCCESS);
   ASSERT(result >= RETRY);
 
-  if (result == EXCEPTION && !Top::has_pending_exception()) {
+  if (result == EXCEPTION && !isolate->has_pending_exception()) {
     // We detected a stack overflow (on the backtrack stack) in RegExp code,
     // but haven't created the exception yet.
-    Top::StackOverflow();
+    isolate->StackOverflow();
   }
   return static_cast<Result>(result);
 }
 
 
-static unibrow::Mapping<unibrow::Ecma262Canonicalize> canonicalize;
-
-
-byte NativeRegExpMacroAssembler::word_character_map[] = {
+const byte NativeRegExpMacroAssembler::word_character_map[] = {
     0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
     0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
     0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
@@ -208,7 +210,11 @@ byte NativeRegExpMacroAssembler::word_character_map[] = {
 int NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16(
     Address byte_offset1,
     Address byte_offset2,
-    size_t byte_length) {
+    size_t byte_length,
+    Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  unibrow::Mapping<unibrow::Ecma262Canonicalize>* canonicalize =
+      isolate->regexp_macro_assembler_canonicalize();
   // This function is not allowed to cause a garbage collection.
   // A GC might move the calling generated code and invalidate the
   // return address on the stack.
@@ -222,10 +228,10 @@ int NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16(
     unibrow::uchar c2 = substring2[i];
     if (c1 != c2) {
       unibrow::uchar s1[1] = { c1 };
-      canonicalize.get(c1, '\0', s1);
+      canonicalize->get(c1, '\0', s1);
       if (s1[0] != c2) {
         unibrow::uchar s2[1] = { c2 };
-        canonicalize.get(c2, '\0', s2);
+        canonicalize->get(c2, '\0', s2);
         if (s1[0] != s2[0]) {
           return 0;
         }
@@ -237,13 +243,16 @@ int NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16(
 
 
 Address NativeRegExpMacroAssembler::GrowStack(Address stack_pointer,
-                                              Address* stack_base) {
-  size_t size = RegExpStack::stack_capacity();
-  Address old_stack_base = RegExpStack::stack_base();
+                                              Address* stack_base,
+                                              Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  RegExpStack* regexp_stack = isolate->regexp_stack();
+  size_t size = regexp_stack->stack_capacity();
+  Address old_stack_base = regexp_stack->stack_base();
   ASSERT(old_stack_base == *stack_base);
   ASSERT(stack_pointer <= old_stack_base);
   ASSERT(static_cast<size_t>(old_stack_base - stack_pointer) <= size);
-  Address new_stack_base = RegExpStack::EnsureCapacity(size * 2);
+  Address new_stack_base = regexp_stack->EnsureCapacity(size * 2);
   if (new_stack_base == NULL) {
     return NULL;
   }
index ef85d27e521e80f940e1a189302df1273e12229e..0d60e796cec56ddcb14b502c82520f2bb0d2edcd 100644 (file)
@@ -190,30 +190,33 @@ class NativeRegExpMacroAssembler: public RegExpMacroAssembler {
                       Handle<String> subject,
                       int* offsets_vector,
                       int offsets_vector_length,
-                      int previous_index);
+                      int previous_index,
+                      Isolate* isolate);
 
   // Compares two-byte strings case insensitively.
   // Called from generated RegExp code.
   static int CaseInsensitiveCompareUC16(Address byte_offset1,
                                         Address byte_offset2,
-                                        size_t byte_length);
+                                        size_t byte_length,
+                                        Isolate* isolate);
 
   // Called from RegExp if the backtrack stack limit is hit.
   // Tries to expand the stack. Returns the new stack-pointer if
   // successful, and updates the stack_top address, or returns 0 if unable
   // to grow the stack.
   // This function must not trigger a garbage collection.
-  static Address GrowStack(Address stack_pointer, Address* stack_top);
+  static Address GrowStack(Address stack_pointer, Address* stack_top,
+                           Isolate* isolate);
 
   static const byte* StringCharacterPosition(String* subject, int start_index);
 
   // Byte map of ASCII characters with a 0xff if the character is a word
   // character (digit, letter or underscore) and 0x00 otherwise.
   // Used by generated RegExp code.
-  static byte word_character_map[128];
+  static const byte word_character_map[128];
 
   static Address word_character_map_address() {
-    return &word_character_map[0];
+    return const_cast<Address>(&word_character_map[0]);
   }
 
   static Result Execute(Code* code,
@@ -221,7 +224,8 @@ class NativeRegExpMacroAssembler: public RegExpMacroAssembler {
                         int start_offset,
                         const byte* input_start,
                         const byte* input_end,
-                        int* output);
+                        int* output,
+                        Isolate* isolate);
 };
 
 #endif  // V8_INTERPRETED_REGEXP
index 7696279a1d581dd33466754d9f1c72deb8a6beb4..ff9547f3a711ed0f44126aa624d421b734178bb8 100644 (file)
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 #include "v8.h"
-#include "top.h"
 #include "regexp-stack.h"
 
 namespace v8 {
 namespace internal {
 
-RegExpStack::RegExpStack() {
+RegExpStackScope::RegExpStackScope(Isolate* isolate)
+    : regexp_stack_(isolate->regexp_stack()) {
   // Initialize, if not already initialized.
-  RegExpStack::EnsureCapacity(0);
+  regexp_stack_->EnsureCapacity(0);
 }
 
 
-RegExpStack::~RegExpStack() {
+RegExpStackScope::~RegExpStackScope() {
+  ASSERT(Isolate::Current() == regexp_stack_->isolate_);
   // Reset the buffer if it has grown.
-  RegExpStack::Reset();
+  regexp_stack_->Reset();
+}
+
+
+RegExpStack::RegExpStack()
+    : isolate_(NULL) {
+}
+
+
+RegExpStack::~RegExpStack() {
 }
 
 
@@ -70,9 +80,9 @@ void RegExpStack::Reset() {
 
 
 void RegExpStack::ThreadLocal::Free() {
-  if (thread_local_.memory_size_ > 0) {
-    DeleteArray(thread_local_.memory_);
-    thread_local_ = ThreadLocal();
+  if (memory_size_ > 0) {
+    DeleteArray(memory_);
+    Clear();
   }
 }
 
@@ -98,6 +108,4 @@ Address RegExpStack::EnsureCapacity(size_t size) {
 }
 
 
-RegExpStack::ThreadLocal RegExpStack::thread_local_;
-
 }}  // namespace v8::internal
index b4fa2e9204cca56e144722fd4f3bac3fa3348fcd..59432067e4375f2bdea1d622a55cce4c4dd4dba4 100644 (file)
 namespace v8 {
 namespace internal {
 
+class RegExpStack;
+
 // Maintains a per-v8thread stack area that can be used by irregexp
 // implementation for its backtracking stack.
 // Since there is only one stack area, the Irregexp implementation is not
 // re-entrant. I.e., no regular expressions may be executed in the same thread
 // during a preempted Irregexp execution.
+class RegExpStackScope {
+ public:
+  // Create and delete an instance to control the life-time of a growing stack.
+
+  // Initializes the stack memory area if necessary.
+  explicit RegExpStackScope(Isolate* isolate);
+  ~RegExpStackScope();  // Releases the stack if it has grown.
+
+  RegExpStack* stack() const { return regexp_stack_; }
+
+ private:
+  RegExpStack* regexp_stack_;
+
+  DISALLOW_COPY_AND_ASSIGN(RegExpStackScope);
+};
+
+
 class RegExpStack {
  public:
   // Number of allocated locations on the stack below the limit.
@@ -43,39 +62,37 @@ class RegExpStack {
   // check.
   static const int kStackLimitSlack = 32;
 
-  // Create and delete an instance to control the life-time of a growing stack.
-  RegExpStack();  // Initializes the stack memory area if necessary.
-  ~RegExpStack();  // Releases the stack if it has grown.
-
   // Gives the top of the memory used as stack.
-  static Address stack_base() {
+  Address stack_base() {
     ASSERT(thread_local_.memory_size_ != 0);
     return thread_local_.memory_ + thread_local_.memory_size_;
   }
 
   // The total size of the memory allocated for the stack.
-  static size_t stack_capacity() { return thread_local_.memory_size_; }
+  size_t stack_capacity() { return thread_local_.memory_size_; }
 
   // If the stack pointer gets below the limit, we should react and
   // either grow the stack or report an out-of-stack exception.
   // There is only a limited number of locations below the stack limit,
   // so users of the stack should check the stack limit during any
   // sequence of pushes longer that this.
-  static Address* limit_address() { return &(thread_local_.limit_); }
+  Address* limit_address() { return &(thread_local_.limit_); }
 
   // Ensures that there is a memory area with at least the specified size.
   // If passing zero, the default/minimum size buffer is allocated.
-  static Address EnsureCapacity(size_t size);
+  Address EnsureCapacity(size_t size);
 
   // Thread local archiving.
   static int ArchiveSpacePerThread() {
-    return static_cast<int>(sizeof(thread_local_));
+    return static_cast<int>(sizeof(ThreadLocal));
   }
-  static char* ArchiveStack(char* to);
-  static char* RestoreStack(char* from);
-  static void FreeThreadResources() { thread_local_.Free(); }
-
+  char* ArchiveStack(char* to);
+  char* RestoreStack(char* from);
+  void FreeThreadResources() { thread_local_.Free(); }
  private:
+  RegExpStack();
+  ~RegExpStack();
+
   // Artificial limit used when no memory has been allocated.
   static const uintptr_t kMemoryTop = static_cast<uintptr_t>(-1);
 
@@ -87,35 +104,42 @@ class RegExpStack {
 
   // Structure holding the allocated memory, size and limit.
   struct ThreadLocal {
-    ThreadLocal()
-        : memory_(NULL),
-          memory_size_(0),
-          limit_(reinterpret_cast<Address>(kMemoryTop)) {}
+    ThreadLocal() { Clear(); }
     // If memory_size_ > 0 then memory_ must be non-NULL.
     Address memory_;
     size_t memory_size_;
     Address limit_;
+    void Clear() {
+      memory_ = NULL;
+      memory_size_ = 0;
+      limit_ = reinterpret_cast<Address>(kMemoryTop);
+    }
     void Free();
   };
 
   // Address of allocated memory.
-  static Address memory_address() {
+  Address memory_address() {
     return reinterpret_cast<Address>(&thread_local_.memory_);
   }
 
   // Address of size of allocated memory.
-  static Address memory_size_address() {
+  Address memory_size_address() {
     return reinterpret_cast<Address>(&thread_local_.memory_size_);
   }
 
   // Resets the buffer if it has grown beyond the default/minimum size.
   // After this, the buffer is either the default size, or it is empty, so
   // you have to call EnsureCapacity before using it again.
-  static void Reset();
+  void Reset();
 
-  static ThreadLocal thread_local_;
+  ThreadLocal thread_local_;
+  Isolate* isolate_;
 
   friend class ExternalReference;
+  friend class Isolate;
+  friend class RegExpStackScope;
+
+  DISALLOW_COPY_AND_ASSIGN(RegExpStack);
 };
 
 }}  // namespace v8::internal
index e0ea9e18946a02f1eed72aa39d6f5be4d05a2d9f..5a68ab0c6529a9424bd71d475a050483f28d4e78 100644 (file)
@@ -63,14 +63,14 @@ Result& Result::operator=(const Result& other) {
 
 Result::~Result() {
   if (is_register()) {
-    CodeGeneratorScope::Current()->allocator()->Unuse(reg());
+    CodeGeneratorScope::Current(Isolate::Current())->allocator()->Unuse(reg());
   }
 }
 
 
 void Result::Unuse() {
   if (is_register()) {
-    CodeGeneratorScope::Current()->allocator()->Unuse(reg());
+    CodeGeneratorScope::Current(Isolate::Current())->allocator()->Unuse(reg());
   }
   invalidate();
 }
@@ -79,7 +79,7 @@ void Result::Unuse() {
 void Result::CopyTo(Result* destination) const {
   destination->value_ = value_;
   if (is_register()) {
-    CodeGeneratorScope::Current()->allocator()->Use(reg());
+    CodeGeneratorScope::Current(Isolate::Current())->allocator()->Use(reg());
   }
 }
 
index 31d0a49fa55f7ddbf7d07290d59a656a52b9f9e9..cb5e35feb0f1a5b8eb2d1e60bd0cc23d23ef369d 100644 (file)
@@ -40,19 +40,13 @@ namespace internal {
 
 Result::Result(Register reg, TypeInfo info) {
   ASSERT(reg.is_valid() && !RegisterAllocator::IsReserved(reg));
-  CodeGeneratorScope::Current()->allocator()->Use(reg);
+  CodeGeneratorScope::Current(Isolate::Current())->allocator()->Use(reg);
   value_ = TypeField::encode(REGISTER)
       | TypeInfoField::encode(info.ToInt())
       | DataField::encode(reg.code_);
 }
 
 
-Result::ZoneObjectList* Result::ConstantList() {
-  static ZoneObjectList list(10);
-  return &list;
-}
-
-
 // -------------------------------------------------------------------------
 // RegisterAllocator implementation.
 
index a03a9d2fb845c3ba4e569ef4af8d4941db19b69f..f0ef9c324d8c4fb02b542b29e5ed4fd63aeb6e25 100644 (file)
@@ -69,12 +69,13 @@ class Result BASE_EMBEDDED {
 
   // Construct a Result whose value is a compile-time constant.
   explicit Result(Handle<Object> value) {
+    ZoneObjectList* constant_list = Isolate::Current()->result_constant_list();
     TypeInfo info = TypeInfo::TypeFromValue(value);
     value_ = TypeField::encode(CONSTANT)
         | TypeInfoField::encode(info.ToInt())
         | IsUntaggedInt32Field::encode(false)
-        | DataField::encode(ConstantList()->length());
-    ConstantList()->Add(value);
+        | DataField::encode(constant_list->length());
+    constant_list->Add(value);
   }
 
   // The copy constructor and assignment operators could each create a new
@@ -85,18 +86,6 @@ class Result BASE_EMBEDDED {
 
   inline ~Result();
 
-  // Static indirection table for handles to constants.  If a Result
-  // represents a constant, the data contains an index into this table
-  // of handles to the actual constants.
-  typedef ZoneList<Handle<Object> > ZoneObjectList;
-
-  static ZoneObjectList* ConstantList();
-
-  // Clear the constants indirection table.
-  static void ClearConstantList() {
-    ConstantList()->Clear();
-  }
-
   inline void Unuse();
 
   Type type() const { return TypeField::decode(value_); }
@@ -137,7 +126,8 @@ class Result BASE_EMBEDDED {
 
   Handle<Object> handle() const {
     ASSERT(type() == CONSTANT);
-    return ConstantList()->at(DataField::decode(value_));
+    return Isolate::Current()->result_constant_list()->
+        at(DataField::decode(value_));
   }
 
   // Move this result to an arbitrary register.  The register is not
index fd40cdc3fa0b9c734756c6c55e48fcafee58e011..780314d9c979c17cf07d6d7dceff6ed830bedcb7 100644 (file)
@@ -989,7 +989,8 @@ bool Rewriter::Rewrite(CompilationInfo* info) {
 
   ZoneList<Statement*>* body = function->body();
   if (!body->is_empty()) {
-    Variable* result = scope->NewTemporary(Factory::result_symbol());
+    Variable* result = scope->NewTemporary(
+        info->isolate()->factory()->result_symbol());
     Processor processor(result);
     processor.Process(body);
     if (processor.HasStackOverflow()) return false;
index df6471e9d5b9a258e7dfd8ddb098f05459f95927..98dc00bd07a4d570d23a3dc97611d9b263a2e1da 100644 (file)
@@ -36,8 +36,8 @@
 #include "execution.h"
 #include "global-handles.h"
 #include "mark-compact.h"
+#include "platform.h"
 #include "scopeinfo.h"
-#include "top.h"
 
 namespace v8 {
 namespace internal {
@@ -69,16 +69,9 @@ class PendingListNode : public Malloced {
 };
 
 
-enum SamplerState {
-  IN_NON_JS_STATE = 0,
-  IN_JS_STATE = 1
-};
-
-
 // Optimization sampler constants.
 static const int kSamplerFrameCount = 2;
 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
-static const int kSamplerWindowSize = 16;
 
 static const int kSamplerTicksBetweenThresholdAdjustment = 32;
 
@@ -92,34 +85,19 @@ static const int kSamplerThresholdSizeFactorDelta = 1;
 
 static const int kSizeLimit = 1500;
 
-static int sampler_threshold = kSamplerThresholdInit;
-static int sampler_threshold_size_factor = kSamplerThresholdSizeFactorInit;
-
-static int sampler_ticks_until_threshold_adjustment =
-    kSamplerTicksBetweenThresholdAdjustment;
-
-// The ratio of ticks spent in JS code in percent.
-static Atomic32 js_ratio;
-
-static Object* sampler_window[kSamplerWindowSize] = { NULL, };
-static int sampler_window_position = 0;
-static int sampler_window_weight[kSamplerWindowSize] = { 0, };
-
-
-// Support for pending 'optimize soon' requests.
-static PendingListNode* optimize_soon_list = NULL;
-
 
 PendingListNode::PendingListNode(JSFunction* function) : next_(NULL) {
-  function_ = GlobalHandles::Create(function);
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
+  function_ = global_handles->Create(function);
   start_ = OS::Ticks();
-  GlobalHandles::MakeWeak(function_.location(), this, &WeakCallback);
+  global_handles->MakeWeak(function_.location(), this, &WeakCallback);
 }
 
 
 void PendingListNode::Destroy() {
   if (!IsValid()) return;
-  GlobalHandles::Destroy(function_.location());
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
+  global_handles->Destroy(function_.location());
   function_= Handle<Object>::null();
 }
 
@@ -135,7 +113,37 @@ static bool IsOptimizable(JSFunction* function) {
 }
 
 
-static void Optimize(JSFunction* function, bool eager, int delay) {
+Atomic32 RuntimeProfiler::state_ = 0;
+// TODO(isolates): Create the semaphore lazily and clean it up when no
+// longer required.
+#ifdef ENABLE_LOGGING_AND_PROFILING
+Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0);
+#endif
+
+
+RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
+    : isolate_(isolate),
+      sampler_threshold_(kSamplerThresholdInit),
+      sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
+      sampler_ticks_until_threshold_adjustment_(
+        kSamplerTicksBetweenThresholdAdjustment),
+      js_ratio_(0),
+      sampler_window_position_(0),
+      optimize_soon_list_(NULL),
+      state_window_position_(0) {
+  state_counts_[0] = kStateWindowSize;
+  state_counts_[1] = 0;
+  memset(state_window_, 0, sizeof(state_window_));
+  ClearSampleBuffer();
+}
+
+
+bool RuntimeProfiler::IsEnabled() {
+  return V8::UseCrankshaft() && FLAG_opt;
+}
+
+
+void RuntimeProfiler::Optimize(JSFunction* function, bool eager, int delay) {
   ASSERT(IsOptimizable(function));
   if (FLAG_trace_opt) {
     PrintF("[marking (%s) ", eager ? "eagerly" : "lazily");
@@ -152,11 +160,13 @@ static void Optimize(JSFunction* function, bool eager, int delay) {
 }
 
 
-static void AttemptOnStackReplacement(JSFunction* function) {
+void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
   // See AlwaysFullCompiler (in compiler.cc) comment on why we need
   // Debug::has_break_points().
   ASSERT(function->IsMarkedForLazyRecompilation());
-  if (!FLAG_use_osr || Debug::has_break_points() || function->IsBuiltin()) {
+  if (!FLAG_use_osr ||
+      isolate_->debug()->has_break_points() ||
+      function->IsBuiltin()) {
     return;
   }
 
@@ -186,7 +196,8 @@ static void AttemptOnStackReplacement(JSFunction* function) {
   Object* check_code;
   MaybeObject* maybe_check_code = check_stub.TryGetCode();
   if (maybe_check_code->ToObject(&check_code)) {
-    Code* replacement_code = Builtins::builtin(Builtins::OnStackReplacement);
+    Code* replacement_code =
+        isolate_->builtins()->builtin(Builtins::OnStackReplacement);
     Code* unoptimized_code = shared->code();
     Deoptimizer::PatchStackCheckCode(unoptimized_code,
                                      Code::cast(check_code),
@@ -195,21 +206,19 @@ static void AttemptOnStackReplacement(JSFunction* function) {
 }
 
 
-static void ClearSampleBuffer() {
-  for (int i = 0; i < kSamplerWindowSize; i++) {
-    sampler_window[i] = NULL;
-    sampler_window_weight[i] = 0;
-  }
+void RuntimeProfiler::ClearSampleBuffer() {
+  memset(sampler_window_, 0, sizeof(sampler_window_));
+  memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_));
 }
 
 
-static int LookupSample(JSFunction* function) {
+int RuntimeProfiler::LookupSample(JSFunction* function) {
   int weight = 0;
   for (int i = 0; i < kSamplerWindowSize; i++) {
-    Object* sample = sampler_window[i];
+    Object* sample = sampler_window_[i];
     if (sample != NULL) {
       if (function == sample) {
-        weight += sampler_window_weight[i];
+        weight += sampler_window_weight_[i];
       }
     }
   }
@@ -217,18 +226,18 @@ static int LookupSample(JSFunction* function) {
 }
 
 
-static void AddSample(JSFunction* function, int weight) {
+void RuntimeProfiler::AddSample(JSFunction* function, int weight) {
   ASSERT(IsPowerOf2(kSamplerWindowSize));
-  sampler_window[sampler_window_position] = function;
-  sampler_window_weight[sampler_window_position] = weight;
-  sampler_window_position = (sampler_window_position + 1) &
+  sampler_window_[sampler_window_position_] = function;
+  sampler_window_weight_[sampler_window_position_] = weight;
+  sampler_window_position_ = (sampler_window_position_ + 1) &
       (kSamplerWindowSize - 1);
 }
 
 
 void RuntimeProfiler::OptimizeNow() {
-  HandleScope scope;
-  PendingListNode* current = optimize_soon_list;
+  HandleScope scope(isolate_);
+  PendingListNode* current = optimize_soon_list_;
   while (current != NULL) {
     PendingListNode* next = current->next();
     if (current->IsValid()) {
@@ -241,7 +250,7 @@ void RuntimeProfiler::OptimizeNow() {
     delete current;
     current = next;
   }
-  optimize_soon_list = NULL;
+  optimize_soon_list_ = NULL;
 
   // Run through the JavaScript frames and collect them. If we already
   // have a sample of the function, we mark it for optimizations
@@ -257,14 +266,14 @@ void RuntimeProfiler::OptimizeNow() {
 
     // Adjust threshold each time we have processed
     // a certain number of ticks.
-    if (sampler_ticks_until_threshold_adjustment > 0) {
-      sampler_ticks_until_threshold_adjustment--;
-      if (sampler_ticks_until_threshold_adjustment <= 0) {
+    if (sampler_ticks_until_threshold_adjustment_ > 0) {
+      sampler_ticks_until_threshold_adjustment_--;
+      if (sampler_ticks_until_threshold_adjustment_ <= 0) {
         // If the threshold is not already at the minimum
         // modify and reset the ticks until next adjustment.
-        if (sampler_threshold > kSamplerThresholdMin) {
-          sampler_threshold -= kSamplerThresholdDelta;
-          sampler_ticks_until_threshold_adjustment =
+        if (sampler_threshold_ > kSamplerThresholdMin) {
+          sampler_threshold_ -= kSamplerThresholdDelta;
+          sampler_ticks_until_threshold_adjustment_ =
               kSamplerTicksBetweenThresholdAdjustment;
         }
       }
@@ -284,11 +293,11 @@ void RuntimeProfiler::OptimizeNow() {
 
     int function_size = function->shared()->SourceSize();
     int threshold_size_factor = (function_size > kSizeLimit)
-        ? sampler_threshold_size_factor
+        ? sampler_threshold_size_factor_
         : 1;
 
-    int threshold = sampler_threshold * threshold_size_factor;
-    int current_js_ratio = NoBarrier_Load(&js_ratio);
+    int threshold = sampler_threshold_ * threshold_size_factor;
+    int current_js_ratio = NoBarrier_Load(&js_ratio_);
 
     // Adjust threshold depending on the ratio of time spent
     // in JS code.
@@ -304,7 +313,8 @@ void RuntimeProfiler::OptimizeNow() {
 
     if (LookupSample(function) >= threshold) {
       Optimize(function, false, 0);
-      CompilationCache::MarkForEagerOptimizing(Handle<JSFunction>(function));
+      isolate_->compilation_cache()->MarkForEagerOptimizing(
+          Handle<JSFunction>(function));
     }
   }
 
@@ -320,26 +330,21 @@ void RuntimeProfiler::OptimizeNow() {
 void RuntimeProfiler::OptimizeSoon(JSFunction* function) {
   if (!IsOptimizable(function)) return;
   PendingListNode* node = new PendingListNode(function);
-  node->set_next(optimize_soon_list);
-  optimize_soon_list = node;
+  node->set_next(optimize_soon_list_);
+  optimize_soon_list_ = node;
 }
 
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
-static void UpdateStateRatio(SamplerState current_state) {
-  static const int kStateWindowSize = 128;
-  static SamplerState state_window[kStateWindowSize];
-  static int state_window_position = 0;
-  static int state_counts[2] = { kStateWindowSize, 0 };
-
-  SamplerState old_state = state_window[state_window_position];
-  state_counts[old_state]--;
-  state_window[state_window_position] = current_state;
-  state_counts[current_state]++;
+void RuntimeProfiler::UpdateStateRatio(SamplerState current_state) {
+  SamplerState old_state = state_window_[state_window_position_];
+  state_counts_[old_state]--;
+  state_window_[state_window_position_] = current_state;
+  state_counts_[current_state]++;
   ASSERT(IsPowerOf2(kStateWindowSize));
-  state_window_position = (state_window_position + 1) &
+  state_window_position_ = (state_window_position_ + 1) &
       (kStateWindowSize - 1);
-  NoBarrier_Store(&js_ratio, state_counts[IN_JS_STATE] * 100 /
+  NoBarrier_Store(&js_ratio_, state_counts_[IN_JS_STATE] * 100 /
                   kStateWindowSize);
 }
 #endif
@@ -348,11 +353,11 @@ static void UpdateStateRatio(SamplerState current_state) {
 void RuntimeProfiler::NotifyTick() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
   // Record state sample.
-  SamplerState state = Top::IsInJSState()
+  SamplerState state = IsSomeIsolateInJS()
       ? IN_JS_STATE
       : IN_NON_JS_STATE;
   UpdateStateRatio(state);
-  StackGuard::RequestRuntimeProfilerTick();
+  isolate_->stack_guard()->RequestRuntimeProfilerTick();
 #endif
 }
 
@@ -361,15 +366,15 @@ void RuntimeProfiler::Setup() {
   ClearSampleBuffer();
   // If the ticker hasn't already started, make sure to do so to get
   // the ticks for the runtime profiler.
-  if (IsEnabled()) Logger::EnsureTickerStarted();
+  if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
 }
 
 
 void RuntimeProfiler::Reset() {
-  sampler_threshold = kSamplerThresholdInit;
-  sampler_ticks_until_threshold_adjustment =
+  sampler_threshold_ = kSamplerThresholdInit;
+  sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
+  sampler_ticks_until_threshold_adjustment_ =
       kSamplerTicksBetweenThresholdAdjustment;
-  sampler_threshold_size_factor = kSamplerThresholdSizeFactorInit;
 }
 
 
@@ -386,24 +391,61 @@ int RuntimeProfiler::SamplerWindowSize() {
 // Update the pointers in the sampler window after a GC.
 void RuntimeProfiler::UpdateSamplesAfterScavenge() {
   for (int i = 0; i < kSamplerWindowSize; i++) {
-    Object* function = sampler_window[i];
-    if (function != NULL && Heap::InNewSpace(function)) {
+    Object* function = sampler_window_[i];
+    if (function != NULL && isolate_->heap()->InNewSpace(function)) {
       MapWord map_word = HeapObject::cast(function)->map_word();
       if (map_word.IsForwardingAddress()) {
-        sampler_window[i] = map_word.ToForwardingAddress();
+        sampler_window_[i] = map_word.ToForwardingAddress();
       } else {
-        sampler_window[i] = NULL;
+        sampler_window_[i] = NULL;
       }
     }
   }
 }
 
 
+void RuntimeProfiler::HandleWakeUp(Isolate* isolate) {
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  // The profiler thread must still be waiting.
+  ASSERT(NoBarrier_Load(&state_) >= 0);
+  // In IsolateEnteredJS we have already incremented the counter and
+  // undid the decrement done by the profiler thread. Increment again
+  // to get the right count of active isolates.
+  NoBarrier_AtomicIncrement(&state_, 1);
+  semaphore_->Signal();
+  isolate->ResetEagerOptimizingData();
+#endif
+}
+
+
+bool RuntimeProfiler::IsSomeIsolateInJS() {
+  return NoBarrier_Load(&state_) > 0;
+}
+
+
+bool RuntimeProfiler::WaitForSomeIsolateToEnterJS() {
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  Atomic32 old_state = NoBarrier_CompareAndSwap(&state_, 0, -1);
+  ASSERT(old_state >= -1);
+  if (old_state != 0) return false;
+  semaphore_->Wait();
+#endif
+  return true;
+}
+
+
+void RuntimeProfiler::WakeUpRuntimeProfilerThreadBeforeShutdown() {
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  semaphore_->Signal();
+#endif
+}
+
+
 void RuntimeProfiler::RemoveDeadSamples() {
   for (int i = 0; i < kSamplerWindowSize; i++) {
-    Object* function = sampler_window[i];
+    Object* function = sampler_window_[i];
     if (function != NULL && !HeapObject::cast(function)->IsMarked()) {
-      sampler_window[i] = NULL;
+      sampler_window_[i] = NULL;
     }
   }
 }
@@ -411,7 +453,7 @@ void RuntimeProfiler::RemoveDeadSamples() {
 
 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
   for (int i = 0; i < kSamplerWindowSize; i++) {
-    visitor->VisitPointer(&sampler_window[i]);
+    visitor->VisitPointer(&sampler_window_[i]);
   }
 }
 
@@ -419,20 +461,13 @@ void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
   static const int kNonJSTicksThreshold = 100;
-  // We suspend the runtime profiler thread when not running
-  // JavaScript. If the CPU profiler is active we must not do this
-  // because it samples both JavaScript and C++ code.
-  if (RuntimeProfiler::IsEnabled() &&
-      !CpuProfiler::is_profiling() &&
-      !(FLAG_prof && FLAG_prof_auto)) {
-    if (Top::IsInJSState()) {
-      non_js_ticks_ = 0;
+  if (RuntimeProfiler::IsSomeIsolateInJS()) {
+    non_js_ticks_ = 0;
+  } else {
+    if (non_js_ticks_ < kNonJSTicksThreshold) {
+      ++non_js_ticks_;
     } else {
-      if (non_js_ticks_ < kNonJSTicksThreshold) {
-        ++non_js_ticks_;
-      } else {
-        if (Top::WaitForJSState()) return true;
-      }
+      return RuntimeProfiler::WaitForSomeIsolateToEnterJS();
     }
   }
 #endif
index 02defc9b2cf18bf18fa36618b8da5fb02ff001a8..8074035a92da02d37c4c8217db7f9275c508d98a 100644 (file)
 #ifndef V8_RUNTIME_PROFILER_H_
 #define V8_RUNTIME_PROFILER_H_
 
-#include "v8.h"
 #include "allocation.h"
+#include "atomicops.h"
 
 namespace v8 {
 namespace internal {
 
-class RuntimeProfiler : public AllStatic {
+class Isolate;
+class JSFunction;
+class Object;
+class PendingListNode;
+class Semaphore;
+
+
+enum SamplerState {
+  IN_NON_JS_STATE = 0,
+  IN_JS_STATE = 1
+};
+
+
+class RuntimeProfiler {
  public:
-  static bool IsEnabled() { return V8::UseCrankshaft() && FLAG_opt; }
+  explicit RuntimeProfiler(Isolate* isolate);
+
+  static bool IsEnabled();
+
+  void OptimizeNow();
+  void OptimizeSoon(JSFunction* function);
+
+  void NotifyTick();
+
+  void Setup();
+  void Reset();
+  void TearDown();
+
+  Object** SamplerWindowAddress();
+  int SamplerWindowSize();
+
+  // Rate limiting support.
+
+  // VM thread interface.
+  //
+  // Called by isolates when their states change.
+  static inline void IsolateEnteredJS(Isolate* isolate);
+  static inline void IsolateExitedJS(Isolate* isolate);
+
+  // Profiler thread interface.
+  //
+  // IsSomeIsolateInJS():
+  // The profiler thread can query whether some isolate is currently
+  // running JavaScript code.
+  //
+  // WaitForSomeIsolateToEnterJS():
+  // When no isolates are running JavaScript code for some time the
+  // profiler thread suspends itself by calling the wait function. The
+  // wait function returns true after it waited or false immediately.
+  // While the function was waiting the profiler may have been
+  // disabled so it *must check* whether it is allowed to continue.
+  static bool IsSomeIsolateInJS();
+  static bool WaitForSomeIsolateToEnterJS();
+
+  // When shutting down we join the profiler thread. Doing so while
+  // it's waiting on a semaphore will cause a deadlock, so we have to
+  // wake it up first.
+  static void WakeUpRuntimeProfilerThreadBeforeShutdown();
+
+  void UpdateSamplesAfterScavenge();
+  void RemoveDeadSamples();
+  void UpdateSamplesAfterCompact(ObjectVisitor* visitor);
 
-  static void OptimizeNow();
-  static void OptimizeSoon(JSFunction* function);
+ private:
+  static const int kSamplerWindowSize = 16;
+  static const int kStateWindowSize = 128;
+
+  static void HandleWakeUp(Isolate* isolate);
+
+  void Optimize(JSFunction* function, bool eager, int delay);
+
+  void AttemptOnStackReplacement(JSFunction* function);
+
+  void ClearSampleBuffer();
+
+  void ClearSampleBufferNewSpaceEntries();
+
+  int LookupSample(JSFunction* function);
+
+  void AddSample(JSFunction* function, int weight);
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  void UpdateStateRatio(SamplerState current_state);
+#endif
 
-  static void NotifyTick();
+  Isolate* isolate_;
 
-  static void Setup();
-  static void Reset();
-  static void TearDown();
+  int sampler_threshold_;
+  int sampler_threshold_size_factor_;
+  int sampler_ticks_until_threshold_adjustment_;
 
-  static int SamplerWindowSize();
-  static void UpdateSamplesAfterScavenge();
-  static void RemoveDeadSamples();
-  static void UpdateSamplesAfterCompact(ObjectVisitor* visitor);
+  // The ratio of ticks spent in JS code in percent.
+  Atomic32 js_ratio_;
+
+  Object* sampler_window_[kSamplerWindowSize];
+  int sampler_window_position_;
+  int sampler_window_weight_[kSamplerWindowSize];
+
+  // Support for pending 'optimize soon' requests.
+  PendingListNode* optimize_soon_list_;
+
+  SamplerState state_window_[kStateWindowSize];
+  int state_window_position_;
+  int state_counts_[2];
+
+  // Possible state values:
+  //   -1            => the profiler thread is waiting on the semaphore
+  //   0 or positive => the number of isolates running JavaScript code.
+  static Atomic32 state_;
+  static Semaphore* semaphore_;
 };
 
 
@@ -59,9 +152,10 @@ class RuntimeProfilerRateLimiter BASE_EMBEDDED {
  public:
   RuntimeProfilerRateLimiter() : non_js_ticks_(0) { }
 
-  // Suspends the current thread when not executing JavaScript to
-  // minimize CPU usage. Returns whether this thread was suspended
-  // (and so might have to check whether profiling is still active.)
+  // Suspends the current thread (which must be the profiler thread)
+  // when not executing JavaScript to minimize CPU usage. Returns
+  // whether the thread was suspended (and so must check whether
+  // profiling is still active.)
   //
   // Does nothing when runtime profiling is not enabled.
   bool SuspendIfNecessary();
@@ -72,6 +166,27 @@ class RuntimeProfilerRateLimiter BASE_EMBEDDED {
   DISALLOW_COPY_AND_ASSIGN(RuntimeProfilerRateLimiter);
 };
 
+
+// Implementation of RuntimeProfiler inline functions.
+
+void RuntimeProfiler::IsolateEnteredJS(Isolate* isolate) {
+  Atomic32 new_state = NoBarrier_AtomicIncrement(&state_, 1);
+  if (new_state == 0) {
+    // Just incremented from -1 to 0. -1 can only be set by the
+    // profiler thread before it suspends itself and starts waiting on
+    // the semaphore.
+    HandleWakeUp(isolate);
+  }
+  ASSERT(new_state >= 0);
+}
+
+
+void RuntimeProfiler::IsolateExitedJS(Isolate* isolate) {
+  Atomic32 new_state = NoBarrier_AtomicIncrement(&state_, -1);
+  ASSERT(new_state >= 0);
+  USE(new_state);
+}
+
 } }  // namespace v8::internal
 
 #endif  // V8_RUNTIME_PROFILER_H_
index b6bc4d09cc77092484ad7e467f059df0558874c7..9bbca2dbdaa9a8e7e3ed2f58cb53783f4aba790c 100644 (file)
@@ -59,7 +59,7 @@ namespace internal {
 
 
 #define RUNTIME_ASSERT(value) \
-  if (!(value)) return Top::ThrowIllegalOperation();
+  if (!(value)) return isolate->ThrowIllegalOperation();
 
 // Cast the given object to a value of the specified type and store
 // it in a variable with the given name.  If the object is not of the
@@ -100,16 +100,15 @@ namespace internal {
   RUNTIME_ASSERT(obj->IsNumber());                                   \
   type name = NumberTo##Type(obj);
 
-// Non-reentrant string buffer for efficient general use in this file.
-static StaticResource<StringInputBuffer> runtime_string_input_buffer;
 
+MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
+                                                   JSObject* boilerplate) {
+  StackLimitCheck check(isolate);
+  if (check.HasOverflowed()) return isolate->StackOverflow();
 
-MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(JSObject* boilerplate) {
-  StackLimitCheck check;
-  if (check.HasOverflowed()) return Top::StackOverflow();
-
+  Heap* heap = isolate->heap();
   Object* result;
-  { MaybeObject* maybe_result = Heap::CopyJSObject(boilerplate);
+  { MaybeObject* maybe_result = heap->CopyJSObject(boilerplate);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   JSObject* copy = JSObject::cast(result);
@@ -121,7 +120,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(JSObject* boilerplate) {
       Object* value = properties->get(i);
       if (value->IsJSObject()) {
         JSObject* js_object = JSObject::cast(value);
-        { MaybeObject* maybe_result = DeepCopyBoilerplate(js_object);
+        { MaybeObject* maybe_result = DeepCopyBoilerplate(isolate, js_object);
           if (!maybe_result->ToObject(&result)) return maybe_result;
         }
         properties->set(i, result);
@@ -132,7 +131,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(JSObject* boilerplate) {
       Object* value = copy->InObjectPropertyAt(i);
       if (value->IsJSObject()) {
         JSObject* js_object = JSObject::cast(value);
-        { MaybeObject* maybe_result = DeepCopyBoilerplate(js_object);
+        { MaybeObject* maybe_result = DeepCopyBoilerplate(isolate, js_object);
           if (!maybe_result->ToObject(&result)) return maybe_result;
         }
         copy->InObjectPropertyAtPut(i, result);
@@ -140,7 +139,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(JSObject* boilerplate) {
     }
   } else {
     { MaybeObject* maybe_result =
-          Heap::AllocateFixedArray(copy->NumberOfLocalProperties(NONE));
+          heap->AllocateFixedArray(copy->NumberOfLocalProperties(NONE));
       if (!maybe_result->ToObject(&result)) return maybe_result;
     }
     FixedArray* names = FixedArray::cast(result);
@@ -158,7 +157,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(JSObject* boilerplate) {
           copy->GetProperty(key_string, &attributes)->ToObjectUnchecked();
       if (value->IsJSObject()) {
         JSObject* js_object = JSObject::cast(value);
-        { MaybeObject* maybe_result = DeepCopyBoilerplate(js_object);
+        { MaybeObject* maybe_result = DeepCopyBoilerplate(isolate, js_object);
           if (!maybe_result->ToObject(&result)) return maybe_result;
         }
         { MaybeObject* maybe_result =
@@ -176,8 +175,8 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(JSObject* boilerplate) {
   switch (copy->GetElementsKind()) {
     case JSObject::FAST_ELEMENTS: {
       FixedArray* elements = FixedArray::cast(copy->elements());
-      if (elements->map() == Heap::fixed_cow_array_map()) {
-        Counters::cow_arrays_created_runtime.Increment();
+      if (elements->map() == heap->fixed_cow_array_map()) {
+        isolate->counters()->cow_arrays_created_runtime()->Increment();
 #ifdef DEBUG
         for (int i = 0; i < elements->length(); i++) {
           ASSERT(!elements->get(i)->IsJSObject());
@@ -188,7 +187,8 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(JSObject* boilerplate) {
           Object* value = elements->get(i);
           if (value->IsJSObject()) {
             JSObject* js_object = JSObject::cast(value);
-            { MaybeObject* maybe_result = DeepCopyBoilerplate(js_object);
+            { MaybeObject* maybe_result = DeepCopyBoilerplate(isolate,
+                                                              js_object);
               if (!maybe_result->ToObject(&result)) return maybe_result;
             }
             elements->set(i, result);
@@ -206,7 +206,8 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(JSObject* boilerplate) {
           Object* value = element_dictionary->ValueAt(i);
           if (value->IsJSObject()) {
             JSObject* js_object = JSObject::cast(value);
-            { MaybeObject* maybe_result = DeepCopyBoilerplate(js_object);
+            { MaybeObject* maybe_result = DeepCopyBoilerplate(isolate,
+                                                              js_object);
               if (!maybe_result->ToObject(&result)) return maybe_result;
             }
             element_dictionary->ValueAtPut(i, result);
@@ -223,15 +224,19 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(JSObject* boilerplate) {
 }
 
 
-static MaybeObject* Runtime_CloneLiteralBoilerplate(Arguments args) {
+static MaybeObject* Runtime_CloneLiteralBoilerplate(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   CONVERT_CHECKED(JSObject, boilerplate, args[0]);
-  return DeepCopyBoilerplate(boilerplate);
+  return DeepCopyBoilerplate(isolate, boilerplate);
 }
 
 
-static MaybeObject* Runtime_CloneShallowLiteralBoilerplate(Arguments args) {
+static MaybeObject* Runtime_CloneShallowLiteralBoilerplate(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   CONVERT_CHECKED(JSObject, boilerplate, args[0]);
-  return Heap::CopyJSObject(boilerplate);
+  return isolate->heap()->CopyJSObject(boilerplate);
 }
 
 
@@ -239,6 +244,7 @@ static Handle<Map> ComputeObjectLiteralMap(
     Handle<Context> context,
     Handle<FixedArray> constant_properties,
     bool* is_result_from_cache) {
+  Isolate* isolate = context->GetIsolate();
   int properties_length = constant_properties->length();
   int number_of_properties = properties_length / 2;
   if (FLAG_canonicalize_object_literal_maps) {
@@ -265,7 +271,8 @@ static Handle<Map> ComputeObjectLiteralMap(
     if ((number_of_symbol_keys == number_of_properties) &&
         (number_of_symbol_keys < kMaxKeys)) {
       // Create the fixed array with the key.
-      Handle<FixedArray> keys = Factory::NewFixedArray(number_of_symbol_keys);
+      Handle<FixedArray> keys =
+          isolate->factory()->NewFixedArray(number_of_symbol_keys);
       if (number_of_symbol_keys > 0) {
         int index = 0;
         for (int p = 0; p < properties_length; p += 2) {
@@ -277,22 +284,24 @@ static Handle<Map> ComputeObjectLiteralMap(
         ASSERT(index == number_of_symbol_keys);
       }
       *is_result_from_cache = true;
-      return Factory::ObjectLiteralMapFromCache(context, keys);
+      return isolate->factory()->ObjectLiteralMapFromCache(context, keys);
     }
   }
   *is_result_from_cache = false;
-  return Factory::CopyMap(
+  return isolate->factory()->CopyMap(
       Handle<Map>(context->object_function()->initial_map()),
       number_of_properties);
 }
 
 
 static Handle<Object> CreateLiteralBoilerplate(
+    Isolate* isolate,
     Handle<FixedArray> literals,
     Handle<FixedArray> constant_properties);
 
 
 static Handle<Object> CreateObjectLiteralBoilerplate(
+    Isolate* isolate,
     Handle<FixedArray> literals,
     Handle<FixedArray> constant_properties,
     bool should_have_fast_elements) {
@@ -310,7 +319,7 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
                                             constant_properties,
                                             &is_result_from_cache);
 
-  Handle<JSObject> boilerplate = Factory::NewJSObjectFromMap(map);
+  Handle<JSObject> boilerplate = isolate->factory()->NewJSObjectFromMap(map);
 
   // Normalize the elements of the boilerplate to save space if needed.
   if (!should_have_fast_elements) NormalizeElements(boilerplate);
@@ -321,13 +330,13 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
                                                    length / 2,
                                                    !is_result_from_cache);
     for (int index = 0; index < length; index +=2) {
-      Handle<Object> key(constant_properties->get(index+0));
-      Handle<Object> value(constant_properties->get(index+1));
+      Handle<Object> key(constant_properties->get(index+0), isolate);
+      Handle<Object> value(constant_properties->get(index+1), isolate);
       if (value->IsFixedArray()) {
         // The value contains the constant_properties of a
         // simple object literal.
         Handle<FixedArray> array = Handle<FixedArray>::cast(value);
-        value = CreateLiteralBoilerplate(literals, array);
+        value = CreateLiteralBoilerplate(isolate, literals, array);
         if (value.is_null()) return value;
       }
       Handle<Object> result;
@@ -358,7 +367,8 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
         char arr[100];
         Vector<char> buffer(arr, ARRAY_SIZE(arr));
         const char* str = DoubleToCString(num, buffer);
-        Handle<String> name = Factory::NewStringFromAscii(CStrVector(str));
+        Handle<String> name =
+            isolate->factory()->NewStringFromAscii(CStrVector(str));
         result = SetLocalPropertyIgnoreAttributes(boilerplate, name,
                                                   value, NONE);
       }
@@ -375,16 +385,18 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
 
 
 static Handle<Object> CreateArrayLiteralBoilerplate(
+    Isolate* isolate,
     Handle<FixedArray> literals,
     Handle<FixedArray> elements) {
   // Create the JSArray.
   Handle<JSFunction> constructor(
       JSFunction::GlobalContextFromLiterals(*literals)->array_function());
-  Handle<Object> object = Factory::NewJSObject(constructor);
+  Handle<Object> object = isolate->factory()->NewJSObject(constructor);
 
-  const bool is_cow = (elements->map() == Heap::fixed_cow_array_map());
+  const bool is_cow =
+      (elements->map() == isolate->heap()->fixed_cow_array_map());
   Handle<FixedArray> copied_elements =
-      is_cow ? elements : Factory::CopyFixedArray(elements);
+      is_cow ? elements : isolate->factory()->CopyFixedArray(elements);
 
   Handle<FixedArray> content = Handle<FixedArray>::cast(copied_elements);
   if (is_cow) {
@@ -401,7 +413,7 @@ static Handle<Object> CreateArrayLiteralBoilerplate(
         // simple object literal.
         Handle<FixedArray> fa(FixedArray::cast(content->get(i)));
         Handle<Object> result =
-            CreateLiteralBoilerplate(literals, fa);
+            CreateLiteralBoilerplate(isolate, literals, fa);
         if (result.is_null()) return result;
         content->set(i, *result);
       }
@@ -415,16 +427,17 @@ static Handle<Object> CreateArrayLiteralBoilerplate(
 
 
 static Handle<Object> CreateLiteralBoilerplate(
+    Isolate* isolate,
     Handle<FixedArray> literals,
     Handle<FixedArray> array) {
   Handle<FixedArray> elements = CompileTimeValue::GetElements(array);
   switch (CompileTimeValue::GetType(array)) {
     case CompileTimeValue::OBJECT_LITERAL_FAST_ELEMENTS:
-      return CreateObjectLiteralBoilerplate(literals, elements, true);
+      return CreateObjectLiteralBoilerplate(isolate, literals, elements, true);
     case CompileTimeValue::OBJECT_LITERAL_SLOW_ELEMENTS:
-      return CreateObjectLiteralBoilerplate(literals, elements, false);
+      return CreateObjectLiteralBoilerplate(isolate, literals, elements, false);
     case CompileTimeValue::ARRAY_LITERAL:
-      return CreateArrayLiteralBoilerplate(literals, elements);
+      return CreateArrayLiteralBoilerplate(isolate, literals, elements);
     default:
       UNREACHABLE();
       return Handle<Object>::null();
@@ -432,19 +445,22 @@ static Handle<Object> CreateLiteralBoilerplate(
 }
 
 
-static MaybeObject* Runtime_CreateArrayLiteralBoilerplate(Arguments args) {
+static MaybeObject* Runtime_CreateArrayLiteralBoilerplate(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   // Takes a FixedArray of elements containing the literal elements of
   // the array literal and produces JSArray with those elements.
   // Additionally takes the literals array of the surrounding function
   // which contains the context from which to get the Array function
   // to use for creating the array literal.
-  HandleScope scope;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   CONVERT_ARG_CHECKED(FixedArray, literals, 0);
   CONVERT_SMI_CHECKED(literals_index, args[1]);
   CONVERT_ARG_CHECKED(FixedArray, elements, 2);
 
-  Handle<Object> object = CreateArrayLiteralBoilerplate(literals, elements);
+  Handle<Object> object =
+      CreateArrayLiteralBoilerplate(isolate, literals, elements);
   if (object.is_null()) return Failure::Exception();
 
   // Update the functions literal and return the boilerplate.
@@ -453,8 +469,9 @@ static MaybeObject* Runtime_CreateArrayLiteralBoilerplate(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_CreateObjectLiteral(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_CreateObjectLiteral(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 4);
   CONVERT_ARG_CHECKED(FixedArray, literals, 0);
   CONVERT_SMI_CHECKED(literals_index, args[1]);
@@ -463,21 +480,24 @@ static MaybeObject* Runtime_CreateObjectLiteral(Arguments args) {
   bool should_have_fast_elements = fast_elements == 1;
 
   // Check if boilerplate exists. If not, create it first.
-  Handle<Object> boilerplate(literals->get(literals_index));
-  if (*boilerplate == Heap::undefined_value()) {
-    boilerplate = CreateObjectLiteralBoilerplate(literals,
+  Handle<Object> boilerplate(literals->get(literals_index), isolate);
+  if (*boilerplate == isolate->heap()->undefined_value()) {
+    boilerplate = CreateObjectLiteralBoilerplate(isolate,
+                                                 literals,
                                                  constant_properties,
                                                  should_have_fast_elements);
     if (boilerplate.is_null()) return Failure::Exception();
     // Update the functions literal and return the boilerplate.
     literals->set(literals_index, *boilerplate);
   }
-  return DeepCopyBoilerplate(JSObject::cast(*boilerplate));
+  return DeepCopyBoilerplate(isolate, JSObject::cast(*boilerplate));
 }
 
 
-static MaybeObject* Runtime_CreateObjectLiteralShallow(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_CreateObjectLiteralShallow(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 4);
   CONVERT_ARG_CHECKED(FixedArray, literals, 0);
   CONVERT_SMI_CHECKED(literals_index, args[1]);
@@ -486,70 +506,77 @@ static MaybeObject* Runtime_CreateObjectLiteralShallow(Arguments args) {
   bool should_have_fast_elements = fast_elements == 1;
 
   // Check if boilerplate exists. If not, create it first.
-  Handle<Object> boilerplate(literals->get(literals_index));
-  if (*boilerplate == Heap::undefined_value()) {
-    boilerplate = CreateObjectLiteralBoilerplate(literals,
+  Handle<Object> boilerplate(literals->get(literals_index), isolate);
+  if (*boilerplate == isolate->heap()->undefined_value()) {
+    boilerplate = CreateObjectLiteralBoilerplate(isolate,
+                                                 literals,
                                                  constant_properties,
                                                  should_have_fast_elements);
     if (boilerplate.is_null()) return Failure::Exception();
     // Update the functions literal and return the boilerplate.
     literals->set(literals_index, *boilerplate);
   }
-  return Heap::CopyJSObject(JSObject::cast(*boilerplate));
+  return isolate->heap()->CopyJSObject(JSObject::cast(*boilerplate));
 }
 
 
-static MaybeObject* Runtime_CreateArrayLiteral(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_CreateArrayLiteral(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   CONVERT_ARG_CHECKED(FixedArray, literals, 0);
   CONVERT_SMI_CHECKED(literals_index, args[1]);
   CONVERT_ARG_CHECKED(FixedArray, elements, 2);
 
   // Check if boilerplate exists. If not, create it first.
-  Handle<Object> boilerplate(literals->get(literals_index));
-  if (*boilerplate == Heap::undefined_value()) {
-    boilerplate = CreateArrayLiteralBoilerplate(literals, elements);
+  Handle<Object> boilerplate(literals->get(literals_index), isolate);
+  if (*boilerplate == isolate->heap()->undefined_value()) {
+    boilerplate = CreateArrayLiteralBoilerplate(isolate, literals, elements);
     if (boilerplate.is_null()) return Failure::Exception();
     // Update the functions literal and return the boilerplate.
     literals->set(literals_index, *boilerplate);
   }
-  return DeepCopyBoilerplate(JSObject::cast(*boilerplate));
+  return DeepCopyBoilerplate(isolate, JSObject::cast(*boilerplate));
 }
 
 
-static MaybeObject* Runtime_CreateArrayLiteralShallow(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_CreateArrayLiteralShallow(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   CONVERT_ARG_CHECKED(FixedArray, literals, 0);
   CONVERT_SMI_CHECKED(literals_index, args[1]);
   CONVERT_ARG_CHECKED(FixedArray, elements, 2);
 
   // Check if boilerplate exists. If not, create it first.
-  Handle<Object> boilerplate(literals->get(literals_index));
-  if (*boilerplate == Heap::undefined_value()) {
-    boilerplate = CreateArrayLiteralBoilerplate(literals, elements);
+  Handle<Object> boilerplate(literals->get(literals_index), isolate);
+  if (*boilerplate == isolate->heap()->undefined_value()) {
+    boilerplate = CreateArrayLiteralBoilerplate(isolate, literals, elements);
     if (boilerplate.is_null()) return Failure::Exception();
     // Update the functions literal and return the boilerplate.
     literals->set(literals_index, *boilerplate);
   }
   if (JSObject::cast(*boilerplate)->elements()->map() ==
-      Heap::fixed_cow_array_map()) {
-    Counters::cow_arrays_created_runtime.Increment();
+      isolate->heap()->fixed_cow_array_map()) {
+    COUNTERS->cow_arrays_created_runtime()->Increment();
   }
-  return Heap::CopyJSObject(JSObject::cast(*boilerplate));
+  return isolate->heap()->CopyJSObject(JSObject::cast(*boilerplate));
 }
 
 
-static MaybeObject* Runtime_CreateCatchExtensionObject(Arguments args) {
+static MaybeObject* Runtime_CreateCatchExtensionObject(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(String, key, args[0]);
   Object* value = args[1];
   // Create a catch context extension object.
   JSFunction* constructor =
-      Top::context()->global_context()->context_extension_function();
+      isolate->context()->global_context()->
+          context_extension_function();
   Object* object;
-  { MaybeObject* maybe_object = Heap::AllocateJSObject(constructor);
+  { MaybeObject* maybe_object = isolate->heap()->AllocateJSObject(constructor);
     if (!maybe_object->ToObject(&object)) return maybe_object;
   }
   // Assign the exception value to the catch variable and make sure
@@ -564,16 +591,18 @@ static MaybeObject* Runtime_CreateCatchExtensionObject(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_ClassOf(Arguments args) {
+static MaybeObject* Runtime_ClassOf(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
   Object* obj = args[0];
-  if (!obj->IsJSObject()) return Heap::null_value();
+  if (!obj->IsJSObject()) return isolate->heap()->null_value();
   return JSObject::cast(obj)->class_name();
 }
 
 
-static MaybeObject* Runtime_IsInPrototypeChain(Arguments args) {
+static MaybeObject* Runtime_IsInPrototypeChain(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
   // See ECMA-262, section 15.3.5.3, page 88 (steps 5 - 8).
@@ -581,15 +610,16 @@ static MaybeObject* Runtime_IsInPrototypeChain(Arguments args) {
   Object* V = args[1];
   while (true) {
     Object* prototype = V->GetPrototype();
-    if (prototype->IsNull()) return Heap::false_value();
-    if (O == prototype) return Heap::true_value();
+    if (prototype->IsNull()) return isolate->heap()->false_value();
+    if (O == prototype) return isolate->heap()->true_value();
     V = prototype;
   }
 }
 
 
 // Inserts an object as the hidden prototype of another object.
-static MaybeObject* Runtime_SetHiddenPrototype(Arguments args) {
+static MaybeObject* Runtime_SetHiddenPrototype(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(JSObject, jsobject, args[0]);
@@ -627,15 +657,16 @@ static MaybeObject* Runtime_SetHiddenPrototype(Arguments args) {
   new_map->set_prototype(proto);
   jsobject->set_map(new_map);
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_IsConstructCall(Arguments args) {
+static MaybeObject* Runtime_IsConstructCall(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 0);
   JavaScriptFrameIterator it;
-  return Heap::ToBoolean(it.frame()->IsConstructor());
+  return isolate->heap()->ToBoolean(it.frame()->IsConstructor());
 }
 
 
@@ -682,9 +713,10 @@ static bool CheckAccess(JSObject* obj,
 
   JSObject* holder = result->holder();
   JSObject* current = obj;
+  Isolate* isolate = obj->GetIsolate();
   while (true) {
     if (current->IsAccessCheckNeeded() &&
-        !Top::MayNamedAccess(current, name, access_type)) {
+        !isolate->MayNamedAccess(current, name, access_type)) {
       // Access check callback denied the access, but some properties
       // can have a special permissions which override callbacks descision
       // (currently see v8::AccessControl).
@@ -721,7 +753,7 @@ static bool CheckAccess(JSObject* obj,
       break;
   }
 
-  Top::ReportFailedAccessCheck(current, access_type);
+  isolate->ReportFailedAccessCheck(current, access_type);
   return false;
 }
 
@@ -731,7 +763,7 @@ static bool CheckElementAccess(JSObject* obj,
                                uint32_t index,
                                v8::AccessType access_type) {
   if (obj->IsAccessCheckNeeded() &&
-      !Top::MayIndexedAccess(obj, index, access_type)) {
+      !obj->GetIsolate()->MayIndexedAccess(obj, index, access_type)) {
     return false;
   }
 
@@ -758,11 +790,13 @@ enum PropertyDescriptorIndices {
 //         [false, value, Writeable, Enumerable, Configurable]
 //  if args[1] is an accessor on args[0]
 //         [true, GetFunction, SetFunction, Enumerable, Configurable]
-static MaybeObject* Runtime_GetOwnProperty(Arguments args) {
+static MaybeObject* Runtime_GetOwnProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
-  Handle<FixedArray> elms = Factory::NewFixedArray(DESCRIPTOR_SIZE);
-  Handle<JSArray> desc = Factory::NewJSArrayWithElements(elms);
+  Heap* heap = isolate->heap();
+  HandleScope scope(isolate);
+  Handle<FixedArray> elms = isolate->factory()->NewFixedArray(DESCRIPTOR_SIZE);
+  Handle<JSArray> desc = isolate->factory()->NewJSArrayWithElements(elms);
   LookupResult result;
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
   CONVERT_ARG_CHECKED(String, name, 1);
@@ -772,7 +806,7 @@ static MaybeObject* Runtime_GetOwnProperty(Arguments args) {
   if (name->AsArrayIndex(&index)) {
     switch (obj->HasLocalElement(index)) {
       case JSObject::UNDEFINED_ELEMENT:
-        return Heap::undefined_value();
+        return heap->undefined_value();
 
       case JSObject::STRING_CHARACTER_ELEMENT: {
         // Special handling of string objects according to ECMAScript 5
@@ -783,23 +817,23 @@ static MaybeObject* Runtime_GetOwnProperty(Arguments args) {
         Handle<String> str(String::cast(js_value->value()));
         Handle<String> substr = SubString(str, index, index + 1, NOT_TENURED);
 
-        elms->set(IS_ACCESSOR_INDEX, Heap::false_value());
+        elms->set(IS_ACCESSOR_INDEX, heap->false_value());
         elms->set(VALUE_INDEX, *substr);
-        elms->set(WRITABLE_INDEX, Heap::false_value());
-        elms->set(ENUMERABLE_INDEX,  Heap::false_value());
-        elms->set(CONFIGURABLE_INDEX, Heap::false_value());
+        elms->set(WRITABLE_INDEX, heap->false_value());
+        elms->set(ENUMERABLE_INDEX,  heap->false_value());
+        elms->set(CONFIGURABLE_INDEX, heap->false_value());
         return *desc;
       }
 
       case JSObject::INTERCEPTED_ELEMENT:
       case JSObject::FAST_ELEMENT: {
-        elms->set(IS_ACCESSOR_INDEX, Heap::false_value());
+        elms->set(IS_ACCESSOR_INDEX, heap->false_value());
         Handle<Object> value = GetElement(obj, index);
-        RETURN_IF_EMPTY_HANDLE(value);
+        RETURN_IF_EMPTY_HANDLE(isolate, value);
         elms->set(VALUE_INDEX, *value);
-        elms->set(WRITABLE_INDEX, Heap::true_value());
-        elms->set(ENUMERABLE_INDEX,  Heap::true_value());
-        elms->set(CONFIGURABLE_INDEX, Heap::true_value());
+        elms->set(WRITABLE_INDEX, heap->true_value());
+        elms->set(ENUMERABLE_INDEX,  heap->true_value());
+        elms->set(CONFIGURABLE_INDEX, heap->true_value());
         return *desc;
       }
 
@@ -807,7 +841,7 @@ static MaybeObject* Runtime_GetOwnProperty(Arguments args) {
         Handle<JSObject> holder = obj;
         if (obj->IsJSGlobalProxy()) {
           Object* proto = obj->GetPrototype();
-          if (proto->IsNull()) return Heap::undefined_value();
+          if (proto->IsNull()) return heap->undefined_value();
           ASSERT(proto->IsJSGlobalObject());
           holder = Handle<JSObject>(JSObject::cast(proto));
         }
@@ -820,7 +854,7 @@ static MaybeObject* Runtime_GetOwnProperty(Arguments args) {
             // This is an accessor property with getter and/or setter.
             FixedArray* callbacks =
                 FixedArray::cast(dictionary->ValueAt(entry));
-            elms->set(IS_ACCESSOR_INDEX, Heap::true_value());
+            elms->set(IS_ACCESSOR_INDEX, heap->true_value());
             if (CheckElementAccess(*obj, index, v8::ACCESS_GET)) {
               elms->set(GETTER_INDEX, callbacks->get(0));
             }
@@ -831,19 +865,19 @@ static MaybeObject* Runtime_GetOwnProperty(Arguments args) {
           }
           case NORMAL: {
             // This is a data property.
-            elms->set(IS_ACCESSOR_INDEX, Heap::false_value());
+            elms->set(IS_ACCESSOR_INDEX, heap->false_value());
             Handle<Object> value = GetElement(obj, index);
             ASSERT(!value.is_null());
             elms->set(VALUE_INDEX, *value);
-            elms->set(WRITABLE_INDEX, Heap::ToBoolean(!details.IsReadOnly()));
+            elms->set(WRITABLE_INDEX, heap->ToBoolean(!details.IsReadOnly()));
             break;
           }
           default:
             UNREACHABLE();
             break;
         }
-        elms->set(ENUMERABLE_INDEX, Heap::ToBoolean(!details.IsDontEnum()));
-        elms->set(CONFIGURABLE_INDEX, Heap::ToBoolean(!details.IsDontDelete()));
+        elms->set(ENUMERABLE_INDEX, heap->ToBoolean(!details.IsDontEnum()));
+        elms->set(CONFIGURABLE_INDEX, heap->ToBoolean(!details.IsDontDelete()));
         return *desc;
       }
     }
@@ -853,22 +887,22 @@ static MaybeObject* Runtime_GetOwnProperty(Arguments args) {
   GetOwnPropertyImplementation(*obj, *name, &result);
 
   if (!result.IsProperty()) {
-    return Heap::undefined_value();
+    return heap->undefined_value();
   }
 
   if (!CheckAccess(*obj, *name, &result, v8::ACCESS_HAS)) {
-    return Heap::false_value();
+    return heap->false_value();
   }
 
-  elms->set(ENUMERABLE_INDEX, Heap::ToBoolean(!result.IsDontEnum()));
-  elms->set(CONFIGURABLE_INDEX, Heap::ToBoolean(!result.IsDontDelete()));
+  elms->set(ENUMERABLE_INDEX, heap->ToBoolean(!result.IsDontEnum()));
+  elms->set(CONFIGURABLE_INDEX, heap->ToBoolean(!result.IsDontDelete()));
 
   bool is_js_accessor = (result.type() == CALLBACKS) &&
                         (result.GetCallbackObject()->IsFixedArray());
 
   if (is_js_accessor) {
     // __defineGetter__/__defineSetter__ callback.
-    elms->set(IS_ACCESSOR_INDEX, Heap::true_value());
+    elms->set(IS_ACCESSOR_INDEX, heap->true_value());
 
     FixedArray* structure = FixedArray::cast(result.GetCallbackObject());
     if (CheckAccess(*obj, *name, &result, v8::ACCESS_GET)) {
@@ -878,8 +912,8 @@ static MaybeObject* Runtime_GetOwnProperty(Arguments args) {
       elms->set(SETTER_INDEX, structure->get(1));
     }
   } else {
-    elms->set(IS_ACCESSOR_INDEX, Heap::false_value());
-    elms->set(WRITABLE_INDEX, Heap::ToBoolean(!result.IsReadOnly()));
+    elms->set(IS_ACCESSOR_INDEX, heap->false_value());
+    elms->set(WRITABLE_INDEX, heap->ToBoolean(!result.IsReadOnly()));
 
     PropertyAttributes attrs;
     Object* value;
@@ -894,29 +928,32 @@ static MaybeObject* Runtime_GetOwnProperty(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_PreventExtensions(Arguments args) {
+static MaybeObject* Runtime_PreventExtensions(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(JSObject, obj, args[0]);
   return obj->PreventExtensions();
 }
 
 
-static MaybeObject* Runtime_IsExtensible(Arguments args) {
+static MaybeObject* Runtime_IsExtensible(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(JSObject, obj, args[0]);
   if (obj->IsJSGlobalProxy()) {
     Object* proto = obj->GetPrototype();
-    if (proto->IsNull()) return Heap::false_value();
+    if (proto->IsNull()) return isolate->heap()->false_value();
     ASSERT(proto->IsJSGlobalObject());
     obj = JSObject::cast(proto);
   }
-  return obj->map()->is_extensible() ? Heap::true_value()
-                                     : Heap::false_value();
+  return obj->map()->is_extensible() ? isolate->heap()->true_value()
+                                     : isolate->heap()->false_value();
 }
 
 
-static MaybeObject* Runtime_RegExpCompile(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_RegExpCompile(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   CONVERT_ARG_CHECKED(JSRegExp, re, 0);
   CONVERT_ARG_CHECKED(String, pattern, 1);
@@ -927,23 +964,26 @@ static MaybeObject* Runtime_RegExpCompile(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_CreateApiFunction(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_CreateApiFunction(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_CHECKED(FunctionTemplateInfo, data, 0);
-  return *Factory::CreateApiFunction(data);
+  return *isolate->factory()->CreateApiFunction(data);
 }
 
 
-static MaybeObject* Runtime_IsTemplate(Arguments args) {
+static MaybeObject* Runtime_IsTemplate(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   Object* arg = args[0];
   bool result = arg->IsObjectTemplateInfo() || arg->IsFunctionTemplateInfo();
-  return Heap::ToBoolean(result);
+  return isolate->heap()->ToBoolean(result);
 }
 
 
-static MaybeObject* Runtime_GetTemplateField(Arguments args) {
+static MaybeObject* Runtime_GetTemplateField(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(HeapObject, templ, args[0]);
   CONVERT_CHECKED(Smi, field, args[1]);
@@ -962,7 +1002,8 @@ static MaybeObject* Runtime_GetTemplateField(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_DisableAccessChecks(Arguments args) {
+static MaybeObject* Runtime_DisableAccessChecks(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(HeapObject, object, args[0]);
   Map* old_map = object->map();
@@ -977,11 +1018,13 @@ static MaybeObject* Runtime_DisableAccessChecks(Arguments args) {
     Map::cast(new_map)->set_is_access_check_needed(false);
     object->set_map(Map::cast(new_map));
   }
-  return needs_access_checks ? Heap::true_value() : Heap::false_value();
+  return needs_access_checks ? isolate->heap()->true_value()
+                             : isolate->heap()->false_value();
 }
 
 
-static MaybeObject* Runtime_EnableAccessChecks(Arguments args) {
+static MaybeObject* Runtime_EnableAccessChecks(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(HeapObject, object, args[0]);
   Map* old_map = object->map();
@@ -995,24 +1038,29 @@ static MaybeObject* Runtime_EnableAccessChecks(Arguments args) {
     Map::cast(new_map)->set_is_access_check_needed(true);
     object->set_map(Map::cast(new_map));
   }
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static Failure* ThrowRedeclarationError(const char* type, Handle<String> name) {
-  HandleScope scope;
-  Handle<Object> type_handle = Factory::NewStringFromAscii(CStrVector(type));
+static Failure* ThrowRedeclarationError(Isolate* isolate,
+                                        const char* type,
+                                        Handle<String> name) {
+  HandleScope scope(isolate);
+  Handle<Object> type_handle =
+      isolate->factory()->NewStringFromAscii(CStrVector(type));
   Handle<Object> args[2] = { type_handle, name };
   Handle<Object> error =
-      Factory::NewTypeError("redeclaration", HandleVector(args, 2));
-  return Top::Throw(*error);
+      isolate->factory()->NewTypeError("redeclaration", HandleVector(args, 2));
+  return isolate->Throw(*error);
 }
 
 
-static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
+static MaybeObject* Runtime_DeclareGlobals(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 4);
-  HandleScope scope;
-  Handle<GlobalObject> global = Handle<GlobalObject>(Top::context()->global());
+  HandleScope scope(isolate);
+  Handle<GlobalObject> global = Handle<GlobalObject>(
+      isolate->context()->global());
 
   Handle<Context> context = args.at<Context>(0);
   CONVERT_ARG_CHECKED(FixedArray, pairs, 1);
@@ -1030,9 +1078,9 @@ static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
   // Traverse the name/value pairs and set the properties.
   int length = pairs->length();
   for (int i = 0; i < length; i += 2) {
-    HandleScope scope;
+    HandleScope scope(isolate);
     Handle<String> name(String::cast(pairs->get(i)));
-    Handle<Object> value(pairs->get(i + 1));
+    Handle<Object> value(pairs->get(i + 1), isolate);
 
     // We have to declare a global const property. To capture we only
     // assign to it when evaluating the assignment for "const x =
@@ -1062,7 +1110,7 @@ static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
             // Check if the existing property conflicts with regards to const.
             if (is_local && (is_read_only || is_const_property)) {
               const char* type = (is_read_only) ? "const" : "var";
-              return ThrowRedeclarationError(type, name);
+              return ThrowRedeclarationError(isolate, type, name);
             };
             // The property already exists without conflicting: Go to
             // the next declaration.
@@ -1074,12 +1122,12 @@ static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
           // For const properties, we treat a callback with this name
           // even in the prototype as a conflicting declaration.
           if (is_const_property && (lookup.type() == CALLBACKS)) {
-            return ThrowRedeclarationError("const", name);
+            return ThrowRedeclarationError(isolate, "const", name);
           }
           // Otherwise, we check for locally conflicting declarations.
           if (is_local && (is_read_only || is_const_property)) {
             const char* type = (is_read_only) ? "const" : "var";
-            return ThrowRedeclarationError(type, name);
+            return ThrowRedeclarationError(isolate, type, name);
           }
           // The property already exists without conflicting: Go to
           // the next declaration.
@@ -1091,7 +1139,9 @@ static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
       Handle<SharedFunctionInfo> shared =
           Handle<SharedFunctionInfo>::cast(value);
       Handle<JSFunction> function =
-          Factory::NewFunctionFromSharedFunctionInfo(shared, context, TENURED);
+          isolate->factory()->NewFunctionFromSharedFunctionInfo(shared,
+                                                                context,
+                                                                TENURED);
       value = function;
     }
 
@@ -1113,7 +1163,7 @@ static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
         (lookup.type() != INTERCEPTOR) &&
         (lookup.IsReadOnly() || is_const_property)) {
       const char* type = (lookup.IsReadOnly()) ? "const" : "var";
-      return ThrowRedeclarationError(type, name);
+      return ThrowRedeclarationError(isolate, type, name);
     }
 
     // Safari does not allow the invocation of callback setters for
@@ -1129,12 +1179,14 @@ static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
         attributes = static_cast<PropertyAttributes>(
             attributes | (lookup.GetAttributes() & DONT_DELETE));
       }
-      RETURN_IF_EMPTY_HANDLE(SetLocalPropertyIgnoreAttributes(global,
+      RETURN_IF_EMPTY_HANDLE(isolate,
+                             SetLocalPropertyIgnoreAttributes(global,
                                                               name,
                                                               value,
                                                               attributes));
     } else {
-      RETURN_IF_EMPTY_HANDLE(SetProperty(global,
+      RETURN_IF_EMPTY_HANDLE(isolate,
+                             SetProperty(global,
                                          name,
                                          value,
                                          attributes,
@@ -1142,13 +1194,14 @@ static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
     }
   }
 
-  ASSERT(!Top::has_pending_exception());
-  return Heap::undefined_value();
+  ASSERT(!isolate->has_pending_exception());
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_DeclareContextSlot(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DeclareContextSlot(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 4);
 
   CONVERT_ARG_CHECKED(Context, context, 0);
@@ -1156,7 +1209,7 @@ static MaybeObject* Runtime_DeclareContextSlot(Arguments args) {
   PropertyAttributes mode =
       static_cast<PropertyAttributes>(Smi::cast(args[2])->value());
   RUNTIME_ASSERT(mode == READ_ONLY || mode == NONE);
-  Handle<Object> initial_value(args[3]);
+  Handle<Object> initial_value(args[3], isolate);
 
   // Declarations are always done in the function context.
   context = Handle<Context>(context->fcontext());
@@ -1175,7 +1228,7 @@ static MaybeObject* Runtime_DeclareContextSlot(Arguments args) {
       // Functions are not read-only.
       ASSERT(mode != READ_ONLY || initial_value->IsTheHole());
       const char* type = ((attributes & READ_ONLY) != 0) ? "const" : "var";
-      return ThrowRedeclarationError(type, name);
+      return ThrowRedeclarationError(isolate, type, name);
     }
 
     // Initialize it if necessary.
@@ -1200,6 +1253,7 @@ static MaybeObject* Runtime_DeclareContextSlot(Arguments args) {
         // Slow case: The property is not in the FixedArray part of the context.
         Handle<JSObject> context_ext = Handle<JSObject>::cast(holder);
         RETURN_IF_EMPTY_HANDLE(
+            isolate,
             SetProperty(context_ext, name, initial_value,
                         mode, kNonStrictMode));
       }
@@ -1216,7 +1270,8 @@ static MaybeObject* Runtime_DeclareContextSlot(Arguments args) {
     } else {
       // The function context's extension context does not exists - allocate
       // it.
-      context_ext = Factory::NewJSObject(Top::context_extension_function());
+      context_ext = isolate->factory()->NewJSObject(
+          isolate->context_extension_function());
       // And store it in the extension slot.
       context->set_extension(*context_ext);
     }
@@ -1226,7 +1281,7 @@ static MaybeObject* Runtime_DeclareContextSlot(Arguments args) {
     // or undefined, and use the correct mode (e.g. READ_ONLY attribute for
     // constant declarations).
     ASSERT(!context_ext->HasLocalProperty(*name));
-    Handle<Object> value(Heap::undefined_value());
+    Handle<Object> value(isolate->heap()->undefined_value(), isolate);
     if (*initial_value != NULL) value = initial_value;
     // Declaring a const context slot is a conflicting declaration if
     // there is a callback with that name in a prototype. It is
@@ -1239,18 +1294,20 @@ static MaybeObject* Runtime_DeclareContextSlot(Arguments args) {
       LookupResult lookup;
       context_ext->Lookup(*name, &lookup);
       if (lookup.IsProperty() && (lookup.type() == CALLBACKS)) {
-        return ThrowRedeclarationError("const", name);
+        return ThrowRedeclarationError(isolate, "const", name);
       }
     }
-    RETURN_IF_EMPTY_HANDLE(SetProperty(context_ext, name, value, mode,
+    RETURN_IF_EMPTY_HANDLE(isolate,
+                           SetProperty(context_ext, name, value, mode,
                                        kNonStrictMode));
   }
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_InitializeVarGlobal(Arguments args) {
+static MaybeObject* Runtime_InitializeVarGlobal(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation nha;
   // args[0] == name
   // args[1] == strict_mode
@@ -1262,7 +1319,7 @@ static MaybeObject* Runtime_InitializeVarGlobal(Arguments args) {
   bool assign = args.length() == 3;
 
   CONVERT_ARG_CHECKED(String, name, 0);
-  GlobalObject* global = Top::context()->global();
+  GlobalObject* global = isolate->context()->global();
   RUNTIME_ASSERT(args[1]->IsSmi());
   StrictModeFlag strict_mode =
       static_cast<StrictModeFlag>(Smi::cast(args[1])->value());
@@ -1288,8 +1345,8 @@ static MaybeObject* Runtime_InitializeVarGlobal(Arguments args) {
       if (lookup.IsReadOnly()) {
         // If we found readonly property on one of hidden prototypes,
         // just shadow it.
-        if (real_holder != Top::context()->global()) break;
-        return ThrowRedeclarationError("const", name);
+        if (real_holder != isolate->context()->global()) break;
+        return ThrowRedeclarationError(isolate, "const", name);
       }
 
       // Determine if this is a redeclaration of an intercepted read-only
@@ -1297,7 +1354,7 @@ static MaybeObject* Runtime_InitializeVarGlobal(Arguments args) {
       bool found = true;
       PropertyType type = lookup.type();
       if (type == INTERCEPTOR) {
-        HandleScope handle_scope;
+        HandleScope handle_scope(isolate);
         Handle<JSObject> holder(real_holder);
         PropertyAttributes intercepted = holder->GetPropertyAttribute(*name);
         real_holder = *holder;
@@ -1310,19 +1367,19 @@ static MaybeObject* Runtime_InitializeVarGlobal(Arguments args) {
           // overwrite it with a variable declaration we must throw a
           // re-declaration error.  However if we found readonly property
           // on one of hidden prototypes, just shadow it.
-          if (real_holder != Top::context()->global()) break;
-          return ThrowRedeclarationError("const", name);
+          if (real_holder != isolate->context()->global()) break;
+          return ThrowRedeclarationError(isolate, "const", name);
         }
       }
 
       if (found && !assign) {
         // The global property is there and we're not assigning any value
         // to it. Just return.
-        return Heap::undefined_value();
+        return isolate->heap()->undefined_value();
       }
 
       // Assign the value (or undefined) to the property.
-      Object* value = (assign) ? args[2] : Heap::undefined_value();
+      Object* value = (assign) ? args[2] : isolate->heap()->undefined_value();
       return real_holder->SetProperty(
           &lookup, *name, value, attributes, strict_mode);
     }
@@ -1337,15 +1394,16 @@ static MaybeObject* Runtime_InitializeVarGlobal(Arguments args) {
     real_holder = JSObject::cast(proto);
   }
 
-  global = Top::context()->global();
+  global = isolate->context()->global();
   if (assign) {
     return global->SetProperty(*name, args[2], attributes, strict_mode);
   }
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_InitializeConstGlobal(Arguments args) {
+static MaybeObject* Runtime_InitializeConstGlobal(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   // All constants are declared with an initial value. The name
   // of the constant is the first argument and the initial value
   // is the second.
@@ -1354,7 +1412,7 @@ static MaybeObject* Runtime_InitializeConstGlobal(Arguments args) {
   Handle<Object> value = args.at<Object>(1);
 
   // Get the current global object from top.
-  GlobalObject* global = Top::context()->global();
+  GlobalObject* global = isolate->context()->global();
 
   // According to ECMA-262, section 12.2, page 62, the property must
   // not be deletable. Since it's a const, it must be READ_ONLY too.
@@ -1379,7 +1437,7 @@ static MaybeObject* Runtime_InitializeConstGlobal(Arguments args) {
   // need to ask it for the property attributes.
   if (!lookup.IsReadOnly()) {
     if (lookup.type() != INTERCEPTOR) {
-      return ThrowRedeclarationError("var", name);
+      return ThrowRedeclarationError(isolate, "var", name);
     }
 
     PropertyAttributes intercepted = global->GetPropertyAttribute(*name);
@@ -1387,20 +1445,21 @@ static MaybeObject* Runtime_InitializeConstGlobal(Arguments args) {
     // Throw re-declaration error if the intercepted property is present
     // but not read-only.
     if (intercepted != ABSENT && (intercepted & READ_ONLY) == 0) {
-      return ThrowRedeclarationError("var", name);
+      return ThrowRedeclarationError(isolate, "var", name);
     }
 
     // Restore global object from context (in case of GC) and continue
     // with setting the value because the property is either absent or
     // read-only. We also have to do redo the lookup.
-    HandleScope handle_scope;
-    Handle<GlobalObject> global(Top::context()->global());
+    HandleScope handle_scope(isolate);
+    Handle<GlobalObject> global(isolate->context()->global());
 
     // BUG 1213575: Handle the case where we have to set a read-only
     // property through an interceptor and only do it if it's
     // uninitialized, e.g. the hole. Nirk...
     // Passing non-strict mode because the property is writable.
-    RETURN_IF_EMPTY_HANDLE(SetProperty(global,
+    RETURN_IF_EMPTY_HANDLE(isolate,
+                           SetProperty(global,
                                        name,
                                        value,
                                        attributes,
@@ -1434,11 +1493,13 @@ static MaybeObject* Runtime_InitializeConstGlobal(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_InitializeConstContextSlot(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_InitializeConstContextSlot(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
 
-  Handle<Object> value(args[0]);
+  Handle<Object> value(args[0], isolate);
   ASSERT(!value->IsTheHole());
   CONVERT_ARG_CHECKED(Context, context, 1);
   Handle<String> name(String::cast(args[2]));
@@ -1479,6 +1540,7 @@ static MaybeObject* Runtime_InitializeConstContextSlot(Arguments args) {
       ASSERT((attributes & READ_ONLY) == 0);
       Handle<JSObject> arguments(Handle<JSObject>::cast(holder));
       RETURN_IF_EMPTY_HANDLE(
+          isolate,
           SetElement(arguments, index, value, kNonStrictMode));
     }
     return *value;
@@ -1487,9 +1549,11 @@ static MaybeObject* Runtime_InitializeConstContextSlot(Arguments args) {
   // The property could not be found, we introduce it in the global
   // context.
   if (attributes == ABSENT) {
-    Handle<JSObject> global = Handle<JSObject>(Top::context()->global());
+    Handle<JSObject> global = Handle<JSObject>(
+        isolate->context()->global());
     // Strict mode not needed (const disallowed in strict mode).
     RETURN_IF_EMPTY_HANDLE(
+        isolate,
         SetProperty(global, name, value, NONE, kNonStrictMode));
     return *value;
   }
@@ -1529,6 +1593,7 @@ static MaybeObject* Runtime_InitializeConstContextSlot(Arguments args) {
     if ((attributes & READ_ONLY) == 0) {
       // Strict mode not needed (const disallowed in strict mode).
       RETURN_IF_EMPTY_HANDLE(
+          isolate,
           SetProperty(context_ext, name, value, attributes, kNonStrictMode));
     }
   }
@@ -1538,8 +1603,9 @@ static MaybeObject* Runtime_InitializeConstContextSlot(Arguments args) {
 
 
 static MaybeObject* Runtime_OptimizeObjectForAddingMultipleProperties(
-    Arguments args) {
-  HandleScope scope;
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   CONVERT_ARG_CHECKED(JSObject, object, 0);
   CONVERT_SMI_CHECKED(properties, args[1]);
@@ -1550,8 +1616,9 @@ static MaybeObject* Runtime_OptimizeObjectForAddingMultipleProperties(
 }
 
 
-static MaybeObject* Runtime_RegExpExec(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_RegExpExec(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 4);
   CONVERT_ARG_CHECKED(JSRegExp, regexp, 0);
   CONVERT_ARG_CHECKED(String, subject, 1);
@@ -1562,7 +1629,7 @@ static MaybeObject* Runtime_RegExpExec(Arguments args) {
   RUNTIME_ASSERT(last_match_info->HasFastElements());
   RUNTIME_ASSERT(index >= 0);
   RUNTIME_ASSERT(index <= subject->length());
-  Counters::regexp_entry_runtime.Increment();
+  isolate->counters()->regexp_entry_runtime()->Increment();
   Handle<Object> result = RegExpImpl::Exec(regexp,
                                            subject,
                                            index,
@@ -1572,31 +1639,31 @@ static MaybeObject* Runtime_RegExpExec(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_RegExpConstructResult(Arguments args) {
+static MaybeObject* Runtime_RegExpConstructResult(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 3);
   CONVERT_SMI_CHECKED(elements_count, args[0]);
   if (elements_count > JSArray::kMaxFastElementsLength) {
-    return Top::ThrowIllegalOperation();
+    return isolate->ThrowIllegalOperation();
   }
   Object* new_object;
   { MaybeObject* maybe_new_object =
-        Heap::AllocateFixedArrayWithHoles(elements_count);
+        isolate->heap()->AllocateFixedArrayWithHoles(elements_count);
     if (!maybe_new_object->ToObject(&new_object)) return maybe_new_object;
   }
   FixedArray* elements = FixedArray::cast(new_object);
-  { MaybeObject* maybe_new_object = Heap::AllocateRaw(JSRegExpResult::kSize,
-                                                      NEW_SPACE,
-                                                      OLD_POINTER_SPACE);
+  { MaybeObject* maybe_new_object = isolate->heap()->AllocateRaw(
+      JSRegExpResult::kSize, NEW_SPACE, OLD_POINTER_SPACE);
     if (!maybe_new_object->ToObject(&new_object)) return maybe_new_object;
   }
   {
     AssertNoAllocation no_gc;
-    HandleScope scope;
+    HandleScope scope(isolate);
     reinterpret_cast<HeapObject*>(new_object)->
-        set_map(Top::global_context()->regexp_result_map());
+        set_map(isolate->global_context()->regexp_result_map());
   }
   JSArray* array = JSArray::cast(new_object);
-  array->set_properties(Heap::empty_fixed_array());
+  array->set_properties(isolate->heap()->empty_fixed_array());
   array->set_elements(elements);
   array->set_length(Smi::FromInt(elements_count));
   // Write in-object properties after the length of the array.
@@ -1606,20 +1673,21 @@ static MaybeObject* Runtime_RegExpConstructResult(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_RegExpInitializeObject(Arguments args) {
+static MaybeObject* Runtime_RegExpInitializeObject(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   AssertNoAllocation no_alloc;
   ASSERT(args.length() == 5);
   CONVERT_CHECKED(JSRegExp, regexp, args[0]);
   CONVERT_CHECKED(String, source, args[1]);
 
   Object* global = args[2];
-  if (!global->IsTrue()) global = Heap::false_value();
+  if (!global->IsTrue()) global = isolate->heap()->false_value();
 
   Object* ignoreCase = args[3];
-  if (!ignoreCase->IsTrue()) ignoreCase = Heap::false_value();
+  if (!ignoreCase->IsTrue()) ignoreCase = isolate->heap()->false_value();
 
   Object* multiline = args[4];
-  if (!multiline->IsTrue()) multiline = Heap::false_value();
+  if (!multiline->IsTrue()) multiline = isolate->heap()->false_value();
 
   Map* map = regexp->map();
   Object* constructor = map->constructor();
@@ -1638,33 +1706,32 @@ static MaybeObject* Runtime_RegExpInitializeObject(Arguments args) {
     return regexp;
   }
 
-  // Map has changed, so use generic, but slower, method.  Since these
-  // properties were all added as DONT_DELETE they must be present and
-  // normal so no failures can be expected.
+  // Map has changed, so use generic, but slower, method.
   PropertyAttributes final =
       static_cast<PropertyAttributes>(READ_ONLY | DONT_ENUM | DONT_DELETE);
   PropertyAttributes writable =
       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
+  Heap* heap = isolate->heap();
   MaybeObject* result;
-  result = regexp->SetLocalPropertyIgnoreAttributes(Heap::source_symbol(),
+  result = regexp->SetLocalPropertyIgnoreAttributes(heap->source_symbol(),
                                                     source,
                                                     final);
   ASSERT(!result->IsFailure());
-  result = regexp->SetLocalPropertyIgnoreAttributes(Heap::global_symbol(),
+  result = regexp->SetLocalPropertyIgnoreAttributes(heap->global_symbol(),
                                                     global,
                                                     final);
   ASSERT(!result->IsFailure());
   result =
-      regexp->SetLocalPropertyIgnoreAttributes(Heap::ignore_case_symbol(),
+      regexp->SetLocalPropertyIgnoreAttributes(heap->ignore_case_symbol(),
                                                ignoreCase,
                                                final);
   ASSERT(!result->IsFailure());
-  result = regexp->SetLocalPropertyIgnoreAttributes(Heap::multiline_symbol(),
+  result = regexp->SetLocalPropertyIgnoreAttributes(heap->multiline_symbol(),
                                                     multiline,
                                                     final);
   ASSERT(!result->IsFailure());
   result =
-      regexp->SetLocalPropertyIgnoreAttributes(Heap::last_index_symbol(),
+      regexp->SetLocalPropertyIgnoreAttributes(heap->last_index_symbol(),
                                                Smi::FromInt(0),
                                                writable);
   ASSERT(!result->IsFailure());
@@ -1673,59 +1740,68 @@ static MaybeObject* Runtime_RegExpInitializeObject(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_FinishArrayPrototypeSetup(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_FinishArrayPrototypeSetup(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_CHECKED(JSArray, prototype, 0);
   // This is necessary to enable fast checks for absence of elements
   // on Array.prototype and below.
-  prototype->set_elements(Heap::empty_fixed_array());
+  prototype->set_elements(isolate->heap()->empty_fixed_array());
   return Smi::FromInt(0);
 }
 
 
-static Handle<JSFunction> InstallBuiltin(Handle<JSObject> holder,
+static Handle<JSFunction> InstallBuiltin(Isolate* isolate,
+                                         Handle<JSObject> holder,
                                          const char* name,
                                          Builtins::Name builtin_name) {
-  Handle<String> key = Factory::LookupAsciiSymbol(name);
-  Handle<Code> code(Builtins::builtin(builtin_name));
-  Handle<JSFunction> optimized = Factory::NewFunction(key,
-                                                      JS_OBJECT_TYPE,
-                                                      JSObject::kHeaderSize,
-                                                      code,
-                                                      false);
+  Handle<String> key = isolate->factory()->LookupAsciiSymbol(name);
+  Handle<Code> code(isolate->builtins()->builtin(builtin_name));
+  Handle<JSFunction> optimized =
+      isolate->factory()->NewFunction(key,
+                                      JS_OBJECT_TYPE,
+                                      JSObject::kHeaderSize,
+                                      code,
+                                      false);
   optimized->shared()->DontAdaptArguments();
   SetProperty(holder, key, optimized, NONE, kStrictMode);
   return optimized;
 }
 
 
-static MaybeObject* Runtime_SpecialArrayFunctions(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_SpecialArrayFunctions(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_CHECKED(JSObject, holder, 0);
 
-  InstallBuiltin(holder, "pop", Builtins::ArrayPop);
-  InstallBuiltin(holder, "push", Builtins::ArrayPush);
-  InstallBuiltin(holder, "shift", Builtins::ArrayShift);
-  InstallBuiltin(holder, "unshift", Builtins::ArrayUnshift);
-  InstallBuiltin(holder, "slice", Builtins::ArraySlice);
-  InstallBuiltin(holder, "splice", Builtins::ArraySplice);
-  InstallBuiltin(holder, "concat", Builtins::ArrayConcat);
+  InstallBuiltin(isolate, holder, "pop", Builtins::ArrayPop);
+  InstallBuiltin(isolate, holder, "push", Builtins::ArrayPush);
+  InstallBuiltin(isolate, holder, "shift", Builtins::ArrayShift);
+  InstallBuiltin(isolate, holder, "unshift", Builtins::ArrayUnshift);
+  InstallBuiltin(isolate, holder, "slice", Builtins::ArraySlice);
+  InstallBuiltin(isolate, holder, "splice", Builtins::ArraySplice);
+  InstallBuiltin(isolate, holder, "concat", Builtins::ArrayConcat);
 
   return *holder;
 }
 
 
-static MaybeObject* Runtime_GetGlobalReceiver(Arguments args) {
+static MaybeObject* Runtime_GetGlobalReceiver(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   // Returns a real global receiver, not one of builtins object.
-  Context* global_context = Top::context()->global()->global_context();
+  Context* global_context =
+      isolate->context()->global()->global_context();
   return global_context->global()->global_receiver();
 }
 
 
-static MaybeObject* Runtime_MaterializeRegExpLiteral(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_MaterializeRegExpLiteral(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 4);
   CONVERT_ARG_CHECKED(FixedArray, literals, 0);
   int index = Smi::cast(args[1])->value();
@@ -1746,7 +1822,7 @@ static MaybeObject* Runtime_MaterializeRegExpLiteral(Arguments args) {
       RegExpImpl::CreateRegExpLiteral(constructor, pattern, flags,
                                       &has_pending_exception);
   if (has_pending_exception) {
-    ASSERT(Top::has_pending_exception());
+    ASSERT(isolate->has_pending_exception());
     return Failure::Exception();
   }
   literals->set(index, *regexp);
@@ -1754,7 +1830,8 @@ static MaybeObject* Runtime_MaterializeRegExpLiteral(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_FunctionGetName(Arguments args) {
+static MaybeObject* Runtime_FunctionGetName(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -1763,44 +1840,47 @@ static MaybeObject* Runtime_FunctionGetName(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_FunctionSetName(Arguments args) {
+static MaybeObject* Runtime_FunctionSetName(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_CHECKED(JSFunction, f, args[0]);
   CONVERT_CHECKED(String, name, args[1]);
   f->shared()->set_name(name);
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_FunctionRemovePrototype(Arguments args) {
+static MaybeObject* Runtime_FunctionRemovePrototype(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   CONVERT_CHECKED(JSFunction, f, args[0]);
-  Object* obj;
-  { MaybeObject* maybe_obj = f->RemovePrototype();
-    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
-  }
+  Object* obj = f->RemovePrototype();
+  if (obj->IsFailure()) return obj;
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_FunctionGetScript(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_FunctionGetScript(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
   CONVERT_CHECKED(JSFunction, fun, args[0]);
-  Handle<Object> script = Handle<Object>(fun->shared()->script());
-  if (!script->IsScript()) return Heap::undefined_value();
+  Handle<Object> script = Handle<Object>(fun->shared()->script(), isolate);
+  if (!script->IsScript()) return isolate->heap()->undefined_value();
 
   return *GetScriptWrapper(Handle<Script>::cast(script));
 }
 
 
-static MaybeObject* Runtime_FunctionGetSourceCode(Arguments args) {
+static MaybeObject* Runtime_FunctionGetSourceCode(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -1809,7 +1889,9 @@ static MaybeObject* Runtime_FunctionGetSourceCode(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_FunctionGetScriptSourcePosition(Arguments args) {
+static MaybeObject* Runtime_FunctionGetScriptSourcePosition(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -1819,7 +1901,9 @@ static MaybeObject* Runtime_FunctionGetScriptSourcePosition(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_FunctionGetPositionForOffset(Arguments args) {
+static MaybeObject* Runtime_FunctionGetPositionForOffset(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
 
   CONVERT_CHECKED(Code, code, args[0]);
@@ -1832,19 +1916,21 @@ static MaybeObject* Runtime_FunctionGetPositionForOffset(Arguments args) {
 }
 
 
-
-static MaybeObject* Runtime_FunctionSetInstanceClassName(Arguments args) {
+static MaybeObject* Runtime_FunctionSetInstanceClassName(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_CHECKED(JSFunction, fun, args[0]);
   CONVERT_CHECKED(String, name, args[1]);
   fun->SetInstanceClassName(name);
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_FunctionSetLength(Arguments args) {
+static MaybeObject* Runtime_FunctionSetLength(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -1855,7 +1941,8 @@ static MaybeObject* Runtime_FunctionSetLength(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_FunctionSetPrototype(Arguments args) {
+static MaybeObject* Runtime_FunctionSetPrototype(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -1870,26 +1957,31 @@ static MaybeObject* Runtime_FunctionSetPrototype(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_FunctionIsAPIFunction(Arguments args) {
+static MaybeObject* Runtime_FunctionIsAPIFunction(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   CONVERT_CHECKED(JSFunction, f, args[0]);
-  return f->shared()->IsApiFunction() ? Heap::true_value()
-                                      : Heap::false_value();
+  return f->shared()->IsApiFunction() ? isolate->heap()->true_value()
+                                      : isolate->heap()->false_value();
 }
 
-static MaybeObject* Runtime_FunctionIsBuiltin(Arguments args) {
+
+static MaybeObject* Runtime_FunctionIsBuiltin(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   CONVERT_CHECKED(JSFunction, f, args[0]);
-  return f->IsBuiltin() ? Heap::true_value() : Heap::false_value();
+  return f->IsBuiltin() ? isolate->heap()->true_value() :
+                          isolate->heap()->false_value();
 }
 
 
-static MaybeObject* Runtime_SetCode(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_SetCode(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
 
   CONVERT_ARG_CHECKED(JSFunction, target, 0);
@@ -1921,7 +2013,7 @@ static MaybeObject* Runtime_SetCode(Arguments args) {
     // SetCode is only used for built-in constructors like String,
     // Array, and Object, and some web code
     // doesn't like seeing source code for constructors.
-    target->shared()->set_script(Heap::undefined_value());
+    target->shared()->set_script(isolate->heap()->undefined_value());
     target->shared()->code()->set_optimizable(false);
     // Clear the optimization hints related to the compiled code as these are no
     // longer valid when the code is overwritten.
@@ -1932,7 +2024,7 @@ static MaybeObject* Runtime_SetCode(Arguments args) {
     // cross context contamination.
     int number_of_literals = fun->NumberOfLiterals();
     Handle<FixedArray> literals =
-        Factory::NewFixedArray(number_of_literals, TENURED);
+        isolate->factory()->NewFixedArray(number_of_literals, TENURED);
     if (number_of_literals > 0) {
       // Insert the object, regexp and array functions in the literals
       // array prefix.  These are the functions that will be used when
@@ -1943,7 +2035,7 @@ static MaybeObject* Runtime_SetCode(Arguments args) {
     // It's okay to skip the write barrier here because the literals
     // are guaranteed to be in old space.
     target->set_literals(*literals, SKIP_WRITE_BARRIER);
-    target->set_next_function_link(Heap::undefined_value());
+    target->set_next_function_link(isolate->heap()->undefined_value());
   }
 
   target->set_context(*context);
@@ -1951,29 +2043,33 @@ static MaybeObject* Runtime_SetCode(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_SetExpectedNumberOfProperties(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_SetExpectedNumberOfProperties(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   CONVERT_ARG_CHECKED(JSFunction, function, 0);
   CONVERT_SMI_CHECKED(num, args[1]);
   RUNTIME_ASSERT(num >= 0);
   SetExpectedNofProperties(function, num);
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-MUST_USE_RESULT static MaybeObject* CharFromCode(Object* char_code) {
+MUST_USE_RESULT static MaybeObject* CharFromCode(Isolate* isolate,
+                                                 Object* char_code) {
   uint32_t code;
   if (char_code->ToArrayIndex(&code)) {
     if (code <= 0xffff) {
-      return Heap::LookupSingleCharacterStringFromCode(code);
+      return isolate->heap()->LookupSingleCharacterStringFromCode(code);
     }
   }
-  return Heap::empty_string();
+  return isolate->heap()->empty_string();
 }
 
 
-static MaybeObject* Runtime_StringCharCodeAt(Arguments args) {
+static MaybeObject* Runtime_StringCharCodeAt(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -1984,7 +2080,7 @@ static MaybeObject* Runtime_StringCharCodeAt(Arguments args) {
   uint32_t i = 0;
   if (index->IsSmi()) {
     int value = Smi::cast(index)->value();
-    if (value < 0) return Heap::nan_value();
+    if (value < 0) return isolate->heap()->nan_value();
     i = value;
   } else {
     ASSERT(index->IsHeapNumber());
@@ -2002,24 +2098,25 @@ static MaybeObject* Runtime_StringCharCodeAt(Arguments args) {
   subject = String::cast(flat);
 
   if (i >= static_cast<uint32_t>(subject->length())) {
-    return Heap::nan_value();
+    return isolate->heap()->nan_value();
   }
 
   return Smi::FromInt(subject->Get(i));
 }
 
 
-static MaybeObject* Runtime_CharFromCode(Arguments args) {
+static MaybeObject* Runtime_CharFromCode(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  return CharFromCode(args[0]);
+  return CharFromCode(isolate, args[0]);
 }
 
 
 class FixedArrayBuilder {
  public:
-  explicit FixedArrayBuilder(int initial_capacity)
-      : array_(Factory::NewFixedArrayWithHoles(initial_capacity)),
+  explicit FixedArrayBuilder(Isolate* isolate, int initial_capacity)
+      : array_(isolate->factory()->NewFixedArrayWithHoles(initial_capacity)),
         length_(0) {
     // Require a non-zero initial size. Ensures that doubling the size to
     // extend the array will work.
@@ -2049,7 +2146,7 @@ class FixedArrayBuilder {
         new_length *= 2;
       } while (new_length < required_length);
       Handle<FixedArray> extended_array =
-          Factory::NewFixedArrayWithHoles(new_length);
+          array_->GetIsolate()->factory()->NewFixedArrayWithHoles(new_length);
       array_->CopyTo(0, *extended_array, 0, length_);
       array_ = extended_array;
     }
@@ -2080,7 +2177,7 @@ class FixedArrayBuilder {
   }
 
   Handle<JSArray> ToJSArray() {
-    Handle<JSArray> result_array = Factory::NewJSArrayWithElements(array_);
+    Handle<JSArray> result_array = FACTORY->NewJSArrayWithElements(array_);
     result_array->set_length(Smi::FromInt(length_));
     return result_array;
   }
@@ -2117,8 +2214,11 @@ typedef BitField<int,
 
 class ReplacementStringBuilder {
  public:
-  ReplacementStringBuilder(Handle<String> subject, int estimated_part_count)
-      : array_builder_(estimated_part_count),
+  ReplacementStringBuilder(Heap* heap,
+                           Handle<String> subject,
+                           int estimated_part_count)
+      : heap_(heap),
+        array_builder_(heap->isolate(), estimated_part_count),
         subject_(subject),
         character_count_(0),
         is_ascii_(subject->IsAsciiRepresentation()) {
@@ -2170,7 +2270,7 @@ class ReplacementStringBuilder {
 
   Handle<String> ToString() {
     if (array_builder_.length() == 0) {
-      return Factory::empty_string();
+      return heap_->isolate()->factory()->empty_string();
     }
 
     Handle<String> joined_string;
@@ -2211,12 +2311,14 @@ class ReplacementStringBuilder {
 
  private:
   Handle<String> NewRawAsciiString(int size) {
-    CALL_HEAP_FUNCTION(Heap::AllocateRawAsciiString(size), String);
+    CALL_HEAP_FUNCTION(heap_->isolate(),
+                       heap_->AllocateRawAsciiString(size), String);
   }
 
 
   Handle<String> NewRawTwoByteString(int size) {
-    CALL_HEAP_FUNCTION(Heap::AllocateRawTwoByteString(size), String);
+    CALL_HEAP_FUNCTION(heap_->isolate(),
+                       heap_->AllocateRawTwoByteString(size), String);
   }
 
 
@@ -2226,6 +2328,7 @@ class ReplacementStringBuilder {
     array_builder_.Add(element);
   }
 
+  Heap* heap_;
   FixedArrayBuilder array_builder_;
   Handle<String> subject_;
   int character_count_;
@@ -2436,6 +2539,7 @@ void CompiledReplacement::Compile(Handle<String> replacement,
                             capture_count,
                             subject_length);
   }
+  Isolate* isolate = replacement->GetIsolate();
   // Find substrings of replacement string and create them as String objects.
   int substring_index = 0;
   for (int i = 0, n = parts_.length(); i < n; i++) {
@@ -2443,7 +2547,8 @@ void CompiledReplacement::Compile(Handle<String> replacement,
     if (tag <= 0) {  // A replacement string slice.
       int from = -tag;
       int to = parts_[i].data;
-      replacement_substrings_.Add(Factory::NewSubString(replacement, from, to));
+      replacement_substrings_.Add(
+          isolate->factory()->NewSubString(replacement, from, to));
       parts_[i].tag = REPLACEMENT_SUBSTRING;
       parts_[i].data = substring_index;
       substring_index++;
@@ -2496,6 +2601,7 @@ void CompiledReplacement::Apply(ReplacementStringBuilder* builder,
 
 
 MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
+    Isolate* isolate,
     String* subject,
     JSRegExp* regexp,
     String* replacement,
@@ -2503,7 +2609,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
   ASSERT(subject->IsFlat());
   ASSERT(replacement->IsFlat());
 
-  HandleScope handles;
+  HandleScope handles(isolate);
 
   int length = subject->length();
   Handle<String> subject_handle(subject);
@@ -2537,7 +2643,9 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
   // conservatively.
   int expected_parts =
       (compiled_replacement.parts() + 1) * (is_global ? 4 : 1) + 1;
-  ReplacementStringBuilder builder(subject_handle, expected_parts);
+  ReplacementStringBuilder builder(isolate->heap(),
+                                   subject_handle,
+                                   expected_parts);
 
   // Index of end of last match.
   int prev = 0;
@@ -2553,7 +2661,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
     // so its internal buffer can safely allocate a new handle if it grows.
     builder.EnsureCapacity(parts_added_per_loop);
 
-    HandleScope loop_scope;
+    HandleScope loop_scope(isolate);
     int start, end;
     {
       AssertNoAllocation match_info_array_is_not_in_a_handle;
@@ -2605,12 +2713,13 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
 
 template <typename ResultSeqString>
 MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
+    Isolate* isolate,
     String* subject,
     JSRegExp* regexp,
     JSArray* last_match_info) {
   ASSERT(subject->IsFlat());
 
-  HandleScope handles;
+  HandleScope handles(isolate);
 
   Handle<String> subject_handle(subject);
   Handle<JSRegExp> regexp_handle(regexp);
@@ -2624,7 +2733,6 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
 
   ASSERT(last_match_info_handle->HasFastElements());
 
-  HandleScope loop_scope;
   int start, end;
   {
     AssertNoAllocation match_info_array_is_not_in_a_handle;
@@ -2638,15 +2746,15 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
   int length = subject->length();
   int new_length = length - (end - start);
   if (new_length == 0) {
-    return Heap::empty_string();
+    return isolate->heap()->empty_string();
   }
   Handle<ResultSeqString> answer;
   if (ResultSeqString::kHasAsciiEncoding) {
-    answer =
-        Handle<ResultSeqString>::cast(Factory::NewRawAsciiString(new_length));
+    answer = Handle<ResultSeqString>::cast(
+        isolate->factory()->NewRawAsciiString(new_length));
   } else {
-    answer =
-        Handle<ResultSeqString>::cast(Factory::NewRawTwoByteString(new_length));
+    answer = Handle<ResultSeqString>::cast(
+        isolate->factory()->NewRawTwoByteString(new_length));
   }
 
   // If the regexp isn't global, only match once.
@@ -2694,7 +2802,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
     if (match->IsNull()) break;
 
     ASSERT(last_match_info_handle->HasFastElements());
-    HandleScope loop_scope;
+    HandleScope loop_scope(isolate);
     {
       AssertNoAllocation match_info_array_is_not_in_a_handle;
       FixedArray* match_info_array =
@@ -2714,7 +2822,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
   }
 
   if (position == 0) {
-    return Heap::empty_string();
+    return isolate->heap()->empty_string();
   }
 
   // Shorten string and fill
@@ -2726,13 +2834,15 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
   if (delta == 0) return *answer;
 
   Address end_of_string = answer->address() + string_size;
-  Heap::CreateFillerObjectAt(end_of_string, delta);
+  isolate->heap()->CreateFillerObjectAt(end_of_string, delta);
 
   return *answer;
 }
 
 
-static MaybeObject* Runtime_StringReplaceRegExpWithString(Arguments args) {
+static MaybeObject* Runtime_StringReplaceRegExpWithString(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 4);
 
   CONVERT_CHECKED(String, subject, args[0]);
@@ -2765,14 +2875,15 @@ static MaybeObject* Runtime_StringReplaceRegExpWithString(Arguments args) {
   if (replacement->length() == 0) {
     if (subject->HasOnlyAsciiChars()) {
       return StringReplaceRegExpWithEmptyString<SeqAsciiString>(
-          subject, regexp, last_match_info);
+          isolate, subject, regexp, last_match_info);
     } else {
       return StringReplaceRegExpWithEmptyString<SeqTwoByteString>(
-          subject, regexp, last_match_info);
+          isolate, subject, regexp, last_match_info);
     }
   }
 
-  return StringReplaceRegExpWithString(subject,
+  return StringReplaceRegExpWithString(isolate,
+                                       subject,
                                        regexp,
                                        replacement,
                                        last_match_info);
@@ -2782,7 +2893,8 @@ static MaybeObject* Runtime_StringReplaceRegExpWithString(Arguments args) {
 // Perform string match of pattern on subject, starting at start index.
 // Caller must ensure that 0 <= start_index <= sub->length(),
 // and should check that pat->length() + start_index <= sub->length().
-int Runtime::StringMatch(Handle<String> sub,
+int Runtime::StringMatch(Isolate* isolate,
+                         Handle<String> sub,
                          Handle<String> pat,
                          int start_index) {
   ASSERT(0 <= start_index);
@@ -2808,20 +2920,33 @@ int Runtime::StringMatch(Handle<String> sub,
   if (seq_pat->IsAsciiRepresentation()) {
     Vector<const char> pat_vector = seq_pat->ToAsciiVector();
     if (seq_sub->IsAsciiRepresentation()) {
-      return SearchString(seq_sub->ToAsciiVector(), pat_vector, start_index);
+      return SearchString(isolate,
+                          seq_sub->ToAsciiVector(),
+                          pat_vector,
+                          start_index);
     }
-    return SearchString(seq_sub->ToUC16Vector(), pat_vector, start_index);
+    return SearchString(isolate,
+                        seq_sub->ToUC16Vector(),
+                        pat_vector,
+                        start_index);
   }
   Vector<const uc16> pat_vector = seq_pat->ToUC16Vector();
   if (seq_sub->IsAsciiRepresentation()) {
-    return SearchString(seq_sub->ToAsciiVector(), pat_vector, start_index);
+    return SearchString(isolate,
+                        seq_sub->ToAsciiVector(),
+                        pat_vector,
+                        start_index);
   }
-  return SearchString(seq_sub->ToUC16Vector(), pat_vector, start_index);
+  return SearchString(isolate,
+                      seq_sub->ToUC16Vector(),
+                      pat_vector,
+                      start_index);
 }
 
 
-static MaybeObject* Runtime_StringIndexOf(Arguments args) {
-  HandleScope scope;  // create a new handle scope
+static MaybeObject* Runtime_StringIndexOf(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);  // create a new handle scope
   ASSERT(args.length() == 3);
 
   CONVERT_ARG_CHECKED(String, sub, 0);
@@ -2832,7 +2957,8 @@ static MaybeObject* Runtime_StringIndexOf(Arguments args) {
   if (!index->ToArrayIndex(&start_index)) return Smi::FromInt(-1);
 
   RUNTIME_ASSERT(start_index <= static_cast<uint32_t>(sub->length()));
-  int position = Runtime::StringMatch(sub, pat, start_index);
+  int position =
+      Runtime::StringMatch(isolate, sub, pat, start_index);
   return Smi::FromInt(position);
 }
 
@@ -2871,8 +2997,9 @@ static int StringMatchBackwards(Vector<const schar> subject,
   return -1;
 }
 
-static MaybeObject* Runtime_StringLastIndexOf(Arguments args) {
-  HandleScope scope;  // create a new handle scope
+static MaybeObject* Runtime_StringLastIndexOf(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);  // create a new handle scope
   ASSERT(args.length() == 3);
 
   CONVERT_ARG_CHECKED(String, sub, 0);
@@ -2928,7 +3055,8 @@ static MaybeObject* Runtime_StringLastIndexOf(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_StringLocaleCompare(Arguments args) {
+static MaybeObject* Runtime_StringLocaleCompare(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -2958,8 +3086,10 @@ static MaybeObject* Runtime_StringLocaleCompare(Arguments args) {
   str1->TryFlatten();
   str2->TryFlatten();
 
-  static StringInputBuffer buf1;
-  static StringInputBuffer buf2;
+  StringInputBuffer& buf1 =
+      *isolate->runtime_state()->string_locale_compare_buf1();
+  StringInputBuffer& buf2 =
+      *isolate->runtime_state()->string_locale_compare_buf2();
 
   buf1.Reset(str1);
   buf2.Reset(str2);
@@ -2974,7 +3104,8 @@ static MaybeObject* Runtime_StringLocaleCompare(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_SubString(Arguments args) {
+static MaybeObject* Runtime_SubString(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 3);
 
@@ -2996,12 +3127,13 @@ static MaybeObject* Runtime_SubString(Arguments args) {
   RUNTIME_ASSERT(end >= start);
   RUNTIME_ASSERT(start >= 0);
   RUNTIME_ASSERT(end <= value->length());
-  Counters::sub_string_runtime.Increment();
+  isolate->counters()->sub_string_runtime()->Increment();
   return value->SubString(start, end);
 }
 
 
-static MaybeObject* Runtime_StringMatch(Arguments args) {
+static MaybeObject* Runtime_StringMatch(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT_EQ(3, args.length());
 
   CONVERT_ARG_CHECKED(String, subject, 0);
@@ -3015,7 +3147,7 @@ static MaybeObject* Runtime_StringMatch(Arguments args) {
     return Failure::Exception();
   }
   if (match->IsNull()) {
-    return Heap::null_value();
+    return isolate->heap()->null_value();
   }
   int length = subject->length();
 
@@ -3040,14 +3172,14 @@ static MaybeObject* Runtime_StringMatch(Arguments args) {
     }
   } while (!match->IsNull());
   int matches = offsets.length() / 2;
-  Handle<FixedArray> elements = Factory::NewFixedArray(matches);
+  Handle<FixedArray> elements = isolate->factory()->NewFixedArray(matches);
   for (int i = 0; i < matches ; i++) {
     int from = offsets.at(i * 2);
     int to = offsets.at(i * 2 + 1);
-    Handle<String> match = Factory::NewSubString(subject, from, to);
+    Handle<String> match = isolate->factory()->NewSubString(subject, from, to);
     elements->set(i, *match);
   }
-  Handle<JSArray> result = Factory::NewJSArrayWithElements(elements);
+  Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(elements);
   result->set_length(Smi::FromInt(matches));
   return *result;
 }
@@ -3074,7 +3206,8 @@ static void SetLastMatchInfoNoCaptures(Handle<String> subject,
 
 
 template <typename SubjectChar, typename PatternChar>
-static bool SearchStringMultiple(Vector<const SubjectChar> subject,
+static bool SearchStringMultiple(Isolate* isolate,
+                                 Vector<const SubjectChar> subject,
                                  Vector<const PatternChar> pattern,
                                  String* pattern_string,
                                  FixedArrayBuilder* builder,
@@ -3083,7 +3216,7 @@ static bool SearchStringMultiple(Vector<const SubjectChar> subject,
   int subject_length = subject.length();
   int pattern_length = pattern.length();
   int max_search_start = subject_length - pattern_length;
-  StringSearch<PatternChar, SubjectChar> search(pattern);
+  StringSearch<PatternChar, SubjectChar> search(isolate, pattern);
   while (pos <= max_search_start) {
     if (!builder->HasCapacity(kMaxBuilderEntriesPerRegExpMatch)) {
       *match_pos = pos;
@@ -3116,7 +3249,8 @@ static bool SearchStringMultiple(Vector<const SubjectChar> subject,
 }
 
 
-static bool SearchStringMultiple(Handle<String> subject,
+static bool SearchStringMultiple(Isolate* isolate,
+                                 Handle<String> subject,
                                  Handle<String> pattern,
                                  Handle<JSArray> last_match_info,
                                  FixedArrayBuilder* builder) {
@@ -3132,13 +3266,15 @@ static bool SearchStringMultiple(Handle<String> subject,
     if (subject->IsAsciiRepresentation()) {
       Vector<const char> subject_vector = subject->ToAsciiVector();
       if (pattern->IsAsciiRepresentation()) {
-        if (SearchStringMultiple(subject_vector,
+        if (SearchStringMultiple(isolate,
+                                 subject_vector,
                                  pattern->ToAsciiVector(),
                                  *pattern,
                                  builder,
                                  &match_pos)) break;
       } else {
-        if (SearchStringMultiple(subject_vector,
+        if (SearchStringMultiple(isolate,
+                                 subject_vector,
                                  pattern->ToUC16Vector(),
                                  *pattern,
                                  builder,
@@ -3147,13 +3283,15 @@ static bool SearchStringMultiple(Handle<String> subject,
     } else {
       Vector<const uc16> subject_vector = subject->ToUC16Vector();
       if (pattern->IsAsciiRepresentation()) {
-        if (SearchStringMultiple(subject_vector,
+        if (SearchStringMultiple(isolate,
+                                 subject_vector,
                                  pattern->ToAsciiVector(),
                                  *pattern,
                                  builder,
                                  &match_pos)) break;
       } else {
-        if (SearchStringMultiple(subject_vector,
+        if (SearchStringMultiple(isolate,
+                                 subject_vector,
                                  pattern->ToUC16Vector(),
                                  *pattern,
                                  builder,
@@ -3174,6 +3312,7 @@ static bool SearchStringMultiple(Handle<String> subject,
 
 
 static RegExpImpl::IrregexpResult SearchRegExpNoCaptureMultiple(
+    Isolate* isolate,
     Handle<String> subject,
     Handle<JSRegExp> regexp,
     Handle<JSArray> last_match_array,
@@ -3204,8 +3343,10 @@ static RegExpImpl::IrregexpResult SearchRegExpNoCaptureMultiple(
                                                   match_start);
       }
       match_end = register_vector[1];
-      HandleScope loop_scope;
-      builder->Add(*Factory::NewSubString(subject, match_start, match_end));
+      HandleScope loop_scope(isolate);
+      builder->Add(*isolate->factory()->NewSubString(subject,
+                                                     match_start,
+                                                     match_end));
       if (match_start != match_end) {
         pos = match_end;
       } else {
@@ -3238,6 +3379,7 @@ static RegExpImpl::IrregexpResult SearchRegExpNoCaptureMultiple(
 
 
 static RegExpImpl::IrregexpResult SearchRegExpMultiple(
+    Isolate* isolate,
     Handle<String> subject,
     Handle<JSRegExp> regexp,
     Handle<JSArray> last_match_array,
@@ -3281,31 +3423,32 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
 
       {
         // Avoid accumulating new handles inside loop.
-        HandleScope temp_scope;
+        HandleScope temp_scope(isolate);
         // Arguments array to replace function is match, captures, index and
         // subject, i.e., 3 + capture count in total.
-        Handle<FixedArray> elements = Factory::NewFixedArray(3 + capture_count);
-        Handle<String> match = Factory::NewSubString(subject,
-                                                     match_start,
-                                                     match_end);
+        Handle<FixedArray> elements =
+            isolate->factory()->NewFixedArray(3 + capture_count);
+        Handle<String> match = isolate->factory()->NewSubString(subject,
+                                                                match_start,
+                                                                match_end);
         elements->set(0, *match);
         for (int i = 1; i <= capture_count; i++) {
           int start = register_vector[i * 2];
           if (start >= 0) {
             int end = register_vector[i * 2 + 1];
             ASSERT(start <= end);
-            Handle<String> substring = Factory::NewSubString(subject,
-                                                             start,
-                                                             end);
+            Handle<String> substring = isolate->factory()->NewSubString(subject,
+                                                                        start,
+                                                                        end);
             elements->set(i, *substring);
           } else {
             ASSERT(register_vector[i * 2 + 1] < 0);
-            elements->set(i, Heap::undefined_value());
+            elements->set(i, isolate->heap()->undefined_value());
           }
         }
         elements->set(capture_count + 1, Smi::FromInt(match_start));
         elements->set(capture_count + 2, *subject);
-        builder->Add(*Factory::NewJSArrayWithElements(elements));
+        builder->Add(*isolate->factory()->NewJSArrayWithElements(elements));
       }
       // Swap register vectors, so the last successful match is in
       // prev_register_vector.
@@ -3356,9 +3499,10 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
 }
 
 
-static MaybeObject* Runtime_RegExpExecMultiple(Arguments args) {
+static MaybeObject* Runtime_RegExpExecMultiple(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 4);
-  HandleScope handles;
+  HandleScope handles(isolate);
 
   CONVERT_ARG_CHECKED(String, subject, 1);
   if (!subject->IsFlat()) { FlattenString(subject); }
@@ -3373,7 +3517,7 @@ static MaybeObject* Runtime_RegExpExecMultiple(Arguments args) {
     result_elements =
         Handle<FixedArray>(FixedArray::cast(result_array->elements()));
   } else {
-    result_elements = Factory::NewFixedArrayWithHoles(16);
+    result_elements = isolate->factory()->NewFixedArrayWithHoles(16);
   }
   FixedArrayBuilder builder(result_elements);
 
@@ -3381,31 +3525,38 @@ static MaybeObject* Runtime_RegExpExecMultiple(Arguments args) {
     Handle<String> pattern(
         String::cast(regexp->DataAt(JSRegExp::kAtomPatternIndex)));
     ASSERT(pattern->IsFlat());
-    if (SearchStringMultiple(subject, pattern, last_match_info, &builder)) {
+    if (SearchStringMultiple(isolate, subject, pattern,
+                             last_match_info, &builder)) {
       return *builder.ToJSArray(result_array);
     }
-    return Heap::null_value();
+    return isolate->heap()->null_value();
   }
 
   ASSERT_EQ(regexp->TypeTag(), JSRegExp::IRREGEXP);
 
   RegExpImpl::IrregexpResult result;
   if (regexp->CaptureCount() == 0) {
-    result = SearchRegExpNoCaptureMultiple(subject,
+    result = SearchRegExpNoCaptureMultiple(isolate,
+                                           subject,
                                            regexp,
                                            last_match_info,
                                            &builder);
   } else {
-    result = SearchRegExpMultiple(subject, regexp, last_match_info, &builder);
+    result = SearchRegExpMultiple(isolate,
+                                  subject,
+                                  regexp,
+                                  last_match_info,
+                                  &builder);
   }
   if (result == RegExpImpl::RE_SUCCESS) return *builder.ToJSArray(result_array);
-  if (result == RegExpImpl::RE_FAILURE) return Heap::null_value();
+  if (result == RegExpImpl::RE_FAILURE) return isolate->heap()->null_value();
   ASSERT_EQ(result, RegExpImpl::RE_EXCEPTION);
   return Failure::Exception();
 }
 
 
-static MaybeObject* Runtime_NumberToRadixString(Arguments args) {
+static MaybeObject* Runtime_NumberToRadixString(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -3417,100 +3568,108 @@ static MaybeObject* Runtime_NumberToRadixString(Arguments args) {
       RUNTIME_ASSERT(radix <= 36);
       // Character array used for conversion.
       static const char kCharTable[] = "0123456789abcdefghijklmnopqrstuvwxyz";
-      return Heap::LookupSingleCharacterStringFromCode(kCharTable[value]);
+      return isolate->heap()->
+          LookupSingleCharacterStringFromCode(kCharTable[value]);
     }
   }
 
   // Slow case.
   CONVERT_DOUBLE_CHECKED(value, args[0]);
   if (isnan(value)) {
-    return Heap::AllocateStringFromAscii(CStrVector("NaN"));
+    return isolate->heap()->AllocateStringFromAscii(CStrVector("NaN"));
   }
   if (isinf(value)) {
     if (value < 0) {
-      return Heap::AllocateStringFromAscii(CStrVector("-Infinity"));
+      return isolate->heap()->AllocateStringFromAscii(CStrVector("-Infinity"));
     }
-    return Heap::AllocateStringFromAscii(CStrVector("Infinity"));
+    return isolate->heap()->AllocateStringFromAscii(CStrVector("Infinity"));
   }
   CONVERT_DOUBLE_CHECKED(radix_number, args[1]);
   int radix = FastD2I(radix_number);
   RUNTIME_ASSERT(2 <= radix && radix <= 36);
   char* str = DoubleToRadixCString(value, radix);
-  MaybeObject* result = Heap::AllocateStringFromAscii(CStrVector(str));
+  MaybeObject* result =
+      isolate->heap()->AllocateStringFromAscii(CStrVector(str));
   DeleteArray(str);
   return result;
 }
 
 
-static MaybeObject* Runtime_NumberToFixed(Arguments args) {
+static MaybeObject* Runtime_NumberToFixed(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_DOUBLE_CHECKED(value, args[0]);
   if (isnan(value)) {
-    return Heap::AllocateStringFromAscii(CStrVector("NaN"));
+    return isolate->heap()->AllocateStringFromAscii(CStrVector("NaN"));
   }
   if (isinf(value)) {
     if (value < 0) {
-      return Heap::AllocateStringFromAscii(CStrVector("-Infinity"));
+      return isolate->heap()->AllocateStringFromAscii(CStrVector("-Infinity"));
     }
-    return Heap::AllocateStringFromAscii(CStrVector("Infinity"));
+    return isolate->heap()->AllocateStringFromAscii(CStrVector("Infinity"));
   }
   CONVERT_DOUBLE_CHECKED(f_number, args[1]);
   int f = FastD2I(f_number);
   RUNTIME_ASSERT(f >= 0);
   char* str = DoubleToFixedCString(value, f);
-  MaybeObject* result = Heap::AllocateStringFromAscii(CStrVector(str));
+  MaybeObject* res =
+      isolate->heap()->AllocateStringFromAscii(CStrVector(str));
   DeleteArray(str);
-  return result;
+  return res;
 }
 
 
-static MaybeObject* Runtime_NumberToExponential(Arguments args) {
+static MaybeObject* Runtime_NumberToExponential(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_DOUBLE_CHECKED(value, args[0]);
   if (isnan(value)) {
-    return Heap::AllocateStringFromAscii(CStrVector("NaN"));
+    return isolate->heap()->AllocateStringFromAscii(CStrVector("NaN"));
   }
   if (isinf(value)) {
     if (value < 0) {
-      return Heap::AllocateStringFromAscii(CStrVector("-Infinity"));
+      return isolate->heap()->AllocateStringFromAscii(CStrVector("-Infinity"));
     }
-    return Heap::AllocateStringFromAscii(CStrVector("Infinity"));
+    return isolate->heap()->AllocateStringFromAscii(CStrVector("Infinity"));
   }
   CONVERT_DOUBLE_CHECKED(f_number, args[1]);
   int f = FastD2I(f_number);
   RUNTIME_ASSERT(f >= -1 && f <= 20);
   char* str = DoubleToExponentialCString(value, f);
-  MaybeObject* result = Heap::AllocateStringFromAscii(CStrVector(str));
+  MaybeObject* res =
+      isolate->heap()->AllocateStringFromAscii(CStrVector(str));
   DeleteArray(str);
-  return result;
+  return res;
 }
 
 
-static MaybeObject* Runtime_NumberToPrecision(Arguments args) {
+static MaybeObject* Runtime_NumberToPrecision(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_DOUBLE_CHECKED(value, args[0]);
   if (isnan(value)) {
-    return Heap::AllocateStringFromAscii(CStrVector("NaN"));
+    return isolate->heap()->AllocateStringFromAscii(CStrVector("NaN"));
   }
   if (isinf(value)) {
     if (value < 0) {
-      return Heap::AllocateStringFromAscii(CStrVector("-Infinity"));
+      return isolate->heap()->AllocateStringFromAscii(CStrVector("-Infinity"));
     }
-    return Heap::AllocateStringFromAscii(CStrVector("Infinity"));
+    return isolate->heap()->AllocateStringFromAscii(CStrVector("Infinity"));
   }
   CONVERT_DOUBLE_CHECKED(f_number, args[1]);
   int f = FastD2I(f_number);
   RUNTIME_ASSERT(f >= 1 && f <= 21);
   char* str = DoubleToPrecisionCString(value, f);
-  MaybeObject* result = Heap::AllocateStringFromAscii(CStrVector(str));
+  MaybeObject* res =
+      isolate->heap()->AllocateStringFromAscii(CStrVector(str));
   DeleteArray(str);
-  return result;
+  return res;
 }
 
 
@@ -3526,7 +3685,8 @@ static Handle<Object> GetCharAt(Handle<String> string, uint32_t index) {
 }
 
 
-MaybeObject* Runtime::GetElementOrCharAt(Handle<Object> object,
+MaybeObject* Runtime::GetElementOrCharAt(Isolate* isolate,
+                                         Handle<Object> object,
                                          uint32_t index) {
   // Handle [] indexing on Strings
   if (object->IsString()) {
@@ -3556,22 +3716,23 @@ MaybeObject* Runtime::GetElement(Handle<Object> object, uint32_t index) {
 }
 
 
-MaybeObject* Runtime::GetObjectProperty(Handle<Object> object,
+MaybeObject* Runtime::GetObjectProperty(Isolate* isolate,
+                                        Handle<Object> object,
                                         Handle<Object> key) {
-  HandleScope scope;
+  HandleScope scope(isolate);
 
   if (object->IsUndefined() || object->IsNull()) {
     Handle<Object> args[2] = { key, object };
     Handle<Object> error =
-        Factory::NewTypeError("non_object_property_load",
-                              HandleVector(args, 2));
-    return Top::Throw(*error);
+        isolate->factory()->NewTypeError("non_object_property_load",
+                                         HandleVector(args, 2));
+    return isolate->Throw(*error);
   }
 
   // Check if the given key is an array index.
   uint32_t index;
   if (key->ToArrayIndex(&index)) {
-    return GetElementOrCharAt(object, index);
+    return GetElementOrCharAt(isolate, object, index);
   }
 
   // Convert the key to a string - possibly by calling back into JavaScript.
@@ -3589,7 +3750,7 @@ MaybeObject* Runtime::GetObjectProperty(Handle<Object> object,
   // Check if the name is trivially convertible to an index and get
   // the element if so.
   if (name->AsArrayIndex(&index)) {
-    return GetElementOrCharAt(object, index);
+    return GetElementOrCharAt(isolate, object, index);
   } else {
     PropertyAttributes attr;
     return object->GetProperty(*name, &attr);
@@ -3597,19 +3758,21 @@ MaybeObject* Runtime::GetObjectProperty(Handle<Object> object,
 }
 
 
-static MaybeObject* Runtime_GetProperty(Arguments args) {
+static MaybeObject* Runtime_GetProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   Handle<Object> object = args.at<Object>(0);
   Handle<Object> key = args.at<Object>(1);
 
-  return Runtime::GetObjectProperty(object, key);
+  return Runtime::GetObjectProperty(isolate, object, key);
 }
 
 
 // KeyedStringGetProperty is called from KeyedLoadIC::GenerateGeneric.
-static MaybeObject* Runtime_KeyedGetProperty(Arguments args) {
+static MaybeObject* Runtime_KeyedGetProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -3633,17 +3796,18 @@ static MaybeObject* Runtime_KeyedGetProperty(Arguments args) {
     if (receiver->HasFastProperties()) {
       // Attempt to use lookup cache.
       Map* receiver_map = receiver->map();
-      int offset = KeyedLookupCache::Lookup(receiver_map, key);
+      KeyedLookupCache* keyed_lookup_cache = isolate->keyed_lookup_cache();
+      int offset = keyed_lookup_cache->Lookup(receiver_map, key);
       if (offset != -1) {
         Object* value = receiver->FastPropertyAt(offset);
-        return value->IsTheHole() ? Heap::undefined_value() : value;
+        return value->IsTheHole() ? isolate->heap()->undefined_value() : value;
       }
       // Lookup cache miss.  Perform lookup and update the cache if appropriate.
       LookupResult result;
       receiver->LocalLookup(key, &result);
       if (result.IsProperty() && result.type() == FIELD) {
         int offset = result.GetFieldIndex();
-        KeyedLookupCache::Update(receiver_map, key, offset);
+        keyed_lookup_cache->Update(receiver_map, key, offset);
         return receiver->FastPropertyAt(offset);
       }
     } else {
@@ -3661,7 +3825,7 @@ static MaybeObject* Runtime_KeyedGetProperty(Arguments args) {
     }
   } else if (args[0]->IsString() && args[1]->IsSmi()) {
     // Fast case for string indexing using [] with a smi index.
-    HandleScope scope;
+    HandleScope scope(isolate);
     Handle<String> str = args.at<String>(0);
     int index = Smi::cast(args[1])->value();
     if (index >= 0 && index < str->length()) {
@@ -3671,7 +3835,8 @@ static MaybeObject* Runtime_KeyedGetProperty(Arguments args) {
   }
 
   // Fall back to GetObjectProperty.
-  return Runtime::GetObjectProperty(args.at<Object>(0),
+  return Runtime::GetObjectProperty(isolate,
+                                    args.at<Object>(0),
                                     args.at<Object>(1));
 }
 
@@ -3681,9 +3846,11 @@ static MaybeObject* Runtime_KeyedGetProperty(Arguments args) {
 // Steps 9c & 12 - replace an existing data property with an accessor property.
 // Step 12 - update an existing accessor property with an accessor or generic
 //           descriptor.
-static MaybeObject* Runtime_DefineOrRedefineAccessorProperty(Arguments args) {
+static MaybeObject* Runtime_DefineOrRedefineAccessorProperty(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 5);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
   CONVERT_CHECKED(String, name, args[1]);
   CONVERT_CHECKED(Smi, flag_setter, args[2]);
@@ -3718,9 +3885,11 @@ static MaybeObject* Runtime_DefineOrRedefineAccessorProperty(Arguments args) {
 // Steps 9b & 12 - replace an existing accessor property with a data property.
 // Step 12 - update an existing data property with a data or generic
 //           descriptor.
-static MaybeObject* Runtime_DefineOrRedefineDataProperty(Arguments args) {
+static MaybeObject* Runtime_DefineOrRedefineDataProperty(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 4);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(JSObject, js_object, 0);
   CONVERT_ARG_CHECKED(String, name, 1);
   Handle<Object> obj_value = args.at<Object>(2);
@@ -3767,7 +3936,7 @@ static MaybeObject* Runtime_DefineOrRedefineDataProperty(Arguments args) {
   if (result.IsProperty() &&
       (result.type() == CALLBACKS) &&
       result.GetCallbackObject()->IsAccessorInfo()) {
-    return Heap::undefined_value();
+    return isolate->heap()->undefined_value();
   }
 
   // Take special care when attributes are different and there is already
@@ -3792,23 +3961,28 @@ static MaybeObject* Runtime_DefineOrRedefineDataProperty(Arguments args) {
                                                        attr);
   }
 
-  return Runtime::ForceSetObjectProperty(js_object, name, obj_value, attr);
+  return Runtime::ForceSetObjectProperty(isolate,
+                                         js_object,
+                                         name,
+                                         obj_value,
+                                         attr);
 }
 
 
-MaybeObject* Runtime::SetObjectProperty(Handle<Object> object,
+MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
+                                        Handle<Object> object,
                                         Handle<Object> key,
                                         Handle<Object> value,
                                         PropertyAttributes attr,
                                         StrictModeFlag strict_mode) {
-  HandleScope scope;
+  HandleScope scope(isolate);
 
   if (object->IsUndefined() || object->IsNull()) {
     Handle<Object> args[2] = { key, object };
     Handle<Object> error =
-        Factory::NewTypeError("non_object_property_store",
-                              HandleVector(args, 2));
-    return Top::Throw(*error);
+        isolate->factory()->NewTypeError("non_object_property_store",
+                                         HandleVector(args, 2));
+    return isolate->Throw(*error);
   }
 
   // If the object isn't a JavaScript object, we ignore the store.
@@ -3862,11 +4036,12 @@ MaybeObject* Runtime::SetObjectProperty(Handle<Object> object,
 }
 
 
-MaybeObject* Runtime::ForceSetObjectProperty(Handle<JSObject> js_object,
+MaybeObject* Runtime::ForceSetObjectProperty(Isolate* isolate,
+                                             Handle<JSObject> js_object,
                                              Handle<Object> key,
                                              Handle<Object> value,
                                              PropertyAttributes attr) {
-  HandleScope scope;
+  HandleScope scope(isolate);
 
   // Check if the given key is an array index.
   uint32_t index;
@@ -3911,9 +4086,10 @@ MaybeObject* Runtime::ForceSetObjectProperty(Handle<JSObject> js_object,
 }
 
 
-MaybeObject* Runtime::ForceDeleteObjectProperty(Handle<JSObject> js_object,
+MaybeObject* Runtime::ForceDeleteObjectProperty(Isolate* isolate,
+                                                Handle<JSObject> js_object,
                                                 Handle<Object> key) {
-  HandleScope scope;
+  HandleScope scope(isolate);
 
   // Check if the given key is an array index.
   uint32_t index;
@@ -3925,7 +4101,7 @@ MaybeObject* Runtime::ForceDeleteObjectProperty(Handle<JSObject> js_object,
     // underlying string does nothing with the deletion, we can ignore
     // such deletions.
     if (js_object->IsStringObjectWithCharacterAt(index)) {
-      return Heap::true_value();
+      return isolate->heap()->true_value();
     }
 
     return js_object->DeleteElement(index, JSObject::FORCE_DELETION);
@@ -3947,7 +4123,8 @@ MaybeObject* Runtime::ForceDeleteObjectProperty(Handle<JSObject> js_object,
 }
 
 
-static MaybeObject* Runtime_SetProperty(Arguments args) {
+static MaybeObject* Runtime_SetProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   RUNTIME_ASSERT(args.length() == 4 || args.length() == 5);
 
@@ -3969,7 +4146,8 @@ static MaybeObject* Runtime_SetProperty(Arguments args) {
     strict_mode = static_cast<StrictModeFlag>(strict_unchecked);
   }
 
-  return Runtime::SetObjectProperty(object,
+  return Runtime::SetObjectProperty(isolate,
+                                    object,
                                     key,
                                     value,
                                     attributes,
@@ -3979,7 +4157,9 @@ static MaybeObject* Runtime_SetProperty(Arguments args) {
 
 // Set a local property, even if it is READ_ONLY.  If the property does not
 // exist, it will be added with attributes NONE.
-static MaybeObject* Runtime_IgnoreAttributesAndSetProperty(Arguments args) {
+static MaybeObject* Runtime_IgnoreAttributesAndSetProperty(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   RUNTIME_ASSERT(args.length() == 3 || args.length() == 4);
   CONVERT_CHECKED(JSObject, object, args[0]);
@@ -4000,7 +4180,8 @@ static MaybeObject* Runtime_IgnoreAttributesAndSetProperty(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_DeleteProperty(Arguments args) {
+static MaybeObject* Runtime_DeleteProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 3);
 
@@ -4013,22 +4194,26 @@ static MaybeObject* Runtime_DeleteProperty(Arguments args) {
 }
 
 
-static Object* HasLocalPropertyImplementation(Handle<JSObject> object,
+static Object* HasLocalPropertyImplementation(Isolate* isolate,
+                                              Handle<JSObject> object,
                                               Handle<String> key) {
-  if (object->HasLocalProperty(*key)) return Heap::true_value();
+  if (object->HasLocalProperty(*key)) return isolate->heap()->true_value();
   // Handle hidden prototypes.  If there's a hidden prototype above this thing
   // then we have to check it for properties, because they are supposed to
   // look like they are on this object.
   Handle<Object> proto(object->GetPrototype());
   if (proto->IsJSObject() &&
       Handle<JSObject>::cast(proto)->map()->is_hidden_prototype()) {
-    return HasLocalPropertyImplementation(Handle<JSObject>::cast(proto), key);
+    return HasLocalPropertyImplementation(isolate,
+                                          Handle<JSObject>::cast(proto),
+                                          key);
   }
-  return Heap::false_value();
+  return isolate->heap()->false_value();
 }
 
 
-static MaybeObject* Runtime_HasLocalProperty(Arguments args) {
+static MaybeObject* Runtime_HasLocalProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(String, key, args[1]);
@@ -4038,11 +4223,12 @@ static MaybeObject* Runtime_HasLocalProperty(Arguments args) {
   if (obj->IsJSObject()) {
     JSObject* object = JSObject::cast(obj);
     // Fast case - no interceptors.
-    if (object->HasRealNamedProperty(key)) return Heap::true_value();
+    if (object->HasRealNamedProperty(key)) return isolate->heap()->true_value();
     // Slow case.  Either it's not there or we have an interceptor.  We should
     // have handles for this kind of deal.
-    HandleScope scope;
-    return HasLocalPropertyImplementation(Handle<JSObject>(object),
+    HandleScope scope(isolate);
+    return HasLocalPropertyImplementation(isolate,
+                                          Handle<JSObject>(object),
                                           Handle<String>(key));
   } else if (obj->IsString()) {
     // Well, there is one exception:  Handle [] on strings.
@@ -4050,14 +4236,15 @@ static MaybeObject* Runtime_HasLocalProperty(Arguments args) {
     if (key->AsArrayIndex(&index)) {
       String* string = String::cast(obj);
       if (index < static_cast<uint32_t>(string->length()))
-        return Heap::true_value();
+        return isolate->heap()->true_value();
     }
   }
-  return Heap::false_value();
+  return isolate->heap()->false_value();
 }
 
 
-static MaybeObject* Runtime_HasProperty(Arguments args) {
+static MaybeObject* Runtime_HasProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 2);
 
@@ -4065,13 +4252,14 @@ static MaybeObject* Runtime_HasProperty(Arguments args) {
   if (args[0]->IsJSObject()) {
     JSObject* object = JSObject::cast(args[0]);
     CONVERT_CHECKED(String, key, args[1]);
-    if (object->HasProperty(key)) return Heap::true_value();
+    if (object->HasProperty(key)) return isolate->heap()->true_value();
   }
-  return Heap::false_value();
+  return isolate->heap()->false_value();
 }
 
 
-static MaybeObject* Runtime_HasElement(Arguments args) {
+static MaybeObject* Runtime_HasElement(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation na;
   ASSERT(args.length() == 2);
 
@@ -4080,13 +4268,14 @@ static MaybeObject* Runtime_HasElement(Arguments args) {
     JSObject* object = JSObject::cast(args[0]);
     CONVERT_CHECKED(Smi, index_obj, args[1]);
     uint32_t index = index_obj->value();
-    if (object->HasElement(index)) return Heap::true_value();
+    if (object->HasElement(index)) return isolate->heap()->true_value();
   }
-  return Heap::false_value();
+  return isolate->heap()->false_value();
 }
 
 
-static MaybeObject* Runtime_IsPropertyEnumerable(Arguments args) {
+static MaybeObject* Runtime_IsPropertyEnumerable(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -4095,16 +4284,17 @@ static MaybeObject* Runtime_IsPropertyEnumerable(Arguments args) {
 
   uint32_t index;
   if (key->AsArrayIndex(&index)) {
-    return Heap::ToBoolean(object->HasElement(index));
+    return isolate->heap()->ToBoolean(object->HasElement(index));
   }
 
   PropertyAttributes att = object->GetLocalPropertyAttribute(key);
-  return Heap::ToBoolean(att != ABSENT && (att & DONT_ENUM) == 0);
+  return isolate->heap()->ToBoolean(att != ABSENT && (att & DONT_ENUM) == 0);
 }
 
 
-static MaybeObject* Runtime_GetPropertyNames(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetPropertyNames(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_CHECKED(JSObject, object, 0);
   return *GetKeysFor(object);
@@ -4116,14 +4306,15 @@ static MaybeObject* Runtime_GetPropertyNames(Arguments args) {
 // all enumerable properties of the object and its prototypes
 // have none, the map of the object. This is used to speed up
 // the check for deletions during a for-in.
-static MaybeObject* Runtime_GetPropertyNamesFast(Arguments args) {
+static MaybeObject* Runtime_GetPropertyNamesFast(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
 
   CONVERT_CHECKED(JSObject, raw_object, args[0]);
 
   if (raw_object->IsSimpleEnum()) return raw_object->map();
 
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<JSObject> object(raw_object);
   Handle<FixedArray> content = GetKeysInFixedArrayFor(object,
                                                       INCLUDE_PROTOS);
@@ -4152,11 +4343,12 @@ static int LocalPrototypeChainLength(JSObject* obj) {
 
 // Return the names of the local named properties.
 // args[0]: object
-static MaybeObject* Runtime_GetLocalPropertyNames(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetLocalPropertyNames(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   if (!args[0]->IsJSObject()) {
-    return Heap::undefined_value();
+    return isolate->heap()->undefined_value();
   }
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
 
@@ -4165,9 +4357,11 @@ static MaybeObject* Runtime_GetLocalPropertyNames(Arguments args) {
   if (obj->IsJSGlobalProxy()) {
     // Only collect names if access is permitted.
     if (obj->IsAccessCheckNeeded() &&
-        !Top::MayNamedAccess(*obj, Heap::undefined_value(), v8::ACCESS_KEYS)) {
-      Top::ReportFailedAccessCheck(*obj, v8::ACCESS_KEYS);
-      return *Factory::NewJSArray(0);
+        !isolate->MayNamedAccess(*obj,
+                                 isolate->heap()->undefined_value(),
+                                 v8::ACCESS_KEYS)) {
+      isolate->ReportFailedAccessCheck(*obj, v8::ACCESS_KEYS);
+      return *isolate->factory()->NewJSArray(0);
     }
     obj = Handle<JSObject>(JSObject::cast(obj->GetPrototype()));
   }
@@ -4182,11 +4376,11 @@ static MaybeObject* Runtime_GetLocalPropertyNames(Arguments args) {
   for (int i = 0; i < length; i++) {
     // Only collect names if access is permitted.
     if (jsproto->IsAccessCheckNeeded() &&
-        !Top::MayNamedAccess(*jsproto,
-                             Heap::undefined_value(),
-                             v8::ACCESS_KEYS)) {
-      Top::ReportFailedAccessCheck(*jsproto, v8::ACCESS_KEYS);
-      return *Factory::NewJSArray(0);
+        !isolate->MayNamedAccess(*jsproto,
+                                 isolate->heap()->undefined_value(),
+                                 v8::ACCESS_KEYS)) {
+      isolate->ReportFailedAccessCheck(*jsproto, v8::ACCESS_KEYS);
+      return *isolate->factory()->NewJSArray(0);
     }
     int n;
     n = jsproto->NumberOfLocalProperties(static_cast<PropertyAttributes>(NONE));
@@ -4198,7 +4392,8 @@ static MaybeObject* Runtime_GetLocalPropertyNames(Arguments args) {
   }
 
   // Allocate an array with storage for all the property names.
-  Handle<FixedArray> names = Factory::NewFixedArray(total_property_count);
+  Handle<FixedArray> names =
+      isolate->factory()->NewFixedArray(total_property_count);
 
   // Get the property names.
   jsproto = obj;
@@ -4217,43 +4412,45 @@ static MaybeObject* Runtime_GetLocalPropertyNames(Arguments args) {
   // Filter out name of hidden propeties object.
   if (proto_with_hidden_properties > 0) {
     Handle<FixedArray> old_names = names;
-    names = Factory::NewFixedArray(
+    names = isolate->factory()->NewFixedArray(
         names->length() - proto_with_hidden_properties);
     int dest_pos = 0;
     for (int i = 0; i < total_property_count; i++) {
       Object* name = old_names->get(i);
-      if (name == Heap::hidden_symbol()) {
+      if (name == isolate->heap()->hidden_symbol()) {
         continue;
       }
       names->set(dest_pos++, name);
     }
   }
 
-  return *Factory::NewJSArrayWithElements(names);
+  return *isolate->factory()->NewJSArrayWithElements(names);
 }
 
 
 // Return the names of the local indexed properties.
 // args[0]: object
-static MaybeObject* Runtime_GetLocalElementNames(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetLocalElementNames(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   if (!args[0]->IsJSObject()) {
-    return Heap::undefined_value();
+    return isolate->heap()->undefined_value();
   }
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
 
   int n = obj->NumberOfLocalElements(static_cast<PropertyAttributes>(NONE));
-  Handle<FixedArray> names = Factory::NewFixedArray(n);
+  Handle<FixedArray> names = isolate->factory()->NewFixedArray(n);
   obj->GetLocalElementKeys(*names, static_cast<PropertyAttributes>(NONE));
-  return *Factory::NewJSArrayWithElements(names);
+  return *isolate->factory()->NewJSArrayWithElements(names);
 }
 
 
 // Return information on whether an object has a named or indexed interceptor.
 // args[0]: object
-static MaybeObject* Runtime_GetInterceptorInfo(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetInterceptorInfo(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   if (!args[0]->IsJSObject()) {
     return Smi::FromInt(0);
@@ -4270,8 +4467,10 @@ static MaybeObject* Runtime_GetInterceptorInfo(Arguments args) {
 
 // Return property names from named interceptor.
 // args[0]: object
-static MaybeObject* Runtime_GetNamedInterceptorPropertyNames(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetNamedInterceptorPropertyNames(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
 
@@ -4279,14 +4478,16 @@ static MaybeObject* Runtime_GetNamedInterceptorPropertyNames(Arguments args) {
     v8::Handle<v8::Array> result = GetKeysForNamedInterceptor(obj, obj);
     if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result);
   }
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
 // Return element names from indexed interceptor.
 // args[0]: object
-static MaybeObject* Runtime_GetIndexedInterceptorElementNames(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetIndexedInterceptorElementNames(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
 
@@ -4294,28 +4495,29 @@ static MaybeObject* Runtime_GetIndexedInterceptorElementNames(Arguments args) {
     v8::Handle<v8::Array> result = GetKeysForIndexedInterceptor(obj, obj);
     if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result);
   }
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_LocalKeys(Arguments args) {
+static MaybeObject* Runtime_LocalKeys(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT_EQ(args.length(), 1);
   CONVERT_CHECKED(JSObject, raw_object, args[0]);
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<JSObject> object(raw_object);
 
   if (object->IsJSGlobalProxy()) {
     // Do access checks before going to the global object.
     if (object->IsAccessCheckNeeded() &&
-        !Top::MayNamedAccess(*object, Heap::undefined_value(),
+        !isolate->MayNamedAccess(*object, isolate->heap()->undefined_value(),
                              v8::ACCESS_KEYS)) {
-      Top::ReportFailedAccessCheck(*object, v8::ACCESS_KEYS);
-      return *Factory::NewJSArray(0);
+      isolate->ReportFailedAccessCheck(*object, v8::ACCESS_KEYS);
+      return *isolate->factory()->NewJSArray(0);
     }
 
     Handle<Object> proto(object->GetPrototype());
     // If proxy is detached we simply return an empty array.
-    if (proto->IsNull()) return *Factory::NewJSArray(0);
+    if (proto->IsNull()) return *isolate->factory()->NewJSArray(0);
     object = Handle<JSObject>::cast(proto);
   }
 
@@ -4325,24 +4527,26 @@ static MaybeObject* Runtime_LocalKeys(Arguments args) {
   // property array and since the result is mutable we have to create
   // a fresh clone on each invocation.
   int length = contents->length();
-  Handle<FixedArray> copy = Factory::NewFixedArray(length);
+  Handle<FixedArray> copy = isolate->factory()->NewFixedArray(length);
   for (int i = 0; i < length; i++) {
     Object* entry = contents->get(i);
     if (entry->IsString()) {
       copy->set(i, entry);
     } else {
       ASSERT(entry->IsNumber());
-      HandleScope scope;
-      Handle<Object> entry_handle(entry);
-      Handle<Object> entry_str = Factory::NumberToString(entry_handle);
+      HandleScope scope(isolate);
+      Handle<Object> entry_handle(entry, isolate);
+      Handle<Object> entry_str =
+          isolate->factory()->NumberToString(entry_handle);
       copy->set(i, *entry_str);
     }
   }
-  return *Factory::NewJSArrayWithElements(copy);
+  return *isolate->factory()->NewJSArrayWithElements(copy);
 }
 
 
-static MaybeObject* Runtime_GetArgumentsProperty(Arguments args) {
+static MaybeObject* Runtime_GetArgumentsProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -4362,7 +4566,7 @@ static MaybeObject* Runtime_GetArgumentsProperty(Arguments args) {
   }
 
   // Convert the key to a string.
-  HandleScope scope;
+  HandleScope scope(isolate);
   bool exception = false;
   Handle<Object> converted =
       Execution::ToString(args.at<Object>(0), &exception);
@@ -4374,29 +4578,30 @@ static MaybeObject* Runtime_GetArgumentsProperty(Arguments args) {
     if (index < n) {
       return frame->GetParameter(index);
     } else {
-      return Top::initial_object_prototype()->GetElement(index);
+      return isolate->initial_object_prototype()->GetElement(index);
     }
   }
 
   // Handle special arguments properties.
-  if (key->Equals(Heap::length_symbol())) return Smi::FromInt(n);
-  if (key->Equals(Heap::callee_symbol())) {
+  if (key->Equals(isolate->heap()->length_symbol())) return Smi::FromInt(n);
+  if (key->Equals(isolate->heap()->callee_symbol())) {
     Object* function = frame->function();
     if (function->IsJSFunction() &&
         JSFunction::cast(function)->shared()->strict_mode()) {
-      return Top::Throw(*Factory::NewTypeError("strict_arguments_callee",
-                                               HandleVector<Object>(NULL, 0)));
+      return isolate->Throw(*isolate->factory()->NewTypeError(
+          "strict_arguments_callee", HandleVector<Object>(NULL, 0)));
     }
     return function;
   }
 
   // Lookup in the initial Object.prototype object.
-  return Top::initial_object_prototype()->GetProperty(*key);
+  return isolate->initial_object_prototype()->GetProperty(*key);
 }
 
 
-static MaybeObject* Runtime_ToFastProperties(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_ToFastProperties(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
 
   ASSERT(args.length() == 1);
   Handle<Object> object = args.at<Object>(0);
@@ -4411,8 +4616,9 @@ static MaybeObject* Runtime_ToFastProperties(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_ToSlowProperties(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_ToSlowProperties(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
 
   ASSERT(args.length() == 1);
   Handle<Object> object = args.at<Object>(0);
@@ -4424,7 +4630,8 @@ static MaybeObject* Runtime_ToSlowProperties(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_ToBool(Arguments args) {
+static MaybeObject* Runtime_ToBool(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -4434,37 +4641,40 @@ static MaybeObject* Runtime_ToBool(Arguments args) {
 
 // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
 // Possible optimizations: put the type string into the oddballs.
-static MaybeObject* Runtime_Typeof(Arguments args) {
+static MaybeObject* Runtime_Typeof(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
 
   Object* obj = args[0];
-  if (obj->IsNumber()) return Heap::number_symbol();
+  if (obj->IsNumber()) return isolate->heap()->number_symbol();
   HeapObject* heap_obj = HeapObject::cast(obj);
 
   // typeof an undetectable object is 'undefined'
-  if (heap_obj->map()->is_undetectable()) return Heap::undefined_symbol();
+  if (heap_obj->map()->is_undetectable()) {
+    return isolate->heap()->undefined_symbol();
+  }
 
   InstanceType instance_type = heap_obj->map()->instance_type();
   if (instance_type < FIRST_NONSTRING_TYPE) {
-    return Heap::string_symbol();
+    return isolate->heap()->string_symbol();
   }
 
   switch (instance_type) {
     case ODDBALL_TYPE:
       if (heap_obj->IsTrue() || heap_obj->IsFalse()) {
-        return Heap::boolean_symbol();
+        return isolate->heap()->boolean_symbol();
       }
       if (heap_obj->IsNull()) {
-        return Heap::object_symbol();
+        return isolate->heap()->object_symbol();
       }
       ASSERT(heap_obj->IsUndefined());
-      return Heap::undefined_symbol();
+      return isolate->heap()->undefined_symbol();
     case JS_FUNCTION_TYPE: case JS_REGEXP_TYPE:
-      return Heap::function_symbol();
+      return isolate->heap()->function_symbol();
     default:
       // For any kind of object not handled above, the spec rule for
       // host objects gives that it is okay to return "object"
-      return Heap::object_symbol();
+      return isolate->heap()->object_symbol();
   }
 }
 
@@ -4491,7 +4701,8 @@ static int ParseDecimalInteger(const char*s, int from, int to) {
 }
 
 
-static MaybeObject* Runtime_StringToNumber(Arguments args) {
+static MaybeObject* Runtime_StringToNumber(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(String, subject, args[0]);
@@ -4507,21 +4718,21 @@ static MaybeObject* Runtime_StringToNumber(Arguments args) {
     int start_pos = (minus ? 1 : 0);
 
     if (start_pos == len) {
-      return Heap::nan_value();
+      return isolate->heap()->nan_value();
     } else if (data[start_pos] > '9') {
       // Fast check for a junk value. A valid string may start from a
       // whitespace, a sign ('+' or '-'), the decimal point, a decimal digit or
       // the 'I' character ('Infinity'). All of that have codes not greater than
       // '9' except 'I'.
       if (data[start_pos] != 'I') {
-        return Heap::nan_value();
+        return isolate->heap()->nan_value();
       }
     } else if (len - start_pos < 10 && AreDigits(data, start_pos, len)) {
       // The maximal/minimal smi has 10 digits. If the string has less digits we
       // know it will fit into the smi-data type.
       int d = ParseDecimalInteger(data, start_pos, len);
       if (minus) {
-        if (d == 0) return Heap::minus_zero_value();
+        if (d == 0) return isolate->heap()->minus_zero_value();
         d = -d;
       } else if (!subject->HasHashCode() &&
                  len <= String::kMaxArrayIndexSize &&
@@ -4541,11 +4752,13 @@ static MaybeObject* Runtime_StringToNumber(Arguments args) {
   }
 
   // Slower case.
-  return Heap::NumberFromDouble(StringToDouble(subject, ALLOW_HEX));
+  return isolate->heap()->NumberFromDouble(StringToDouble(subject, ALLOW_HEX));
 }
 
 
-static MaybeObject* Runtime_StringFromCharCodeArray(Arguments args) {
+static MaybeObject* Runtime_StringFromCharCodeArray(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -4568,9 +4781,9 @@ static MaybeObject* Runtime_StringFromCharCodeArray(Arguments args) {
 
   MaybeObject* maybe_object = NULL;
   if (i == length) {  // The string is ASCII.
-    maybe_object = Heap::AllocateRawAsciiString(length);
+    maybe_object = isolate->heap()->AllocateRawAsciiString(length);
   } else {  // The string is not ASCII.
-    maybe_object = Heap::AllocateRawTwoByteString(length);
+    maybe_object = isolate->heap()->AllocateRawTwoByteString(length);
   }
 
   Object* object = NULL;
@@ -4625,7 +4838,8 @@ static bool IsNotEscaped(uint16_t character) {
 }
 
 
-static MaybeObject* Runtime_URIEscape(Arguments args) {
+static MaybeObject* Runtime_URIEscape(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   const char hex_chars[] = "0123456789ABCDEF";
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
@@ -4636,7 +4850,8 @@ static MaybeObject* Runtime_URIEscape(Arguments args) {
   int escaped_length = 0;
   int length = source->length();
   {
-    Access<StringInputBuffer> buffer(&runtime_string_input_buffer);
+    Access<StringInputBuffer> buffer(
+        isolate->runtime_state()->string_input_buffer());
     buffer->Reset(source);
     while (buffer->has_more()) {
       uint16_t character = buffer->GetNext();
@@ -4650,7 +4865,7 @@ static MaybeObject* Runtime_URIEscape(Arguments args) {
       // We don't allow strings that are longer than a maximal length.
       ASSERT(String::kMaxLength < 0x7fffffff - 6);  // Cannot overflow.
       if (escaped_length > String::kMaxLength) {
-        Top::context()->mark_out_of_memory();
+        isolate->context()->mark_out_of_memory();
         return Failure::OutOfMemoryException();
       }
     }
@@ -4660,13 +4875,15 @@ static MaybeObject* Runtime_URIEscape(Arguments args) {
     return source;
   }
   Object* o;
-  { MaybeObject* maybe_o = Heap::AllocateRawAsciiString(escaped_length);
+  { MaybeObject* maybe_o =
+        isolate->heap()->AllocateRawAsciiString(escaped_length);
     if (!maybe_o->ToObject(&o)) return maybe_o;
   }
   String* destination = String::cast(o);
   int dest_position = 0;
 
-  Access<StringInputBuffer> buffer(&runtime_string_input_buffer);
+  Access<StringInputBuffer> buffer(
+      isolate->runtime_state()->string_input_buffer());
   buffer->Rewind();
   while (buffer->has_more()) {
     uint16_t chr = buffer->GetNext();
@@ -4741,7 +4958,8 @@ static inline int Unescape(String* source,
 }
 
 
-static MaybeObject* Runtime_URIUnescape(Arguments args) {
+static MaybeObject* Runtime_URIUnescape(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(String, source, args[0]);
@@ -4765,9 +4983,10 @@ static MaybeObject* Runtime_URIUnescape(Arguments args) {
     return source;
 
   Object* o;
-  { MaybeObject* maybe_o = ascii ?
-                           Heap::AllocateRawAsciiString(unescaped_length) :
-                           Heap::AllocateRawTwoByteString(unescaped_length);
+  { MaybeObject* maybe_o =
+        ascii ?
+        isolate->heap()->AllocateRawAsciiString(unescaped_length) :
+        isolate->heap()->AllocateRawTwoByteString(unescaped_length);
     if (!maybe_o->ToObject(&o)) return maybe_o;
   }
   String* destination = String::cast(o);
@@ -4857,13 +5076,13 @@ MaybeObject* AllocateRawString(int length);
 
 template <>
 MaybeObject* AllocateRawString<SeqTwoByteString>(int length) {
-  return Heap::AllocateRawTwoByteString(length);
+  return HEAP->AllocateRawTwoByteString(length);
 }
 
 
 template <>
 MaybeObject* AllocateRawString<SeqAsciiString>(int length) {
-  return Heap::AllocateRawAsciiString(length);
+  return HEAP->AllocateRawAsciiString(length);
 }
 
 
@@ -4916,7 +5135,7 @@ static MaybeObject* SlowQuoteJsonString(Vector<const Char> characters) {
 template <typename Char, typename StringType, bool comma>
 static MaybeObject* QuoteJsonString(Vector<const Char> characters) {
   int length = characters.length();
-  Counters::quote_json_char_count.Increment(length);
+  COUNTERS->quote_json_char_count()->Increment(length);
   const int kSpaceForQuotes = 2 + (comma ? 1 :0);
   int worst_case_length = length * kJsonQuoteWorstCaseBlowup + kSpaceForQuotes;
   if (worst_case_length > kMaxGuaranteedNewSpaceString) {
@@ -4928,7 +5147,7 @@ static MaybeObject* QuoteJsonString(Vector<const Char> characters) {
   if (!new_alloc->ToObject(&new_object)) {
     return new_alloc;
   }
-  if (!Heap::new_space()->Contains(new_object)) {
+  if (!HEAP->new_space()->Contains(new_object)) {
     // Even if our string is small enough to fit in new space we still have to
     // handle it being allocated in old space as may happen in the third
     // attempt.  See CALL_AND_RETRY in heap-inl.h and similar code in
@@ -4936,7 +5155,7 @@ static MaybeObject* QuoteJsonString(Vector<const Char> characters) {
     return SlowQuoteJsonString<Char, StringType, comma>(characters);
   }
   StringType* new_string = StringType::cast(new_object);
-  ASSERT(Heap::new_space()->Contains(new_string));
+  ASSERT(HEAP->new_space()->Contains(new_string));
 
   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
   Char* write_cursor = reinterpret_cast<Char*>(
@@ -4973,13 +5192,14 @@ static MaybeObject* QuoteJsonString(Vector<const Char> characters) {
   int final_length = static_cast<int>(
       write_cursor - reinterpret_cast<Char*>(
           new_string->address() + SeqAsciiString::kHeaderSize));
-  Heap::new_space()->ShrinkStringAtAllocationBoundary<StringType>(new_string,
+  HEAP->new_space()->ShrinkStringAtAllocationBoundary<StringType>(new_string,
                                                                   final_length);
   return new_string;
 }
 
 
-static MaybeObject* Runtime_QuoteJSONString(Arguments args) {
+static MaybeObject* Runtime_QuoteJSONString(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   CONVERT_CHECKED(String, str, args[0]);
   if (!str->IsFlat()) {
@@ -4999,7 +5219,8 @@ static MaybeObject* Runtime_QuoteJSONString(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_QuoteJSONStringComma(Arguments args) {
+static MaybeObject* Runtime_QuoteJSONStringComma(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   CONVERT_CHECKED(String, str, args[0]);
   if (!str->IsFlat()) {
@@ -5018,8 +5239,8 @@ static MaybeObject* Runtime_QuoteJSONStringComma(Arguments args) {
   }
 }
 
-
-static MaybeObject* Runtime_StringParseInt(Arguments args) {
+static MaybeObject* Runtime_StringParseInt(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
 
   CONVERT_CHECKED(String, s, args[0]);
@@ -5029,11 +5250,12 @@ static MaybeObject* Runtime_StringParseInt(Arguments args) {
 
   RUNTIME_ASSERT(radix == 0 || (2 <= radix && radix <= 36));
   double value = StringToInt(s, radix);
-  return Heap::NumberFromDouble(value);
+  return isolate->heap()->NumberFromDouble(value);
 }
 
 
-static MaybeObject* Runtime_StringParseFloat(Arguments args) {
+static MaybeObject* Runtime_StringParseFloat(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   CONVERT_CHECKED(String, str, args[0]);
 
@@ -5041,16 +5263,13 @@ static MaybeObject* Runtime_StringParseFloat(Arguments args) {
   double value = StringToDouble(str, ALLOW_TRAILING_JUNK, OS::nan_value());
 
   // Create a number object from the value.
-  return Heap::NumberFromDouble(value);
+  return isolate->heap()->NumberFromDouble(value);
 }
 
 
-static unibrow::Mapping<unibrow::ToUppercase, 128> to_upper_mapping;
-static unibrow::Mapping<unibrow::ToLowercase, 128> to_lower_mapping;
-
-
 template <class Converter>
 MUST_USE_RESULT static MaybeObject* ConvertCaseHelper(
+    Isolate* isolate,
     String* s,
     int length,
     int input_string_length,
@@ -5068,8 +5287,8 @@ MUST_USE_RESULT static MaybeObject* ConvertCaseHelper(
   // dependent upper/lower conversions.
   Object* o;
   { MaybeObject* maybe_o = s->IsAsciiRepresentation()
-                   ? Heap::AllocateRawAsciiString(length)
-                   : Heap::AllocateRawTwoByteString(length);
+        ? isolate->heap()->AllocateRawAsciiString(length)
+        : isolate->heap()->AllocateRawTwoByteString(length);
     if (!maybe_o->ToObject(&o)) return maybe_o;
   }
   String* result = String::cast(o);
@@ -5077,7 +5296,8 @@ MUST_USE_RESULT static MaybeObject* ConvertCaseHelper(
 
   // Convert all characters to upper case, assuming that they will fit
   // in the buffer
-  Access<StringInputBuffer> buffer(&runtime_string_input_buffer);
+  Access<StringInputBuffer> buffer(
+      isolate->runtime_state()->string_input_buffer());
   buffer->Reset(s);
   unibrow::uchar chars[Converter::kMaxWidth];
   // We can assume that the string is not empty
@@ -5124,7 +5344,7 @@ MUST_USE_RESULT static MaybeObject* ConvertCaseHelper(
         if (char_length == 0) char_length = 1;
         current_length += char_length;
         if (current_length > Smi::kMaxValue) {
-          Top::context()->mark_out_of_memory();
+          isolate->context()->mark_out_of_memory();
           return Failure::OutOfMemoryException();
         }
       }
@@ -5282,6 +5502,7 @@ struct ToUpperTraits {
 template <typename ConvertTraits>
 MUST_USE_RESULT static MaybeObject* ConvertCase(
     Arguments args,
+    Isolate* isolate,
     unibrow::Mapping<typename ConvertTraits::UnibrowConverter, 128>* mapping) {
   NoHandleAllocation ha;
   CONVERT_CHECKED(String, s, args[0]);
@@ -5299,7 +5520,7 @@ MUST_USE_RESULT static MaybeObject* ConvertCase(
   // dependent upper/lower conversions.
   if (s->IsSeqAsciiString()) {
     Object* o;
-    { MaybeObject* maybe_o = Heap::AllocateRawAsciiString(length);
+    { MaybeObject* maybe_o = isolate->heap()->AllocateRawAsciiString(length);
       if (!maybe_o->ToObject(&o)) return maybe_o;
     }
     SeqAsciiString* result = SeqAsciiString::cast(o);
@@ -5309,13 +5530,15 @@ MUST_USE_RESULT static MaybeObject* ConvertCase(
   }
 
   Object* answer;
-  { MaybeObject* maybe_answer = ConvertCaseHelper(s, length, length, mapping);
+  { MaybeObject* maybe_answer =
+        ConvertCaseHelper(isolate, s, length, length, mapping);
     if (!maybe_answer->ToObject(&answer)) return maybe_answer;
   }
   if (answer->IsSmi()) {
     // Retry with correct length.
     { MaybeObject* maybe_answer =
-          ConvertCaseHelper(s, Smi::cast(answer)->value(), length, mapping);
+          ConvertCaseHelper(isolate,
+                            s, Smi::cast(answer)->value(), length, mapping);
       if (!maybe_answer->ToObject(&answer)) return maybe_answer;
     }
   }
@@ -5323,13 +5546,17 @@ MUST_USE_RESULT static MaybeObject* ConvertCase(
 }
 
 
-static MaybeObject* Runtime_StringToLowerCase(Arguments args) {
-  return ConvertCase<ToLowerTraits>(args, &to_lower_mapping);
+static MaybeObject* Runtime_StringToLowerCase(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  return ConvertCase<ToLowerTraits>(
+      args, isolate, isolate->runtime_state()->to_lower_mapping());
 }
 
 
-static MaybeObject* Runtime_StringToUpperCase(Arguments args) {
-  return ConvertCase<ToUpperTraits>(args, &to_upper_mapping);
+static MaybeObject* Runtime_StringToUpperCase(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  return ConvertCase<ToUpperTraits>(
+      args, isolate, isolate->runtime_state()->to_upper_mapping());
 }
 
 
@@ -5338,7 +5565,8 @@ static inline bool IsTrimWhiteSpace(unibrow::uchar c) {
 }
 
 
-static MaybeObject* Runtime_StringTrim(Arguments args) {
+static MaybeObject* Runtime_StringTrim(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 3);
 
@@ -5367,14 +5595,15 @@ static MaybeObject* Runtime_StringTrim(Arguments args) {
 
 
 template <typename SubjectChar, typename PatternChar>
-void FindStringIndices(Vector<const SubjectChar> subject,
+void FindStringIndices(Isolate* isolate,
+                       Vector<const SubjectChar> subject,
                        Vector<const PatternChar> pattern,
                        ZoneList<int>* indices,
                        unsigned int limit) {
   ASSERT(limit > 0);
   // Collect indices of pattern in subject, and the end-of-string index.
   // Stop after finding at most limit values.
-  StringSearch<PatternChar, SubjectChar> search(pattern);
+  StringSearch<PatternChar, SubjectChar> search(isolate, pattern);
   int pattern_length = pattern.length();
   int index = 0;
   while (limit > 0) {
@@ -5387,9 +5616,10 @@ void FindStringIndices(Vector<const SubjectChar> subject,
 }
 
 
-static MaybeObject* Runtime_StringSplit(Arguments args) {
+static MaybeObject* Runtime_StringSplit(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 3);
-  HandleScope handle_scope;
+  HandleScope handle_scope(isolate);
   CONVERT_ARG_CHECKED(String, subject, 0);
   CONVERT_ARG_CHECKED(String, pattern, 1);
   CONVERT_NUMBER_CHECKED(uint32_t, limit, Uint32, args[2]);
@@ -5419,12 +5649,14 @@ static MaybeObject* Runtime_StringSplit(Arguments args) {
     if (subject->IsAsciiRepresentation()) {
       Vector<const char> subject_vector = subject->ToAsciiVector();
       if (pattern->IsAsciiRepresentation()) {
-        FindStringIndices(subject_vector,
+        FindStringIndices(isolate,
+                          subject_vector,
                           pattern->ToAsciiVector(),
                           &indices,
                           limit);
       } else {
-        FindStringIndices(subject_vector,
+        FindStringIndices(isolate,
+                          subject_vector,
                           pattern->ToUC16Vector(),
                           &indices,
                           limit);
@@ -5432,12 +5664,14 @@ static MaybeObject* Runtime_StringSplit(Arguments args) {
     } else {
       Vector<const uc16> subject_vector = subject->ToUC16Vector();
       if (pattern->IsAsciiRepresentation()) {
-        FindStringIndices(subject_vector,
+        FindStringIndices(isolate,
+                          subject_vector,
                           pattern->ToAsciiVector(),
                           &indices,
                           limit);
       } else {
-        FindStringIndices(subject_vector,
+        FindStringIndices(isolate,
+                          subject_vector,
                           pattern->ToUC16Vector(),
                           &indices,
                           limit);
@@ -5454,7 +5688,7 @@ static MaybeObject* Runtime_StringSplit(Arguments args) {
   // Create JSArray of substrings separated by separator.
   int part_count = indices.length();
 
-  Handle<JSArray> result = Factory::NewJSArray(part_count);
+  Handle<JSArray> result = isolate->factory()->NewJSArray(part_count);
   result->set_length(Smi::FromInt(part_count));
 
   ASSERT(result->HasFastElements());
@@ -5470,7 +5704,7 @@ static MaybeObject* Runtime_StringSplit(Arguments args) {
     HandleScope local_loop_handle;
     int part_end = indices.at(i);
     Handle<String> substring =
-        Factory::NewSubString(subject, part_start, part_end);
+        isolate->factory()->NewSubString(subject, part_start, part_end);
     elements->set(i, *substring);
     part_start = part_end + pattern_length;
   }
@@ -5483,17 +5717,18 @@ static MaybeObject* Runtime_StringSplit(Arguments args) {
 // one-char strings in the cache. Gives up on the first char that is
 // not in the cache and fills the remainder with smi zeros. Returns
 // the length of the successfully copied prefix.
-static int CopyCachedAsciiCharsToArray(const char* chars,
+static int CopyCachedAsciiCharsToArray(Heap* heap,
+                                       const char* chars,
                                        FixedArray* elements,
                                        int length) {
   AssertNoAllocation nogc;
-  FixedArray* ascii_cache = Heap::single_character_string_cache();
-  Object* undefined = Heap::undefined_value();
+  FixedArray* ascii_cache = heap->single_character_string_cache();
+  Object* undefined = heap->undefined_value();
   int i;
   for (i = 0; i < length; ++i) {
     Object* value = ascii_cache->get(chars[i]);
     if (value == undefined) break;
-    ASSERT(!Heap::InNewSpace(value));
+    ASSERT(!heap->InNewSpace(value));
     elements->set(i, value, SKIP_WRITE_BARRIER);
   }
   if (i < length) {
@@ -5513,8 +5748,9 @@ static int CopyCachedAsciiCharsToArray(const char* chars,
 
 // Converts a String to JSArray.
 // For example, "foo" => ["f", "o", "o"].
-static MaybeObject* Runtime_StringToArray(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_StringToArray(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   CONVERT_ARG_CHECKED(String, s, 0);
   CONVERT_NUMBER_CHECKED(uint32_t, limit, Uint32, args[1]);
@@ -5525,15 +5761,17 @@ static MaybeObject* Runtime_StringToArray(Arguments args) {
   Handle<FixedArray> elements;
   if (s->IsFlat() && s->IsAsciiRepresentation()) {
     Object* obj;
-    { MaybeObject* maybe_obj = Heap::AllocateUninitializedFixedArray(length);
+    { MaybeObject* maybe_obj =
+          isolate->heap()->AllocateUninitializedFixedArray(length);
       if (!maybe_obj->ToObject(&obj)) return maybe_obj;
     }
-    elements = Handle<FixedArray>(FixedArray::cast(obj));
+    elements = Handle<FixedArray>(FixedArray::cast(obj), isolate);
 
     Vector<const char> chars = s->ToAsciiVector();
     // Note, this will initialize all elements (not only the prefix)
     // to prevent GC from seeing partially initialized array.
-    int num_copied_from_cache = CopyCachedAsciiCharsToArray(chars.start(),
+    int num_copied_from_cache = CopyCachedAsciiCharsToArray(isolate->heap(),
+                                                            chars.start(),
                                                             *elements,
                                                             length);
 
@@ -5542,7 +5780,7 @@ static MaybeObject* Runtime_StringToArray(Arguments args) {
       elements->set(i, *str);
     }
   } else {
-    elements = Factory::NewFixedArray(length);
+    elements = isolate->factory()->NewFixedArray(length);
     for (int i = 0; i < length; ++i) {
       Handle<Object> str = LookupSingleCharacterStringFromCode(s->Get(i));
       elements->set(i, *str);
@@ -5555,11 +5793,12 @@ static MaybeObject* Runtime_StringToArray(Arguments args) {
   }
 #endif
 
-  return *Factory::NewJSArrayWithElements(elements);
+  return *isolate->factory()->NewJSArrayWithElements(elements);
 }
 
 
-static MaybeObject* Runtime_NewStringWrapper(Arguments args) {
+static MaybeObject* Runtime_NewStringWrapper(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(String, value, args[0]);
@@ -5567,36 +5806,40 @@ static MaybeObject* Runtime_NewStringWrapper(Arguments args) {
 }
 
 
-bool Runtime::IsUpperCaseChar(uint16_t ch) {
+bool Runtime::IsUpperCaseChar(RuntimeState* runtime_state, uint16_t ch) {
   unibrow::uchar chars[unibrow::ToUppercase::kMaxWidth];
-  int char_length = to_upper_mapping.get(ch, 0, chars);
+  int char_length = runtime_state->to_upper_mapping()->get(ch, 0, chars);
   return char_length == 0;
 }
 
 
-static MaybeObject* Runtime_NumberToString(Arguments args) {
+static MaybeObject* Runtime_NumberToString(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   Object* number = args[0];
   RUNTIME_ASSERT(number->IsNumber());
 
-  return Heap::NumberToString(number);
+  return isolate->heap()->NumberToString(number);
 }
 
 
-static MaybeObject* Runtime_NumberToStringSkipCache(Arguments args) {
+static MaybeObject* Runtime_NumberToStringSkipCache(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   Object* number = args[0];
   RUNTIME_ASSERT(number->IsNumber());
 
-  return Heap::NumberToString(number, false);
+  return isolate->heap()->NumberToString(number, false);
 }
 
 
-static MaybeObject* Runtime_NumberToInteger(Arguments args) {
+static MaybeObject* Runtime_NumberToInteger(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -5606,11 +5849,13 @@ static MaybeObject* Runtime_NumberToInteger(Arguments args) {
   if (number > 0 && number <= Smi::kMaxValue) {
     return Smi::FromInt(static_cast<int>(number));
   }
-  return Heap::NumberFromDouble(DoubleToInteger(number));
+  return isolate->heap()->NumberFromDouble(DoubleToInteger(number));
 }
 
 
-static MaybeObject* Runtime_NumberToIntegerMapMinusZero(Arguments args) {
+static MaybeObject* Runtime_NumberToIntegerMapMinusZero(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -5625,20 +5870,22 @@ static MaybeObject* Runtime_NumberToIntegerMapMinusZero(Arguments args) {
   // Map both -0 and +0 to +0.
   if (double_value == 0) double_value = 0;
 
-  return Heap::NumberFromDouble(double_value);
+  return isolate->heap()->NumberFromDouble(double_value);
 }
 
 
-static MaybeObject* Runtime_NumberToJSUint32(Arguments args) {
+static MaybeObject* Runtime_NumberToJSUint32(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   CONVERT_NUMBER_CHECKED(int32_t, number, Uint32, args[0]);
-  return Heap::NumberFromUint32(number);
+  return isolate->heap()->NumberFromUint32(number);
 }
 
 
-static MaybeObject* Runtime_NumberToJSInt32(Arguments args) {
+static MaybeObject* Runtime_NumberToJSInt32(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -5648,13 +5895,14 @@ static MaybeObject* Runtime_NumberToJSInt32(Arguments args) {
   if (number > 0 && number <= Smi::kMaxValue) {
     return Smi::FromInt(static_cast<int>(number));
   }
-  return Heap::NumberFromInt32(DoubleToInt32(number));
+  return isolate->heap()->NumberFromInt32(DoubleToInt32(number));
 }
 
 
 // Converts a Number to a Smi, if possible. Returns NaN if the number is not
 // a small integer.
-static MaybeObject* Runtime_NumberToSmi(Arguments args) {
+static MaybeObject* Runtime_NumberToSmi(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -5669,75 +5917,83 @@ static MaybeObject* Runtime_NumberToSmi(Arguments args) {
       return Smi::FromInt(int_value);
     }
   }
-  return Heap::nan_value();
+  return isolate->heap()->nan_value();
 }
 
 
-static MaybeObject* Runtime_AllocateHeapNumber(Arguments args) {
+static MaybeObject* Runtime_AllocateHeapNumber(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 0);
-  return Heap::AllocateHeapNumber(0);
+  return isolate->heap()->AllocateHeapNumber(0);
 }
 
 
-static MaybeObject* Runtime_NumberAdd(Arguments args) {
+static MaybeObject* Runtime_NumberAdd(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
   CONVERT_DOUBLE_CHECKED(y, args[1]);
-  return Heap::NumberFromDouble(x + y);
+  return isolate->heap()->NumberFromDouble(x + y);
 }
 
 
-static MaybeObject* Runtime_NumberSub(Arguments args) {
+static MaybeObject* Runtime_NumberSub(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
   CONVERT_DOUBLE_CHECKED(y, args[1]);
-  return Heap::NumberFromDouble(x - y);
+  return isolate->heap()->NumberFromDouble(x - y);
 }
 
 
-static MaybeObject* Runtime_NumberMul(Arguments args) {
+static MaybeObject* Runtime_NumberMul(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
   CONVERT_DOUBLE_CHECKED(y, args[1]);
-  return Heap::NumberFromDouble(x * y);
+  return isolate->heap()->NumberFromDouble(x * y);
 }
 
 
-static MaybeObject* Runtime_NumberUnaryMinus(Arguments args) {
+static MaybeObject* Runtime_NumberUnaryMinus(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return Heap::NumberFromDouble(-x);
+  return isolate->heap()->NumberFromDouble(-x);
 }
 
 
-static MaybeObject* Runtime_NumberAlloc(Arguments args) {
+static MaybeObject* Runtime_NumberAlloc(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 0);
 
-  return Heap::NumberFromDouble(9876543210.0);
+  return isolate->heap()->NumberFromDouble(9876543210.0);
 }
 
 
-static MaybeObject* Runtime_NumberDiv(Arguments args) {
+static MaybeObject* Runtime_NumberDiv(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
   CONVERT_DOUBLE_CHECKED(y, args[1]);
-  return Heap::NumberFromDouble(x / y);
+  return isolate->heap()->NumberFromDouble(x / y);
 }
 
 
-static MaybeObject* Runtime_NumberMod(Arguments args) {
+static MaybeObject* Runtime_NumberMod(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -5746,17 +6002,18 @@ static MaybeObject* Runtime_NumberMod(Arguments args) {
 
   x = modulo(x, y);
   // NumberFromDouble may return a Smi instead of a Number object
-  return Heap::NumberFromDouble(x);
+  return isolate->heap()->NumberFromDouble(x);
 }
 
 
-static MaybeObject* Runtime_StringAdd(Arguments args) {
+static MaybeObject* Runtime_StringAdd(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(String, str1, args[0]);
   CONVERT_CHECKED(String, str2, args[1]);
-  Counters::string_add_runtime.Increment();
-  return Heap::AllocateConsString(str1, str2);
+  isolate->counters()->string_add_runtime()->Increment();
+  return isolate->heap()->AllocateConsString(str1, str2);
 }
 
 
@@ -5799,12 +6056,13 @@ static inline void StringBuilderConcatHelper(String* special,
 }
 
 
-static MaybeObject* Runtime_StringBuilderConcat(Arguments args) {
+static MaybeObject* Runtime_StringBuilderConcat(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 3);
   CONVERT_CHECKED(JSArray, array, args[0]);
   if (!args[1]->IsSmi()) {
-    Top::context()->mark_out_of_memory();
+    isolate->context()->mark_out_of_memory();
     return Failure::OutOfMemoryException();
   }
   int array_length = Smi::cast(args[1])->value();
@@ -5815,7 +6073,7 @@ static MaybeObject* Runtime_StringBuilderConcat(Arguments args) {
 
   int special_length = special->length();
   if (!array->HasFastElements()) {
-    return Top::Throw(Heap::illegal_argument_symbol());
+    return isolate->Throw(isolate->heap()->illegal_argument_symbol());
   }
   FixedArray* fixed_array = FixedArray::cast(array->elements());
   if (fixed_array->length() < array_length) {
@@ -5823,7 +6081,7 @@ static MaybeObject* Runtime_StringBuilderConcat(Arguments args) {
   }
 
   if (array_length == 0) {
-    return Heap::empty_string();
+    return isolate->heap()->empty_string();
   } else if (array_length == 1) {
     Object* first = fixed_array->get(0);
     if (first->IsString()) return first;
@@ -5849,21 +6107,21 @@ static MaybeObject* Runtime_StringBuilderConcat(Arguments args) {
         // Get the position and check that it is a positive smi.
         i++;
         if (i >= array_length) {
-          return Top::Throw(Heap::illegal_argument_symbol());
+          return isolate->Throw(isolate->heap()->illegal_argument_symbol());
         }
         Object* next_smi = fixed_array->get(i);
         if (!next_smi->IsSmi()) {
-          return Top::Throw(Heap::illegal_argument_symbol());
+          return isolate->Throw(isolate->heap()->illegal_argument_symbol());
         }
         pos = Smi::cast(next_smi)->value();
         if (pos < 0) {
-          return Top::Throw(Heap::illegal_argument_symbol());
+          return isolate->Throw(isolate->heap()->illegal_argument_symbol());
         }
       }
       ASSERT(pos >= 0);
       ASSERT(len >= 0);
       if (pos > special_length || len > special_length - pos) {
-        return Top::Throw(Heap::illegal_argument_symbol());
+        return isolate->Throw(isolate->heap()->illegal_argument_symbol());
       }
       increment = len;
     } else if (elt->IsString()) {
@@ -5874,10 +6132,10 @@ static MaybeObject* Runtime_StringBuilderConcat(Arguments args) {
         ascii = false;
       }
     } else {
-      return Top::Throw(Heap::illegal_argument_symbol());
+      return isolate->Throw(isolate->heap()->illegal_argument_symbol());
     }
     if (increment > String::kMaxLength - position) {
-      Top::context()->mark_out_of_memory();
+      isolate->context()->mark_out_of_memory();
       return Failure::OutOfMemoryException();
     }
     position += increment;
@@ -5887,7 +6145,8 @@ static MaybeObject* Runtime_StringBuilderConcat(Arguments args) {
   Object* object;
 
   if (ascii) {
-    { MaybeObject* maybe_object = Heap::AllocateRawAsciiString(length);
+    { MaybeObject* maybe_object =
+          isolate->heap()->AllocateRawAsciiString(length);
       if (!maybe_object->ToObject(&object)) return maybe_object;
     }
     SeqAsciiString* answer = SeqAsciiString::cast(object);
@@ -5897,7 +6156,8 @@ static MaybeObject* Runtime_StringBuilderConcat(Arguments args) {
                               array_length);
     return answer;
   } else {
-    { MaybeObject* maybe_object = Heap::AllocateRawTwoByteString(length);
+    { MaybeObject* maybe_object =
+          isolate->heap()->AllocateRawTwoByteString(length);
       if (!maybe_object->ToObject(&object)) return maybe_object;
     }
     SeqTwoByteString* answer = SeqTwoByteString::cast(object);
@@ -5910,19 +6170,20 @@ static MaybeObject* Runtime_StringBuilderConcat(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_StringBuilderJoin(Arguments args) {
+static MaybeObject* Runtime_StringBuilderJoin(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 3);
   CONVERT_CHECKED(JSArray, array, args[0]);
   if (!args[1]->IsSmi()) {
-    Top::context()->mark_out_of_memory();
+    isolate->context()->mark_out_of_memory();
     return Failure::OutOfMemoryException();
   }
   int array_length = Smi::cast(args[1])->value();
   CONVERT_CHECKED(String, separator, args[2]);
 
   if (!array->HasFastElements()) {
-    return Top::Throw(Heap::illegal_argument_symbol());
+    return isolate->Throw(isolate->heap()->illegal_argument_symbol());
   }
   FixedArray* fixed_array = FixedArray::cast(array->elements());
   if (fixed_array->length() < array_length) {
@@ -5930,7 +6191,7 @@ static MaybeObject* Runtime_StringBuilderJoin(Arguments args) {
   }
 
   if (array_length == 0) {
-    return Heap::empty_string();
+    return isolate->heap()->empty_string();
   } else if (array_length == 1) {
     Object* first = fixed_array->get(0);
     if (first->IsString()) return first;
@@ -5940,7 +6201,7 @@ static MaybeObject* Runtime_StringBuilderJoin(Arguments args) {
   int max_nof_separators =
       (String::kMaxLength + separator_length - 1) / separator_length;
   if (max_nof_separators < (array_length - 1)) {
-      Top::context()->mark_out_of_memory();
+      isolate->context()->mark_out_of_memory();
       return Failure::OutOfMemoryException();
   }
   int length = (array_length - 1) * separator_length;
@@ -5948,19 +6209,20 @@ static MaybeObject* Runtime_StringBuilderJoin(Arguments args) {
     Object* element_obj = fixed_array->get(i);
     if (!element_obj->IsString()) {
       // TODO(1161): handle this case.
-      return Top::Throw(Heap::illegal_argument_symbol());
+      return isolate->Throw(isolate->heap()->illegal_argument_symbol());
     }
     String* element = String::cast(element_obj);
     int increment = element->length();
     if (increment > String::kMaxLength - length) {
-      Top::context()->mark_out_of_memory();
+      isolate->context()->mark_out_of_memory();
       return Failure::OutOfMemoryException();
     }
     length += increment;
   }
 
   Object* object;
-  { MaybeObject* maybe_object = Heap::AllocateRawTwoByteString(length);
+  { MaybeObject* maybe_object =
+        isolate->heap()->AllocateRawTwoByteString(length);
     if (!maybe_object->ToObject(&object)) return maybe_object;
   }
   SeqTwoByteString* answer = SeqTwoByteString::cast(object);
@@ -5993,76 +6255,84 @@ static MaybeObject* Runtime_StringBuilderJoin(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_NumberOr(Arguments args) {
+static MaybeObject* Runtime_NumberOr(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
   CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
-  return Heap::NumberFromInt32(x | y);
+  return isolate->heap()->NumberFromInt32(x | y);
 }
 
 
-static MaybeObject* Runtime_NumberAnd(Arguments args) {
+static MaybeObject* Runtime_NumberAnd(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
   CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
-  return Heap::NumberFromInt32(x & y);
+  return isolate->heap()->NumberFromInt32(x & y);
 }
 
 
-static MaybeObject* Runtime_NumberXor(Arguments args) {
+static MaybeObject* Runtime_NumberXor(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
   CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
-  return Heap::NumberFromInt32(x ^ y);
+  return isolate->heap()->NumberFromInt32(x ^ y);
 }
 
 
-static MaybeObject* Runtime_NumberNot(Arguments args) {
+static MaybeObject* Runtime_NumberNot(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
-  return Heap::NumberFromInt32(~x);
+  return isolate->heap()->NumberFromInt32(~x);
 }
 
 
-static MaybeObject* Runtime_NumberShl(Arguments args) {
+static MaybeObject* Runtime_NumberShl(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
   CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
-  return Heap::NumberFromInt32(x << (y & 0x1f));
+  return isolate->heap()->NumberFromInt32(x << (y & 0x1f));
 }
 
 
-static MaybeObject* Runtime_NumberShr(Arguments args) {
+static MaybeObject* Runtime_NumberShr(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_NUMBER_CHECKED(uint32_t, x, Uint32, args[0]);
   CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
-  return Heap::NumberFromUint32(x >> (y & 0x1f));
+  return isolate->heap()->NumberFromUint32(x >> (y & 0x1f));
 }
 
 
-static MaybeObject* Runtime_NumberSar(Arguments args) {
+static MaybeObject* Runtime_NumberSar(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
   CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
-  return Heap::NumberFromInt32(ArithmeticShiftRight(x, y & 0x1f));
+  return isolate->heap()->NumberFromInt32(ArithmeticShiftRight(x, y & 0x1f));
 }
 
 
-static MaybeObject* Runtime_NumberEquals(Arguments args) {
+static MaybeObject* Runtime_NumberEquals(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -6081,7 +6351,8 @@ static MaybeObject* Runtime_NumberEquals(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_StringEquals(Arguments args) {
+static MaybeObject* Runtime_StringEquals(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -6099,7 +6370,8 @@ static MaybeObject* Runtime_StringEquals(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_NumberCompare(Arguments args) {
+static MaybeObject* Runtime_NumberCompare(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 3);
 
@@ -6114,15 +6386,12 @@ static MaybeObject* Runtime_NumberCompare(Arguments args) {
 
 // Compare two Smis as if they were converted to strings and then
 // compared lexicographically.
-static MaybeObject* Runtime_SmiLexicographicCompare(Arguments args) {
+static MaybeObject* Runtime_SmiLexicographicCompare(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
-  // Arrays for the individual characters of the two Smis.  Smis are
-  // 31 bit integers and 10 decimal digits are therefore enough.
-  static int x_elms[10];
-  static int y_elms[10];
-
   // Extract the integer values from the Smis.
   CONVERT_CHECKED(Smi, x, args[0]);
   CONVERT_CHECKED(Smi, y, args[1]);
@@ -6146,6 +6415,13 @@ static MaybeObject* Runtime_SmiLexicographicCompare(Arguments args) {
     y_value = -y_value;
   }
 
+  // Arrays for the individual characters of the two Smis.  Smis are
+  // 31 bit integers and 10 decimal digits are therefore enough.
+  // TODO(isolates): maybe we should simply allocate 20 bytes on the stack.
+  int* x_elms = isolate->runtime_state()->smi_lexicographic_compare_x_elms();
+  int* y_elms = isolate->runtime_state()->smi_lexicographic_compare_y_elms();
+
+
   // Convert the integers to arrays of their decimal digits.
   int x_index = 0;
   int y_index = 0;
@@ -6172,9 +6448,11 @@ static MaybeObject* Runtime_SmiLexicographicCompare(Arguments args) {
 }
 
 
-static Object* StringInputBufferCompare(String* x, String* y) {
-  static StringInputBuffer bufx;
-  static StringInputBuffer bufy;
+static Object* StringInputBufferCompare(RuntimeState* state,
+                                        String* x,
+                                        String* y) {
+  StringInputBuffer& bufx = *state->string_input_buffer_compare_bufx();
+  StringInputBuffer& bufy = *state->string_input_buffer_compare_bufy();
   bufx.Reset(x);
   bufy.Reset(y);
   while (bufx.has_more() && bufy.has_more()) {
@@ -6227,19 +6505,21 @@ static Object* FlatStringCompare(String* x, String* y) {
   } else {
     result = (r < 0) ? Smi::FromInt(LESS) : Smi::FromInt(GREATER);
   }
-  ASSERT(result == StringInputBufferCompare(x, y));
+  ASSERT(result ==
+      StringInputBufferCompare(Isolate::Current()->runtime_state(), x, y));
   return result;
 }
 
 
-static MaybeObject* Runtime_StringCompare(Arguments args) {
+static MaybeObject* Runtime_StringCompare(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_CHECKED(String, x, args[0]);
   CONVERT_CHECKED(String, y, args[1]);
 
-  Counters::string_compare_runtime.Increment();
+  isolate->counters()->string_compare_runtime()->Increment();
 
   // A few fast case tests before we flatten.
   if (x == y) return Smi::FromInt(EQUAL);
@@ -6255,52 +6535,59 @@ static MaybeObject* Runtime_StringCompare(Arguments args) {
   else if (d > 0) return Smi::FromInt(GREATER);
 
   Object* obj;
-  { MaybeObject* maybe_obj = Heap::PrepareForCompare(x);
+  { MaybeObject* maybe_obj = isolate->heap()->PrepareForCompare(x);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
-  { MaybeObject* maybe_obj = Heap::PrepareForCompare(y);
+  { MaybeObject* maybe_obj = isolate->heap()->PrepareForCompare(y);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
 
   return (x->IsFlat() && y->IsFlat()) ? FlatStringCompare(x, y)
-                                      : StringInputBufferCompare(x, y);
+      : StringInputBufferCompare(isolate->runtime_state(), x, y);
 }
 
 
-static MaybeObject* Runtime_Math_acos(Arguments args) {
+static MaybeObject* Runtime_Math_acos(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_acos.Increment();
+  isolate->counters()->math_acos()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return TranscendentalCache::Get(TranscendentalCache::ACOS, x);
+  return isolate->transcendental_cache()->Get(TranscendentalCache::ACOS, x);
 }
 
 
-static MaybeObject* Runtime_Math_asin(Arguments args) {
+static MaybeObject* Runtime_Math_asin(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_asin.Increment();
+  isolate->counters()->math_asin()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return TranscendentalCache::Get(TranscendentalCache::ASIN, x);
+  return isolate->transcendental_cache()->Get(TranscendentalCache::ASIN, x);
 }
 
 
-static MaybeObject* Runtime_Math_atan(Arguments args) {
+static MaybeObject* Runtime_Math_atan(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_atan.Increment();
+  isolate->counters()->math_atan()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return TranscendentalCache::Get(TranscendentalCache::ATAN, x);
+  return isolate->transcendental_cache()->Get(TranscendentalCache::ATAN, x);
 }
 
 
-static MaybeObject* Runtime_Math_atan2(Arguments args) {
+static const double kPiDividedBy4 = 0.78539816339744830962;
+
+
+static MaybeObject* Runtime_Math_atan2(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
-  Counters::math_atan2.Increment();
+  isolate->counters()->math_atan2()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
   CONVERT_DOUBLE_CHECKED(y, args[1]);
@@ -6310,71 +6597,76 @@ static MaybeObject* Runtime_Math_atan2(Arguments args) {
     // is a multiple of Pi / 4. The sign of the result is determined
     // by the first argument (x) and the sign of the second argument
     // determines the multiplier: one or three.
-    static double kPiDividedBy4 = 0.78539816339744830962;
     int multiplier = (x < 0) ? -1 : 1;
     if (y < 0) multiplier *= 3;
     result = multiplier * kPiDividedBy4;
   } else {
     result = atan2(x, y);
   }
-  return Heap::AllocateHeapNumber(result);
+  return isolate->heap()->AllocateHeapNumber(result);
 }
 
 
-static MaybeObject* Runtime_Math_ceil(Arguments args) {
+static MaybeObject* Runtime_Math_ceil(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_ceil.Increment();
+  isolate->counters()->math_ceil()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return Heap::NumberFromDouble(ceiling(x));
+  return isolate->heap()->NumberFromDouble(ceiling(x));
 }
 
 
-static MaybeObject* Runtime_Math_cos(Arguments args) {
+static MaybeObject* Runtime_Math_cos(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_cos.Increment();
+  isolate->counters()->math_cos()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return TranscendentalCache::Get(TranscendentalCache::COS, x);
+  return isolate->transcendental_cache()->Get(TranscendentalCache::COS, x);
 }
 
 
-static MaybeObject* Runtime_Math_exp(Arguments args) {
+static MaybeObject* Runtime_Math_exp(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_exp.Increment();
+  isolate->counters()->math_exp()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return TranscendentalCache::Get(TranscendentalCache::EXP, x);
+  return isolate->transcendental_cache()->Get(TranscendentalCache::EXP, x);
 }
 
 
-static MaybeObject* Runtime_Math_floor(Arguments args) {
+static MaybeObject* Runtime_Math_floor(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_floor.Increment();
+  isolate->counters()->math_floor()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return Heap::NumberFromDouble(floor(x));
+  return isolate->heap()->NumberFromDouble(floor(x));
 }
 
 
-static MaybeObject* Runtime_Math_log(Arguments args) {
+static MaybeObject* Runtime_Math_log(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_log.Increment();
+  isolate->counters()->math_log()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return TranscendentalCache::Get(TranscendentalCache::LOG, x);
+  return isolate->transcendental_cache()->Get(TranscendentalCache::LOG, x);
 }
 
 
-static MaybeObject* Runtime_Math_pow(Arguments args) {
+static MaybeObject* Runtime_Math_pow(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
-  Counters::math_pow.Increment();
+  isolate->counters()->math_pow()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
 
@@ -6382,16 +6674,17 @@ static MaybeObject* Runtime_Math_pow(Arguments args) {
   // custom powi() function than the generic pow().
   if (args[1]->IsSmi()) {
     int y = Smi::cast(args[1])->value();
-    return Heap::NumberFromDouble(power_double_int(x, y));
+    return isolate->heap()->NumberFromDouble(power_double_int(x, y));
   }
 
   CONVERT_DOUBLE_CHECKED(y, args[1]);
-  return Heap::AllocateHeapNumber(power_double_double(x, y));
+  return isolate->heap()->AllocateHeapNumber(power_double_double(x, y));
 }
 
 // Fast version of Math.pow if we know that y is not an integer and
 // y is not -0.5 or 0.5. Used as slowcase from codegen.
-static MaybeObject* Runtime_Math_pow_cfunction(Arguments args) {
+static MaybeObject* Runtime_Math_pow_cfunction(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
   CONVERT_DOUBLE_CHECKED(x, args[0]);
@@ -6399,17 +6692,18 @@ static MaybeObject* Runtime_Math_pow_cfunction(Arguments args) {
   if (y == 0) {
     return Smi::FromInt(1);
   } else if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) {
-    return Heap::nan_value();
+    return isolate->heap()->nan_value();
   } else {
-    return Heap::AllocateHeapNumber(pow(x, y));
+    return isolate->heap()->AllocateHeapNumber(pow(x, y));
   }
 }
 
 
-static MaybeObject* Runtime_RoundNumber(Arguments args) {
+static MaybeObject* Runtime_RoundNumber(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_round.Increment();
+  isolate->counters()->math_round()->Increment();
 
   if (!args[0]->IsHeapNumber()) {
     // Must be smi. Return the argument unchanged for all the other types
@@ -6435,40 +6729,43 @@ static MaybeObject* Runtime_RoundNumber(Arguments args) {
     return number;
   }
 
-  if (sign && value >= -0.5) return Heap::minus_zero_value();
+  if (sign && value >= -0.5) return isolate->heap()->minus_zero_value();
 
   // Do not call NumberFromDouble() to avoid extra checks.
-  return Heap::AllocateHeapNumber(floor(value + 0.5));
+  return isolate->heap()->AllocateHeapNumber(floor(value + 0.5));
 }
 
 
-static MaybeObject* Runtime_Math_sin(Arguments args) {
+static MaybeObject* Runtime_Math_sin(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_sin.Increment();
+  isolate->counters()->math_sin()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return TranscendentalCache::Get(TranscendentalCache::SIN, x);
+  return isolate->transcendental_cache()->Get(TranscendentalCache::SIN, x);
 }
 
 
-static MaybeObject* Runtime_Math_sqrt(Arguments args) {
+static MaybeObject* Runtime_Math_sqrt(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_sqrt.Increment();
+  isolate->counters()->math_sqrt()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return Heap::AllocateHeapNumber(sqrt(x));
+  return isolate->heap()->AllocateHeapNumber(sqrt(x));
 }
 
 
-static MaybeObject* Runtime_Math_tan(Arguments args) {
+static MaybeObject* Runtime_Math_tan(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  Counters::math_tan.Increment();
+  isolate->counters()->math_tan()->Increment();
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return TranscendentalCache::Get(TranscendentalCache::TAN, x);
+  return isolate->transcendental_cache()->Get(TranscendentalCache::TAN, x);
 }
 
 
@@ -6517,7 +6814,8 @@ static int MakeDay(int year, int month, int day) {
 }
 
 
-static MaybeObject* Runtime_DateMakeDay(Arguments args) {
+static MaybeObject* Runtime_DateMakeDay(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 3);
 
@@ -6816,7 +7114,8 @@ static inline void DateYMDFromTime(int date,
 }
 
 
-static MaybeObject* Runtime_DateYMDFromTime(Arguments args) {
+static MaybeObject* Runtime_DateYMDFromTime(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
@@ -6826,7 +7125,8 @@ static MaybeObject* Runtime_DateYMDFromTime(Arguments args) {
   int year, month, day;
   DateYMDFromTime(static_cast<int>(floor(t / 86400000)), year, month, day);
 
-  RUNTIME_ASSERT(res_array->elements()->map() == Heap::fixed_array_map());
+  RUNTIME_ASSERT(res_array->elements()->map() ==
+                 isolate->heap()->fixed_array_map());
   FixedArray* elms = FixedArray::cast(res_array->elements());
   RUNTIME_ASSERT(elms->length() == 3);
 
@@ -6834,11 +7134,12 @@ static MaybeObject* Runtime_DateYMDFromTime(Arguments args) {
   elms->set(1, Smi::FromInt(month));
   elms->set(2, Smi::FromInt(day));
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_NewArgumentsFast(Arguments args) {
+static MaybeObject* Runtime_NewArgumentsFast(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 3);
 
@@ -6847,20 +7148,21 @@ static MaybeObject* Runtime_NewArgumentsFast(Arguments args) {
   const int length = Smi::cast(args[2])->value();
 
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateArgumentsObject(callee, length);
+  { MaybeObject* maybe_result =
+        isolate->heap()->AllocateArgumentsObject(callee, length);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   // Allocate the elements if needed.
   if (length > 0) {
     // Allocate the fixed array.
     Object* obj;
-    { MaybeObject* maybe_obj = Heap::AllocateRawFixedArray(length);
+    { MaybeObject* maybe_obj = isolate->heap()->AllocateRawFixedArray(length);
       if (!maybe_obj->ToObject(&obj)) return maybe_obj;
     }
 
     AssertNoAllocation no_gc;
     FixedArray* array = reinterpret_cast<FixedArray*>(obj);
-    array->set_map(Heap::fixed_array_map());
+    array->set_map(isolate->heap()->fixed_array_map());
     array->set_length(length);
 
     WriteBarrierMode mode = array->GetWriteBarrierMode(no_gc);
@@ -6873,8 +7175,9 @@ static MaybeObject* Runtime_NewArgumentsFast(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_NewClosure(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_NewClosure(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   CONVERT_ARG_CHECKED(Context, context, 0);
   CONVERT_ARG_CHECKED(SharedFunctionInfo, shared, 1);
@@ -6886,15 +7189,15 @@ static MaybeObject* Runtime_NewClosure(Arguments args) {
   pretenure = pretenure || (context->global_context() == *context);
   PretenureFlag pretenure_flag = pretenure ? TENURED : NOT_TENURED;
   Handle<JSFunction> result =
-      Factory::NewFunctionFromSharedFunctionInfo(shared,
-                                                 context,
-                                                 pretenure_flag);
+      isolate->factory()->NewFunctionFromSharedFunctionInfo(shared,
+                                                            context,
+                                                            pretenure_flag);
   return *result;
 }
 
-
-static MaybeObject* Runtime_NewObjectFromBound(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_NewObjectFromBound(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   // First argument is a function to use as a constructor.
   CONVERT_ARG_CHECKED(JSFunction, function, 0);
@@ -6941,10 +7244,11 @@ static MaybeObject* Runtime_NewObjectFromBound(Arguments args) {
 }
 
 
-static void TrySettingInlineConstructStub(Handle<JSFunction> function) {
-  Handle<Object> prototype = Factory::null_value();
+static void TrySettingInlineConstructStub(Isolate* isolate,
+                                          Handle<JSFunction> function) {
+  Handle<Object> prototype = isolate->factory()->null_value();
   if (function->has_instance_prototype()) {
-    prototype = Handle<Object>(function->instance_prototype());
+    prototype = Handle<Object>(function->instance_prototype(), isolate);
   }
   if (function->shared()->CanGenerateInlineConstructor(*prototype)) {
     ConstructStubCompiler compiler;
@@ -6957,8 +7261,9 @@ static void TrySettingInlineConstructStub(Handle<JSFunction> function) {
 }
 
 
-static MaybeObject* Runtime_NewObject(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_NewObject(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
   Handle<Object> constructor = args.at<Object>(0);
@@ -6967,8 +7272,8 @@ static MaybeObject* Runtime_NewObject(Arguments args) {
   if (!constructor->IsJSFunction()) {
     Vector< Handle<Object> > arguments = HandleVector(&constructor, 1);
     Handle<Object> type_error =
-        Factory::NewTypeError("not_constructor", arguments);
-    return Top::Throw(*type_error);
+        isolate->factory()->NewTypeError("not_constructor", arguments);
+    return isolate->Throw(*type_error);
   }
 
   Handle<JSFunction> function = Handle<JSFunction>::cast(constructor);
@@ -6978,14 +7283,15 @@ static MaybeObject* Runtime_NewObject(Arguments args) {
   if (!function->should_have_prototype()) {
     Vector< Handle<Object> > arguments = HandleVector(&constructor, 1);
     Handle<Object> type_error =
-        Factory::NewTypeError("not_constructor", arguments);
-    return Top::Throw(*type_error);
+        isolate->factory()->NewTypeError("not_constructor", arguments);
+    return isolate->Throw(*type_error);
   }
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
+  Debug* debug = isolate->debug();
   // Handle stepping into constructors if step into is active.
-  if (Debug::StepInActive()) {
-    Debug::HandleStepIn(function, Handle<Object>::null(), 0, true);
+  if (debug->StepInActive()) {
+    debug->HandleStepIn(function, Handle<Object>::null(), 0, true);
   }
 #endif
 
@@ -6995,14 +7301,14 @@ static MaybeObject* Runtime_NewObject(Arguments args) {
       // called using 'new' and creates a new JSFunction object that
       // is returned.  The receiver object is only used for error
       // reporting if an error occurs when constructing the new
-      // JSFunction. Factory::NewJSObject() should not be used to
+      // JSFunction. FACTORY->NewJSObject() should not be used to
       // allocate JSFunctions since it does not properly initialize
       // the shared part of the function. Since the receiver is
       // ignored anyway, we use the global object as the receiver
       // instead of a new JSFunction object. This way, errors are
       // reported the same way whether or not 'Function' is called
       // using 'new'.
-      return Top::context()->global();
+      return isolate->context()->global();
     }
   }
 
@@ -7010,7 +7316,7 @@ static MaybeObject* Runtime_NewObject(Arguments args) {
   // available. We cannot use EnsureCompiled because that forces a
   // compilation through the shared function info which makes it
   // impossible for us to optimize.
-  Handle<SharedFunctionInfo> shared(function->shared());
+  Handle<SharedFunctionInfo> shared(function->shared(), isolate);
   if (!function->is_compiled()) CompileLazy(function, CLEAR_EXCEPTION);
 
   if (!function->has_initial_map() &&
@@ -7022,34 +7328,36 @@ static MaybeObject* Runtime_NewObject(Arguments args) {
   }
 
   bool first_allocation = !shared->live_objects_may_exist();
-  Handle<JSObject> result = Factory::NewJSObject(function);
-  RETURN_IF_EMPTY_HANDLE(result);
+  Handle<JSObject> result = isolate->factory()->NewJSObject(function);
+  RETURN_IF_EMPTY_HANDLE(isolate, result);
   // Delay setting the stub if inobject slack tracking is in progress.
   if (first_allocation && !shared->IsInobjectSlackTrackingInProgress()) {
-    TrySettingInlineConstructStub(function);
+    TrySettingInlineConstructStub(isolate, function);
   }
 
-  Counters::constructed_objects.Increment();
-  Counters::constructed_objects_runtime.Increment();
+  isolate->counters()->constructed_objects()->Increment();
+  isolate->counters()->constructed_objects_runtime()->Increment();
 
   return *result;
 }
 
 
-static MaybeObject* Runtime_FinalizeInstanceSize(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_FinalizeInstanceSize(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
   CONVERT_ARG_CHECKED(JSFunction, function, 0);
   function->shared()->CompleteInobjectSlackTracking();
-  TrySettingInlineConstructStub(function);
+  TrySettingInlineConstructStub(isolate, function);
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_LazyCompile(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_LazyCompile(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
   Handle<JSFunction> function = args.at<JSFunction>(0);
@@ -7079,20 +7387,21 @@ static MaybeObject* Runtime_LazyCompile(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_LazyRecompile(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_LazyRecompile(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   Handle<JSFunction> function = args.at<JSFunction>(0);
   // If the function is not optimizable or debugger is active continue using the
   // code from the full compiler.
   if (!function->shared()->code()->optimizable() ||
-      Debug::has_break_points()) {
+      isolate->debug()->has_break_points()) {
     if (FLAG_trace_opt) {
       PrintF("[failed to optimize ");
       function->PrintName();
       PrintF(": is code optimizable: %s, is debugger enabled: %s]\n",
           function->shared()->code()->optimizable() ? "T" : "F",
-          Debug::has_break_points() ? "T" : "F");
+          isolate->debug()->has_break_points() ? "T" : "F");
     }
     function->ReplaceCode(function->shared()->code());
     return function->code();
@@ -7110,14 +7419,15 @@ static MaybeObject* Runtime_LazyRecompile(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_NotifyDeoptimized(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_NotifyDeoptimized(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   RUNTIME_ASSERT(args[0]->IsSmi());
   Deoptimizer::BailoutType type =
       static_cast<Deoptimizer::BailoutType>(Smi::cast(args[0])->value());
-  Deoptimizer* deoptimizer = Deoptimizer::Grab();
-  ASSERT(Heap::IsAllocationAllowed());
+  Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
+  ASSERT(isolate->heap()->IsAllocationAllowed());
   int frames = deoptimizer->output_count();
 
   JavaScriptFrameIterator it;
@@ -7130,24 +7440,24 @@ static MaybeObject* Runtime_NotifyDeoptimized(Arguments args) {
   delete deoptimizer;
 
   RUNTIME_ASSERT(frame->function()->IsJSFunction());
-  Handle<JSFunction> function(JSFunction::cast(frame->function()));
+  Handle<JSFunction> function(JSFunction::cast(frame->function()), isolate);
   Handle<Object> arguments;
   for (int i = frame->ComputeExpressionsCount() - 1; i >= 0; --i) {
-    if (frame->GetExpression(i) == Heap::arguments_marker()) {
+    if (frame->GetExpression(i) == isolate->heap()->arguments_marker()) {
       if (arguments.is_null()) {
         // FunctionGetArguments can't throw an exception, so cast away the
         // doubt with an assert.
         arguments = Handle<Object>(
             Accessors::FunctionGetArguments(*function,
                                             NULL)->ToObjectUnchecked());
-        ASSERT(*arguments != Heap::null_value());
-        ASSERT(*arguments != Heap::undefined_value());
+        ASSERT(*arguments != isolate->heap()->null_value());
+        ASSERT(*arguments != isolate->heap()->undefined_value());
       }
       frame->SetExpression(i, *arguments);
     }
   }
 
-  CompilationCache::MarkForLazyOptimizing(function);
+  isolate->compilation_cache()->MarkForLazyOptimizing(function);
   if (type == Deoptimizer::EAGER) {
     RUNTIME_ASSERT(function->IsOptimized());
   } else {
@@ -7157,7 +7467,7 @@ static MaybeObject* Runtime_NotifyDeoptimized(Arguments args) {
   // Avoid doing too much work when running with --always-opt and keep
   // the optimized code around.
   if (FLAG_always_opt || type == Deoptimizer::LAZY) {
-    return Heap::undefined_value();
+    return isolate->heap()->undefined_value();
   }
 
   // Count the number of optimized activations of the function.
@@ -7180,31 +7490,35 @@ static MaybeObject* Runtime_NotifyDeoptimized(Arguments args) {
     }
     function->ReplaceCode(function->shared()->code());
   }
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_NotifyOSR(Arguments args) {
-  Deoptimizer* deoptimizer = Deoptimizer::Grab();
+static MaybeObject* Runtime_NotifyOSR(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
   delete deoptimizer;
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_DeoptimizeFunction(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DeoptimizeFunction(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_CHECKED(JSFunction, function, 0);
-  if (!function->IsOptimized()) return Heap::undefined_value();
+  if (!function->IsOptimized()) return isolate->heap()->undefined_value();
 
   Deoptimizer::DeoptimizeFunction(*function);
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_CompileForOnStackReplacement(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_CompileForOnStackReplacement(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_CHECKED(JSFunction, function, 0);
 
@@ -7213,7 +7527,7 @@ static MaybeObject* Runtime_CompileForOnStackReplacement(Arguments args) {
 
   // We have hit a back edge in an unoptimized frame for a function that was
   // selected for on-stack replacement.  Find the unoptimized code object.
-  Handle<Code> unoptimized(function->shared()->code());
+  Handle<Code> unoptimized(function->shared()->code(), isolate);
   // Keep track of whether we've succeeded in optimizing.
   bool succeeded = unoptimized->optimizable();
   if (succeeded) {
@@ -7237,7 +7551,7 @@ static MaybeObject* Runtime_CompileForOnStackReplacement(Arguments args) {
     JavaScriptFrameIterator it;
     JavaScriptFrame* frame = it.frame();
     ASSERT(frame->function() == *function);
-    ASSERT(frame->code() == *unoptimized);
+    ASSERT(frame->LookupCode(isolate) == *unoptimized);
     ASSERT(unoptimized->contains(frame->pc()));
 
     // Use linear search of the unoptimized code's stack check table to find
@@ -7295,7 +7609,7 @@ static MaybeObject* Runtime_CompileForOnStackReplacement(Arguments args) {
   StackCheckStub check_stub;
   Handle<Code> check_code = check_stub.GetCode();
   Handle<Code> replacement_code(
-      Builtins::builtin(Builtins::OnStackReplacement));
+      isolate->builtins()->builtin(Builtins::OnStackReplacement));
   Deoptimizer::RevertStackCheckCode(*unoptimized,
                                     *check_code,
                                     *replacement_code);
@@ -7318,40 +7632,45 @@ static MaybeObject* Runtime_CompileForOnStackReplacement(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_GetFunctionDelegate(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetFunctionDelegate(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   RUNTIME_ASSERT(!args[0]->IsJSFunction());
   return *Execution::GetFunctionDelegate(args.at<Object>(0));
 }
 
 
-static MaybeObject* Runtime_GetConstructorDelegate(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetConstructorDelegate(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   RUNTIME_ASSERT(!args[0]->IsJSFunction());
   return *Execution::GetConstructorDelegate(args.at<Object>(0));
 }
 
 
-static MaybeObject* Runtime_NewContext(Arguments args) {
+static MaybeObject* Runtime_NewContext(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   CONVERT_CHECKED(JSFunction, function, args[0]);
   int length = function->shared()->scope_info()->NumberOfContextSlots();
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateFunctionContext(length, function);
+  { MaybeObject* maybe_result =
+        isolate->heap()->AllocateFunctionContext(length, function);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
 
-  Top::set_context(Context::cast(result));
+  isolate->set_context(Context::cast(result));
 
   return result;  // non-failure
 }
 
 
-MUST_USE_RESULT static MaybeObject* PushContextHelper(Object* object,
+MUST_USE_RESULT static MaybeObject* PushContextHelper(Isolate* isolate,
+                                                      Object* object,
                                                       bool is_catch_context) {
   // Convert the object to a proper JavaScript object.
   Object* js_object = object;
@@ -7361,45 +7680,47 @@ MUST_USE_RESULT static MaybeObject* PushContextHelper(Object* object,
       if (!Failure::cast(maybe_js_object)->IsInternalError()) {
         return maybe_js_object;
       }
-      HandleScope scope;
-      Handle<Object> handle(object);
+      HandleScope scope(isolate);
+      Handle<Object> handle(object, isolate);
       Handle<Object> result =
-          Factory::NewTypeError("with_expression", HandleVector(&handle, 1));
-      return Top::Throw(*result);
+          isolate->factory()->NewTypeError("with_expression",
+                                           HandleVector(&handle, 1));
+      return isolate->Throw(*result);
     }
   }
 
   Object* result;
-  { MaybeObject* maybe_result =
-        Heap::AllocateWithContext(Top::context(),
-                                  JSObject::cast(js_object),
-                                  is_catch_context);
+  { MaybeObject* maybe_result = isolate->heap()->AllocateWithContext(
+      isolate->context(), JSObject::cast(js_object), is_catch_context);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
 
   Context* context = Context::cast(result);
-  Top::set_context(context);
+  isolate->set_context(context);
 
   return result;
 }
 
 
-static MaybeObject* Runtime_PushContext(Arguments args) {
+static MaybeObject* Runtime_PushContext(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  return PushContextHelper(args[0], false);
+  return PushContextHelper(isolate, args[0], false);
 }
 
 
-static MaybeObject* Runtime_PushCatchContext(Arguments args) {
+static MaybeObject* Runtime_PushCatchContext(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
-  return PushContextHelper(args[0], true);
+  return PushContextHelper(isolate, args[0], true);
 }
 
 
-static MaybeObject* Runtime_DeleteContextSlot(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DeleteContextSlot(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
 
   CONVERT_ARG_CHECKED(Context, context, 0);
@@ -7412,12 +7733,12 @@ static MaybeObject* Runtime_DeleteContextSlot(Arguments args) {
 
   // If the slot was not found the result is true.
   if (holder.is_null()) {
-    return Heap::true_value();
+    return isolate->heap()->true_value();
   }
 
   // If the slot was found in a context, it should be DONT_DELETE.
   if (holder->IsContext()) {
-    return Heap::false_value();
+    return isolate->heap()->false_value();
   }
 
   // The slot was found in a JSObject, either a context extension object,
@@ -7465,17 +7786,19 @@ static inline ObjectPair MakePair(MaybeObject* x, MaybeObject* y) {
 #endif
 
 
-static inline MaybeObject* Unhole(MaybeObject* x,
+static inline MaybeObject* Unhole(Heap* heap,
+                                  MaybeObject* x,
                                   PropertyAttributes attributes) {
   ASSERT(!x->IsTheHole() || (attributes & READ_ONLY) != 0);
   USE(attributes);
-  return x->IsTheHole() ? Heap::undefined_value() : x;
+  return x->IsTheHole() ? heap->undefined_value() : x;
 }
 
 
-static JSObject* ComputeReceiverForNonGlobal(JSObject* holder) {
+static JSObject* ComputeReceiverForNonGlobal(Isolate* isolate,
+                                             JSObject* holder) {
   ASSERT(!holder->IsGlobalObject());
-  Context* top = Top::context();
+  Context* top = isolate->context();
   // Get the context extension function.
   JSFunction* context_extension_function =
       top->global_context()->context_extension_function();
@@ -7492,12 +7815,14 @@ static JSObject* ComputeReceiverForNonGlobal(JSObject* holder) {
 }
 
 
-static ObjectPair LoadContextSlotHelper(Arguments args, bool throw_error) {
-  HandleScope scope;
+static ObjectPair LoadContextSlotHelper(Arguments args,
+                                        Isolate* isolate,
+                                        bool throw_error) {
+  HandleScope scope(isolate);
   ASSERT_EQ(2, args.length());
 
   if (!args[0]->IsContext() || !args[1]->IsString()) {
-    return MakePair(Top::ThrowIllegalOperation(), NULL);
+    return MakePair(isolate->ThrowIllegalOperation(), NULL);
   }
   Handle<Context> context = args.at<Context>(0);
   Handle<String> name = args.at<String>(1);
@@ -7514,11 +7839,12 @@ static ObjectPair LoadContextSlotHelper(Arguments args, bool throw_error) {
     // If the "property" we were looking for is a local variable or an
     // argument in a context, the receiver is the global object; see
     // ECMA-262, 3rd., 10.1.6 and 10.2.3.
-    JSObject* receiver = Top::context()->global()->global_receiver();
+    JSObject* receiver =
+        isolate->context()->global()->global_receiver();
     MaybeObject* value = (holder->IsContext())
         ? Context::cast(*holder)->get(index)
         : JSObject::cast(*holder)->GetElement(index);
-    return MakePair(Unhole(value, attributes), receiver);
+    return MakePair(Unhole(isolate->heap(), value, attributes), receiver);
   }
 
   // If the holder is found, we read the property from it.
@@ -7529,9 +7855,9 @@ static ObjectPair LoadContextSlotHelper(Arguments args, bool throw_error) {
     if (object->IsGlobalObject()) {
       receiver = GlobalObject::cast(object)->global_receiver();
     } else if (context->is_exception_holder(*holder)) {
-      receiver = Top::context()->global()->global_receiver();
+      receiver = isolate->context()->global()->global_receiver();
     } else {
-      receiver = ComputeReceiverForNonGlobal(object);
+      receiver = ComputeReceiverForNonGlobal(isolate, object);
     }
     // No need to unhole the value here. This is taken care of by the
     // GetProperty function.
@@ -7542,30 +7868,36 @@ static ObjectPair LoadContextSlotHelper(Arguments args, bool throw_error) {
   if (throw_error) {
     // The property doesn't exist - throw exception.
     Handle<Object> reference_error =
-        Factory::NewReferenceError("not_defined", HandleVector(&name, 1));
-    return MakePair(Top::Throw(*reference_error), NULL);
+        isolate->factory()->NewReferenceError("not_defined",
+                                              HandleVector(&name, 1));
+    return MakePair(isolate->Throw(*reference_error), NULL);
   } else {
     // The property doesn't exist - return undefined
-    return MakePair(Heap::undefined_value(), Heap::undefined_value());
+    return MakePair(isolate->heap()->undefined_value(),
+                    isolate->heap()->undefined_value());
   }
 }
 
 
-static ObjectPair Runtime_LoadContextSlot(Arguments args) {
-  return LoadContextSlotHelper(args, true);
+static ObjectPair Runtime_LoadContextSlot(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  return LoadContextSlotHelper(args, isolate, true);
 }
 
 
-static ObjectPair Runtime_LoadContextSlotNoReferenceError(Arguments args) {
-  return LoadContextSlotHelper(args, false);
+static ObjectPair Runtime_LoadContextSlotNoReferenceError(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  return LoadContextSlotHelper(args, isolate, false);
 }
 
 
-static MaybeObject* Runtime_StoreContextSlot(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_StoreContextSlot(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 4);
 
-  Handle<Object> value(args[0]);
+  Handle<Object> value(args[0], isolate);
   CONVERT_ARG_CHECKED(Context, context, 1);
   CONVERT_ARG_CHECKED(String, name, 2);
   CONVERT_SMI_CHECKED(strict_unchecked, args[3]);
@@ -7587,16 +7919,16 @@ static MaybeObject* Runtime_StoreContextSlot(Arguments args) {
       } else if (strict_mode == kStrictMode) {
         // Setting read only property in strict mode.
         Handle<Object> error =
-            Factory::NewTypeError("strict_cannot_assign",
-                                  HandleVector(&name, 1));
-        return Top::Throw(*error);
+            isolate->factory()->NewTypeError("strict_cannot_assign",
+                                             HandleVector(&name, 1));
+        return isolate->Throw(*error);
       }
     } else {
       ASSERT((attributes & READ_ONLY) == 0);
       Handle<Object> result =
           SetElement(Handle<JSObject>::cast(holder), index, value, strict_mode);
       if (result.is_null()) {
-        ASSERT(Top::has_pending_exception());
+        ASSERT(isolate->has_pending_exception());
         return Failure::Exception();
       }
     }
@@ -7614,7 +7946,7 @@ static MaybeObject* Runtime_StoreContextSlot(Arguments args) {
     // The property was not found. It needs to be stored in the global context.
     ASSERT(attributes == ABSENT);
     attributes = NONE;
-    context_ext = Handle<JSObject>(Top::context()->global());
+    context_ext = Handle<JSObject>(isolate->context()->global());
   }
 
   // Set the property, but ignore if read_only variable on the context
@@ -7622,62 +7954,66 @@ static MaybeObject* Runtime_StoreContextSlot(Arguments args) {
   if ((attributes & READ_ONLY) == 0 ||
       (context_ext->GetLocalPropertyAttribute(*name) == ABSENT)) {
     RETURN_IF_EMPTY_HANDLE(
+        isolate,
         SetProperty(context_ext, name, value, NONE, strict_mode));
   } else if (strict_mode == kStrictMode && (attributes & READ_ONLY) != 0) {
     // Setting read only property in strict mode.
     Handle<Object> error =
-        Factory::NewTypeError("strict_cannot_assign", HandleVector(&name, 1));
-    return Top::Throw(*error);
+      isolate->factory()->NewTypeError(
+          "strict_cannot_assign", HandleVector(&name, 1));
+    return isolate->Throw(*error);
   }
   return *value;
 }
 
 
-static MaybeObject* Runtime_Throw(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_Throw(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
-  return Top::Throw(args[0]);
+  return isolate->Throw(args[0]);
 }
 
 
-static MaybeObject* Runtime_ReThrow(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_ReThrow(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
-  return Top::ReThrow(args[0]);
+  return isolate->ReThrow(args[0]);
 }
 
 
-static MaybeObject* Runtime_PromoteScheduledException(Arguments args) {
+static MaybeObject* Runtime_PromoteScheduledException(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT_EQ(0, args.length());
-  return Top::PromoteScheduledException();
+  return isolate->PromoteScheduledException();
 }
 
 
-static MaybeObject* Runtime_ThrowReferenceError(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_ThrowReferenceError(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
-  Handle<Object> name(args[0]);
+  Handle<Object> name(args[0], isolate);
   Handle<Object> reference_error =
-    Factory::NewReferenceError("not_defined", HandleVector(&name, 1));
-  return Top::Throw(*reference_error);
-}
-
-
-static MaybeObject* Runtime_StackOverflow(Arguments args) {
-  NoHandleAllocation na;
-  return Top::StackOverflow();
+    isolate->factory()->NewReferenceError("not_defined",
+                                          HandleVector(&name, 1));
+  return isolate->Throw(*reference_error);
 }
 
 
-static MaybeObject* Runtime_StackGuard(Arguments args) {
+static MaybeObject* Runtime_StackGuard(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 0);
 
   // First check if this is a real stack overflow.
-  if (StackGuard::IsStackOverflow()) {
-    return Runtime_StackOverflow(args);
+  if (isolate->stack_guard()->IsStackOverflow()) {
+    NoHandleAllocation na;
+    return isolate->StackOverflow();
   }
 
   return Execution::HandleStackGuardInterrupt();
@@ -7770,22 +8106,25 @@ static void PrintTransition(Object* result) {
 }
 
 
-static MaybeObject* Runtime_TraceEnter(Arguments args) {
+static MaybeObject* Runtime_TraceEnter(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 0);
   NoHandleAllocation ha;
   PrintTransition(NULL);
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_TraceExit(Arguments args) {
+static MaybeObject* Runtime_TraceExit(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   PrintTransition(args[0]);
   return args[0];  // return TOS
 }
 
 
-static MaybeObject* Runtime_DebugPrint(Arguments args) {
+static MaybeObject* Runtime_DebugPrint(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -7816,15 +8155,17 @@ static MaybeObject* Runtime_DebugPrint(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_DebugTrace(Arguments args) {
+static MaybeObject* Runtime_DebugTrace(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 0);
   NoHandleAllocation ha;
-  Top::PrintStack();
-  return Heap::undefined_value();
+  isolate->PrintStack();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_DateCurrentTime(Arguments args) {
+static MaybeObject* Runtime_DateCurrentTime(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 0);
 
@@ -7833,12 +8174,13 @@ static MaybeObject* Runtime_DateCurrentTime(Arguments args) {
   // time is milliseconds. Therefore, we floor the result of getting
   // the OS time.
   double millis = floor(OS::TimeCurrentMillis());
-  return Heap::NumberFromDouble(millis);
+  return isolate->heap()->NumberFromDouble(millis);
 }
 
 
-static MaybeObject* Runtime_DateParseString(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DateParseString(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
 
   CONVERT_ARG_CHECKED(String, str, 0);
@@ -7862,109 +8204,119 @@ static MaybeObject* Runtime_DateParseString(Arguments args) {
   if (result) {
     return *output;
   } else {
-    return Heap::null_value();
+    return isolate->heap()->null_value();
   }
 }
 
 
-static MaybeObject* Runtime_DateLocalTimezone(Arguments args) {
+static MaybeObject* Runtime_DateLocalTimezone(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
   const char* zone = OS::LocalTimezone(x);
-  return Heap::AllocateStringFromUtf8(CStrVector(zone));
+  return isolate->heap()->AllocateStringFromUtf8(CStrVector(zone));
 }
 
 
-static MaybeObject* Runtime_DateLocalTimeOffset(Arguments args) {
+static MaybeObject* Runtime_DateLocalTimeOffset(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 0);
 
-  return Heap::NumberFromDouble(OS::LocalTimeOffset());
+  return isolate->heap()->NumberFromDouble(OS::LocalTimeOffset());
 }
 
 
-static MaybeObject* Runtime_DateDaylightSavingsOffset(Arguments args) {
+static MaybeObject* Runtime_DateDaylightSavingsOffset(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
   CONVERT_DOUBLE_CHECKED(x, args[0]);
-  return Heap::NumberFromDouble(OS::DaylightSavingsOffset(x));
+  return isolate->heap()->NumberFromDouble(OS::DaylightSavingsOffset(x));
 }
 
 
-static MaybeObject* Runtime_GlobalReceiver(Arguments args) {
+static MaybeObject* Runtime_GlobalReceiver(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   Object* global = args[0];
-  if (!global->IsJSGlobalObject()) return Heap::null_value();
+  if (!global->IsJSGlobalObject()) return isolate->heap()->null_value();
   return JSGlobalObject::cast(global)->global_receiver();
 }
 
 
-static MaybeObject* Runtime_ParseJson(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_ParseJson(RUNTIME_CALLING_CONVENTION) {
+  HandleScope scope(isolate);
   ASSERT_EQ(1, args.length());
   CONVERT_ARG_CHECKED(String, source, 0);
 
   Handle<Object> result = JsonParser::Parse(source);
   if (result.is_null()) {
     // Syntax error or stack overflow in scanner.
-    ASSERT(Top::has_pending_exception());
+    ASSERT(isolate->has_pending_exception());
     return Failure::Exception();
   }
   return *result;
 }
 
 
-static MaybeObject* Runtime_CompileString(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_CompileString(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT_EQ(1, args.length());
   CONVERT_ARG_CHECKED(String, source, 0);
 
   // Compile source string in the global context.
-  Handle<Context> context(Top::context()->global_context());
+  Handle<Context> context(isolate->context()->global_context());
   Handle<SharedFunctionInfo> shared = Compiler::CompileEval(source,
                                                             context,
                                                             true,
                                                             kNonStrictMode);
   if (shared.is_null()) return Failure::Exception();
   Handle<JSFunction> fun =
-      Factory::NewFunctionFromSharedFunctionInfo(shared, context, NOT_TENURED);
+      isolate->factory()->NewFunctionFromSharedFunctionInfo(shared,
+                                                            context,
+                                                            NOT_TENURED);
   return *fun;
 }
 
 
-static ObjectPair CompileGlobalEval(Handle<String> source,
+static ObjectPair CompileGlobalEval(Isolate* isolate,
+                                    Handle<String> source,
                                     Handle<Object> receiver,
                                     StrictModeFlag strict_mode) {
   // Deal with a normal eval call with a string argument. Compile it
   // and return the compiled function bound in the local context.
   Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
       source,
-      Handle<Context>(Top::context()),
-      Top::context()->IsGlobalContext(),
+      Handle<Context>(isolate->context()),
+      isolate->context()->IsGlobalContext(),
       strict_mode);
   if (shared.is_null()) return MakePair(Failure::Exception(), NULL);
-  Handle<JSFunction> compiled = Factory::NewFunctionFromSharedFunctionInfo(
-      shared,
-      Handle<Context>(Top::context()),
-      NOT_TENURED);
+  Handle<JSFunction> compiled =
+      isolate->factory()->NewFunctionFromSharedFunctionInfo(
+          shared, Handle<Context>(isolate->context()), NOT_TENURED);
   return MakePair(*compiled, *receiver);
 }
 
 
-static ObjectPair Runtime_ResolvePossiblyDirectEval(Arguments args) {
+static ObjectPair Runtime_ResolvePossiblyDirectEval(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 4);
 
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<Object> callee = args.at<Object>(0);
   Handle<Object> receiver;  // Will be overwritten.
 
   // Compute the calling context.
-  Handle<Context> context = Handle<Context>(Top::context());
+  Handle<Context> context = Handle<Context>(isolate->context(), isolate);
 #ifdef DEBUG
-  // Make sure Top::context() agrees with the old code that traversed
+  // Make sure Isolate::context() agrees with the old code that traversed
   // the stack frames to compute the context.
   StackFrameLocator locator;
   JavaScriptFrame* frame = locator.FindJavaScriptFrame(0);
@@ -7976,25 +8328,28 @@ static ObjectPair Runtime_ResolvePossiblyDirectEval(Arguments args) {
   int index = -1;
   PropertyAttributes attributes = ABSENT;
   while (true) {
-    receiver = context->Lookup(Factory::eval_symbol(), FOLLOW_PROTOTYPE_CHAIN,
+    receiver = context->Lookup(isolate->factory()->eval_symbol(),
+                               FOLLOW_PROTOTYPE_CHAIN,
                                &index, &attributes);
     // Stop search when eval is found or when the global context is
     // reached.
     if (attributes != ABSENT || context->IsGlobalContext()) break;
     if (context->is_function_context()) {
-      context = Handle<Context>(Context::cast(context->closure()->context()));
+      context = Handle<Context>(Context::cast(context->closure()->context()),
+                                isolate);
     } else {
-      context = Handle<Context>(context->previous());
+      context = Handle<Context>(context->previous(), isolate);
     }
   }
 
   // If eval could not be resolved, it has been deleted and we need to
   // throw a reference error.
   if (attributes == ABSENT) {
-    Handle<Object> name = Factory::eval_symbol();
+    Handle<Object> name = isolate->factory()->eval_symbol();
     Handle<Object> reference_error =
-        Factory::NewReferenceError("not_defined", HandleVector(&name, 1));
-    return MakePair(Top::Throw(*reference_error), NULL);
+        isolate->factory()->NewReferenceError("not_defined",
+                                              HandleVector(&name, 1));
+    return MakePair(isolate->Throw(*reference_error), NULL);
   }
 
   if (!context->IsGlobalContext()) {
@@ -8002,61 +8357,70 @@ static ObjectPair Runtime_ResolvePossiblyDirectEval(Arguments args) {
     // with the given arguments. This is not necessarily the global eval.
     if (receiver->IsContext()) {
       context = Handle<Context>::cast(receiver);
-      receiver = Handle<Object>(context->get(index));
+      receiver = Handle<Object>(context->get(index), isolate);
     } else if (receiver->IsJSContextExtensionObject()) {
-      receiver = Handle<JSObject>(Top::context()->global()->global_receiver());
+      receiver = Handle<JSObject>(
+          isolate->context()->global()->global_receiver(), isolate);
     }
     return MakePair(*callee, *receiver);
   }
 
   // 'eval' is bound in the global context, but it may have been overwritten.
   // Compare it to the builtin 'GlobalEval' function to make sure.
-  if (*callee != Top::global_context()->global_eval_fun() ||
+  if (*callee != isolate->global_context()->global_eval_fun() ||
       !args[1]->IsString()) {
-    return MakePair(*callee, Top::context()->global()->global_receiver());
+    return MakePair(*callee,
+                    isolate->context()->global()->global_receiver());
   }
 
   ASSERT(args[3]->IsSmi());
-  return CompileGlobalEval(args.at<String>(1),
+  return CompileGlobalEval(isolate,
+                           args.at<String>(1),
                            args.at<Object>(2),
                            static_cast<StrictModeFlag>(
                                 Smi::cast(args[3])->value()));
 }
 
 
-static ObjectPair Runtime_ResolvePossiblyDirectEvalNoLookup(Arguments args) {
+static ObjectPair Runtime_ResolvePossiblyDirectEvalNoLookup(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 4);
 
-  HandleScope scope;
+  HandleScope scope(isolate);
   Handle<Object> callee = args.at<Object>(0);
 
   // 'eval' is bound in the global context, but it may have been overwritten.
   // Compare it to the builtin 'GlobalEval' function to make sure.
-  if (*callee != Top::global_context()->global_eval_fun() ||
+  if (*callee != isolate->global_context()->global_eval_fun() ||
       !args[1]->IsString()) {
-    return MakePair(*callee, Top::context()->global()->global_receiver());
+    return MakePair(*callee,
+                    isolate->context()->global()->global_receiver());
   }
 
   ASSERT(args[3]->IsSmi());
-  return CompileGlobalEval(args.at<String>(1),
+  return CompileGlobalEval(isolate,
+                           args.at<String>(1),
                            args.at<Object>(2),
                            static_cast<StrictModeFlag>(
                                 Smi::cast(args[3])->value()));
 }
 
 
-static MaybeObject* Runtime_SetNewFunctionAttributes(Arguments args) {
+static MaybeObject* Runtime_SetNewFunctionAttributes(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   // This utility adjusts the property attributes for newly created Function
   // object ("new Function(...)") by changing the map.
   // All it does is changing the prototype property to enumerable
   // as specified in ECMA262, 15.3.5.2.
-  HandleScope scope;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_CHECKED(JSFunction, func, 0);
 
   Handle<Map> map = func->shared()->strict_mode()
-                        ? Top::strict_mode_function_instance_map()
-                        : Top::function_instance_map();
+                        ? isolate->strict_mode_function_instance_map()
+                        : isolate->function_instance_map();
 
   ASSERT(func->map()->instance_type() == map->instance_type());
   ASSERT(func->map()->instance_size() == map->instance_size());
@@ -8065,7 +8429,8 @@ static MaybeObject* Runtime_SetNewFunctionAttributes(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_AllocateInNewSpace(Arguments args) {
+static MaybeObject* Runtime_AllocateInNewSpace(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   // Allocate a block of memory in NewSpace (filled with a filler).
   // Use as fallback for allocation in generated code when NewSpace
   // is full.
@@ -8074,13 +8439,13 @@ static MaybeObject* Runtime_AllocateInNewSpace(Arguments args) {
   int size = size_smi->value();
   RUNTIME_ASSERT(IsAligned(size, kPointerSize));
   RUNTIME_ASSERT(size > 0);
-  static const int kMinFreeNewSpaceAfterGC =
-      Heap::InitialSemiSpaceSize() * 3/4;
+  Heap* heap = isolate->heap();
+  const int kMinFreeNewSpaceAfterGC = heap->InitialSemiSpaceSize() * 3/4;
   RUNTIME_ASSERT(size <= kMinFreeNewSpaceAfterGC);
   Object* allocation;
-  { MaybeObject* maybe_allocation = Heap::new_space()->AllocateRaw(size);
+  { MaybeObject* maybe_allocation = heap->new_space()->AllocateRaw(size);
     if (maybe_allocation->ToObject(&allocation)) {
-      Heap::CreateFillerObjectAt(HeapObject::cast(allocation)->address(), size);
+      heap->CreateFillerObjectAt(HeapObject::cast(allocation)->address(), size);
     }
     return maybe_allocation;
   }
@@ -8090,7 +8455,8 @@ static MaybeObject* Runtime_AllocateInNewSpace(Arguments args) {
 // Push an object unto an array of objects if it is not already in the
 // array.  Returns true if the element was pushed on the stack and
 // false otherwise.
-static MaybeObject* Runtime_PushIfAbsent(Arguments args) {
+static MaybeObject* Runtime_PushIfAbsent(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(JSArray, array, args[0]);
   CONVERT_CHECKED(JSObject, element, args[1]);
@@ -8098,7 +8464,7 @@ static MaybeObject* Runtime_PushIfAbsent(Arguments args) {
   int length = Smi::cast(array->length())->value();
   FixedArray* elements = FixedArray::cast(array->elements());
   for (int i = 0; i < length; i++) {
-    if (elements->get(i) == element) return Heap::false_value();
+    if (elements->get(i) == element) return isolate->heap()->false_value();
   }
   Object* obj;
   // Strict not needed. Used for cycle detection in Array join implementation.
@@ -8106,7 +8472,7 @@ static MaybeObject* Runtime_PushIfAbsent(Arguments args) {
                                                    kNonStrictMode);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
-  return Heap::true_value();
+  return isolate->heap()->true_value();
 }
 
 
@@ -8123,9 +8489,12 @@ static MaybeObject* Runtime_PushIfAbsent(Arguments args) {
  */
 class ArrayConcatVisitor {
  public:
-  ArrayConcatVisitor(Handle<FixedArray> storage,
+  ArrayConcatVisitor(Isolate* isolate,
+                     Handle<FixedArray> storage,
                      bool fast_elements) :
-    storage_(Handle<FixedArray>::cast(GlobalHandles::Create(*storage))),
+      isolate_(isolate),
+      storage_(Handle<FixedArray>::cast(
+          isolate->global_handles()->Create(*storage))),
       index_offset_(0u),
       fast_elements_(fast_elements) { }
 
@@ -8152,7 +8521,7 @@ class ArrayConcatVisitor {
     ASSERT(!fast_elements_);
     Handle<NumberDictionary> dict(NumberDictionary::cast(*storage_));
     Handle<NumberDictionary> result =
-        Factory::DictionaryAtNumberPut(dict, index, elm);
+        isolate_->factory()->DictionaryAtNumberPut(dict, index, elm);
     if (!result.is_identical_to(dict)) {
       // Dictionary needed to grow.
       clear_storage();
@@ -8169,14 +8538,14 @@ class ArrayConcatVisitor {
   }
 
   Handle<JSArray> ToArray() {
-    Handle<JSArray> array = Factory::NewJSArray(0);
+    Handle<JSArray> array = isolate_->factory()->NewJSArray(0);
     Handle<Object> length =
-        Factory::NewNumber(static_cast<double>(index_offset_));
+        isolate_->factory()->NewNumber(static_cast<double>(index_offset_));
     Handle<Map> map;
     if (fast_elements_) {
-      map = Factory::GetFastElementsMap(Handle<Map>(array->map()));
+      map = isolate_->factory()->GetFastElementsMap(Handle<Map>(array->map()));
     } else {
-      map = Factory::GetSlowElementsMap(Handle<Map>(array->map()));
+      map = isolate_->factory()->GetSlowElementsMap(Handle<Map>(array->map()));
     }
     array->set_map(*map);
     array->set_length(*length);
@@ -8190,14 +8559,14 @@ class ArrayConcatVisitor {
     ASSERT(fast_elements_);
     Handle<FixedArray> current_storage(*storage_);
     Handle<NumberDictionary> slow_storage(
-        Factory::NewNumberDictionary(current_storage->length()));
+        isolate_->factory()->NewNumberDictionary(current_storage->length()));
     uint32_t current_length = static_cast<uint32_t>(current_storage->length());
     for (uint32_t i = 0; i < current_length; i++) {
       HandleScope loop_scope;
       Handle<Object> element(current_storage->get(i));
       if (!element->IsTheHole()) {
         Handle<NumberDictionary> new_storage =
-          Factory::DictionaryAtNumberPut(slow_storage, i, element);
+          isolate_->factory()->DictionaryAtNumberPut(slow_storage, i, element);
         if (!new_storage.is_identical_to(slow_storage)) {
           slow_storage = loop_scope.CloseAndEscape(new_storage);
         }
@@ -8209,13 +8578,16 @@ class ArrayConcatVisitor {
   }
 
   inline void clear_storage() {
-    GlobalHandles::Destroy(Handle<Object>::cast(storage_).location());
+    isolate_->global_handles()->Destroy(
+        Handle<Object>::cast(storage_).location());
   }
 
   inline void set_storage(FixedArray* storage) {
-    storage_ = Handle<FixedArray>::cast(GlobalHandles::Create(storage));
+    storage_ = Handle<FixedArray>::cast(
+        isolate_->global_handles()->Create(storage));
   }
 
+  Isolate* isolate_;
   Handle<FixedArray> storage_;  // Always a global handle.
   // Index after last seen index. Always less than or equal to
   // JSObject::kMaxElementCount.
@@ -8263,7 +8635,8 @@ static uint32_t EstimateElementCount(Handle<JSArray> array) {
 
 
 template<class ExternalArrayClass, class ElementType>
-static void IterateExternalArrayElements(Handle<JSObject> receiver,
+static void IterateExternalArrayElements(Isolate* isolate,
+                                         Handle<JSObject> receiver,
                                          bool elements_are_ints,
                                          bool elements_are_guaranteed_smis,
                                          ArrayConcatVisitor* visitor) {
@@ -8288,15 +8661,15 @@ static void IterateExternalArrayElements(Handle<JSObject> receiver,
           visitor->visit(j, e);
         } else {
           Handle<Object> e =
-              Factory::NewNumber(static_cast<ElementType>(val));
+              isolate->factory()->NewNumber(static_cast<ElementType>(val));
           visitor->visit(j, e);
         }
       }
     }
   } else {
     for (uint32_t j = 0; j < len; j++) {
-      HandleScope loop_scope;
-      Handle<Object> e = Factory::NewNumber(array->get(j));
+      HandleScope loop_scope(isolate);
+      Handle<Object> e = isolate->factory()->NewNumber(array->get(j));
       visitor->visit(j, e);
     }
   }
@@ -8425,7 +8798,8 @@ static void CollectElementIndices(Handle<JSObject> object,
  * length.
  * Returns false if any access threw an exception, otherwise true.
  */
-static bool IterateElements(Handle<JSArray> receiver,
+static bool IterateElements(Isolate* isolate,
+                            Handle<JSArray> receiver,
                             ArrayConcatVisitor* visitor) {
   uint32_t length = static_cast<uint32_t>(receiver->length()->Number());
   switch (receiver->GetElementsKind()) {
@@ -8436,8 +8810,8 @@ static bool IterateElements(Handle<JSArray> receiver,
       int fast_length = static_cast<int>(length);
       ASSERT(fast_length <= elements->length());
       for (int j = 0; j < fast_length; j++) {
-        HandleScope loop_scope;
-        Handle<Object> element_value(elements->get(j));
+        HandleScope loop_scope(isolate);
+        Handle<Object> element_value(elements->get(j), isolate);
         if (!element_value->IsTheHole()) {
           visitor->visit(j, element_value);
         } else if (receiver->HasElement(j)) {
@@ -8483,37 +8857,37 @@ static bool IterateElements(Handle<JSArray> receiver,
     }
     case JSObject::EXTERNAL_BYTE_ELEMENTS: {
       IterateExternalArrayElements<ExternalByteArray, int8_t>(
-          receiver, true, true, visitor);
+          isolate, receiver, true, true, visitor);
       break;
     }
     case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS: {
       IterateExternalArrayElements<ExternalUnsignedByteArray, uint8_t>(
-          receiver, true, true, visitor);
+          isolate, receiver, true, true, visitor);
       break;
     }
     case JSObject::EXTERNAL_SHORT_ELEMENTS: {
       IterateExternalArrayElements<ExternalShortArray, int16_t>(
-          receiver, true, true, visitor);
+          isolate, receiver, true, true, visitor);
       break;
     }
     case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS: {
       IterateExternalArrayElements<ExternalUnsignedShortArray, uint16_t>(
-          receiver, true, true, visitor);
+          isolate, receiver, true, true, visitor);
       break;
     }
     case JSObject::EXTERNAL_INT_ELEMENTS: {
       IterateExternalArrayElements<ExternalIntArray, int32_t>(
-          receiver, true, false, visitor);
+          isolate, receiver, true, false, visitor);
       break;
     }
     case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS: {
       IterateExternalArrayElements<ExternalUnsignedIntArray, uint32_t>(
-          receiver, true, false, visitor);
+          isolate, receiver, true, false, visitor);
       break;
     }
     case JSObject::EXTERNAL_FLOAT_ELEMENTS: {
       IterateExternalArrayElements<ExternalFloatArray, float>(
-          receiver, false, false, visitor);
+          isolate, receiver, false, false, visitor);
       break;
     }
     default:
@@ -8531,9 +8905,10 @@ static bool IterateElements(Handle<JSArray> receiver,
  * TODO(581): Fix non-compliance for very large concatenations and update to
  * following the ECMAScript 5 specification.
  */
-static MaybeObject* Runtime_ArrayConcat(Arguments args) {
+static MaybeObject* Runtime_ArrayConcat(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
-  HandleScope handle_scope;
+  HandleScope handle_scope(isolate);
 
   CONVERT_ARG_CHECKED(JSArray, arguments, 0);
   int argument_count = static_cast<int>(arguments->length()->Number());
@@ -8588,22 +8963,23 @@ static MaybeObject* Runtime_ArrayConcat(Arguments args) {
   if (fast_case) {
     // The backing storage array must have non-existing elements to
     // preserve holes across concat operations.
-    storage = Factory::NewFixedArrayWithHoles(estimate_result_length);
+    storage = isolate->factory()->NewFixedArrayWithHoles(
+        estimate_result_length);
   } else {
     // TODO(126): move 25% pre-allocation logic into Dictionary::Allocate
     uint32_t at_least_space_for = estimate_nof_elements +
                                   (estimate_nof_elements >> 2);
     storage = Handle<FixedArray>::cast(
-        Factory::NewNumberDictionary(at_least_space_for));
+        isolate->factory()->NewNumberDictionary(at_least_space_for));
   }
 
-  ArrayConcatVisitor visitor(storage, fast_case);
+  ArrayConcatVisitor visitor(isolate, storage, fast_case);
 
   for (int i = 0; i < argument_count; i++) {
     Handle<Object> obj(elements->get(i));
     if (obj->IsJSArray()) {
       Handle<JSArray> array = Handle<JSArray>::cast(obj);
-      if (!IterateElements(array, &visitor)) {
+      if (!IterateElements(isolate, array, &visitor)) {
         return Failure::Exception();
       }
     } else {
@@ -8618,7 +8994,8 @@ static MaybeObject* Runtime_ArrayConcat(Arguments args) {
 
 // This will not allocate (flatten the string), but it may run
 // very slowly for very deeply nested ConsStrings.  For debugging use only.
-static MaybeObject* Runtime_GlobalPrint(Arguments args) {
+static MaybeObject* Runtime_GlobalPrint(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -8636,7 +9013,8 @@ static MaybeObject* Runtime_GlobalPrint(Arguments args) {
 // and are followed by non-existing element. Does not change the length
 // property.
 // Returns the number of non-undefined elements collected.
-static MaybeObject* Runtime_RemoveArrayHoles(Arguments args) {
+static MaybeObject* Runtime_RemoveArrayHoles(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(JSObject, object, args[0]);
   CONVERT_NUMBER_CHECKED(uint32_t, limit, Uint32, args[1]);
@@ -8645,14 +9023,15 @@ static MaybeObject* Runtime_RemoveArrayHoles(Arguments args) {
 
 
 // Move contents of argument 0 (an array) to argument 1 (an array)
-static MaybeObject* Runtime_MoveArrayContents(Arguments args) {
+static MaybeObject* Runtime_MoveArrayContents(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(JSArray, from, args[0]);
   CONVERT_CHECKED(JSArray, to, args[1]);
   HeapObject* new_elements = from->elements();
   MaybeObject* maybe_new_map;
-  if (new_elements->map() == Heap::fixed_array_map() ||
-      new_elements->map() == Heap::fixed_cow_array_map()) {
+  if (new_elements->map() == isolate->heap()->fixed_array_map() ||
+      new_elements->map() == isolate->heap()->fixed_cow_array_map()) {
     maybe_new_map = to->map()->GetFastElementsMap();
   } else {
     maybe_new_map = to->map()->GetSlowElementsMap();
@@ -8672,7 +9051,9 @@ static MaybeObject* Runtime_MoveArrayContents(Arguments args) {
 
 
 // How many elements does this object/array have?
-static MaybeObject* Runtime_EstimateNumberOfElements(Arguments args) {
+static MaybeObject* Runtime_EstimateNumberOfElements(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(JSObject, object, args[0]);
   HeapObject* elements = object->elements();
@@ -8686,8 +9067,9 @@ static MaybeObject* Runtime_EstimateNumberOfElements(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_SwapElements(Arguments args) {
-  HandleScope handle_scope;
+static MaybeObject* Runtime_SwapElements(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope handle_scope(isolate);
 
   ASSERT_EQ(3, args.length());
 
@@ -8698,19 +9080,21 @@ static MaybeObject* Runtime_SwapElements(Arguments args) {
   uint32_t index1, index2;
   if (!key1->ToArrayIndex(&index1)
       || !key2->ToArrayIndex(&index2)) {
-    return Top::ThrowIllegalOperation();
+    return isolate->ThrowIllegalOperation();
   }
 
   Handle<JSObject> jsobject = Handle<JSObject>::cast(object);
   Handle<Object> tmp1 = GetElement(jsobject, index1);
-  RETURN_IF_EMPTY_HANDLE(tmp1);
+  RETURN_IF_EMPTY_HANDLE(isolate, tmp1);
   Handle<Object> tmp2 = GetElement(jsobject, index2);
-  RETURN_IF_EMPTY_HANDLE(tmp2);
+  RETURN_IF_EMPTY_HANDLE(isolate, tmp2);
 
-  RETURN_IF_EMPTY_HANDLE(SetElement(jsobject, index1, tmp2, kStrictMode));
-  RETURN_IF_EMPTY_HANDLE(SetElement(jsobject, index2, tmp1, kStrictMode));
+  RETURN_IF_EMPTY_HANDLE(isolate,
+                         SetElement(jsobject, index1, tmp2, kStrictMode));
+  RETURN_IF_EMPTY_HANDLE(isolate,
+                         SetElement(jsobject, index2, tmp1, kStrictMode));
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
@@ -8719,9 +9103,10 @@ static MaybeObject* Runtime_SwapElements(Arguments args) {
 // intervals (pair of a negative integer (-start-1) followed by a
 // positive (length)) or undefined values.
 // Intervals can span over some keys that are not in the object.
-static MaybeObject* Runtime_GetArrayKeys(Arguments args) {
+static MaybeObject* Runtime_GetArrayKeys(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(JSObject, array, 0);
   CONVERT_NUMBER_CHECKED(uint32_t, length, Uint32, args[1]);
   if (array->elements()->IsDictionary()) {
@@ -8737,19 +9122,19 @@ static MaybeObject* Runtime_GetArrayKeys(Arguments args) {
         keys->set_undefined(i);
       }
     }
-    return *Factory::NewJSArrayWithElements(keys);
+    return *isolate->factory()->NewJSArrayWithElements(keys);
   } else {
     ASSERT(array->HasFastElements());
-    Handle<FixedArray> single_interval = Factory::NewFixedArray(2);
+    Handle<FixedArray> single_interval = isolate->factory()->NewFixedArray(2);
     // -1 means start of array.
     single_interval->set(0, Smi::FromInt(-1));
     uint32_t actual_length =
         static_cast<uint32_t>(FixedArray::cast(array->elements())->length());
     uint32_t min_length = actual_length < length ? actual_length : length;
     Handle<Object> length_object =
-        Factory::NewNumber(static_cast<double>(min_length));
+        isolate->factory()->NewNumber(static_cast<double>(min_length));
     single_interval->set(1, *length_object);
-    return *Factory::NewJSArrayWithElements(single_interval);
+    return *isolate->factory()->NewJSArrayWithElements(single_interval);
   }
 }
 
@@ -8759,7 +9144,8 @@ static MaybeObject* Runtime_GetArrayKeys(Arguments args) {
 // to the way accessors are implemented, it is set for both the getter
 // and setter on the first call to DefineAccessor and ignored on
 // subsequent calls.
-static MaybeObject* Runtime_DefineAccessor(Arguments args) {
+static MaybeObject* Runtime_DefineAccessor(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   RUNTIME_ASSERT(args.length() == 4 || args.length() == 5);
   // Compute attributes.
   PropertyAttributes attributes = NONE;
@@ -8779,7 +9165,8 @@ static MaybeObject* Runtime_DefineAccessor(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_LookupAccessor(Arguments args) {
+static MaybeObject* Runtime_LookupAccessor(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 3);
   CONVERT_CHECKED(JSObject, obj, args[0]);
   CONVERT_CHECKED(String, name, args[1]);
@@ -8789,7 +9176,8 @@ static MaybeObject* Runtime_LookupAccessor(Arguments args) {
 
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-static MaybeObject* Runtime_DebugBreak(Arguments args) {
+static MaybeObject* Runtime_DebugBreak(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 0);
   return Execution::DebugBreakHelper();
 }
@@ -8811,27 +9199,31 @@ static StackFrame::Id UnwrapFrameId(Smi* wrapped) {
 // args[0]: debug event listener function to set or null or undefined for
 //          clearing the event listener function
 // args[1]: object supplied during callback
-static MaybeObject* Runtime_SetDebugEventListener(Arguments args) {
+static MaybeObject* Runtime_SetDebugEventListener(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
   RUNTIME_ASSERT(args[0]->IsJSFunction() ||
                  args[0]->IsUndefined() ||
                  args[0]->IsNull());
   Handle<Object> callback = args.at<Object>(0);
   Handle<Object> data = args.at<Object>(1);
-  Debugger::SetEventListener(callback, data);
+  isolate->debugger()->SetEventListener(callback, data);
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_Break(Arguments args) {
+static MaybeObject* Runtime_Break(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 0);
-  StackGuard::DebugBreak();
-  return Heap::undefined_value();
+  isolate->stack_guard()->DebugBreak();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* DebugLookupResultValue(Object* receiver, String* name,
+static MaybeObject* DebugLookupResultValue(Heap* heap,
+                                           Object* receiver,
+                                           String* name,
                                            LookupResult* result,
                                            bool* caught_exception) {
   Object* value;
@@ -8839,7 +9231,7 @@ static MaybeObject* DebugLookupResultValue(Object* receiver, String* name,
     case NORMAL:
       value = result->holder()->GetNormalizedProperty(result);
       if (value->IsTheHole()) {
-        return Heap::undefined_value();
+        return heap->undefined_value();
       }
       return value;
     case FIELD:
@@ -8847,7 +9239,7 @@ static MaybeObject* DebugLookupResultValue(Object* receiver, String* name,
           JSObject::cast(
               result->holder())->FastPropertyAt(result->GetFieldIndex());
       if (value->IsTheHole()) {
-        return Heap::undefined_value();
+        return heap->undefined_value();
       }
       return value;
     case CONSTANT_FUNCTION:
@@ -8860,8 +9252,8 @@ static MaybeObject* DebugLookupResultValue(Object* receiver, String* name,
         if (!maybe_value->ToObject(&value)) {
           if (maybe_value->IsRetryAfterGC()) return maybe_value;
           ASSERT(maybe_value->IsException());
-          maybe_value = Top::pending_exception();
-          Top::clear_pending_exception();
+          maybe_value = heap->isolate()->pending_exception();
+          heap->isolate()->clear_pending_exception();
           if (caught_exception != NULL) {
             *caught_exception = true;
           }
@@ -8869,19 +9261,19 @@ static MaybeObject* DebugLookupResultValue(Object* receiver, String* name,
         }
         return value;
       } else {
-        return Heap::undefined_value();
+        return heap->undefined_value();
       }
     }
     case INTERCEPTOR:
     case MAP_TRANSITION:
     case CONSTANT_TRANSITION:
     case NULL_DESCRIPTOR:
-      return Heap::undefined_value();
+      return heap->undefined_value();
     default:
       UNREACHABLE();
   }
   UNREACHABLE();
-  return Heap::undefined_value();
+  return heap->undefined_value();
 }
 
 
@@ -8897,8 +9289,10 @@ static MaybeObject* DebugLookupResultValue(Object* receiver, String* name,
 // 4: Setter function if defined
 // Items 2-4 are only filled if the property has either a getter or a setter
 // defined through __defineGetter__ and/or __defineSetter__.
-static MaybeObject* Runtime_DebugGetPropertyDetails(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DebugGetPropertyDetails(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
 
   ASSERT(args.length() == 2);
 
@@ -8911,9 +9305,9 @@ static MaybeObject* Runtime_DebugGetPropertyDetails(Arguments args) {
   // into the embedding application can occour, and the embedding application
   // could have the assumption that its own global context is the current
   // context and not some internal debugger context.
-  SaveContext save;
-  if (Debug::InDebugger()) {
-    Top::set_context(*Debug::debugger_entry()->GetContext());
+  SaveContext save(isolate);
+  if (isolate->debug()->InDebugger()) {
+    isolate->set_context(*isolate->debug()->debugger_entry()->GetContext());
   }
 
   // Skip the global proxy as it has no properties and always delegates to the
@@ -8927,17 +9321,17 @@ static MaybeObject* Runtime_DebugGetPropertyDetails(Arguments args) {
   // if so.
   uint32_t index;
   if (name->AsArrayIndex(&index)) {
-    Handle<FixedArray> details = Factory::NewFixedArray(2);
+    Handle<FixedArray> details = isolate->factory()->NewFixedArray(2);
     Object* element_or_char;
     { MaybeObject* maybe_element_or_char =
-          Runtime::GetElementOrCharAt(obj, index);
+          Runtime::GetElementOrCharAt(isolate, obj, index);
       if (!maybe_element_or_char->ToObject(&element_or_char)) {
         return maybe_element_or_char;
       }
     }
     details->set(0, element_or_char);
     details->set(1, PropertyDetails(NONE, NORMAL).AsSmi());
-    return *Factory::NewJSArrayWithElements(details);
+    return *isolate->factory()->NewJSArrayWithElements(details);
   }
 
   // Find the number of objects making up this.
@@ -8955,7 +9349,8 @@ static MaybeObject* Runtime_DebugGetPropertyDetails(Arguments args) {
       PropertyType result_type = result.type();
       Handle<Object> result_callback_obj;
       if (result_type == CALLBACKS) {
-        result_callback_obj = Handle<Object>(result.GetCallbackObject());
+        result_callback_obj = Handle<Object>(result.GetCallbackObject(),
+                                             isolate);
       }
       Smi* property_details = result.GetPropertyDetails().AsSmi();
       // DebugLookupResultValue can cause GC so details from LookupResult needs
@@ -8963,40 +9358,42 @@ static MaybeObject* Runtime_DebugGetPropertyDetails(Arguments args) {
       bool caught_exception = false;
       Object* raw_value;
       { MaybeObject* maybe_raw_value =
-            DebugLookupResultValue(*obj, *name, &result, &caught_exception);
+            DebugLookupResultValue(isolate->heap(), *obj, *name,
+                                   &result, &caught_exception);
         if (!maybe_raw_value->ToObject(&raw_value)) return maybe_raw_value;
       }
-      Handle<Object> value(raw_value);
+      Handle<Object> value(raw_value, isolate);
 
       // If the callback object is a fixed array then it contains JavaScript
       // getter and/or setter.
       bool hasJavaScriptAccessors = result_type == CALLBACKS &&
                                     result_callback_obj->IsFixedArray();
       Handle<FixedArray> details =
-          Factory::NewFixedArray(hasJavaScriptAccessors ? 5 : 2);
+          isolate->factory()->NewFixedArray(hasJavaScriptAccessors ? 5 : 2);
       details->set(0, *value);
       details->set(1, property_details);
       if (hasJavaScriptAccessors) {
         details->set(2,
-                     caught_exception ? Heap::true_value()
-                                      : Heap::false_value());
+                     caught_exception ? isolate->heap()->true_value()
+                                      : isolate->heap()->false_value());
         details->set(3, FixedArray::cast(*result_callback_obj)->get(0));
         details->set(4, FixedArray::cast(*result_callback_obj)->get(1));
       }
 
-      return *Factory::NewJSArrayWithElements(details);
+      return *isolate->factory()->NewJSArrayWithElements(details);
     }
     if (i < length - 1) {
       jsproto = Handle<JSObject>(JSObject::cast(jsproto->GetPrototype()));
     }
   }
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_DebugGetProperty(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DebugGetProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
 
   ASSERT(args.length() == 2);
 
@@ -9006,15 +9403,17 @@ static MaybeObject* Runtime_DebugGetProperty(Arguments args) {
   LookupResult result;
   obj->Lookup(*name, &result);
   if (result.IsProperty()) {
-    return DebugLookupResultValue(*obj, *name, &result, NULL);
+    return DebugLookupResultValue(isolate->heap(), *obj, *name, &result, NULL);
   }
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
 // Return the property type calculated from the property details.
 // args[0]: smi with property details.
-static MaybeObject* Runtime_DebugPropertyTypeFromDetails(Arguments args) {
+static MaybeObject* Runtime_DebugPropertyTypeFromDetails(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(Smi, details, args[0]);
   PropertyType type = PropertyDetails(details).type();
@@ -9024,7 +9423,9 @@ static MaybeObject* Runtime_DebugPropertyTypeFromDetails(Arguments args) {
 
 // Return the property attribute calculated from the property details.
 // args[0]: smi with property details.
-static MaybeObject* Runtime_DebugPropertyAttributesFromDetails(Arguments args) {
+static MaybeObject* Runtime_DebugPropertyAttributesFromDetails(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(Smi, details, args[0]);
   PropertyAttributes attributes = PropertyDetails(details).attributes();
@@ -9034,7 +9435,9 @@ static MaybeObject* Runtime_DebugPropertyAttributesFromDetails(Arguments args) {
 
 // Return the property insertion index calculated from the property details.
 // args[0]: smi with property details.
-static MaybeObject* Runtime_DebugPropertyIndexFromDetails(Arguments args) {
+static MaybeObject* Runtime_DebugPropertyIndexFromDetails(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
   CONVERT_CHECKED(Smi, details, args[0]);
   int index = PropertyDetails(details).index();
@@ -9045,8 +9448,10 @@ static MaybeObject* Runtime_DebugPropertyIndexFromDetails(Arguments args) {
 // Return property value from named interceptor.
 // args[0]: object
 // args[1]: property name
-static MaybeObject* Runtime_DebugNamedInterceptorPropertyValue(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DebugNamedInterceptorPropertyValue(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
   RUNTIME_ASSERT(obj->HasNamedInterceptor());
@@ -9061,8 +9466,9 @@ static MaybeObject* Runtime_DebugNamedInterceptorPropertyValue(Arguments args) {
 // args[0]: object
 // args[1]: index
 static MaybeObject* Runtime_DebugIndexedInterceptorElementValue(
-    Arguments args) {
-  HandleScope scope;
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
   RUNTIME_ASSERT(obj->HasIndexedInterceptor());
@@ -9072,31 +9478,35 @@ static MaybeObject* Runtime_DebugIndexedInterceptorElementValue(
 }
 
 
-static MaybeObject* Runtime_CheckExecutionState(Arguments args) {
+static MaybeObject* Runtime_CheckExecutionState(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() >= 1);
   CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]);
   // Check that the break id is valid.
-  if (Debug::break_id() == 0 || break_id != Debug::break_id()) {
-    return Top::Throw(Heap::illegal_execution_state_symbol());
+  if (isolate->debug()->break_id() == 0 ||
+      break_id != isolate->debug()->break_id()) {
+    return isolate->Throw(
+        isolate->heap()->illegal_execution_state_symbol());
   }
 
-  return Heap::true_value();
+  return isolate->heap()->true_value();
 }
 
 
-static MaybeObject* Runtime_GetFrameCount(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetFrameCount(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
   // Check arguments.
   Object* result;
-  { MaybeObject* maybe_result = Runtime_CheckExecutionState(args);
+  { MaybeObject* maybe_result = Runtime_CheckExecutionState(args, isolate);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
 
   // Count all frames which are relevant to debugging stack trace.
   int n = 0;
-  StackFrame::Id id = Debug::break_frame_id();
+  StackFrame::Id id = isolate->debug()->break_frame_id();
   if (id == StackFrame::NO_ID) {
     // If there is no JavaScript stack frame count is 0.
     return Smi::FromInt(0);
@@ -9134,22 +9544,24 @@ static const int kFrameDetailsFirstDynamicIndex = 9;
 // Arguments name, value
 // Locals name, value
 // Return value if any
-static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetFrameDetails(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
 
   // Check arguments.
   Object* check;
-  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args);
+  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args, isolate);
     if (!maybe_check->ToObject(&check)) return maybe_check;
   }
   CONVERT_NUMBER_CHECKED(int, index, Int32, args[1]);
+  Heap* heap = isolate->heap();
 
   // Find the relevant frame with the requested index.
-  StackFrame::Id id = Debug::break_frame_id();
+  StackFrame::Id id = isolate->debug()->break_frame_id();
   if (id == StackFrame::NO_ID) {
     // If there are no JavaScript stack frames return undefined.
-    return Heap::undefined_value();
+    return heap->undefined_value();
   }
   int count = 0;
   JavaScriptFrameIterator it(id);
@@ -9157,24 +9569,25 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
     if (count == index) break;
     count++;
   }
-  if (it.done()) return Heap::undefined_value();
+  if (it.done()) return heap->undefined_value();
 
   bool is_optimized_frame =
-      it.frame()->code()->kind() == Code::OPTIMIZED_FUNCTION;
+      it.frame()->LookupCode(isolate)->kind() == Code::OPTIMIZED_FUNCTION;
 
   // Traverse the saved contexts chain to find the active context for the
   // selected frame.
-  SaveContext* save = Top::save_context();
+  SaveContext* save = isolate->save_context();
   while (save != NULL && !save->below(it.frame())) {
     save = save->prev();
   }
   ASSERT(save != NULL);
 
   // Get the frame id.
-  Handle<Object> frame_id(WrapFrameId(it.frame()->id()));
+  Handle<Object> frame_id(WrapFrameId(it.frame()->id()), isolate);
 
   // Find source position.
-  int position = it.frame()->code()->SourcePosition(it.frame()->pc());
+  int position =
+      it.frame()->LookupCode(isolate)->SourcePosition(it.frame()->pc());
 
   // Check for constructor frame.
   bool constructor = it.frame()->IsConstructor();
@@ -9192,7 +9605,8 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
   // TODO(1240907): Hide compiler-introduced stack variables
   // (e.g. .result)?  For users of the debugger, they will probably be
   // confusing.
-  Handle<FixedArray> locals = Factory::NewFixedArray(info.NumberOfLocals() * 2);
+  Handle<FixedArray> locals =
+      isolate->factory()->NewFixedArray(info.NumberOfLocals() * 2);
 
   // Fill in the names of the locals.
   for (int i = 0; i < info.NumberOfLocals(); i++) {
@@ -9207,7 +9621,7 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
       //
       // TODO(1140): We should be able to get the correct values
       // for locals in optimized frames.
-      locals->set(i * 2 + 1, Heap::undefined_value());
+      locals->set(i * 2 + 1, isolate->heap()->undefined_value());
     } else if (i < info.number_of_stack_slots()) {
       // Get the value from the stack.
       locals->set(i * 2 + 1, it.frame()->GetExpression(i));
@@ -9228,12 +9642,12 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
   // frame or if the frame is optimized it cannot be at a return.
   bool at_return = false;
   if (!is_optimized_frame && index == 0) {
-    at_return = Debug::IsBreakAtReturn(it.frame());
+    at_return = isolate->debug()->IsBreakAtReturn(it.frame());
   }
 
   // If positioned just before return find the value to be returned and add it
   // to the frame information.
-  Handle<Object> return_value = Factory::undefined_value();
+  Handle<Object> return_value = isolate->factory()->undefined_value();
   if (at_return) {
     StackFrameIterator it2;
     Address internal_frame_sp = NULL;
@@ -9249,7 +9663,8 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
             // entering the debug break exit frame.
             if (internal_frame_sp != NULL) {
               return_value =
-                  Handle<Object>(Memory::Object_at(internal_frame_sp));
+                  Handle<Object>(Memory::Object_at(internal_frame_sp),
+                                 isolate);
               break;
             }
           }
@@ -9279,7 +9694,7 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
   int details_size = kFrameDetailsFirstDynamicIndex +
                      2 * (argument_count + info.NumberOfLocals()) +
                      (at_return ? 1 : 0);
-  Handle<FixedArray> details = Factory::NewFixedArray(details_size);
+  Handle<FixedArray> details = isolate->factory()->NewFixedArray(details_size);
 
   // Add the frame id.
   details->set(kFrameDetailsFrameIdIndex, *frame_id);
@@ -9298,18 +9713,19 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
   if (position != RelocInfo::kNoPosition) {
     details->set(kFrameDetailsSourcePositionIndex, Smi::FromInt(position));
   } else {
-    details->set(kFrameDetailsSourcePositionIndex, Heap::undefined_value());
+    details->set(kFrameDetailsSourcePositionIndex, heap->undefined_value());
   }
 
   // Add the constructor information.
-  details->set(kFrameDetailsConstructCallIndex, Heap::ToBoolean(constructor));
+  details->set(kFrameDetailsConstructCallIndex, heap->ToBoolean(constructor));
 
   // Add the at return information.
-  details->set(kFrameDetailsAtReturnIndex, Heap::ToBoolean(at_return));
+  details->set(kFrameDetailsAtReturnIndex, heap->ToBoolean(at_return));
 
   // Add information on whether this frame is invoked in the debugger context.
   details->set(kFrameDetailsDebuggerFrameIndex,
-               Heap::ToBoolean(*save->context() == *Debug::debug_context()));
+               heap->ToBoolean(*save->context() ==
+                   *isolate->debug()->debug_context()));
 
   // Fill the dynamic part.
   int details_index = kFrameDetailsFirstDynamicIndex;
@@ -9320,7 +9736,7 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
     if (i < info.number_of_parameters()) {
       details->set(details_index++, *info.parameter_name(i));
     } else {
-      details->set(details_index++, Heap::undefined_value());
+      details->set(details_index++, heap->undefined_value());
     }
 
     // Parameter value. If we are inspecting an optimized frame, use
@@ -9332,7 +9748,7 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
         (i < it.frame()->ComputeParametersCount())) {
       details->set(details_index++, it.frame()->GetParameter(i));
     } else {
-      details->set(details_index++, Heap::undefined_value());
+      details->set(details_index++, heap->undefined_value());
     }
   }
 
@@ -9349,7 +9765,7 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
   // Add the receiver (same as in function frame).
   // THIS MUST BE DONE LAST SINCE WE MIGHT ADVANCE
   // THE FRAME ITERATOR TO WRAP THE RECEIVER.
-  Handle<Object> receiver(it.frame()->receiver());
+  Handle<Object> receiver(it.frame()->receiver(), isolate);
   if (!receiver->IsJSObject()) {
     // If the receiver is NOT a JSObject we have hit an optimization
     // where a value object is not converted into a wrapped JS objects.
@@ -9359,17 +9775,19 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
     it.Advance();
     Handle<Context> calling_frames_global_context(
         Context::cast(Context::cast(it.frame()->context())->global_context()));
-    receiver = Factory::ToObject(receiver, calling_frames_global_context);
+    receiver =
+        isolate->factory()->ToObject(receiver, calling_frames_global_context);
   }
   details->set(kFrameDetailsReceiverIndex, *receiver);
 
   ASSERT_EQ(details_size, details_index);
-  return *Factory::NewJSArrayWithElements(details);
+  return *isolate->factory()->NewJSArrayWithElements(details);
 }
 
 
 // Copy all the context locals into an object used to materialize a scope.
 static bool CopyContextLocalsToScopeObject(
+    Isolate* isolate,
     Handle<SerializedScopeInfo> serialized_scope_info,
     ScopeInfo<>& scope_info,
     Handle<Context> context,
@@ -9382,11 +9800,13 @@ static bool CopyContextLocalsToScopeObject(
         *scope_info.context_slot_name(i), NULL);
 
     // Don't include the arguments shadow (.arguments) context variable.
-    if (*scope_info.context_slot_name(i) != Heap::arguments_shadow_symbol()) {
+    if (*scope_info.context_slot_name(i) !=
+        isolate->heap()->arguments_shadow_symbol()) {
       RETURN_IF_EMPTY_HANDLE_VALUE(
+          isolate,
           SetProperty(scope_object,
                       scope_info.context_slot_name(i),
-                      Handle<Object>(context->get(context_index)),
+                      Handle<Object>(context->get(context_index), isolate),
                       NONE,
                       kNonStrictMode),
           false);
@@ -9399,7 +9819,8 @@ static bool CopyContextLocalsToScopeObject(
 
 // Create a plain JSObject which materializes the local scope for the specified
 // frame.
-static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
+static Handle<JSObject> MaterializeLocalScope(Isolate* isolate,
+                                              JavaScriptFrame* frame) {
   Handle<JSFunction> function(JSFunction::cast(frame->function()));
   Handle<SharedFunctionInfo> shared(function->shared());
   Handle<SerializedScopeInfo> serialized_scope_info(shared->scope_info());
@@ -9407,14 +9828,16 @@ static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
 
   // Allocate and initialize a JSObject with all the arguments, stack locals
   // heap locals and extension properties of the debugged function.
-  Handle<JSObject> local_scope = Factory::NewJSObject(Top::object_function());
+  Handle<JSObject> local_scope =
+      isolate->factory()->NewJSObject(isolate->object_function());
 
   // First fill all parameters.
   for (int i = 0; i < scope_info.number_of_parameters(); ++i) {
     RETURN_IF_EMPTY_HANDLE_VALUE(
+        isolate,
         SetProperty(local_scope,
                     scope_info.parameter_name(i),
-                    Handle<Object>(frame->GetParameter(i)),
+                    Handle<Object>(frame->GetParameter(i), isolate),
                     NONE,
                     kNonStrictMode),
         Handle<JSObject>());
@@ -9423,9 +9846,10 @@ static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
   // Second fill all stack locals.
   for (int i = 0; i < scope_info.number_of_stack_slots(); i++) {
     RETURN_IF_EMPTY_HANDLE_VALUE(
+        isolate,
         SetProperty(local_scope,
                     scope_info.stack_slot_name(i),
-                    Handle<Object>(frame->GetExpression(i)),
+                    Handle<Object>(frame->GetExpression(i), isolate),
                     NONE,
                     kNonStrictMode),
         Handle<JSObject>());
@@ -9434,7 +9858,8 @@ static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
   // Third fill all context locals.
   Handle<Context> frame_context(Context::cast(frame->context()));
   Handle<Context> function_context(frame_context->fcontext());
-  if (!CopyContextLocalsToScopeObject(serialized_scope_info, scope_info,
+  if (!CopyContextLocalsToScopeObject(isolate,
+                                      serialized_scope_info, scope_info,
                                       function_context, local_scope)) {
     return Handle<JSObject>();
   }
@@ -9451,6 +9876,7 @@ static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
         ASSERT(keys->get(i)->IsString());
         Handle<String> key(String::cast(keys->get(i)));
         RETURN_IF_EMPTY_HANDLE_VALUE(
+            isolate,
             SetProperty(local_scope,
                         key,
                         GetProperty(ext, key),
@@ -9466,7 +9892,8 @@ static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
 
 // Create a plain JSObject which materializes the closure content for the
 // context.
-static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
+static Handle<JSObject> MaterializeClosure(Isolate* isolate,
+                                           Handle<Context> context) {
   ASSERT(context->is_function_context());
 
   Handle<SharedFunctionInfo> shared(context->closure()->shared());
@@ -9475,12 +9902,13 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
 
   // Allocate and initialize a JSObject with all the content of theis function
   // closure.
-  Handle<JSObject> closure_scope = Factory::NewJSObject(Top::object_function());
+  Handle<JSObject> closure_scope =
+      isolate->factory()->NewJSObject(isolate->object_function());
 
   // Check whether the arguments shadow object exists.
   int arguments_shadow_index =
-      shared->scope_info()->ContextSlotIndex(Heap::arguments_shadow_symbol(),
-                                             NULL);
+      shared->scope_info()->ContextSlotIndex(
+          isolate->heap()->arguments_shadow_symbol(), NULL);
   if (arguments_shadow_index >= 0) {
     // In this case all the arguments are available in the arguments shadow
     // object.
@@ -9490,9 +9918,10 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
       // We don't expect exception-throwing getters on the arguments shadow.
       Object* element = arguments_shadow->GetElement(i)->ToObjectUnchecked();
       RETURN_IF_EMPTY_HANDLE_VALUE(
+          isolate,
           SetProperty(closure_scope,
                       scope_info.parameter_name(i),
-                      Handle<Object>(element),
+                      Handle<Object>(element, isolate),
                       NONE,
                       kNonStrictMode),
           Handle<JSObject>());
@@ -9500,7 +9929,8 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
   }
 
   // Fill all context locals to the context extension.
-  if (!CopyContextLocalsToScopeObject(serialized_scope_info, scope_info,
+  if (!CopyContextLocalsToScopeObject(isolate,
+                                      serialized_scope_info, scope_info,
                                       context, closure_scope)) {
     return Handle<JSObject>();
   }
@@ -9514,7 +9944,8 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
       // Names of variables introduced by eval are strings.
       ASSERT(keys->get(i)->IsString());
       Handle<String> key(String::cast(keys->get(i)));
-      RETURN_IF_EMPTY_HANDLE_VALUE(
+       RETURN_IF_EMPTY_HANDLE_VALUE(
+          isolate,
           SetProperty(closure_scope,
                       key,
                       GetProperty(ext, key),
@@ -9545,8 +9976,9 @@ class ScopeIterator {
     ScopeTypeCatch
   };
 
-  explicit ScopeIterator(JavaScriptFrame* frame)
-    : frame_(frame),
+  ScopeIterator(Isolate* isolate, JavaScriptFrame* frame)
+    : isolate_(isolate),
+      frame_(frame),
       function_(JSFunction::cast(frame->function())),
       context_(Context::cast(frame->context())),
       local_done_(false),
@@ -9559,7 +9991,7 @@ class ScopeIterator {
       // Checking for the existence of .result seems fragile, but the scope info
       // saved with the code object does not otherwise have that information.
       int index = function_->shared()->scope_info()->
-          StackSlotIndex(Heap::result_symbol());
+          StackSlotIndex(isolate_->heap()->result_symbol());
       at_local_ = index < 0;
     } else if (context_->is_function_context()) {
       at_local_ = true;
@@ -9637,7 +10069,7 @@ class ScopeIterator {
         break;
       case ScopeIterator::ScopeTypeLocal:
         // Materialize the content of the local scope into a JSObject.
-        return MaterializeLocalScope(frame_);
+        return MaterializeLocalScope(isolate_, frame_);
         break;
       case ScopeIterator::ScopeTypeWith:
       case ScopeIterator::ScopeTypeCatch:
@@ -9646,7 +10078,7 @@ class ScopeIterator {
         break;
       case ScopeIterator::ScopeTypeClosure:
         // Materialize the content of the closure scope into a JSObject.
-        return MaterializeClosure(CurrentContext());
+        return MaterializeClosure(isolate_, CurrentContext());
         break;
     }
     UNREACHABLE();
@@ -9725,6 +10157,7 @@ class ScopeIterator {
 #endif
 
  private:
+  Isolate* isolate_;
   JavaScriptFrame* frame_;
   Handle<JSFunction> function_;
   Handle<Context> context_;
@@ -9735,13 +10168,14 @@ class ScopeIterator {
 };
 
 
-static MaybeObject* Runtime_GetScopeCount(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetScopeCount(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
 
   // Check arguments.
   Object* check;
-  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args);
+  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args, isolate);
     if (!maybe_check->ToObject(&check)) return maybe_check;
   }
   CONVERT_CHECKED(Smi, wrapped_id, args[1]);
@@ -9753,7 +10187,7 @@ static MaybeObject* Runtime_GetScopeCount(Arguments args) {
 
   // Count the visible scopes.
   int n = 0;
-  for (ScopeIterator it(frame); !it.Done(); it.Next()) {
+  for (ScopeIterator it(isolate, frame); !it.Done(); it.Next()) {
     n++;
   }
 
@@ -9773,13 +10207,14 @@ static const int kScopeDetailsSize = 2;
 // The array returned contains the following information:
 // 0: Scope type
 // 1: Scope object
-static MaybeObject* Runtime_GetScopeDetails(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetScopeDetails(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
 
   // Check arguments.
   Object* check;
-  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args);
+  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args, isolate);
     if (!maybe_check->ToObject(&check)) return maybe_check;
   }
   CONVERT_CHECKED(Smi, wrapped_id, args[1]);
@@ -9792,57 +10227,60 @@ static MaybeObject* Runtime_GetScopeDetails(Arguments args) {
 
   // Find the requested scope.
   int n = 0;
-  ScopeIterator it(frame);
+  ScopeIterator it(isolate, frame);
   for (; !it.Done() && n < index; it.Next()) {
     n++;
   }
   if (it.Done()) {
-    return Heap::undefined_value();
+    return isolate->heap()->undefined_value();
   }
 
   // Calculate the size of the result.
   int details_size = kScopeDetailsSize;
-  Handle<FixedArray> details = Factory::NewFixedArray(details_size);
+  Handle<FixedArray> details = isolate->factory()->NewFixedArray(details_size);
 
   // Fill in scope details.
   details->set(kScopeDetailsTypeIndex, Smi::FromInt(it.Type()));
   Handle<JSObject> scope_object = it.ScopeObject();
-  RETURN_IF_EMPTY_HANDLE(scope_object);
+  RETURN_IF_EMPTY_HANDLE(isolate, scope_object);
   details->set(kScopeDetailsObjectIndex, *scope_object);
 
-  return *Factory::NewJSArrayWithElements(details);
+  return *isolate->factory()->NewJSArrayWithElements(details);
 }
 
 
-static MaybeObject* Runtime_DebugPrintScopes(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DebugPrintScopes(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 0);
 
 #ifdef DEBUG
   // Print the scopes for the top frame.
   StackFrameLocator locator;
   JavaScriptFrame* frame = locator.FindJavaScriptFrame(0);
-  for (ScopeIterator it(frame); !it.Done(); it.Next()) {
+  for (ScopeIterator it(isolate, frame); !it.Done(); it.Next()) {
     it.DebugPrint();
   }
 #endif
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_GetThreadCount(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetThreadCount(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
   // Check arguments.
   Object* result;
-  { MaybeObject* maybe_result = Runtime_CheckExecutionState(args);
+  { MaybeObject* maybe_result = Runtime_CheckExecutionState(args, isolate);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
 
   // Count all archived V8 threads.
   int n = 0;
-  for (ThreadState* thread = ThreadState::FirstInUse();
+  for (ThreadState* thread =
+          isolate->thread_manager()->FirstThreadStateInUse();
        thread != NULL;
        thread = thread->Next()) {
     n++;
@@ -9864,70 +10302,78 @@ static const int kThreadDetailsSize = 2;
 // The array returned contains the following information:
 // 0: Is current thread?
 // 1: Thread id
-static MaybeObject* Runtime_GetThreadDetails(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetThreadDetails(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
 
   // Check arguments.
   Object* check;
-  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args);
+  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args, isolate);
     if (!maybe_check->ToObject(&check)) return maybe_check;
   }
   CONVERT_NUMBER_CHECKED(int, index, Int32, args[1]);
 
   // Allocate array for result.
-  Handle<FixedArray> details = Factory::NewFixedArray(kThreadDetailsSize);
+  Handle<FixedArray> details =
+      isolate->factory()->NewFixedArray(kThreadDetailsSize);
 
   // Thread index 0 is current thread.
   if (index == 0) {
     // Fill the details.
-    details->set(kThreadDetailsCurrentThreadIndex, Heap::true_value());
+    details->set(kThreadDetailsCurrentThreadIndex,
+                 isolate->heap()->true_value());
     details->set(kThreadDetailsThreadIdIndex,
-                 Smi::FromInt(ThreadManager::CurrentId()));
+                 Smi::FromInt(
+                     isolate->thread_manager()->CurrentId()));
   } else {
     // Find the thread with the requested index.
     int n = 1;
-    ThreadState* thread = ThreadState::FirstInUse();
+    ThreadState* thread =
+        isolate->thread_manager()->FirstThreadStateInUse();
     while (index != n && thread != NULL) {
       thread = thread->Next();
       n++;
     }
     if (thread == NULL) {
-      return Heap::undefined_value();
+      return isolate->heap()->undefined_value();
     }
 
     // Fill the details.
-    details->set(kThreadDetailsCurrentThreadIndex, Heap::false_value());
+    details->set(kThreadDetailsCurrentThreadIndex,
+                 isolate->heap()->false_value());
     details->set(kThreadDetailsThreadIdIndex, Smi::FromInt(thread->id()));
   }
 
   // Convert to JS array and return.
-  return *Factory::NewJSArrayWithElements(details);
+  return *isolate->factory()->NewJSArrayWithElements(details);
 }
 
 
 // Sets the disable break state
 // args[0]: disable break state
-static MaybeObject* Runtime_SetDisableBreak(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_SetDisableBreak(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_BOOLEAN_CHECKED(disable_break, args[0]);
-  Debug::set_disable_break(disable_break);
-  return  Heap::undefined_value();
+  isolate->debug()->set_disable_break(disable_break);
+  return  isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_GetBreakLocations(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetBreakLocations(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
   CONVERT_ARG_CHECKED(JSFunction, fun, 0);
   Handle<SharedFunctionInfo> shared(fun->shared());
   // Find the number of break points
   Handle<Object> break_locations = Debug::GetSourceBreakLocations(shared);
-  if (break_locations->IsUndefined()) return Heap::undefined_value();
+  if (break_locations->IsUndefined()) return isolate->heap()->undefined_value();
   // Return array as JS array
-  return *Factory::NewJSArrayWithElements(
+  return *isolate->factory()->NewJSArrayWithElements(
       Handle<FixedArray>::cast(break_locations));
 }
 
@@ -9936,8 +10382,9 @@ static MaybeObject* Runtime_GetBreakLocations(Arguments args) {
 // args[0]: function
 // args[1]: number: break source position (within the function source)
 // args[2]: number: break point object
-static MaybeObject* Runtime_SetFunctionBreakPoint(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_SetFunctionBreakPoint(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   CONVERT_ARG_CHECKED(JSFunction, fun, 0);
   Handle<SharedFunctionInfo> shared(fun->shared());
@@ -9946,13 +10393,15 @@ static MaybeObject* Runtime_SetFunctionBreakPoint(Arguments args) {
   Handle<Object> break_point_object_arg = args.at<Object>(2);
 
   // Set break point.
-  Debug::SetBreakPoint(shared, break_point_object_arg, &source_position);
+  isolate->debug()->SetBreakPoint(shared, break_point_object_arg,
+                                  &source_position);
 
   return Smi::FromInt(source_position);
 }
 
 
-Object* Runtime::FindSharedFunctionInfoInScript(Handle<Script> script,
+Object* Runtime::FindSharedFunctionInfoInScript(Isolate* isolate,
+                                                Handle<Script> script,
                                                 int position) {
   // Iterate the heap looking for SharedFunctionInfo generated from the
   // script. The inner most SharedFunctionInfo containing the source position
@@ -10011,7 +10460,7 @@ Object* Runtime::FindSharedFunctionInfoInScript(Handle<Script> script,
     }
 
     if (target.is_null()) {
-      return Heap::undefined_value();
+      return isolate->heap()->undefined_value();
     }
 
     // If the candidate found is compiled we are done. NOTE: when lazy
@@ -10035,8 +10484,9 @@ Object* Runtime::FindSharedFunctionInfoInScript(Handle<Script> script,
 // args[0]: script to set break point in
 // args[1]: number: break source position (within the script source)
 // args[2]: number: break point object
-static MaybeObject* Runtime_SetScriptBreakPoint(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_SetScriptBreakPoint(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   CONVERT_ARG_CHECKED(JSValue, wrapper, 0);
   CONVERT_NUMBER_CHECKED(int32_t, source_position, Int32, args[1]);
@@ -10048,7 +10498,7 @@ static MaybeObject* Runtime_SetScriptBreakPoint(Arguments args) {
   Handle<Script> script(Script::cast(wrapper->value()));
 
   Object* result = Runtime::FindSharedFunctionInfoInScript(
-      script, source_position);
+      isolate, script, source_position);
   if (!result->IsUndefined()) {
     Handle<SharedFunctionInfo> shared(SharedFunctionInfo::cast(result));
     // Find position within function. The script position might be before the
@@ -10059,33 +10509,35 @@ static MaybeObject* Runtime_SetScriptBreakPoint(Arguments args) {
     } else {
       position = source_position - shared->start_position();
     }
-    Debug::SetBreakPoint(shared, break_point_object_arg, &position);
+    isolate->debug()->SetBreakPoint(shared, break_point_object_arg, &position);
     position += shared->start_position();
     return Smi::FromInt(position);
   }
-  return  Heap::undefined_value();
+  return  isolate->heap()->undefined_value();
 }
 
 
 // Clear a break point
 // args[0]: number: break point object
-static MaybeObject* Runtime_ClearBreakPoint(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_ClearBreakPoint(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   Handle<Object> break_point_object_arg = args.at<Object>(0);
 
   // Clear break point.
-  Debug::ClearBreakPoint(break_point_object_arg);
+  isolate->debug()->ClearBreakPoint(break_point_object_arg);
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
 // Change the state of break on exceptions.
 // args[0]: Enum value indicating whether to affect caught/uncaught exceptions.
 // args[1]: Boolean indicating on/off.
-static MaybeObject* Runtime_ChangeBreakOnException(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_ChangeBreakOnException(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   RUNTIME_ASSERT(args[0]->IsNumber());
   CONVERT_BOOLEAN_CHECKED(enable, args[1]);
@@ -10095,21 +10547,22 @@ static MaybeObject* Runtime_ChangeBreakOnException(Arguments args) {
   ExceptionBreakType type =
       static_cast<ExceptionBreakType>(NumberToUint32(args[0]));
   // Update break point state.
-  Debug::ChangeBreakOnException(type, enable);
-  return Heap::undefined_value();
+  isolate->debug()->ChangeBreakOnException(type, enable);
+  return isolate->heap()->undefined_value();
 }
 
 
 // Returns the state of break on exceptions
 // args[0]: boolean indicating uncaught exceptions
-static MaybeObject* Runtime_IsBreakOnException(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_IsBreakOnException(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   RUNTIME_ASSERT(args[0]->IsNumber());
 
   ExceptionBreakType type =
       static_cast<ExceptionBreakType>(NumberToUint32(args[0]));
-  bool result = Debug::IsBreakOnException(type);
+  bool result = isolate->debug()->IsBreakOnException(type);
   return Smi::FromInt(result);
 }
 
@@ -10119,16 +10572,17 @@ static MaybeObject* Runtime_IsBreakOnException(Arguments args) {
 // args[1]: step action from the enumeration StepAction
 // args[2]: number of times to perform the step, for step out it is the number
 //          of frames to step down.
-static MaybeObject* Runtime_PrepareStep(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_PrepareStep(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   // Check arguments.
   Object* check;
-  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args);
+  { MaybeObject* maybe_check = Runtime_CheckExecutionState(args, isolate);
     if (!maybe_check->ToObject(&check)) return maybe_check;
   }
   if (!args[1]->IsNumber() || !args[2]->IsNumber()) {
-    return Top::Throw(Heap::illegal_argument_symbol());
+    return isolate->Throw(isolate->heap()->illegal_argument_symbol());
   }
 
   // Get the step action and check validity.
@@ -10138,30 +10592,32 @@ static MaybeObject* Runtime_PrepareStep(Arguments args) {
       step_action != StepOut &&
       step_action != StepInMin &&
       step_action != StepMin) {
-    return Top::Throw(Heap::illegal_argument_symbol());
+    return isolate->Throw(isolate->heap()->illegal_argument_symbol());
   }
 
   // Get the number of steps.
   int step_count = NumberToInt32(args[2]);
   if (step_count < 1) {
-    return Top::Throw(Heap::illegal_argument_symbol());
+    return isolate->Throw(isolate->heap()->illegal_argument_symbol());
   }
 
   // Clear all current stepping setup.
-  Debug::ClearStepping();
+  isolate->debug()->ClearStepping();
 
   // Prepare step.
-  Debug::PrepareStep(static_cast<StepAction>(step_action), step_count);
-  return Heap::undefined_value();
+  isolate->debug()->PrepareStep(static_cast<StepAction>(step_action),
+                                step_count);
+  return isolate->heap()->undefined_value();
 }
 
 
 // Clear all stepping set by PrepareStep.
-static MaybeObject* Runtime_ClearStepping(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_ClearStepping(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 0);
-  Debug::ClearStepping();
-  return Heap::undefined_value();
+  isolate->debug()->ClearStepping();
+  return isolate->heap()->undefined_value();
 }
 
 
@@ -10178,15 +10634,15 @@ static Handle<Context> CopyWithContextChain(Handle<Context> context_chain,
   Handle<Context> previous(context_chain->previous());
   Handle<JSObject> extension(JSObject::cast(context_chain->extension()));
   Handle<Context> context = CopyWithContextChain(function_context, previous);
-  return Factory::NewWithContext(context,
-                                 extension,
-                                 context_chain->IsCatchContext());
+  return context->GetIsolate()->factory()->NewWithContext(
+      context, extension, context_chain->IsCatchContext());
 }
 
 
 // Helper function to find or create the arguments object for
 // Runtime_DebugEvaluate.
-static Handle<Object> GetArgumentsObject(JavaScriptFrame* frame,
+static Handle<Object> GetArgumentsObject(Isolate* isolate,
+                                         JavaScriptFrame* frame,
                                          Handle<JSFunction> function,
                                          Handle<SerializedScopeInfo> scope_info,
                                          const ScopeInfo<>* sinfo,
@@ -10196,22 +10652,24 @@ static Handle<Object> GetArgumentsObject(JavaScriptFrame* frame,
   // does not support eval) then create an 'arguments' object.
   int index;
   if (sinfo->number_of_stack_slots() > 0) {
-    index = scope_info->StackSlotIndex(Heap::arguments_symbol());
+    index = scope_info->StackSlotIndex(isolate->heap()->arguments_symbol());
     if (index != -1) {
-      return Handle<Object>(frame->GetExpression(index));
+      return Handle<Object>(frame->GetExpression(index), isolate);
     }
   }
 
   if (sinfo->number_of_context_slots() > Context::MIN_CONTEXT_SLOTS) {
-    index = scope_info->ContextSlotIndex(Heap::arguments_symbol(), NULL);
+    index = scope_info->ContextSlotIndex(isolate->heap()->arguments_symbol(),
+                                         NULL);
     if (index != -1) {
-      return Handle<Object>(function_context->get(index));
+      return Handle<Object>(function_context->get(index), isolate);
     }
   }
 
   const int length = frame->ComputeParametersCount();
-  Handle<JSObject> arguments = Factory::NewArgumentsObject(function, length);
-  Handle<FixedArray> array = Factory::NewFixedArray(length);
+  Handle<JSObject> arguments =
+      isolate->factory()->NewArgumentsObject(function, length);
+  Handle<FixedArray> array = isolate->factory()->NewFixedArray(length);
 
   AssertNoAllocation no_gc;
   WriteBarrierMode mode = array->GetWriteBarrierMode(no_gc);
@@ -10223,6 +10681,10 @@ static Handle<Object> GetArgumentsObject(JavaScriptFrame* frame,
 }
 
 
+static const char kSourceStr[] =
+    "(function(arguments,__source__){return eval(__source__);})";
+
+
 // Evaluate a piece of JavaScript in the context of a stack frame for
 // debugging. This is accomplished by creating a new context which in its
 // extension part has all the parameters and locals of the function on the
@@ -10234,14 +10696,16 @@ static Handle<Object> GetArgumentsObject(JavaScriptFrame* frame,
 // stack frame presenting the same view of the values of parameters and
 // local variables as if the piece of JavaScript was evaluated at the point
 // where the function on the stack frame is currently stopped.
-static MaybeObject* Runtime_DebugEvaluate(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DebugEvaluate(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
 
   // Check the execution state and decode arguments frame and source to be
   // evaluated.
   ASSERT(args.length() == 5);
   Object* check_result;
-  { MaybeObject* maybe_check_result = Runtime_CheckExecutionState(args);
+  { MaybeObject* maybe_check_result = Runtime_CheckExecutionState(args,
+                                                                  isolate);
     if (!maybe_check_result->ToObject(&check_result)) {
       return maybe_check_result;
     }
@@ -10264,13 +10728,13 @@ static MaybeObject* Runtime_DebugEvaluate(Arguments args) {
 
   // Traverse the saved contexts chain to find the active context for the
   // selected frame.
-  SaveContext* save = Top::save_context();
+  SaveContext* save = isolate->save_context();
   while (save != NULL && !save->below(frame)) {
     save = save->prev();
   }
   ASSERT(save != NULL);
-  SaveContext savex;
-  Top::set_context(*(save->context()));
+  SaveContext savex(isolate);
+  isolate->set_context(*(save->context()));
 
   // Create the (empty) function replacing the function on the stack frame for
   // the purpose of evaluating in the context created below. It is important
@@ -10279,7 +10743,8 @@ static MaybeObject* Runtime_DebugEvaluate(Arguments args) {
   // in Context::Lookup, where context slots for parameters and local variables
   // are looked at before the extension object.
   Handle<JSFunction> go_between =
-      Factory::NewFunction(Factory::empty_string(), Factory::undefined_value());
+      isolate->factory()->NewFunction(isolate->factory()->empty_string(),
+                                      isolate->factory()->undefined_value());
   go_between->set_context(function->context());
 #ifdef DEBUG
   ScopeInfo<> go_between_sinfo(go_between->shared()->scope_info());
@@ -10288,13 +10753,14 @@ static MaybeObject* Runtime_DebugEvaluate(Arguments args) {
 #endif
 
   // Materialize the content of the local scope into a JSObject.
-  Handle<JSObject> local_scope = MaterializeLocalScope(frame);
-  RETURN_IF_EMPTY_HANDLE(local_scope);
+  Handle<JSObject> local_scope = MaterializeLocalScope(isolate, frame);
+  RETURN_IF_EMPTY_HANDLE(isolate, local_scope);
 
   // Allocate a new context for the debug evaluation and set the extension
   // object build.
   Handle<Context> context =
-      Factory::NewFunctionContext(Context::MIN_CONTEXT_SLOTS, go_between);
+      isolate->factory()->NewFunctionContext(Context::MIN_CONTEXT_SLOTS,
+                                             go_between);
   context->set_extension(*local_scope);
   // Copy any with contexts present and chain them in front of this context.
   Handle<Context> frame_context(Context::cast(frame->context()));
@@ -10302,7 +10768,7 @@ static MaybeObject* Runtime_DebugEvaluate(Arguments args) {
   context = CopyWithContextChain(frame_context, context);
 
   if (additional_context->IsJSObject()) {
-    context = Factory::NewWithContext(context,
+    context = isolate->factory()->NewWithContext(context,
         Handle<JSObject>::cast(additional_context), false);
   }
 
@@ -10311,12 +10777,10 @@ static MaybeObject* Runtime_DebugEvaluate(Arguments args) {
   // 'arguments'. This it to have access to what would have been 'arguments' in
   // the function being debugged.
   // function(arguments,__source__) {return eval(__source__);}
-  static const char* source_str =
-      "(function(arguments,__source__){return eval(__source__);})";
-  static const int source_str_length = StrLength(source_str);
+
   Handle<String> function_source =
-      Factory::NewStringFromAscii(Vector<const char>(source_str,
-                                                     source_str_length));
+      isolate->factory()->NewStringFromAscii(
+          Vector<const char>(kSourceStr, sizeof(kSourceStr) - 1));
 
   // Currently, the eval code will be executed in non-strict mode,
   // even in the strict code context.
@@ -10327,17 +10791,18 @@ static MaybeObject* Runtime_DebugEvaluate(Arguments args) {
                             kNonStrictMode);
   if (shared.is_null()) return Failure::Exception();
   Handle<JSFunction> compiled_function =
-      Factory::NewFunctionFromSharedFunctionInfo(shared, context);
+      isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, context);
 
   // Invoke the result of the compilation to get the evaluation function.
   bool has_pending_exception;
-  Handle<Object> receiver(frame->receiver());
+  Handle<Object> receiver(frame->receiver(), isolate);
   Handle<Object> evaluation_function =
       Execution::Call(compiled_function, receiver, 0, NULL,
                       &has_pending_exception);
   if (has_pending_exception) return Failure::Exception();
 
-  Handle<Object> arguments = GetArgumentsObject(frame, function, scope_info,
+  Handle<Object> arguments = GetArgumentsObject(isolate, frame,
+                                                function, scope_info,
                                                 &sinfo, function_context);
 
   // Invoke the evaluation function and return the result.
@@ -10359,14 +10824,16 @@ static MaybeObject* Runtime_DebugEvaluate(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_DebugEvaluateGlobal(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DebugEvaluateGlobal(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
 
   // Check the execution state and decode arguments frame and source to be
   // evaluated.
   ASSERT(args.length() == 4);
   Object* check_result;
-  { MaybeObject* maybe_check_result = Runtime_CheckExecutionState(args);
+  { MaybeObject* maybe_check_result = Runtime_CheckExecutionState(args,
+                                                                  isolate);
     if (!maybe_check_result->ToObject(&check_result)) {
       return maybe_check_result;
     }
@@ -10379,28 +10846,30 @@ static MaybeObject* Runtime_DebugEvaluateGlobal(Arguments args) {
   DisableBreak disable_break_save(disable_break);
 
   // Enter the top context from before the debugger was invoked.
-  SaveContext save;
+  SaveContext save(isolate);
   SaveContext* top = &save;
-  while (top != NULL && *top->context() == *Debug::debug_context()) {
+  while (top != NULL && *top->context() == *isolate->debug()->debug_context()) {
     top = top->prev();
   }
   if (top != NULL) {
-    Top::set_context(*top->context());
+    isolate->set_context(*top->context());
   }
 
   // Get the global context now set to the top context from before the
   // debugger was invoked.
-  Handle<Context> context = Top::global_context();
+  Handle<Context> context = isolate->global_context();
 
   bool is_global = true;
 
   if (additional_context->IsJSObject()) {
     // Create a function context first, than put 'with' context on top of it.
-    Handle<JSFunction> go_between = Factory::NewFunction(
-        Factory::empty_string(), Factory::undefined_value());
+    Handle<JSFunction> go_between = isolate->factory()->NewFunction(
+        isolate->factory()->empty_string(),
+        isolate->factory()->undefined_value());
     go_between->set_context(*context);
     context =
-        Factory::NewFunctionContext(Context::MIN_CONTEXT_SLOTS, go_between);
+        isolate->factory()->NewFunctionContext(
+            Context::MIN_CONTEXT_SLOTS, go_between);
     context->set_extension(JSObject::cast(*additional_context));
     is_global = false;
   }
@@ -10412,12 +10881,13 @@ static MaybeObject* Runtime_DebugEvaluateGlobal(Arguments args) {
       Compiler::CompileEval(source, context, is_global, kNonStrictMode);
   if (shared.is_null()) return Failure::Exception();
   Handle<JSFunction> compiled_function =
-      Handle<JSFunction>(Factory::NewFunctionFromSharedFunctionInfo(shared,
-                                                                    context));
+      Handle<JSFunction>(
+          isolate->factory()->NewFunctionFromSharedFunctionInfo(shared,
+                                                                context));
 
   // Invoke the result of the compilation to get the evaluation function.
   bool has_pending_exception;
-  Handle<Object> receiver = Top::global();
+  Handle<Object> receiver = isolate->global();
   Handle<Object> result =
     Execution::Call(compiled_function, receiver, 0, NULL,
                     &has_pending_exception);
@@ -10426,12 +10896,13 @@ static MaybeObject* Runtime_DebugEvaluateGlobal(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_DebugGetLoadedScripts(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_DebugGetLoadedScripts(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 0);
 
   // Fill the script objects.
-  Handle<FixedArray> instances = Debug::GetLoadedScripts();
+  Handle<FixedArray> instances = isolate->debug()->GetLoadedScripts();
 
   // Convert the script objects to proper JS objects.
   for (int i = 0; i < instances->length(); i++) {
@@ -10446,7 +10917,8 @@ static MaybeObject* Runtime_DebugGetLoadedScripts(Arguments args) {
   }
 
   // Return result as a JS array.
-  Handle<JSObject> result = Factory::NewJSObject(Top::array_function());
+  Handle<JSObject> result =
+      isolate->factory()->NewJSObject(isolate->array_function());
   Handle<JSArray>::cast(result)->SetContent(*instances);
   return *result;
 }
@@ -10526,11 +10998,12 @@ static int DebugReferencedBy(JSObject* target,
 // args[0]: the object to find references to
 // args[1]: constructor function for instances to exclude (Mirror)
 // args[2]: the the maximum number of objects to return
-static MaybeObject* Runtime_DebugReferencedBy(Arguments args) {
+static MaybeObject* Runtime_DebugReferencedBy(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 3);
 
   // First perform a full GC in order to avoid references from dead objects.
-  Heap::CollectAllGarbage(false);
+  isolate->heap()->CollectAllGarbage(false);
 
   // Check parameters.
   CONVERT_CHECKED(JSObject, target, args[0]);
@@ -10542,7 +11015,7 @@ static MaybeObject* Runtime_DebugReferencedBy(Arguments args) {
 
   // Get the constructor function for context extension and arguments array.
   JSObject* arguments_boilerplate =
-      Top::context()->global_context()->arguments_boilerplate();
+      isolate->context()->global_context()->arguments_boilerplate();
   JSFunction* arguments_function =
       JSFunction::cast(arguments_boilerplate->map()->constructor());
 
@@ -10553,7 +11026,7 @@ static MaybeObject* Runtime_DebugReferencedBy(Arguments args) {
 
   // Allocate an array to hold the result.
   Object* object;
-  { MaybeObject* maybe_object = Heap::AllocateFixedArray(count);
+  { MaybeObject* maybe_object = isolate->heap()->AllocateFixedArray(count);
     if (!maybe_object->ToObject(&object)) return maybe_object;
   }
   FixedArray* instances = FixedArray::cast(object);
@@ -10564,8 +11037,8 @@ static MaybeObject* Runtime_DebugReferencedBy(Arguments args) {
 
   // Return result as JS array.
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateJSObject(
-        Top::context()->global_context()->array_function());
+  { MaybeObject* maybe_result = isolate->heap()->AllocateJSObject(
+      isolate->context()->global_context()->array_function());
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   JSArray::cast(result)->SetContent(instances);
@@ -10606,11 +11079,12 @@ static int DebugConstructedBy(JSFunction* constructor, int max_references,
 // Scan the heap for objects constructed by a specific function.
 // args[0]: the constructor to find instances of
 // args[1]: the the maximum number of objects to return
-static MaybeObject* Runtime_DebugConstructedBy(Arguments args) {
+static MaybeObject* Runtime_DebugConstructedBy(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
 
   // First perform a full GC in order to avoid dead objects.
-  Heap::CollectAllGarbage(false);
+  isolate->heap()->CollectAllGarbage(false);
 
   // Check parameters.
   CONVERT_CHECKED(JSFunction, constructor, args[0]);
@@ -10623,7 +11097,7 @@ static MaybeObject* Runtime_DebugConstructedBy(Arguments args) {
 
   // Allocate an array to hold the result.
   Object* object;
-  { MaybeObject* maybe_object = Heap::AllocateFixedArray(count);
+  { MaybeObject* maybe_object = isolate->heap()->AllocateFixedArray(count);
     if (!maybe_object->ToObject(&object)) return maybe_object;
   }
   FixedArray* instances = FixedArray::cast(object);
@@ -10633,8 +11107,8 @@ static MaybeObject* Runtime_DebugConstructedBy(Arguments args) {
 
   // Return result as JS array.
   Object* result;
-  { MaybeObject* maybe_result = Heap::AllocateJSObject(
-        Top::context()->global_context()->array_function());
+  { MaybeObject* maybe_result = isolate->heap()->AllocateJSObject(
+          isolate->context()->global_context()->array_function());
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   JSArray::cast(result)->SetContent(instances);
@@ -10644,7 +11118,8 @@ static MaybeObject* Runtime_DebugConstructedBy(Arguments args) {
 
 // Find the effective prototype object as returned by __proto__.
 // args[0]: the object to find the prototype for.
-static MaybeObject* Runtime_DebugGetPrototype(Arguments args) {
+static MaybeObject* Runtime_DebugGetPrototype(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
 
   CONVERT_CHECKED(JSObject, obj, args[0]);
@@ -10654,16 +11129,19 @@ static MaybeObject* Runtime_DebugGetPrototype(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_SystemBreak(Arguments args) {
+static MaybeObject* Runtime_SystemBreak(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 0);
   CPU::DebugBreak();
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_DebugDisassembleFunction(Arguments args) {
+static MaybeObject* Runtime_DebugDisassembleFunction(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef DEBUG
-  HandleScope scope;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   // Get the function and make sure it is compiled.
   CONVERT_ARG_CHECKED(JSFunction, func, 0);
@@ -10673,13 +11151,15 @@ static MaybeObject* Runtime_DebugDisassembleFunction(Arguments args) {
   }
   func->code()->PrintLn();
 #endif  // DEBUG
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_DebugDisassembleConstructor(Arguments args) {
+static MaybeObject* Runtime_DebugDisassembleConstructor(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef DEBUG
-  HandleScope scope;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   // Get the function and make sure it is compiled.
   CONVERT_ARG_CHECKED(JSFunction, func, 0);
@@ -10689,11 +11169,13 @@ static MaybeObject* Runtime_DebugDisassembleConstructor(Arguments args) {
   }
   shared->construct_stub()->PrintLn();
 #endif  // DEBUG
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_FunctionGetInferredName(Arguments args) {
+static MaybeObject* Runtime_FunctionGetInferredName(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
 
@@ -10703,7 +11185,7 @@ static MaybeObject* Runtime_FunctionGetInferredName(Arguments args) {
 
 
 static int FindSharedFunctionInfosForScript(Script* script,
-                                     FixedArray* buffer) {
+                                            FixedArray* buffer) {
   AssertNoAllocation no_allocations;
 
   int counter = 0;
@@ -10730,9 +11212,10 @@ static int FindSharedFunctionInfosForScript(Script* script,
 // to this script. Returns JSArray of SharedFunctionInfo wrapped
 // in OpaqueReferences.
 static MaybeObject* Runtime_LiveEditFindSharedFunctionInfosForScript(
-    Arguments args) {
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_CHECKED(JSValue, script_value, args[0]);
 
   Handle<Script> script = Handle<Script>(Script::cast(script_value->value()));
@@ -10740,14 +11223,14 @@ static MaybeObject* Runtime_LiveEditFindSharedFunctionInfosForScript(
   const int kBufferSize = 32;
 
   Handle<FixedArray> array;
-  array = Factory::NewFixedArray(kBufferSize);
+  array = isolate->factory()->NewFixedArray(kBufferSize);
   int number = FindSharedFunctionInfosForScript(*script, *array);
   if (number > kBufferSize) {
-    array = Factory::NewFixedArray(number);
+    array = isolate->factory()->NewFixedArray(number);
     FindSharedFunctionInfosForScript(*script, *array);
   }
 
-  Handle<JSArray> result = Factory::NewJSArrayWithElements(array);
+  Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(array);
   result->set_length(Smi::FromInt(number));
 
   LiveEdit::WrapSharedFunctionInfos(result);
@@ -10762,16 +11245,18 @@ static MaybeObject* Runtime_LiveEditFindSharedFunctionInfosForScript(
 // Returns a JSArray of compilation infos. The array is ordered so that
 // each function with all its descendant is always stored in a continues range
 // with the function itself going first. The root function is a script function.
-static MaybeObject* Runtime_LiveEditGatherCompileInfo(Arguments args) {
+static MaybeObject* Runtime_LiveEditGatherCompileInfo(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_CHECKED(JSValue, script, args[0]);
   CONVERT_ARG_CHECKED(String, source, 1);
   Handle<Script> script_handle = Handle<Script>(Script::cast(script->value()));
 
   JSArray* result =  LiveEdit::GatherCompileInfo(script_handle, source);
 
-  if (Top::has_pending_exception()) {
+  if (isolate->has_pending_exception()) {
     return Failure::Exception();
   }
 
@@ -10781,12 +11266,13 @@ static MaybeObject* Runtime_LiveEditGatherCompileInfo(Arguments args) {
 // Changes the source of the script to a new_source.
 // If old_script_name is provided (i.e. is a String), also creates a copy of
 // the script with its original source and sends notification to debugger.
-static MaybeObject* Runtime_LiveEditReplaceScript(Arguments args) {
+static MaybeObject* Runtime_LiveEditReplaceScript(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 3);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_CHECKED(JSValue, original_script_value, args[0]);
   CONVERT_ARG_CHECKED(String, new_source, 1);
-  Handle<Object> old_script_name(args[2]);
+  Handle<Object> old_script_name(args[2], isolate);
 
   CONVERT_CHECKED(Script, original_script_pointer,
                   original_script_value->value());
@@ -10800,23 +11286,27 @@ static MaybeObject* Runtime_LiveEditReplaceScript(Arguments args) {
     Handle<Script> script_handle(Script::cast(old_script));
     return *(GetScriptWrapper(script_handle));
   } else {
-    return Heap::null_value();
+    return isolate->heap()->null_value();
   }
 }
 
 
-static MaybeObject* Runtime_LiveEditFunctionSourceUpdated(Arguments args) {
+static MaybeObject* Runtime_LiveEditFunctionSourceUpdated(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 1);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(JSArray, shared_info, 0);
   return LiveEdit::FunctionSourceUpdated(shared_info);
 }
 
 
 // Replaces code of SharedFunctionInfo with a new one.
-static MaybeObject* Runtime_LiveEditReplaceFunctionCode(Arguments args) {
+static MaybeObject* Runtime_LiveEditReplaceFunctionCode(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(JSArray, new_compile_info, 0);
   CONVERT_ARG_CHECKED(JSArray, shared_info, 1);
 
@@ -10824,17 +11314,19 @@ static MaybeObject* Runtime_LiveEditReplaceFunctionCode(Arguments args) {
 }
 
 // Connects SharedFunctionInfo to another script.
-static MaybeObject* Runtime_LiveEditFunctionSetScript(Arguments args) {
+static MaybeObject* Runtime_LiveEditFunctionSetScript(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
-  Handle<Object> function_object(args[0]);
-  Handle<Object> script_object(args[1]);
+  HandleScope scope(isolate);
+  Handle<Object> function_object(args[0], isolate);
+  Handle<Object> script_object(args[1], isolate);
 
   if (function_object->IsJSValue()) {
     Handle<JSValue> function_wrapper = Handle<JSValue>::cast(function_object);
     if (script_object->IsJSValue()) {
       CONVERT_CHECKED(Script, script, JSValue::cast(*script_object)->value());
-      script_object = Handle<Object>(script);
+      script_object = Handle<Object>(script, isolate);
     }
 
     LiveEdit::SetFunctionScript(function_wrapper, script_object);
@@ -10843,15 +11335,17 @@ static MaybeObject* Runtime_LiveEditFunctionSetScript(Arguments args) {
     // and we check it in this function.
   }
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
 // In a code of a parent function replaces original function as embedded object
 // with a substitution one.
-static MaybeObject* Runtime_LiveEditReplaceRefToNestedFunction(Arguments args) {
+static MaybeObject* Runtime_LiveEditReplaceRefToNestedFunction(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 3);
-  HandleScope scope;
+  HandleScope scope(isolate);
 
   CONVERT_ARG_CHECKED(JSValue, parent_wrapper, 0);
   CONVERT_ARG_CHECKED(JSValue, orig_wrapper, 1);
@@ -10860,7 +11354,7 @@ static MaybeObject* Runtime_LiveEditReplaceRefToNestedFunction(Arguments args) {
   LiveEdit::ReplaceRefToNestedFunction(parent_wrapper, orig_wrapper,
                                        subst_wrapper);
 
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
@@ -10869,9 +11363,11 @@ static MaybeObject* Runtime_LiveEditReplaceRefToNestedFunction(Arguments args) {
 // array of groups of 3 numbers:
 // (change_begin, change_end, change_end_new_position).
 // Each group describes a change in text; groups are sorted by change_begin.
-static MaybeObject* Runtime_LiveEditPatchFunctionPositions(Arguments args) {
+static MaybeObject* Runtime_LiveEditPatchFunctionPositions(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(JSArray, shared_array, 0);
   CONVERT_ARG_CHECKED(JSArray, position_change_array, 1);
 
@@ -10883,9 +11379,11 @@ static MaybeObject* Runtime_LiveEditPatchFunctionPositions(Arguments args) {
 // checks that none of them have activations on stacks (of any thread).
 // Returns array of the same length with corresponding results of
 // LiveEdit::FunctionPatchabilityStatus type.
-static MaybeObject* Runtime_LiveEditCheckAndDropActivations(Arguments args) {
+static MaybeObject* Runtime_LiveEditCheckAndDropActivations(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(JSArray, shared_array, 0);
   CONVERT_BOOLEAN_CHECKED(do_drop, args[1]);
 
@@ -10895,9 +11393,10 @@ static MaybeObject* Runtime_LiveEditCheckAndDropActivations(Arguments args) {
 // Compares 2 strings line-by-line, then token-wise and returns diff in form
 // of JSArray of triplets (pos1, pos1_end, pos2_end) describing list
 // of diff chunks.
-static MaybeObject* Runtime_LiveEditCompareStrings(Arguments args) {
+static MaybeObject* Runtime_LiveEditCompareStrings(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(String, s1, 0);
   CONVERT_ARG_CHECKED(String, s2, 1);
 
@@ -10905,20 +11404,21 @@ static MaybeObject* Runtime_LiveEditCompareStrings(Arguments args) {
 }
 
 
-
 // A testing entry. Returns statement position which is the closest to
 // source_position.
-static MaybeObject* Runtime_GetFunctionCodePositionFromSource(Arguments args) {
+static MaybeObject* Runtime_GetFunctionCodePositionFromSource(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(JSFunction, function, 0);
   CONVERT_NUMBER_CHECKED(int32_t, source_position, Int32, args[1]);
 
-  Handle<Code> code(function->code());
+  Handle<Code> code(function->code(), isolate);
 
   if (code->kind() != Code::FUNCTION &&
       code->kind() != Code::OPTIMIZED_FUNCTION) {
-    return Heap::undefined_value();
+    return isolate->heap()->undefined_value();
   }
 
   RelocIterator it(*code, RelocInfo::ModeMask(RelocInfo::STATEMENT_POSITION));
@@ -10945,9 +11445,10 @@ static MaybeObject* Runtime_GetFunctionCodePositionFromSource(Arguments args) {
 // Calls specified function with or without entering the debugger.
 // This is used in unit tests to run code as if debugger is entered or simply
 // to have a stack with C++ frame in the middle.
-static MaybeObject* Runtime_ExecuteInDebugContext(Arguments args) {
+static MaybeObject* Runtime_ExecuteInDebugContext(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
-  HandleScope scope;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(JSFunction, function, 0);
   CONVERT_BOOLEAN_CHECKED(without_debugger, args[1]);
 
@@ -10955,11 +11456,11 @@ static MaybeObject* Runtime_ExecuteInDebugContext(Arguments args) {
   bool pending_exception;
   {
     if (without_debugger) {
-      result = Execution::Call(function, Top::global(), 0, NULL,
+      result = Execution::Call(function, isolate->global(), 0, NULL,
                                &pending_exception);
     } else {
       EnterDebugger enter_debugger;
-      result = Execution::Call(function, Top::global(), 0, NULL,
+      result = Execution::Call(function, isolate->global(), 0, NULL,
                                &pending_exception);
     }
   }
@@ -10972,61 +11473,68 @@ static MaybeObject* Runtime_ExecuteInDebugContext(Arguments args) {
 
 
 // Sets a v8 flag.
-static MaybeObject* Runtime_SetFlags(Arguments args) {
+static MaybeObject* Runtime_SetFlags(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   CONVERT_CHECKED(String, arg, args[0]);
   SmartPointer<char> flags =
       arg->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
   FlagList::SetFlagsFromString(*flags, StrLength(*flags));
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
 // Performs a GC.
 // Presently, it only does a full GC.
-static MaybeObject* Runtime_CollectGarbage(Arguments args) {
-  Heap::CollectAllGarbage(true);
-  return Heap::undefined_value();
+static MaybeObject* Runtime_CollectGarbage(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  isolate->heap()->CollectAllGarbage(true);
+  return isolate->heap()->undefined_value();
 }
 
 
 // Gets the current heap usage.
-static MaybeObject* Runtime_GetHeapUsage(Arguments args) {
-  int usage = static_cast<int>(Heap::SizeOfObjects());
+static MaybeObject* Runtime_GetHeapUsage(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  int usage = static_cast<int>(isolate->heap()->SizeOfObjects());
   if (!Smi::IsValid(usage)) {
-    return *Factory::NewNumberFromInt(usage);
+    return *isolate->factory()->NewNumberFromInt(usage);
   }
   return Smi::FromInt(usage);
 }
 
 
 // Captures a live object list from the present heap.
-static MaybeObject* Runtime_HasLOLEnabled(Arguments args) {
+static MaybeObject* Runtime_HasLOLEnabled(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
-  return Heap::true_value();
+  return isolate->heap()->true_value();
 #else
-  return Heap::false_value();
+  return isolate->heap()->false_value();
 #endif
 }
 
 
 // Captures a live object list from the present heap.
-static MaybeObject* Runtime_CaptureLOL(Arguments args) {
+static MaybeObject* Runtime_CaptureLOL(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   return LiveObjectList::Capture();
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
 
 // Deletes the specified live object list.
-static MaybeObject* Runtime_DeleteLOL(Arguments args) {
+static MaybeObject* Runtime_DeleteLOL(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   CONVERT_SMI_CHECKED(id, args[0]);
   bool success = LiveObjectList::Delete(id);
-  return success ? Heap::true_value() : Heap::false_value();
+  return success ? isolate->heap()->true_value() :
+                   isolate->heap()->false_value();
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
@@ -11036,7 +11544,8 @@ static MaybeObject* Runtime_DeleteLOL(Arguments args) {
 // specified by id1 and id2.
 // If id1 is 0 (i.e. not a valid lol), then the whole of lol id2 will be
 // dumped.
-static MaybeObject* Runtime_DumpLOL(Arguments args) {
+static MaybeObject* Runtime_DumpLOL(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   HandleScope scope;
   CONVERT_SMI_CHECKED(id1, args[0]);
@@ -11047,40 +11556,43 @@ static MaybeObject* Runtime_DumpLOL(Arguments args) {
   EnterDebugger enter_debugger;
   return LiveObjectList::Dump(id1, id2, start, count, filter_obj);
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
 
 // Gets the specified object as requested by the debugger.
 // This is only used for obj ids shown in live object lists.
-static MaybeObject* Runtime_GetLOLObj(Arguments args) {
+static MaybeObject* Runtime_GetLOLObj(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   CONVERT_SMI_CHECKED(obj_id, args[0]);
   Object* result = LiveObjectList::GetObj(obj_id);
   return result;
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
 
 // Gets the obj id for the specified address if valid.
 // This is only used for obj ids shown in live object lists.
-static MaybeObject* Runtime_GetLOLObjId(Arguments args) {
+static MaybeObject* Runtime_GetLOLObjId(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   HandleScope scope;
   CONVERT_ARG_CHECKED(String, address, 0);
   Object* result = LiveObjectList::GetObjId(address);
   return result;
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
 
 // Gets the retainers that references the specified object alive.
-static MaybeObject* Runtime_GetLOLObjRetainers(Arguments args) {
+static MaybeObject* Runtime_GetLOLObjRetainers(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   HandleScope scope;
   CONVERT_SMI_CHECKED(obj_id, args[0]);
@@ -11114,13 +11626,14 @@ static MaybeObject* Runtime_GetLOLObjRetainers(Arguments args) {
                                          limit,
                                          filter_obj);
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
 
 // Gets the reference path between 2 objects.
-static MaybeObject* Runtime_GetLOLPath(Arguments args) {
+static MaybeObject* Runtime_GetLOLPath(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   HandleScope scope;
   CONVERT_SMI_CHECKED(obj_id1, args[0]);
@@ -11136,45 +11649,48 @@ static MaybeObject* Runtime_GetLOLPath(Arguments args) {
       LiveObjectList::GetPath(obj_id1, obj_id2, instance_filter);
   return result;
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
 
 // Generates the response to a debugger request for a list of all
 // previously captured live object lists.
-static MaybeObject* Runtime_InfoLOL(Arguments args) {
+static MaybeObject* Runtime_InfoLOL(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   CONVERT_SMI_CHECKED(start, args[0]);
   CONVERT_SMI_CHECKED(count, args[1]);
   return LiveObjectList::Info(start, count);
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
 
 // Gets a dump of the specified object as requested by the debugger.
 // This is only used for obj ids shown in live object lists.
-static MaybeObject* Runtime_PrintLOLObj(Arguments args) {
+static MaybeObject* Runtime_PrintLOLObj(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   HandleScope scope;
   CONVERT_SMI_CHECKED(obj_id, args[0]);
   Object* result = LiveObjectList::PrintObj(obj_id);
   return result;
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
 
 // Resets and releases all previously captured live object lists.
-static MaybeObject* Runtime_ResetLOL(Arguments args) {
+static MaybeObject* Runtime_ResetLOL(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   LiveObjectList::Reset();
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
@@ -11184,7 +11700,8 @@ static MaybeObject* Runtime_ResetLOL(Arguments args) {
 // specified by id1 and id2.
 // If id1 is 0 (i.e. not a valid lol), then the whole of lol id2 will be
 // summarized.
-static MaybeObject* Runtime_SummarizeLOL(Arguments args) {
+static MaybeObject* Runtime_SummarizeLOL(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
 #ifdef LIVE_OBJECT_LIST
   HandleScope scope;
   CONVERT_SMI_CHECKED(id1, args[0]);
@@ -11194,7 +11711,7 @@ static MaybeObject* Runtime_SummarizeLOL(Arguments args) {
   EnterDebugger enter_debugger;
   return LiveObjectList::Summarize(id1, id2, filter_obj);
 #else
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 #endif
 }
 
@@ -11202,25 +11719,27 @@ static MaybeObject* Runtime_SummarizeLOL(Arguments args) {
 
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
-static MaybeObject* Runtime_ProfilerResume(Arguments args) {
+static MaybeObject* Runtime_ProfilerResume(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_CHECKED(Smi, smi_modules, args[0]);
   CONVERT_CHECKED(Smi, smi_tag, args[1]);
   v8::V8::ResumeProfilerEx(smi_modules->value(), smi_tag->value());
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_ProfilerPause(Arguments args) {
+static MaybeObject* Runtime_ProfilerPause(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
 
   CONVERT_CHECKED(Smi, smi_modules, args[0]);
   CONVERT_CHECKED(Smi, smi_tag, args[1]);
   v8::V8::PauseProfilerEx(smi_modules->value(), smi_tag->value());
-  return Heap::undefined_value();
+  return isolate->heap()->undefined_value();
 }
 
 #endif  // ENABLE_LOGGING_AND_PROFILING
@@ -11250,7 +11769,7 @@ static Handle<Object> Runtime_GetScriptFromScriptName(
   }
 
   // If no script with the requested script data is found return undefined.
-  if (script.is_null()) return Factory::undefined_value();
+  if (script.is_null()) return FACTORY->undefined_value();
 
   // Return the script found.
   return GetScriptWrapper(script);
@@ -11260,8 +11779,9 @@ static Handle<Object> Runtime_GetScriptFromScriptName(
 // Get the script object from script data. NOTE: Regarding performance
 // see the NOTE for GetScriptFromScriptData.
 // args[0]: script data for the script to find the source for
-static MaybeObject* Runtime_GetScript(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_GetScript(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
 
   ASSERT(args.length() == 1);
 
@@ -11305,17 +11825,19 @@ static bool ShowFrameInStackTrace(StackFrame* raw_frame, Object* caller,
 // Collect the raw data for a stack trace.  Returns an array of 4
 // element segments each containing a receiver, function, code and
 // native code offset.
-static MaybeObject* Runtime_CollectStackTrace(Arguments args) {
+static MaybeObject* Runtime_CollectStackTrace(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT_EQ(args.length(), 2);
   Handle<Object> caller = args.at<Object>(0);
   CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[1]);
 
-  HandleScope scope;
+  HandleScope scope(isolate);
+  Factory* factory = isolate->factory();
 
   limit = Max(limit, 0);  // Ensure that limit is not negative.
   int initial_size = Min(limit, 10);
   Handle<FixedArray> elements =
-      Factory::NewFixedArrayWithHoles(initial_size * 4);
+      factory->NewFixedArrayWithHoles(initial_size * 4);
 
   StackFrameIterator iter;
   // If the caller parameter is a function we skip frames until we're
@@ -11334,7 +11856,7 @@ static MaybeObject* Runtime_CollectStackTrace(Arguments args) {
         if (cursor + 4 > elements->length()) {
           int new_capacity = JSObject::NewElementsCapacity(elements->length());
           Handle<FixedArray> new_elements =
-              Factory::NewFixedArrayWithHoles(new_capacity);
+              factory->NewFixedArrayWithHoles(new_capacity);
           for (int i = 0; i < cursor; i++) {
             new_elements->set(i, elements->get(i));
           }
@@ -11354,36 +11876,40 @@ static MaybeObject* Runtime_CollectStackTrace(Arguments args) {
     }
     iter.Advance();
   }
-  Handle<JSArray> result = Factory::NewJSArrayWithElements(elements);
+  Handle<JSArray> result = factory->NewJSArrayWithElements(elements);
   result->set_length(Smi::FromInt(cursor));
   return *result;
 }
 
 
 // Returns V8 version as a string.
-static MaybeObject* Runtime_GetV8Version(Arguments args) {
+static MaybeObject* Runtime_GetV8Version(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT_EQ(args.length(), 0);
 
   NoHandleAllocation ha;
 
   const char* version_string = v8::V8::GetVersion();
 
-  return Heap::AllocateStringFromAscii(CStrVector(version_string), NOT_TENURED);
+  return isolate->heap()->AllocateStringFromAscii(CStrVector(version_string),
+                                                  NOT_TENURED);
 }
 
 
-static MaybeObject* Runtime_Abort(Arguments args) {
+static MaybeObject* Runtime_Abort(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
   OS::PrintError("abort: %s\n", reinterpret_cast<char*>(args[0]) +
                                     Smi::cast(args[1])->value());
-  Top::PrintStack();
+  isolate->PrintStack();
   OS::Abort();
   UNREACHABLE();
   return NULL;
 }
 
 
-static MaybeObject* Runtime_GetFromCache(Arguments args) {
+static MaybeObject* Runtime_GetFromCache(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   // This is only called from codegen, so checks might be more lax.
   CONVERT_CHECKED(JSFunctionResultCache, cache, args[0]);
   Object* key = args[1];
@@ -11417,7 +11943,7 @@ static MaybeObject* Runtime_GetFromCache(Arguments args) {
   }
 
   // There is no value in the cache.  Invoke the function and cache result.
-  HandleScope scope;
+  HandleScope scope(isolate);
 
   Handle<JSFunctionResultCache> cache_handle(cache);
   Handle<Object> key_handle(key);
@@ -11426,7 +11952,7 @@ static MaybeObject* Runtime_GetFromCache(Arguments args) {
     Handle<JSFunction> factory(JSFunction::cast(
           cache_handle->get(JSFunctionResultCache::kFactoryIndex)));
     // TODO(antonm): consider passing a receiver when constructing a cache.
-    Handle<Object> receiver(Top::global_context()->global());
+    Handle<Object> receiver(isolate->global_context()->global());
     // This handle is nor shared, nor used later, so it's safe.
     Object** argv[] = { key_handle.location() };
     bool pending_exception = false;
@@ -11475,39 +12001,46 @@ static MaybeObject* Runtime_GetFromCache(Arguments args) {
 }
 
 
-static MaybeObject* Runtime_NewMessageObject(Arguments args) {
-  HandleScope scope;
+static MaybeObject* Runtime_NewMessageObject(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
+  HandleScope scope(isolate);
   CONVERT_ARG_CHECKED(String, type, 0);
   CONVERT_ARG_CHECKED(JSArray, arguments, 1);
-  return *Factory::NewJSMessageObject(type,
-                                      arguments,
-                                      0,
-                                      0,
-                                      Factory::undefined_value(),
-                                      Factory::undefined_value(),
-                                      Factory::undefined_value());
+  return *isolate->factory()->NewJSMessageObject(
+      type,
+      arguments,
+      0,
+      0,
+      isolate->factory()->undefined_value(),
+      isolate->factory()->undefined_value(),
+      isolate->factory()->undefined_value());
 }
 
 
-static MaybeObject* Runtime_MessageGetType(Arguments args) {
+static MaybeObject* Runtime_MessageGetType(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   CONVERT_CHECKED(JSMessageObject, message, args[0]);
   return message->type();
 }
 
 
-static MaybeObject* Runtime_MessageGetArguments(Arguments args) {
+static MaybeObject* Runtime_MessageGetArguments(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   CONVERT_CHECKED(JSMessageObject, message, args[0]);
   return message->arguments();
 }
 
 
-static MaybeObject* Runtime_MessageGetStartPosition(Arguments args) {
+static MaybeObject* Runtime_MessageGetStartPosition(
+    RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   CONVERT_CHECKED(JSMessageObject, message, args[0]);
   return Smi::FromInt(message->start_position());
 }
 
 
-static MaybeObject* Runtime_MessageGetScript(Arguments args) {
+static MaybeObject* Runtime_MessageGetScript(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   CONVERT_CHECKED(JSMessageObject, message, args[0]);
   return message->script();
 }
@@ -11516,7 +12049,8 @@ static MaybeObject* Runtime_MessageGetScript(Arguments args) {
 #ifdef DEBUG
 // ListNatives is ONLY used by the fuzz-natives.js in debug mode
 // Exclude the code in release mode.
-static MaybeObject* Runtime_ListNatives(Arguments args) {
+static MaybeObject* Runtime_ListNatives(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 0);
   HandleScope scope;
 #define COUNT_ENTRY(Name, argc, ressize) + 1
@@ -11525,7 +12059,8 @@ static MaybeObject* Runtime_ListNatives(Arguments args) {
       INLINE_FUNCTION_LIST(COUNT_ENTRY)
       INLINE_RUNTIME_FUNCTION_LIST(COUNT_ENTRY);
 #undef COUNT_ENTRY
-  Handle<FixedArray> elements = Factory::NewFixedArray(entry_count);
+  Factory* factory = isolate->factory();
+  Handle<FixedArray> elements = factory->NewFixedArray(entry_count);
   int index = 0;
   bool inline_runtime_functions = false;
 #define ADD_ENTRY(Name, argc, ressize)                                       \
@@ -11534,16 +12069,16 @@ static MaybeObject* Runtime_ListNatives(Arguments args) {
     Handle<String> name;                                                     \
     /* Inline runtime functions have an underscore in front of the name. */  \
     if (inline_runtime_functions) {                                          \
-      name = Factory::NewStringFromAscii(                                    \
+      name = factory->NewStringFromAscii(                                    \
           Vector<const char>("_" #Name, StrLength("_" #Name)));              \
     } else {                                                                 \
-      name = Factory::NewStringFromAscii(                                    \
+      name = factory->NewStringFromAscii(                                    \
           Vector<const char>(#Name, StrLength(#Name)));                      \
     }                                                                        \
-    Handle<FixedArray> pair_elements = Factory::NewFixedArray(2);            \
+    Handle<FixedArray> pair_elements = factory->NewFixedArray(2);            \
     pair_elements->set(0, *name);                                            \
     pair_elements->set(1, Smi::FromInt(argc));                               \
-    Handle<JSArray> pair = Factory::NewJSArrayWithElements(pair_elements);   \
+    Handle<JSArray> pair = factory->NewJSArrayWithElements(pair_elements);   \
     elements->set(index++, *pair);                                           \
   }
   inline_runtime_functions = false;
@@ -11553,23 +12088,24 @@ static MaybeObject* Runtime_ListNatives(Arguments args) {
   INLINE_RUNTIME_FUNCTION_LIST(ADD_ENTRY)
 #undef ADD_ENTRY
   ASSERT_EQ(index, entry_count);
-  Handle<JSArray> result = Factory::NewJSArrayWithElements(elements);
+  Handle<JSArray> result = factory->NewJSArrayWithElements(elements);
   return *result;
 }
 #endif
 
 
-static MaybeObject* Runtime_Log(Arguments args) {
+static MaybeObject* Runtime_Log(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(String, format, args[0]);
   CONVERT_CHECKED(JSArray, elms, args[1]);
   Vector<const char> chars = format->ToAsciiVector();
-  Logger::LogRuntime(chars, elms);
-  return Heap::undefined_value();
+  LOGGER->LogRuntime(chars, elms);
+  return isolate->heap()->undefined_value();
 }
 
 
-static MaybeObject* Runtime_IS_VAR(Arguments args) {
+static MaybeObject* Runtime_IS_VAR(RUNTIME_CALLING_CONVENTION) {
   UNREACHABLE();  // implemented as macro in the parser
   return NULL;
 }
@@ -11587,20 +12123,22 @@ static MaybeObject* Runtime_IS_VAR(Arguments args) {
   { Runtime::kInline##name, Runtime::INLINE,     \
     "_" #name, NULL, number_of_args, result_size },
 
-Runtime::Function kIntrinsicFunctions[] = {
+static const Runtime::Function kIntrinsicFunctions[] = {
   RUNTIME_FUNCTION_LIST(F)
   INLINE_FUNCTION_LIST(I)
   INLINE_RUNTIME_FUNCTION_LIST(I)
 };
 
 
-MaybeObject* Runtime::InitializeIntrinsicFunctionNames(Object* dictionary) {
+MaybeObject* Runtime::InitializeIntrinsicFunctionNames(Heap* heap,
+                                                       Object* dictionary) {
+  ASSERT(Isolate::Current()->heap() == heap);
   ASSERT(dictionary != NULL);
   ASSERT(StringDictionary::cast(dictionary)->NumberOfElements() == 0);
   for (int i = 0; i < kNumFunctions; ++i) {
     Object* name_symbol;
     { MaybeObject* maybe_name_symbol =
-          Heap::LookupAsciiSymbol(kIntrinsicFunctions[i].name);
+          heap->LookupAsciiSymbol(kIntrinsicFunctions[i].name);
       if (!maybe_name_symbol->ToObject(&name_symbol)) return maybe_name_symbol;
     }
     StringDictionary* string_dictionary = StringDictionary::cast(dictionary);
@@ -11619,10 +12157,11 @@ MaybeObject* Runtime::InitializeIntrinsicFunctionNames(Object* dictionary) {
 }
 
 
-Runtime::Function* Runtime::FunctionForSymbol(Handle<String> name) {
-  int entry = Heap::intrinsic_function_names()->FindEntry(*name);
+const Runtime::Function* Runtime::FunctionForSymbol(Handle<String> name) {
+  Heap* heap = name->GetHeap();
+  int entry = heap->intrinsic_function_names()->FindEntry(*name);
   if (entry != kNotFound) {
-    Object* smi_index = Heap::intrinsic_function_names()->ValueAt(entry);
+    Object* smi_index = heap->intrinsic_function_names()->ValueAt(entry);
     int function_index = Smi::cast(smi_index)->value();
     return &(kIntrinsicFunctions[function_index]);
   }
@@ -11630,7 +12169,7 @@ Runtime::Function* Runtime::FunctionForSymbol(Handle<String> name) {
 }
 
 
-Runtime::Function* Runtime::FunctionForId(Runtime::FunctionId id) {
+const Runtime::Function* Runtime::FunctionForId(Runtime::FunctionId id) {
   return &(kIntrinsicFunctions[static_cast<int>(id)]);
 }
 
@@ -11640,12 +12179,12 @@ void Runtime::PerformGC(Object* result) {
   if (failure->IsRetryAfterGC()) {
     // Try to do a garbage collection; ignore it if it fails. The C
     // entry stub will throw an out-of-memory exception in that case.
-    Heap::CollectGarbage(failure->allocation_space());
+    HEAP->CollectGarbage(failure->allocation_space());
   } else {
     // Handle last resort GC and make sure to allow future allocations
     // to grow the heap without causing GCs (if possible).
-    Counters::gc_last_resort_from_js.Increment();
-    Heap::CollectAllGarbage(false);
+    COUNTERS->gc_last_resort_from_js()->Increment();
+    HEAP->CollectAllGarbage(false);
   }
 }
 
index 8e73d5c4a23bf5ab21c32644800bf7715f7cbd6b..58062ca404935d802c4e8328ffedeed80491f976 100644 (file)
@@ -28,6 +28,8 @@
 #ifndef V8_RUNTIME_H_
 #define V8_RUNTIME_H_
 
+#include "zone.h"
+
 namespace v8 {
 namespace internal {
 
@@ -412,7 +414,6 @@ namespace internal {
 #define RUNTIME_FUNCTION_LIST_DEBUG(F)
 #endif
 
-
 // ----------------------------------------------------------------------------
 // RUNTIME_FUNCTION_LIST defines all runtime functions accessed
 // either directly by id (via the code generator), or indirectly
@@ -482,6 +483,57 @@ namespace internal {
 //---------------------------------------------------------------------------
 // Runtime provides access to all C++ runtime functions.
 
+class RuntimeState {
+ public:
+
+  StaticResource<StringInputBuffer>* string_input_buffer() {
+    return &string_input_buffer_;
+  }
+  unibrow::Mapping<unibrow::ToUppercase, 128>* to_upper_mapping() {
+    return &to_upper_mapping_;
+  }
+  unibrow::Mapping<unibrow::ToLowercase, 128>* to_lower_mapping() {
+    return &to_lower_mapping_;
+  }
+  StringInputBuffer* string_input_buffer_compare_bufx() {
+    return &string_input_buffer_compare_bufx_;
+  }
+  StringInputBuffer* string_input_buffer_compare_bufy() {
+    return &string_input_buffer_compare_bufy_;
+  }
+  StringInputBuffer* string_locale_compare_buf1() {
+    return &string_locale_compare_buf1_;
+  }
+  StringInputBuffer* string_locale_compare_buf2() {
+    return &string_locale_compare_buf2_;
+  }
+  int* smi_lexicographic_compare_x_elms() {
+    return smi_lexicographic_compare_x_elms_;
+  }
+  int* smi_lexicographic_compare_y_elms() {
+    return smi_lexicographic_compare_y_elms_;
+  }
+
+ private:
+  RuntimeState() {}
+  // Non-reentrant string buffer for efficient general use in the runtime.
+  StaticResource<StringInputBuffer> string_input_buffer_;
+  unibrow::Mapping<unibrow::ToUppercase, 128> to_upper_mapping_;
+  unibrow::Mapping<unibrow::ToLowercase, 128> to_lower_mapping_;
+  StringInputBuffer string_input_buffer_compare_bufx_;
+  StringInputBuffer string_input_buffer_compare_bufy_;
+  StringInputBuffer string_locale_compare_buf1_;
+  StringInputBuffer string_locale_compare_buf2_;
+  int smi_lexicographic_compare_x_elms_[10];
+  int smi_lexicographic_compare_y_elms_[10];
+
+  friend class Isolate;
+  friend class Runtime;
+
+  DISALLOW_COPY_AND_ASSIGN(RuntimeState);
+};
+
+
 class Runtime : public AllStatic {
  public:
   enum FunctionId {
@@ -525,30 +577,35 @@ class Runtime : public AllStatic {
   // retried with a new, empty StringDictionary, not with the same one.
   // Alternatively, heap initialization can be completely restarted.
   MUST_USE_RESULT static MaybeObject* InitializeIntrinsicFunctionNames(
-      Object* dictionary);
+      Heap* heap, Object* dictionary);
 
   // Get the intrinsic function with the given name, which must be a symbol.
-  static Function* FunctionForSymbol(Handle<String> name);
+  static const Function* FunctionForSymbol(Handle<String> name);
 
   // Get the intrinsic function with the given FunctionId.
-  static Function* FunctionForId(FunctionId id);
+  static const Function* FunctionForId(FunctionId id);
 
   // General-purpose helper functions for runtime system.
-  static int StringMatch(Handle<String> sub, Handle<String> pat, int index);
+  static int StringMatch(Isolate* isolate,
+                         Handle<String> sub,
+                         Handle<String> pat,
+                         int index);
 
-  static bool IsUpperCaseChar(uint16_t ch);
+  static bool IsUpperCaseChar(RuntimeState* runtime_state, uint16_t ch);
 
   // TODO(1240886): The following three methods are *not* handle safe,
   // but accept handle arguments. This seems fragile.
 
   // Support getting the characters in a string using [] notation as
   // in Firefox/SpiderMonkey, Safari and Opera.
-  MUST_USE_RESULT static MaybeObject* GetElementOrCharAt(Handle<Object> object,
+  MUST_USE_RESULT static MaybeObject* GetElementOrCharAt(Isolate* isolate,
+                                                         Handle<Object> object,
                                                          uint32_t index);
   MUST_USE_RESULT static MaybeObject* GetElement(Handle<Object> object,
                                                  uint32_t index);
 
   MUST_USE_RESULT static MaybeObject* SetObjectProperty(
+      Isolate* isolate,
       Handle<Object> object,
       Handle<Object> key,
       Handle<Object> value,
@@ -556,27 +613,31 @@ class Runtime : public AllStatic {
       StrictModeFlag strict_mode);
 
   MUST_USE_RESULT static MaybeObject* ForceSetObjectProperty(
+      Isolate* isolate,
       Handle<JSObject> object,
       Handle<Object> key,
       Handle<Object> value,
       PropertyAttributes attr);
 
   MUST_USE_RESULT static MaybeObject* ForceDeleteObjectProperty(
+      Isolate* isolate,
       Handle<JSObject> object,
       Handle<Object> key);
 
-  MUST_USE_RESULT static MaybeObject* GetObjectProperty(Handle<Object> object,
-                                                        Handle<Object> key);
+  MUST_USE_RESULT static MaybeObject* GetObjectProperty(
+      Isolate* isolate,
+      Handle<Object> object,
+      Handle<Object> key);
 
   // This function is used in FunctionNameUsing* tests.
-  static Object* FindSharedFunctionInfoInScript(Handle<Script> script,
+  static Object* FindSharedFunctionInfoInScript(Isolate* isolate,
+                                                Handle<Script> script,
                                                 int position);
 
   // Helper functions used stubs.
   static void PerformGC(Object* result);
 };
 
-
 } }  // namespace v8::internal
 
 #endif  // V8_RUNTIME_H_
index d2ec54c38874c9782c35f0ca9663482024787318..28cf6e64c9d8ff320c834c425ccba3319dc66aa0 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "v8.h"
+
 #include "safepoint-table.h"
 
 #include "deoptimizer.h"
 #include "disasm.h"
 #include "macro-assembler.h"
+#include "zone-inl.h"
 
 namespace v8 {
 namespace internal {
index 2cb19942c4daf33b9eeae941b7d92601bd67d677..08450ea36c85a4cebf4fc9488fd5abcf1e621b77 100644 (file)
 #ifndef V8_SAFEPOINT_TABLE_H_
 #define V8_SAFEPOINT_TABLE_H_
 
-#include "v8.h"
-
 #include "heap.h"
 #include "zone.h"
-#include "zone-inl.h"
 
 namespace v8 {
 namespace internal {
index 80bca4e28260253d62b6aa49b2b403cab81df32b..324a0e02e5905627b4e7791659b1148bddeb3241 100644 (file)
 
 // Features shared by parsing and pre-parsing scanners.
 
+#include "v8.h"
+
+/*
+TODO(isolates): I incldue v8.h instead of these because we need Isolate and 
+some classes (NativeAllocationChecker) are moved into isolate.h 
 #include "../include/v8stdint.h"
+*/
 #include "scanner-base.h"
 #include "char-predicates-inl.h"
 
@@ -35,16 +41,6 @@ namespace v8 {
 namespace internal {
 
 // ----------------------------------------------------------------------------
-// Character predicates
-
-unibrow::Predicate<IdentifierStart, 128> ScannerConstants::kIsIdentifierStart;
-unibrow::Predicate<IdentifierPart, 128> ScannerConstants::kIsIdentifierPart;
-unibrow::Predicate<unibrow::WhiteSpace, 128> ScannerConstants::kIsWhiteSpace;
-unibrow::Predicate<unibrow::LineTerminator, 128>
-  ScannerConstants::kIsLineTerminator;
-
-StaticResource<ScannerConstants::Utf8Decoder> ScannerConstants::utf8_decoder_;
-
 // Compound predicates.
 
 bool ScannerConstants::IsIdentifier(unibrow::CharacterStream* buffer) {
@@ -64,8 +60,10 @@ bool ScannerConstants::IsIdentifier(unibrow::CharacterStream* buffer) {
 // ----------------------------------------------------------------------------
 // Scanner
 
-Scanner::Scanner()
-  : octal_pos_(kNoOctalLocation) { }
+Scanner::Scanner(Isolate* isolate)
+    : scanner_constants_(isolate->scanner_constants()),
+      octal_pos_(kNoOctalLocation) {
+}
 
 
 uc32 Scanner::ScanHexEscape(uc32 c, int length) {
@@ -122,7 +120,7 @@ uc32 Scanner::ScanOctalEscape(uc32 c, int length) {
 // ----------------------------------------------------------------------------
 // JavaScriptScanner
 
-JavaScriptScanner::JavaScriptScanner() : Scanner() {}
+JavaScriptScanner::JavaScriptScanner(Isolate* isolate) : Scanner(isolate) {}
 
 
 Token::Value JavaScriptScanner::Next() {
@@ -151,9 +149,9 @@ bool JavaScriptScanner::SkipWhiteSpace() {
   while (true) {
     // We treat byte-order marks (BOMs) as whitespace for better
     // compatibility with Spidermonkey and other JavaScript engines.
-    while (ScannerConstants::kIsWhiteSpace.get(c0_) || IsByteOrderMark(c0_)) {
+    while (scanner_constants_->IsWhiteSpace(c0_) || IsByteOrderMark(c0_)) {
       // IsWhiteSpace() includes line terminators!
-      if (ScannerConstants::kIsLineTerminator.get(c0_)) {
+      if (scanner_constants_->IsLineTerminator(c0_)) {
         // Ignore line terminators, but remember them. This is necessary
         // for automatic semicolon insertion.
         has_line_terminator_before_next_ = true;
@@ -193,7 +191,7 @@ Token::Value JavaScriptScanner::SkipSingleLineComment() {
   // separately by the lexical grammar and becomes part of the
   // stream of input elements for the syntactic grammar (see
   // ECMA-262, section 7.4, page 12).
-  while (c0_ >= 0 && !ScannerConstants::kIsLineTerminator.get(c0_)) {
+  while (c0_ >= 0 && !scanner_constants_->IsLineTerminator(c0_)) {
     Advance();
   }
 
@@ -458,7 +456,7 @@ void JavaScriptScanner::Scan() {
         break;
 
       default:
-        if (ScannerConstants::kIsIdentifierStart.get(c0_)) {
+        if (scanner_constants_->IsIdentifierStart(c0_)) {
           token = ScanIdentifierOrKeyword();
         } else if (IsDecimalDigit(c0_)) {
           token = ScanNumber(false);
@@ -506,7 +504,7 @@ void JavaScriptScanner::ScanEscape() {
   Advance();
 
   // Skip escaped newlines.
-  if (ScannerConstants::kIsLineTerminator.get(c)) {
+  if (scanner_constants_->IsLineTerminator(c)) {
     // Allow CR+LF newlines in multiline string literals.
     if (IsCarriageReturn(c) && IsLineFeed(c0_)) Advance();
     // Allow LF+CR newlines in multiline string literals.
@@ -549,7 +547,7 @@ Token::Value JavaScriptScanner::ScanString() {
 
   LiteralScope literal(this);
   while (c0_ != quote && c0_ >= 0
-         && !ScannerConstants::kIsLineTerminator.get(c0_)) {
+         && !scanner_constants_->IsLineTerminator(c0_)) {
     uc32 c = c0_;
     Advance();
     if (c == '\\') {
@@ -648,7 +646,7 @@ Token::Value JavaScriptScanner::ScanNumber(bool seen_period) {
   // not be an identifier start or a decimal digit; see ECMA-262
   // section 7.8.3, page 17 (note that we read only one decimal digit
   // if the value is 0).
-  if (IsDecimalDigit(c0_) || ScannerConstants::kIsIdentifierStart.get(c0_))
+  if (IsDecimalDigit(c0_) || scanner_constants_->IsIdentifierStart(c0_))
     return Token::ILLEGAL;
 
   literal.Complete();
@@ -670,14 +668,14 @@ uc32 JavaScriptScanner::ScanIdentifierUnicodeEscape() {
 
 
 Token::Value JavaScriptScanner::ScanIdentifierOrKeyword() {
-  ASSERT(ScannerConstants::kIsIdentifierStart.get(c0_));
+  ASSERT(scanner_constants_->IsIdentifierStart(c0_));
   LiteralScope literal(this);
   KeywordMatcher keyword_match;
   // Scan identifier start character.
   if (c0_ == '\\') {
     uc32 c = ScanIdentifierUnicodeEscape();
     // Only allow legal identifier start characters.
-    if (!ScannerConstants::kIsIdentifierStart.get(c)) return Token::ILLEGAL;
+    if (!scanner_constants_->IsIdentifierStart(c)) return Token::ILLEGAL;
     AddLiteralChar(c);
     return ScanIdentifierSuffix(&literal);
   }
@@ -690,7 +688,7 @@ Token::Value JavaScriptScanner::ScanIdentifierOrKeyword() {
   }
 
   // Scan the rest of the identifier characters.
-  while (ScannerConstants::kIsIdentifierPart.get(c0_)) {
+  while (scanner_constants_->IsIdentifierPart(c0_)) {
     if (c0_ != '\\') {
       uc32 next_char = c0_;
       Advance();
@@ -708,11 +706,11 @@ Token::Value JavaScriptScanner::ScanIdentifierOrKeyword() {
 
 Token::Value JavaScriptScanner::ScanIdentifierSuffix(LiteralScope* literal) {
   // Scan the rest of the identifier characters.
-  while (ScannerConstants::kIsIdentifierPart.get(c0_)) {
+  while (scanner_constants_->IsIdentifierPart(c0_)) {
     if (c0_ == '\\') {
       uc32 c = ScanIdentifierUnicodeEscape();
       // Only allow legal identifier part characters.
-      if (!ScannerConstants::kIsIdentifierPart.get(c)) return Token::ILLEGAL;
+      if (!scanner_constants_->IsIdentifierPart(c)) return Token::ILLEGAL;
       AddLiteralChar(c);
     } else {
       AddLiteralChar(c0_);
@@ -742,10 +740,10 @@ bool JavaScriptScanner::ScanRegExpPattern(bool seen_equal) {
     AddLiteralChar('=');
 
   while (c0_ != '/' || in_character_class) {
-    if (ScannerConstants::kIsLineTerminator.get(c0_) || c0_ < 0) return false;
+    if (scanner_constants_->IsLineTerminator(c0_) || c0_ < 0) return false;
     if (c0_ == '\\') {  // Escape sequence.
       AddLiteralCharAdvance();
-      if (ScannerConstants::kIsLineTerminator.get(c0_) || c0_ < 0) return false;
+      if (scanner_constants_->IsLineTerminator(c0_) || c0_ < 0) return false;
       AddLiteralCharAdvance();
       // If the escape allows more characters, i.e., \x??, \u????, or \c?,
       // only "safe" characters are allowed (letters, digits, underscore),
@@ -771,7 +769,7 @@ bool JavaScriptScanner::ScanRegExpPattern(bool seen_equal) {
 bool JavaScriptScanner::ScanRegExpFlags() {
   // Scan regular expression flags.
   LiteralScope literal(this);
-  while (ScannerConstants::kIsIdentifierPart.get(c0_)) {
+  while (scanner_constants_->IsIdentifierPart(c0_)) {
     if (c0_ == '\\') {
       uc32 c = ScanIdentifierUnicodeEscape();
       if (c != static_cast<uc32>(unibrow::Utf8::kBadChar)) {
index f5fe7f7cefe295a33bd2590220c433735d39d64e..7203569770aa7987e0a8b73e1e2322b320c55303 100644 (file)
@@ -119,26 +119,34 @@ class UC16CharacterStream {
 };
 
 
+class ScannerConstants {
 // ---------------------------------------------------------------------
 // Constants used by scanners.
-
-class ScannerConstants : AllStatic {
  public:
   typedef unibrow::Utf8InputBuffer<1024> Utf8Decoder;
 
-  static StaticResource<Utf8Decoder>* utf8_decoder() {
+  StaticResource<Utf8Decoder>* utf8_decoder() {
     return &utf8_decoder_;
   }
 
-  static unibrow::Predicate<IdentifierStart, 128> kIsIdentifierStart;
-  static unibrow::Predicate<IdentifierPart, 128> kIsIdentifierPart;
-  static unibrow::Predicate<unibrow::LineTerminator, 128> kIsLineTerminator;
-  static unibrow::Predicate<unibrow::WhiteSpace, 128> kIsWhiteSpace;
+  bool IsIdentifierStart(unibrow::uchar c) { return kIsIdentifierStart.get(c); }
+  bool IsIdentifierPart(unibrow::uchar c) { return kIsIdentifierPart.get(c); }
+  bool IsLineTerminator(unibrow::uchar c) { return kIsLineTerminator.get(c); }
+  bool IsWhiteSpace(unibrow::uchar c) { return kIsWhiteSpace.get(c); }
 
-  static bool IsIdentifier(unibrow::CharacterStream* buffer);
+  bool IsIdentifier(unibrow::CharacterStream* buffer);
 
  private:
-  static StaticResource<Utf8Decoder> utf8_decoder_;
+  ScannerConstants() {}
+
+  unibrow::Predicate<IdentifierStart, 128> kIsIdentifierStart;
+  unibrow::Predicate<IdentifierPart, 128> kIsIdentifierPart;
+  unibrow::Predicate<unibrow::LineTerminator, 128> kIsLineTerminator;
+  unibrow::Predicate<unibrow::WhiteSpace, 128> kIsWhiteSpace;
+  StaticResource<Utf8Decoder> utf8_decoder_;
+
+  friend class Isolate;
+  DISALLOW_COPY_AND_ASSIGN(ScannerConstants);
 };
 
 // ----------------------------------------------------------------------------
@@ -238,6 +246,8 @@ class LiteralBuffer {
   bool is_ascii_;
   int position_;
   Vector<byte> backing_store_;
+
+  DISALLOW_COPY_AND_ASSIGN(LiteralBuffer);
 };
 
 
@@ -263,7 +273,7 @@ class Scanner {
     bool complete_;
   };
 
-  Scanner();
+  explicit Scanner(Isolate* isolate);
 
   // Returns the current token again.
   Token::Value current_token() { return current_.token; }
@@ -418,6 +428,8 @@ class Scanner {
     return source_->pos() - kCharacterLookaheadBufferSize;
   }
 
+  ScannerConstants* scanner_constants_;
+
   // Buffers collecting literal strings, numbers, etc.
   LiteralBuffer literal_buffer1_;
   LiteralBuffer literal_buffer2_;
@@ -462,7 +474,7 @@ class JavaScriptScanner : public Scanner {
     bool complete_;
   };
 
-  JavaScriptScanner();
+  explicit JavaScriptScanner(Isolate* isolate);
 
   // Returns the next token.
   Token::Value Next();
index d54d9f91f9a56c4cda311ca5e7647863dbb17bdb..d1520b58248a3f11bdd53ceeb6cc662b8e3ee070 100755 (executable)
@@ -328,8 +328,6 @@ void Scanner::LiteralScope::Complete() {
 // ----------------------------------------------------------------------------
 // V8JavaScriptScanner
 
-V8JavaScriptScanner::V8JavaScriptScanner() : JavaScriptScanner() { }
-
 
 void V8JavaScriptScanner::Initialize(UC16CharacterStream* source) {
   source_ = source;
@@ -347,7 +345,7 @@ void V8JavaScriptScanner::Initialize(UC16CharacterStream* source) {
 // ----------------------------------------------------------------------------
 // JsonScanner
 
-JsonScanner::JsonScanner() : Scanner() { }
+JsonScanner::JsonScanner(Isolate* isolate) : Scanner(isolate) { }
 
 
 void JsonScanner::Initialize(UC16CharacterStream* source) {
@@ -576,11 +574,10 @@ Token::Value JsonScanner::ScanJsonIdentifier(const char* text,
     Advance();
     text++;
   }
-  if (ScannerConstants::kIsIdentifierPart.get(c0_)) return Token::ILLEGAL;
+  if (scanner_constants_->IsIdentifierPart(c0_)) return Token::ILLEGAL;
   literal.Complete();
   return token;
 }
 
 
-
 } }  // namespace v8::internal
index cf2084f55e9002666d571f6dbb096e1e4ccc7b8d..b61a0688ec4ca78ddc9103a6856b866a9633323c 100644 (file)
@@ -134,14 +134,16 @@ class ExternalTwoByteStringUC16CharacterStream: public UC16CharacterStream {
 
 class V8JavaScriptScanner : public JavaScriptScanner {
  public:
-  V8JavaScriptScanner();
+  explicit V8JavaScriptScanner(Isolate* isolate)
+      : JavaScriptScanner(isolate) {}
+
   void Initialize(UC16CharacterStream* source);
 };
 
 
 class JsonScanner : public Scanner {
  public:
-  JsonScanner();
+  explicit JsonScanner(Isolate* isolate);
 
   void Initialize(UC16CharacterStream* source);
 
index e06235af76f712d46b2f87d6cd9508badfe3a3ad..58e2ad280e033ad8d8f2d2ab8f3598612ec9d103 100644 (file)
@@ -50,7 +50,7 @@ static int CompareLocal(Variable* const* v, Variable* const* w) {
 
 template<class Allocator>
 ScopeInfo<Allocator>::ScopeInfo(Scope* scope)
-    : function_name_(Factory::empty_symbol()),
+    : function_name_(FACTORY->empty_symbol()),
       calls_eval_(scope->calls_eval()),
       parameters_(scope->num_parameters()),
       stack_slots_(scope->num_stack_slots()),
@@ -141,7 +141,7 @@ ScopeInfo<Allocator>::ScopeInfo(Scope* scope)
              context_slots_.length());
       ASSERT(var->AsSlot()->index() - Context::MIN_CONTEXT_SLOTS ==
              context_modes_.length());
-      context_slots_.Add(Factory::empty_symbol());
+      context_slots_.Add(FACTORY->empty_symbol());
       context_modes_.Add(Variable::INTERNAL);
     }
   }
@@ -238,7 +238,7 @@ static Object** ReadList(Object** p,
 
 template<class Allocator>
 ScopeInfo<Allocator>::ScopeInfo(SerializedScopeInfo* data)
-  : function_name_(Factory::empty_symbol()),
+  : function_name_(FACTORY->empty_symbol()),
     parameters_(4),
     stack_slots_(8),
     context_slots_(8),
@@ -309,7 +309,7 @@ Handle<SerializedScopeInfo> ScopeInfo<Allocator>::Serialize() {
                stack_slots_.length();
 
   Handle<SerializedScopeInfo> data(
-      SerializedScopeInfo::cast(*Factory::NewFixedArray(length, TENURED)));
+      SerializedScopeInfo::cast(*FACTORY->NewFixedArray(length, TENURED)));
   AssertNoAllocation nogc;
 
   Object** p0 = data->data_start();
@@ -357,7 +357,7 @@ Handle<SerializedScopeInfo> SerializedScopeInfo::Create(Scope* scope) {
 
 
 SerializedScopeInfo* SerializedScopeInfo::Empty() {
-  return reinterpret_cast<SerializedScopeInfo*>(Heap::empty_fixed_array());
+  return reinterpret_cast<SerializedScopeInfo*>(HEAP->empty_fixed_array());
 }
 
 
@@ -448,7 +448,8 @@ int SerializedScopeInfo::StackSlotIndex(String* name) {
 
 int SerializedScopeInfo::ContextSlotIndex(String* name, Variable::Mode* mode) {
   ASSERT(name->IsSymbol());
-  int result = ContextSlotCache::Lookup(this, name, mode);
+  Isolate* isolate = GetIsolate();
+  int result = isolate->context_slot_cache()->Lookup(this, name, mode);
   if (result != ContextSlotCache::kNotFound) return result;
   if (length() > 0) {
     // Slots start after length entry.
@@ -465,13 +466,13 @@ int SerializedScopeInfo::ContextSlotIndex(String* name, Variable::Mode* mode) {
         Variable::Mode mode_value = static_cast<Variable::Mode>(v);
         if (mode != NULL) *mode = mode_value;
         result = static_cast<int>((p - p0) >> 1) + Context::MIN_CONTEXT_SLOTS;
-        ContextSlotCache::Update(this, name, mode_value, result);
+        isolate->context_slot_cache()->Update(this, name, mode_value, result);
         return result;
       }
       p += 2;
     }
   }
-  ContextSlotCache::Update(this, name, Variable::INTERNAL, -1);
+  isolate->context_slot_cache()->Update(this, name, Variable::INTERNAL, -1);
   return -1;
 }
 
@@ -547,7 +548,7 @@ void ContextSlotCache::Update(Object* data,
                               int slot_index) {
   String* symbol;
   ASSERT(slot_index > kNotFound);
-  if (Heap::LookupSymbolIfExists(name, &symbol)) {
+  if (HEAP->LookupSymbolIfExists(name, &symbol)) {
     int index = Hash(data, symbol);
     Key& key = keys_[index];
     key.data = data;
@@ -566,12 +567,6 @@ void ContextSlotCache::Clear() {
 }
 
 
-ContextSlotCache::Key ContextSlotCache::keys_[ContextSlotCache::kLength];
-
-
-uint32_t ContextSlotCache::values_[ContextSlotCache::kLength];
-
-
 #ifdef DEBUG
 
 void ContextSlotCache::ValidateEntry(Object* data,
@@ -579,7 +574,7 @@ void ContextSlotCache::ValidateEntry(Object* data,
                                      Variable::Mode mode,
                                      int slot_index) {
   String* symbol;
-  if (Heap::LookupSymbolIfExists(name, &symbol)) {
+  if (HEAP->LookupSymbolIfExists(name, &symbol)) {
     int index = Hash(data, name);
     Key& key = keys_[index];
     ASSERT(key.data == data);
index dd49a4e08250405a756d155ac4dd39c3dfe184f9..cc9f8165a260eecf57260f644327c26e956285f3 100644 (file)
@@ -114,7 +114,7 @@ class SerializedScopeInfo : public FixedArray {
 
   // Does this scope have an arguments shadow?
   bool HasArgumentsShadow() {
-    return StackSlotIndex(Heap::arguments_shadow_symbol()) >= 0;
+    return StackSlotIndex(GetHeap()->arguments_shadow_symbol()) >= 0;
   }
 
   // Return the number of stack slots for code.
@@ -173,28 +173,36 @@ class ContextSlotCache {
  public:
   // Lookup context slot index for (data, name).
   // If absent, kNotFound is returned.
-  static int Lookup(Object* data,
-                    String* name,
-                    Variable::Mode* mode);
+  int Lookup(Object* data,
+             String* name,
+             Variable::Mode* mode);
 
   // Update an element in the cache.
-  static void Update(Object* data,
-                     String* name,
-                     Variable::Mode mode,
-                     int slot_index);
+  void Update(Object* data,
+              String* name,
+              Variable::Mode mode,
+              int slot_index);
 
   // Clear the cache.
-  static void Clear();
+  void Clear();
 
   static const int kNotFound = -2;
  private:
+  ContextSlotCache() {
+    for (int i = 0; i < kLength; ++i) {
+      keys_[i].data = NULL;
+      keys_[i].name = NULL;
+      values_[i] = kNotFound;
+    }
+  }
+
   inline static int Hash(Object* data, String* name);
 
 #ifdef DEBUG
-  static void ValidateEntry(Object* data,
-                            String* name,
-                            Variable::Mode mode,
-                            int slot_index);
+  void ValidateEntry(Object* data,
+                     String* name,
+                     Variable::Mode mode,
+                     int slot_index);
 #endif
 
   static const int kLength = 256;
@@ -228,8 +236,11 @@ class ContextSlotCache {
     uint32_t value_;
   };
 
-  static Key keys_[kLength];
-  static uint32_t values_[kLength];
+  Key keys_[kLength];
+  uint32_t values_[kLength];
+
+  friend class Isolate;
+  DISALLOW_COPY_AND_ASSIGN(ContextSlotCache);
 };
 
 
index cffbef6d1f8455bd89eb1caabde85d669f8d5331..f4bcaa8fba6d8c2750227a74199e2e3100875f5f 100644 (file)
@@ -40,12 +40,14 @@ namespace internal {
 // ----------------------------------------------------------------------------
 // A Zone allocator for use with LocalsMap.
 
+// TODO(isolates): It is probably worth it to change the Allocator class to
+//                 take a pointer to an isolate.
 class ZoneAllocator: public Allocator {
  public:
   /* nothing to do */
   virtual ~ZoneAllocator()  {}
 
-  virtual void* New(size_t size)  { return Zone::New(static_cast<int>(size)); }
+  virtual void* New(size_t size)  { return ZONE->New(static_cast<int>(size)); }
 
   /* ignored - Zone is freed in one fell swoop */
   virtual void Delete(void* p)  {}
@@ -159,16 +161,19 @@ Scope::Scope(Scope* inner_scope, SerializedScopeInfo* scope_info)
   // This scope's arguments shadow (if present) is context-allocated if an inner
   // scope accesses this one's parameters.  Allocate the arguments_shadow_
   // variable if necessary.
+  Isolate* isolate = Isolate::Current();
   Variable::Mode mode;
   int arguments_shadow_index =
-      scope_info_->ContextSlotIndex(Heap::arguments_shadow_symbol(), &mode);
+      scope_info_->ContextSlotIndex(
+          isolate->heap()->arguments_shadow_symbol(), &mode);
   if (arguments_shadow_index >= 0) {
     ASSERT(mode == Variable::INTERNAL);
-    arguments_shadow_ = new Variable(this,
-                                     Factory::arguments_shadow_symbol(),
-                                     Variable::INTERNAL,
-                                     true,
-                                     Variable::ARGUMENTS);
+    arguments_shadow_ = new Variable(
+        this,
+        isolate->factory()->arguments_shadow_symbol(),
+        Variable::INTERNAL,
+        true,
+        Variable::ARGUMENTS);
     arguments_shadow_->set_rewrite(
         new Slot(arguments_shadow_, Slot::CONTEXT, arguments_shadow_index));
     arguments_shadow_->set_is_used(true);
@@ -213,7 +218,7 @@ bool Scope::Analyze(CompilationInfo* info) {
   top->AllocateVariables(info->calling_context());
 
 #ifdef DEBUG
-  if (Bootstrapper::IsActive()
+  if (info->isolate()->bootstrapper()->IsActive()
           ? FLAG_print_builtin_scopes
           : FLAG_print_scopes) {
     info->function()->scope()->Print();
@@ -245,7 +250,7 @@ void Scope::Initialize(bool inside_with) {
   // such parameter is 'this' which is passed on the stack when
   // invoking scripts
   Variable* var =
-      variables_.Declare(this, Factory::this_symbol(), Variable::VAR,
+      variables_.Declare(this, FACTORY->this_symbol(), Variable::VAR,
                          false, Variable::THIS);
   var->set_rewrite(new Slot(var, Slot::PARAMETER, -1));
   receiver_ = var;
@@ -254,7 +259,7 @@ void Scope::Initialize(bool inside_with) {
     // Declare 'arguments' variable which exists in all functions.
     // Note that it might never be accessed, in which case it won't be
     // allocated during variable allocation.
-    variables_.Declare(this, Factory::arguments_symbol(), Variable::VAR,
+    variables_.Declare(this, FACTORY->arguments_symbol(), Variable::VAR,
                        true, Variable::ARGUMENTS);
   }
 }
@@ -269,7 +274,7 @@ Variable* Scope::LocalLookup(Handle<String> name) {
 
   // We should never lookup 'arguments' in this scope
   // as it is implicitly present in any scope.
-  ASSERT(*name != *Factory::arguments_symbol());
+  ASSERT(*name != *FACTORY->arguments_symbol());
 
   // Assert that there is no local slot with the given name.
   ASSERT(scope_info_->StackSlotIndex(*name) < 0);
@@ -868,7 +873,7 @@ bool Scope::MustAllocateInContext(Variable* var) {
 
 bool Scope::HasArgumentsParameter() {
   for (int i = 0; i < params_.length(); i++) {
-    if (params_[i]->name().is_identical_to(Factory::arguments_symbol()))
+    if (params_[i]->name().is_identical_to(FACTORY->arguments_symbol()))
       return true;
   }
   return false;
@@ -887,7 +892,7 @@ void Scope::AllocateHeapSlot(Variable* var) {
 
 void Scope::AllocateParameterLocals() {
   ASSERT(is_function_scope());
-  Variable* arguments = LocalLookup(Factory::arguments_symbol());
+  Variable* arguments = LocalLookup(FACTORY->arguments_symbol());
   ASSERT(arguments != NULL);  // functions have 'arguments' declared implicitly
 
   // Parameters are rewritten to arguments[i] if 'arguments' is used in
@@ -942,7 +947,7 @@ void Scope::AllocateParameterLocals() {
     // variable may be allocated in the heap-allocated context (temporaries
     // are never allocated in the context).
     arguments_shadow_ = new Variable(this,
-                                     Factory::arguments_shadow_symbol(),
+                                     FACTORY->arguments_shadow_symbol(),
                                      Variable::INTERNAL,
                                      true,
                                      Variable::ARGUMENTS);
@@ -1008,7 +1013,7 @@ void Scope::AllocateParameterLocals() {
 void Scope::AllocateNonParameterLocal(Variable* var) {
   ASSERT(var->scope() == this);
   ASSERT(var->rewrite() == NULL ||
-         (!var->IsVariable(Factory::result_symbol())) ||
+         (!var->IsVariable(FACTORY->result_symbol())) ||
          (var->AsSlot() == NULL || var->AsSlot()->type() != Slot::LOCAL));
   if (var->rewrite() == NULL && MustAllocate(var)) {
     if (MustAllocateInContext(var)) {
index 10994df9c855cf503fd7e8bcb86580d41916ac47..24622b4b46bcadda8248afd34aea2c20c8eafef3 100644 (file)
@@ -228,7 +228,7 @@ class Scope: public ZoneObject {
   // A new variable proxy corresponding to the (function) receiver.
   VariableProxy* receiver() const {
     VariableProxy* proxy =
-        new VariableProxy(Factory::this_symbol(), true, false);
+        new VariableProxy(FACTORY->this_symbol(), true, false);
     proxy->BindTo(receiver_);
     return proxy;
   }
@@ -426,7 +426,7 @@ class Scope: public ZoneObject {
                    SerializedScopeInfo* scope_info) {
     outer_scope_ = outer_scope;
     type_ = type;
-    scope_name_ = Factory::empty_symbol();
+    scope_name_ = FACTORY->empty_symbol();
     dynamics_ = NULL;
     receiver_ = NULL;
     function_ = NULL;
index 16d27598c6b4b6c11a6170cd2be790f1bc3c32c1..acf13fb770e46ff613a9f5ee7592a08413c4e936 100644 (file)
@@ -38,7 +38,6 @@
 #include "serialize.h"
 #include "stub-cache.h"
 #include "v8threads.h"
-#include "top.h"
 #include "bootstrapper.h"
 
 namespace v8 {
@@ -68,9 +67,14 @@ static int* GetInternalPointer(StatsCounter* counter) {
 // hashmaps in ExternalReferenceEncoder and ExternalReferenceDecoder.
 class ExternalReferenceTable {
  public:
-  static ExternalReferenceTable* instance() {
-    if (!instance_) instance_ = new ExternalReferenceTable();
-    return instance_;
+  static ExternalReferenceTable* instance(Isolate* isolate) {
+    ExternalReferenceTable* external_reference_table =
+        isolate->external_reference_table();
+    if (external_reference_table == NULL) {
+      external_reference_table = new ExternalReferenceTable(isolate);
+      isolate->set_external_reference_table(external_reference_table);
+    }
+    return external_reference_table;
   }
 
   int size() const { return refs_.length(); }
@@ -84,9 +88,9 @@ class ExternalReferenceTable {
   int max_id(int code) { return max_id_[code]; }
 
  private:
-  static ExternalReferenceTable* instance_;
-
-  ExternalReferenceTable() : refs_(64) { PopulateTable(); }
+  explicit ExternalReferenceTable(Isolate* isolate) : refs_(64) {
+      PopulateTable(isolate);
+  }
   ~ExternalReferenceTable() { }
 
   struct ExternalReferenceEntry {
@@ -95,7 +99,7 @@ class ExternalReferenceTable {
     const char* name;
   };
 
-  void PopulateTable();
+  void PopulateTable(Isolate* isolate);
 
   // For a few types of references, we can get their address from their id.
   void AddFromId(TypeCode type, uint16_t id, const char* name);
@@ -108,9 +112,6 @@ class ExternalReferenceTable {
 };
 
 
-ExternalReferenceTable* ExternalReferenceTable::instance_ = NULL;
-
-
 void ExternalReferenceTable::AddFromId(TypeCode type,
                                        uint16_t id,
                                        const char* name) {
@@ -159,7 +160,7 @@ void ExternalReferenceTable::Add(Address address,
 }
 
 
-void ExternalReferenceTable::PopulateTable() {
+void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
   for (int type_code = 0; type_code < kTypeCodeCount; type_code++) {
     max_id_[type_code] = 0;
   }
@@ -225,19 +226,19 @@ void ExternalReferenceTable::PopulateTable() {
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Debug addresses
-  Add(Debug_Address(Debug::k_after_break_target_address).address(),
+  Add(Debug_Address(Debug::k_after_break_target_address).address(isolate),
       DEBUG_ADDRESS,
       Debug::k_after_break_target_address << kDebugIdShift,
       "Debug::after_break_target_address()");
-  Add(Debug_Address(Debug::k_debug_break_slot_address).address(),
+  Add(Debug_Address(Debug::k_debug_break_slot_address).address(isolate),
       DEBUG_ADDRESS,
       Debug::k_debug_break_slot_address << kDebugIdShift,
       "Debug::debug_break_slot_address()");
-  Add(Debug_Address(Debug::k_debug_break_return_address).address(),
+  Add(Debug_Address(Debug::k_debug_break_return_address).address(isolate),
       DEBUG_ADDRESS,
       Debug::k_debug_break_return_address << kDebugIdShift,
       "Debug::debug_break_return_address()");
-  Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(),
+  Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate),
       DEBUG_ADDRESS,
       Debug::k_restarter_frame_function_pointer << kDebugIdShift,
       "Debug::restarter_frame_function_pointer_address()");
@@ -245,14 +246,14 @@ void ExternalReferenceTable::PopulateTable() {
 
   // Stat counters
   struct StatsRefTableEntry {
-    StatsCounter* counter;
+    StatsCounter* (Counters::*counter)();
     uint16_t id;
     const char* name;
   };
 
-  static const StatsRefTableEntry stats_ref_table[] = {
+  const StatsRefTableEntry stats_ref_table[] = {
 #define COUNTER_ENTRY(name, caption) \
-  { &Counters::name, \
+  { &Counters::name,    \
     Counters::k_##name, \
     "Counters::" #name },
 
@@ -261,33 +262,28 @@ void ExternalReferenceTable::PopulateTable() {
 #undef COUNTER_ENTRY
   };  // end of stats_ref_table[].
 
+  Counters* counters = isolate->counters();
   for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) {
-    Add(reinterpret_cast<Address>(
-            GetInternalPointer(stats_ref_table[i].counter)),
+    Add(reinterpret_cast<Address>(GetInternalPointer(
+            (counters->*(stats_ref_table[i].counter))())),
         STATS_COUNTER,
         stats_ref_table[i].id,
         stats_ref_table[i].name);
   }
 
   // Top addresses
-  const char* top_address_format = "Top::%s";
 
   const char* AddressNames[] = {
-#define C(name) #name,
-    TOP_ADDRESS_LIST(C)
-    TOP_ADDRESS_LIST_PROF(C)
+#define C(name) "Isolate::" #name,
+    ISOLATE_ADDRESS_LIST(C)
+    ISOLATE_ADDRESS_LIST_PROF(C)
     NULL
 #undef C
   };
 
-  int top_format_length = StrLength(top_address_format) - 2;
-  for (uint16_t i = 0; i < Top::k_top_address_count; ++i) {
-    const char* address_name = AddressNames[i];
-    Vector<char> name =
-        Vector<char>::New(top_format_length + StrLength(address_name) + 1);
-    const char* chars = name.start();
-    OS::SNPrintF(name, top_address_format, address_name);
-    Add(Top::get_address_from_id((Top::AddressId)i), TOP_ADDRESS, i, chars);
+  for (uint16_t i = 0; i < Isolate::k_isolate_address_count; ++i) {
+    Add(isolate->get_address_from_id((Isolate::AddressId)i),
+        TOP_ADDRESS, i, AddressNames[i]);
   }
 
   // Accessors
@@ -300,20 +296,22 @@ void ExternalReferenceTable::PopulateTable() {
   ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION)
 #undef ACCESSOR_DESCRIPTOR_DECLARATION
 
+  StubCache* stub_cache = isolate->stub_cache();
+
   // Stub cache tables
-  Add(SCTableReference::keyReference(StubCache::kPrimary).address(),
+  Add(stub_cache->key_reference(StubCache::kPrimary).address(),
       STUB_CACHE_TABLE,
       1,
       "StubCache::primary_->key");
-  Add(SCTableReference::valueReference(StubCache::kPrimary).address(),
+  Add(stub_cache->value_reference(StubCache::kPrimary).address(),
       STUB_CACHE_TABLE,
       2,
       "StubCache::primary_->value");
-  Add(SCTableReference::keyReference(StubCache::kSecondary).address(),
+  Add(stub_cache->key_reference(StubCache::kSecondary).address(),
       STUB_CACHE_TABLE,
       3,
       "StubCache::secondary_->key");
-  Add(SCTableReference::valueReference(StubCache::kSecondary).address(),
+  Add(stub_cache->value_reference(StubCache::kSecondary).address(),
       STUB_CACHE_TABLE,
       4,
       "StubCache::secondary_->value");
@@ -327,12 +325,10 @@ void ExternalReferenceTable::PopulateTable() {
       RUNTIME_ENTRY,
       2,
       "V8::FillHeapNumberWithRandom");
-
   Add(ExternalReference::random_uint32_function().address(),
       RUNTIME_ENTRY,
       3,
       "V8::Random");
-
   Add(ExternalReference::delete_handle_scope_extensions().address(),
       RUNTIME_ENTRY,
       4,
@@ -486,33 +482,38 @@ void ExternalReferenceTable::PopulateTable() {
       UNCLASSIFIED,
       36,
       "LDoubleConstant::one_half");
-  Add(ExternalReference::address_of_minus_zero().address(),
+  Add(ExternalReference::isolate_address().address(),
       UNCLASSIFIED,
       37,
+      "isolate");
+  Add(ExternalReference::address_of_minus_zero().address(),
+      UNCLASSIFIED,
+      38,
       "LDoubleConstant::minus_zero");
   Add(ExternalReference::address_of_negative_infinity().address(),
       UNCLASSIFIED,
-      38,
+      39,
       "LDoubleConstant::negative_infinity");
   Add(ExternalReference::power_double_double_function().address(),
       UNCLASSIFIED,
-      39,
+      40,
       "power_double_double_function");
   Add(ExternalReference::power_double_int_function().address(),
       UNCLASSIFIED,
-      40,
+      41,
       "power_double_int_function");
   Add(ExternalReference::arguments_marker_location().address(),
       UNCLASSIFIED,
-      41,
+      42,
       "Factory::arguments_marker().location()");
 }
 
 
 ExternalReferenceEncoder::ExternalReferenceEncoder()
-    : encodings_(Match) {
+    : encodings_(Match),
+      isolate_(Isolate::Current()) {
   ExternalReferenceTable* external_references =
-      ExternalReferenceTable::instance();
+      ExternalReferenceTable::instance(isolate_);
   for (int i = 0; i < external_references->size(); ++i) {
     Put(external_references->address(i), i);
   }
@@ -522,20 +523,22 @@ ExternalReferenceEncoder::ExternalReferenceEncoder()
 uint32_t ExternalReferenceEncoder::Encode(Address key) const {
   int index = IndexOf(key);
   ASSERT(key == NULL || index >= 0);
-  return index >=0 ? ExternalReferenceTable::instance()->code(index) : 0;
+  return index >=0 ?
+         ExternalReferenceTable::instance(isolate_)->code(index) : 0;
 }
 
 
 const char* ExternalReferenceEncoder::NameOfAddress(Address key) const {
   int index = IndexOf(key);
-  return index >=0 ? ExternalReferenceTable::instance()->name(index) : NULL;
+  return index >= 0 ?
+      ExternalReferenceTable::instance(isolate_)->name(index) : NULL;
 }
 
 
 int ExternalReferenceEncoder::IndexOf(Address key) const {
   if (key == NULL) return -1;
   HashMap::Entry* entry =
-      const_cast<HashMap &>(encodings_).Lookup(key, Hash(key), false);
+      const_cast<HashMap&>(encodings_).Lookup(key, Hash(key), false);
   return entry == NULL
       ? -1
       : static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
@@ -549,9 +552,10 @@ void ExternalReferenceEncoder::Put(Address key, int index) {
 
 
 ExternalReferenceDecoder::ExternalReferenceDecoder()
-    : encodings_(NewArray<Address*>(kTypeCodeCount)) {
+    : encodings_(NewArray<Address*>(kTypeCodeCount)),
+      isolate_(Isolate::Current()) {
   ExternalReferenceTable* external_references =
-      ExternalReferenceTable::instance();
+      ExternalReferenceTable::instance(isolate_);
   for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
     int max = external_references->max_id(type) + 1;
     encodings_[type] = NewArray<Address>(max + 1);
@@ -572,10 +576,12 @@ ExternalReferenceDecoder::~ExternalReferenceDecoder() {
 
 bool Serializer::serialization_enabled_ = false;
 bool Serializer::too_late_to_enable_now_ = false;
-ExternalReferenceDecoder* Deserializer::external_reference_decoder_ = NULL;
 
 
-Deserializer::Deserializer(SnapshotByteSource* source) : source_(source) {
+Deserializer::Deserializer(SnapshotByteSource* source)
+    : isolate_(NULL),
+      source_(source),
+      external_reference_decoder_(NULL) {
 }
 
 
@@ -654,27 +660,31 @@ HeapObject* Deserializer::GetAddressFromStart(int space) {
 
 
 void Deserializer::Deserialize() {
+  isolate_ = Isolate::Current();
   // Don't GC while deserializing - just expand the heap.
   AlwaysAllocateScope always_allocate;
   // Don't use the free lists while deserializing.
   LinearAllocationScope allocate_linearly;
   // No active threads.
-  ASSERT_EQ(NULL, ThreadState::FirstInUse());
+  ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse());
   // No active handles.
-  ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty());
+  ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
   // Make sure the entire partial snapshot cache is traversed, filling it with
   // valid object pointers.
-  partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity;
+  isolate_->set_serialize_partial_snapshot_cache_length(
+      Isolate::kPartialSnapshotCacheCapacity);
   ASSERT_EQ(NULL, external_reference_decoder_);
   external_reference_decoder_ = new ExternalReferenceDecoder();
-  Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG);
-  Heap::IterateWeakRoots(this, VISIT_ALL);
+  isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
+  isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
 
-  Heap::set_global_contexts_list(Heap::undefined_value());
+  isolate_->heap()->set_global_contexts_list(
+      isolate_->heap()->undefined_value());
 }
 
 
 void Deserializer::DeserializePartial(Object** root) {
+  isolate_ = Isolate::Current();
   // Don't GC while deserializing - just expand the heap.
   AlwaysAllocateScope always_allocate;
   // Don't use the free lists while deserializing.
@@ -688,7 +698,7 @@ void Deserializer::DeserializePartial(Object** root) {
 
 Deserializer::~Deserializer() {
   ASSERT(source_->AtEOF());
-  if (external_reference_decoder_ != NULL) {
+  if (external_reference_decoder_) {
     delete external_reference_decoder_;
     external_reference_decoder_ = NULL;
   }
@@ -719,12 +729,12 @@ void Deserializer::ReadObject(int space_number,
   Object** current = reinterpret_cast<Object**>(address);
   Object** limit = current + (size >> kPointerSizeLog2);
   if (FLAG_log_snapshot_positions) {
-    LOG(SnapshotPositionEvent(address, source_->position()));
+    LOG(isolate_, SnapshotPositionEvent(address, source_->position()));
   }
   ReadChunk(current, limit, space_number, address);
 #ifdef DEBUG
-  bool is_codespace = (space == Heap::code_space()) ||
-      ((space == Heap::lo_space()) && (space_number == kLargeCode));
+  bool is_codespace = (space == HEAP->code_space()) ||
+      ((space == HEAP->lo_space()) && (space_number == kLargeCode));
   ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace);
 #endif
 }
@@ -736,20 +746,20 @@ void Deserializer::ReadObject(int space_number,
 #define ASSIGN_DEST_SPACE(space_number)                                        \
   Space* dest_space;                                                           \
   if (space_number == NEW_SPACE) {                                             \
-    dest_space = Heap::new_space();                                            \
+    dest_space = isolate->heap()->new_space();                                \
   } else if (space_number == OLD_POINTER_SPACE) {                              \
-    dest_space = Heap::old_pointer_space();                                    \
+    dest_space = isolate->heap()->old_pointer_space();                         \
   } else if (space_number == OLD_DATA_SPACE) {                                 \
-    dest_space = Heap::old_data_space();                                       \
+    dest_space = isolate->heap()->old_data_space();                            \
   } else if (space_number == CODE_SPACE) {                                     \
-    dest_space = Heap::code_space();                                           \
+    dest_space = isolate->heap()->code_space();                                \
   } else if (space_number == MAP_SPACE) {                                      \
-    dest_space = Heap::map_space();                                            \
+    dest_space = isolate->heap()->map_space();                                 \
   } else if (space_number == CELL_SPACE) {                                     \
-    dest_space = Heap::cell_space();                                           \
+    dest_space = isolate->heap()->cell_space();                                \
   } else {                                                                     \
     ASSERT(space_number >= LO_SPACE);                                          \
-    dest_space = Heap::lo_space();                                             \
+    dest_space = isolate->heap()->lo_space();                                  \
   }
 
 
@@ -760,6 +770,7 @@ void Deserializer::ReadChunk(Object** current,
                              Object** limit,
                              int source_space,
                              Address address) {
+  Isolate* const isolate = isolate_;
   while (current < limit) {
     int data = source_->Get();
     switch (data) {
@@ -788,14 +799,15 @@ void Deserializer::ReadChunk(Object** current,
             ReadObject(space_number, dest_space, &new_object);                 \
           } else if (where == kRootArray) {                                    \
             int root_id = source_->GetInt();                                   \
-            new_object = Heap::roots_address()[root_id];                       \
+            new_object = isolate->heap()->roots_address()[root_id];            \
           } else if (where == kPartialSnapshotCache) {                         \
             int cache_index = source_->GetInt();                               \
-            new_object = partial_snapshot_cache_[cache_index];                 \
+            new_object = isolate->serialize_partial_snapshot_cache()           \
+                [cache_index];                                                 \
           } else if (where == kExternalReference) {                            \
             int reference_id = source_->GetInt();                              \
-            Address address =                                                  \
-                external_reference_decoder_->Decode(reference_id);             \
+            Address address = external_reference_decoder_->                    \
+                Decode(reference_id);                                          \
             new_object = reinterpret_cast<Object*>(address);                   \
           } else if (where == kBackref) {                                      \
             emit_write_barrier =                                               \
@@ -833,7 +845,7 @@ void Deserializer::ReadChunk(Object** current,
           }                                                                    \
         }                                                                      \
         if (emit_write_barrier) {                                              \
-          Heap::RecordWrite(address, static_cast<int>(                         \
+          isolate->heap()->RecordWrite(address, static_cast<int>(              \
               reinterpret_cast<Address>(current) - address));                  \
         }                                                                      \
         if (!current_was_incremented) {                                        \
@@ -997,7 +1009,8 @@ void Deserializer::ReadChunk(Object** current,
         int index = source_->Get();
         Vector<const char> source_vector = Natives::GetScriptSource(index);
         NativesExternalStringResource* resource =
-            new NativesExternalStringResource(source_vector.start());
+            new NativesExternalStringResource(
+                isolate->bootstrapper(), source_vector.start());
         *current++ = reinterpret_cast<Object*>(resource);
         break;
       }
@@ -1062,6 +1075,9 @@ Serializer::Serializer(SnapshotByteSink* sink)
       current_root_index_(0),
       external_reference_encoder_(new ExternalReferenceEncoder),
       large_object_total_(0) {
+  // The serializer is meant to be used only to generate initial heap images
+  // from a context in which there is only one isolate.
+  ASSERT(Isolate::Current()->IsDefaultIsolate());
   for (int i = 0; i <= LAST_SPACE; i++) {
     fullness_[i] = 0;
   }
@@ -1074,35 +1090,40 @@ Serializer::~Serializer() {
 
 
 void StartupSerializer::SerializeStrongReferences() {
+  Isolate* isolate = Isolate::Current();
   // No active threads.
-  CHECK_EQ(NULL, ThreadState::FirstInUse());
+  CHECK_EQ(NULL, Isolate::Current()->thread_manager()->FirstThreadStateInUse());
   // No active or weak handles.
-  CHECK(HandleScopeImplementer::instance()->blocks()->is_empty());
-  CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles());
+  CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
+  CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
   // We don't support serializing installed extensions.
-  for (RegisteredExtension* ext = RegisteredExtension::first_extension();
+  for (RegisteredExtension* ext = v8::RegisteredExtension::first_extension();
        ext != NULL;
        ext = ext->next()) {
     CHECK_NE(v8::INSTALLED, ext->state());
   }
-  Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG);
+  HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG);
 }
 
 
 void PartialSerializer::Serialize(Object** object) {
   this->VisitPointer(object);
+  Isolate* isolate = Isolate::Current();
 
   // After we have done the partial serialization the partial snapshot cache
   // will contain some references needed to decode the partial snapshot.  We
   // fill it up with undefineds so it has a predictable length so the
   // deserialization code doesn't need to know the length.
-  for (int index = partial_snapshot_cache_length_;
-       index < kPartialSnapshotCacheCapacity;
+  for (int index = isolate->serialize_partial_snapshot_cache_length();
+       index < Isolate::kPartialSnapshotCacheCapacity;
        index++) {
-    partial_snapshot_cache_[index] = Heap::undefined_value();
-    startup_serializer_->VisitPointer(&partial_snapshot_cache_[index]);
+    isolate->serialize_partial_snapshot_cache()[index] =
+        isolate->heap()->undefined_value();
+    startup_serializer_->VisitPointer(
+        &isolate->serialize_partial_snapshot_cache()[index]);
   }
-  partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity;
+  isolate->set_serialize_partial_snapshot_cache_length(
+      Isolate::kPartialSnapshotCacheCapacity);
 }
 
 
@@ -1121,11 +1142,6 @@ void Serializer::VisitPointers(Object** start, Object** end) {
 }
 
 
-Object* SerializerDeserializer::partial_snapshot_cache_[
-    kPartialSnapshotCacheCapacity];
-int SerializerDeserializer::partial_snapshot_cache_length_ = 0;
-
-
 // This ensures that the partial snapshot cache keeps things alive during GC and
 // tracks their movement.  When it is called during serialization of the startup
 // snapshot the partial snapshot is empty, so nothing happens.  When the partial
@@ -1135,9 +1151,11 @@ int SerializerDeserializer::partial_snapshot_cache_length_ = 0;
 // deserialization we therefore need to visit the cache array.  This fills it up
 // with pointers to deserialized objects.
 void SerializerDeserializer::Iterate(ObjectVisitor* visitor) {
+  Isolate* isolate = Isolate::Current();
   visitor->VisitPointers(
-      &partial_snapshot_cache_[0],
-      &partial_snapshot_cache_[partial_snapshot_cache_length_]);
+      isolate->serialize_partial_snapshot_cache(),
+      &isolate->serialize_partial_snapshot_cache()[
+          isolate->serialize_partial_snapshot_cache_length()]);
 }
 
 
@@ -1145,33 +1163,39 @@ void SerializerDeserializer::Iterate(ObjectVisitor* visitor) {
 // the root iteration code (above) will iterate over array elements, writing the
 // references to deserialized objects in them.
 void SerializerDeserializer::SetSnapshotCacheSize(int size) {
-  partial_snapshot_cache_length_ = size;
+  Isolate::Current()->set_serialize_partial_snapshot_cache_length(size);
 }
 
 
 int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
-  for (int i = 0; i < partial_snapshot_cache_length_; i++) {
-    Object* entry = partial_snapshot_cache_[i];
+  Isolate* isolate = Isolate::Current();
+
+  for (int i = 0;
+       i < isolate->serialize_partial_snapshot_cache_length();
+       i++) {
+    Object* entry = isolate->serialize_partial_snapshot_cache()[i];
     if (entry == heap_object) return i;
   }
 
   // We didn't find the object in the cache.  So we add it to the cache and
   // then visit the pointer so that it becomes part of the startup snapshot
   // and we can refer to it from the partial snapshot.
-  int length = partial_snapshot_cache_length_;
-  CHECK(length < kPartialSnapshotCacheCapacity);
-  partial_snapshot_cache_[length] = heap_object;
-  startup_serializer_->VisitPointer(&partial_snapshot_cache_[length]);
+  int length = isolate->serialize_partial_snapshot_cache_length();
+  CHECK(length < Isolate::kPartialSnapshotCacheCapacity);
+  isolate->serialize_partial_snapshot_cache()[length] = heap_object;
+  startup_serializer_->VisitPointer(
+      &isolate->serialize_partial_snapshot_cache()[length]);
   // We don't recurse from the startup snapshot generator into the partial
   // snapshot generator.
-  ASSERT(length == partial_snapshot_cache_length_);
-  return partial_snapshot_cache_length_++;
+  ASSERT(length == isolate->serialize_partial_snapshot_cache_length());
+  isolate->set_serialize_partial_snapshot_cache_length(length + 1);
+  return length;
 }
 
 
 int PartialSerializer::RootIndex(HeapObject* heap_object) {
   for (int i = 0; i < Heap::kRootListLength; i++) {
-    Object* root = Heap::roots_address()[i];
+    Object* root = HEAP->roots_address()[i];
     if (root == heap_object) return i;
   }
   return kInvalidRootIndex;
@@ -1254,13 +1278,13 @@ void StartupSerializer::SerializeObject(
 
 
 void StartupSerializer::SerializeWeakReferences() {
-  for (int i = partial_snapshot_cache_length_;
-       i < kPartialSnapshotCacheCapacity;
+  for (int i = Isolate::Current()->serialize_partial_snapshot_cache_length();
+       i < Isolate::kPartialSnapshotCacheCapacity;
        i++) {
     sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization");
     sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index");
   }
-  Heap::IterateWeakRoots(this, VISIT_ALL);
+  HEAP->IterateWeakRoots(this, VISIT_ALL);
 }
 
 
@@ -1321,7 +1345,8 @@ void Serializer::ObjectSerializer::Serialize() {
              "ObjectSerialization");
   sink_->PutInt(size >> kObjectAlignmentBits, "Size in words");
 
-  LOG(SnapshotPositionEvent(object_->address(), sink_->Position()));
+  LOG(i::Isolate::Current(),
+      SnapshotPositionEvent(object_->address(), sink_->Position()));
 
   // Mark this object as already serialized.
   bool start_new_page;
@@ -1422,7 +1447,7 @@ void Serializer::ObjectSerializer::VisitExternalAsciiString(
   Address references_start = reinterpret_cast<Address>(resource_pointer);
   OutputRawData(references_start);
   for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
-    Object* source = Heap::natives_source_cache()->get(i);
+    Object* source = HEAP->natives_source_cache()->get(i);
     if (!source->IsUndefined()) {
       ExternalAsciiString* string = ExternalAsciiString::cast(source);
       typedef v8::String::ExternalAsciiStringResource Resource;
@@ -1472,7 +1497,7 @@ void Serializer::ObjectSerializer::OutputRawData(Address up_to) {
 int Serializer::SpaceOfObject(HeapObject* object) {
   for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
     AllocationSpace s = static_cast<AllocationSpace>(i);
-    if (Heap::InSpace(object, s)) {
+    if (HEAP->InSpace(object, s)) {
       if (i == LO_SPACE) {
         if (object->IsCode()) {
           return kLargeCode;
@@ -1493,7 +1518,7 @@ int Serializer::SpaceOfObject(HeapObject* object) {
 int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) {
   for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
     AllocationSpace s = static_cast<AllocationSpace>(i);
-    if (Heap::InSpace(object, s)) {
+    if (HEAP->InSpace(object, s)) {
       return i;
     }
   }
index e80c302d02162ea6772a1b4d4c51547e41b9dc03..07c0a255f75de2f53ff4478d337672ba75533298 100644 (file)
@@ -79,6 +79,8 @@ class ExternalReferenceEncoder {
   static bool Match(void* key1, void* key2) { return key1 == key2; }
 
   void Put(Address key, int index);
+
+  Isolate* isolate_;
 };
 
 
@@ -105,6 +107,8 @@ class ExternalReferenceDecoder {
   void Put(uint32_t key, Address value) {
     *Lookup(key) = value;
   }
+
+  Isolate* isolate_;
 };
 
 
@@ -144,7 +148,7 @@ class SnapshotByteSource {
 // This only works for objects in the first page of a space.  Don't use this for
 // things in newspace since it bypasses the write barrier.
 
-static const int k64 = (sizeof(uintptr_t) - 4) / 4;
+RLYSTC const int k64 = (sizeof(uintptr_t) - 4) / 4;
 
 #define COMMON_REFERENCE_PATTERNS(f)                               \
   f(kNumberOfSpaces, 2, (11 - k64))                                \
@@ -177,8 +181,8 @@ static const int k64 = (sizeof(uintptr_t) - 4) / 4;
 // both.
 class SerializerDeserializer: public ObjectVisitor {
  public:
-  static void Iterate(ObjectVisitor* visitor);
-  static void SetSnapshotCacheSize(int size);
+  RLYSTC void Iterate(ObjectVisitor* visitor);
+  RLYSTC void SetSnapshotCacheSize(int size);
 
  protected:
   // Where the pointed-to object can be found:
@@ -216,40 +220,36 @@ class SerializerDeserializer: public ObjectVisitor {
 
   // Misc.
   // Raw data to be copied from the snapshot.
-  static const int kRawData = 0x30;
+  RLYSTC const int kRawData = 0x30;
   // Some common raw lengths: 0x31-0x3f
   // A tag emitted at strategic points in the snapshot to delineate sections.
   // If the deserializer does not find these at the expected moments then it
   // is an indication that the snapshot and the VM do not fit together.
   // Examine the build process for architecture, version or configuration
   // mismatches.
-  static const int kSynchronize = 0x70;
+  RLYSTC const int kSynchronize = 0x70;
   // Used for the source code of the natives, which is in the executable, but
   // is referred to from external strings in the snapshot.
-  static const int kNativesStringResource = 0x71;
-  static const int kNewPage = 0x72;
+  RLYSTC const int kNativesStringResource = 0x71;
+  RLYSTC const int kNewPage = 0x72;
   // 0x73-0x7f                            Free.
   // 0xb0-0xbf                            Free.
   // 0xf0-0xff                            Free.
 
 
-  static const int kLargeData = LAST_SPACE;
-  static const int kLargeCode = kLargeData + 1;
-  static const int kLargeFixedArray = kLargeCode + 1;
-  static const int kNumberOfSpaces = kLargeFixedArray + 1;
-  static const int kAnyOldSpace = -1;
+  RLYSTC const int kLargeData = LAST_SPACE;
+  RLYSTC const int kLargeCode = kLargeData + 1;
+  RLYSTC const int kLargeFixedArray = kLargeCode + 1;
+  RLYSTC const int kNumberOfSpaces = kLargeFixedArray + 1;
+  RLYSTC const int kAnyOldSpace = -1;
 
   // A bitmask for getting the space out of an instruction.
-  static const int kSpaceMask = 15;
+  RLYSTC const int kSpaceMask = 15;
 
-  static inline bool SpaceIsLarge(int space) { return space >= kLargeData; }
-  static inline bool SpaceIsPaged(int space) {
+  RLYSTC inline bool SpaceIsLarge(int space) { return space >= kLargeData; }
+  RLYSTC inline bool SpaceIsPaged(int space) {
     return space >= FIRST_PAGED_SPACE && space <= LAST_PAGED_SPACE;
   }
-
-  static int partial_snapshot_cache_length_;
-  static const int kPartialSnapshotCacheCapacity = 1400;
-  static Object* partial_snapshot_cache_[];
 };
 
 
@@ -313,6 +313,9 @@ class Deserializer: public SerializerDeserializer {
   Address Allocate(int space_number, Space* space, int size);
   void ReadObject(int space_number, Space* space, Object** write_back);
 
+  // Cached current isolate.
+  Isolate* isolate_;
+
   // Keep track of the pages in the paged spaces.
   // (In large object space we are keeping track of individual objects
   // rather than pages.)  In new space we just need the address of the
@@ -320,7 +323,6 @@ class Deserializer: public SerializerDeserializer {
   List<Address> pages_[SerializerDeserializer::kNumberOfSpaces];
 
   SnapshotByteSource* source_;
-  static ExternalReferenceDecoder* external_reference_decoder_;
   // This is the address of the next object that will be allocated in each
   // space.  It is used to calculate the addresses of back-references.
   Address high_water_[LAST_SPACE + 1];
@@ -329,6 +331,8 @@ class Deserializer: public SerializerDeserializer {
   // START_NEW_PAGE_SERIALIZATION tag.
   Address last_object_address_;
 
+  ExternalReferenceDecoder* external_reference_decoder_;
+
   DISALLOW_COPY_AND_ASSIGN(Deserializer);
 };
 
@@ -376,19 +380,19 @@ class SerializationAddressMapper {
   }
 
  private:
-  static bool SerializationMatchFun(void* key1, void* key2) {
+  RLYSTC bool SerializationMatchFun(void* key1, void* key2) {
     return key1 == key2;
   }
 
-  static uint32_t Hash(HeapObject* obj) {
+  RLYSTC uint32_t Hash(HeapObject* obj) {
     return static_cast<int32_t>(reinterpret_cast<intptr_t>(obj->address()));
   }
 
-  static void* Key(HeapObject* obj) {
+  RLYSTC void* Key(HeapObject* obj) {
     return reinterpret_cast<void*>(obj->address());
   }
 
-  static void* Value(int v) {
+  RLYSTC void* Value(int v) {
     return reinterpret_cast<void*>(v);
   }
 
@@ -398,7 +402,8 @@ class SerializationAddressMapper {
 };
 
 
-class Serializer : public SerializerDeserializer {
+// There can be only one serializer per V8 process.
+STATIC_CLASS Serializer : public SerializerDeserializer {
  public:
   explicit Serializer(SnapshotByteSink* sink);
   ~Serializer();
@@ -410,25 +415,25 @@ class Serializer : public SerializerDeserializer {
     return fullness_[space];
   }
 
-  static void Enable() {
+  RLYSTC void Enable() {
     if (!serialization_enabled_) {
       ASSERT(!too_late_to_enable_now_);
     }
     serialization_enabled_ = true;
   }
 
-  static void Disable() { serialization_enabled_ = false; }
+  RLYSTC void Disable() { serialization_enabled_ = false; }
   // Call this when you have made use of the fact that there is no serialization
   // going on.
-  static void TooLateToEnableNow() { too_late_to_enable_now_ = true; }
-  static bool enabled() { return serialization_enabled_; }
+  RLYSTC void TooLateToEnableNow() { too_late_to_enable_now_ = true; }
+  RLYSTC bool enabled() { return serialization_enabled_; }
   SerializationAddressMapper* address_mapper() { return &address_mapper_; }
 #ifdef DEBUG
   virtual void Synchronize(const char* tag);
 #endif
 
  protected:
-  static const int kInvalidRootIndex = -1;
+  RLYSTC const int kInvalidRootIndex = -1;
   virtual int RootIndex(HeapObject* heap_object) = 0;
   virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) = 0;
 
@@ -483,11 +488,11 @@ class Serializer : public SerializerDeserializer {
   // object space it may return kLargeCode or kLargeFixedArray in order
   // to indicate to the deserializer what kind of large object allocation
   // to make.
-  static int SpaceOfObject(HeapObject* object);
+  RLYSTC int SpaceOfObject(HeapObject* object);
   // This just returns the space of the object.  It will return LO_SPACE
   // for all large objects since you can't check the type of the object
   // once the map has been used for the serialization address.
-  static int SpaceOfAlreadySerializedObject(HeapObject* object);
+  RLYSTC int SpaceOfAlreadySerializedObject(HeapObject* object);
   int Allocate(int space, int size, bool* new_page_started);
   int EncodeExternalReference(Address addr) {
     return external_reference_encoder_->Encode(addr);
@@ -501,9 +506,9 @@ class Serializer : public SerializerDeserializer {
   SnapshotByteSink* sink_;
   int current_root_index_;
   ExternalReferenceEncoder* external_reference_encoder_;
-  static bool serialization_enabled_;
+  RLYSTC bool serialization_enabled_;
   // Did we already make use of the fact that serialization was not enabled?
-  static bool too_late_to_enable_now_;
+  RLYSTC bool too_late_to_enable_now_;
   int large_object_total_;
   SerializationAddressMapper address_mapper_;
 
@@ -539,7 +544,7 @@ class PartialSerializer : public Serializer {
     ASSERT(!o->IsScript());
     return o->IsString() || o->IsSharedFunctionInfo() ||
            o->IsHeapNumber() || o->IsCode() ||
-           o->map() == Heap::fixed_cow_array_map();
+           o->map() == HEAP->fixed_cow_array_map();
   }
 
  private:
@@ -555,7 +560,7 @@ class StartupSerializer : public Serializer {
     // strong roots have been serialized we can create a partial snapshot
     // which will repopulate the cache with objects neede by that partial
     // snapshot.
-    partial_snapshot_cache_length_ = 0;
+    Isolate::Current()->set_serialize_partial_snapshot_cache_length(0);
   }
   // Serialize the current state of the heap.  The order is:
   // 1) Strong references.
index f1106e138d1721bf6028be75475a688e1a1a891f..7f82895802db2e9bee043f2ef3c38d5c43ed27cf 100644 (file)
@@ -64,7 +64,7 @@ Handle<Context> Snapshot::NewContextFromSnapshot() {
   if (context_size_ == 0) {
     return Handle<Context>();
   }
-  Heap::ReserveSpace(new_space_used_,
+  HEAP->ReserveSpace(new_space_used_,
                      pointer_space_used_,
                      data_space_used_,
                      code_space_used_,
index 9f77c20f52c928d206163dc8242a42442517a882..bedd186e507118f568b7a534669fc81cd0046203 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include "isolate.h"
+
 #ifndef V8_SNAPSHOT_H_
 #define V8_SNAPSHOT_H_
 
 namespace v8 {
 namespace internal {
 
-class Snapshot {
+STATIC_CLASS Snapshot {
  public:
   // Initialize the VM from the given snapshot file. If snapshot_file is
   // NULL, use the internal snapshot instead. Returns false if no snapshot
index b5ee1e40eac4b45f3c9ffc1879fa347808352885..4afd80eee1b17f5029ab6f21ed8e7a9d3cee1e00 100644 (file)
@@ -28,6 +28,7 @@
 #ifndef V8_SPACES_INL_H_
 #define V8_SPACES_INL_H_
 
+#include "isolate.h"
 #include "memory.h"
 #include "spaces.h"
 
@@ -56,18 +57,18 @@ Page* PageIterator::next() {
 // Page
 
 Page* Page::next_page() {
-  return MemoryAllocator::GetNextPage(this);
+  return heap_->isolate()->memory_allocator()->GetNextPage(this);
 }
 
 
 Address Page::AllocationTop() {
-  PagedSpace* owner = MemoryAllocator::PageOwner(this);
+  PagedSpace* owner = heap_->isolate()->memory_allocator()->PageOwner(this);
   return owner->PageAllocationTop(this);
 }
 
 
 Address Page::AllocationWatermark() {
-  PagedSpace* owner = MemoryAllocator::PageOwner(this);
+  PagedSpace* owner = heap_->isolate()->memory_allocator()->PageOwner(this);
   if (this == owner->AllocationTopPage()) {
     return owner->top();
   }
@@ -82,7 +83,7 @@ uint32_t Page::AllocationWatermarkOffset() {
 
 
 void Page::SetAllocationWatermark(Address allocation_watermark) {
-  if ((Heap::gc_state() == Heap::SCAVENGE) && IsWatermarkValid()) {
+  if ((heap_->gc_state() == Heap::SCAVENGE) && IsWatermarkValid()) {
     // When iterating intergenerational references during scavenge
     // we might decide to promote an encountered young object.
     // We will allocate a space for such an object and put it
@@ -219,23 +220,26 @@ void Page::ClearRegionMarks(Address start, Address end, bool reaches_limit) {
 }
 
 
-void Page::FlipMeaningOfInvalidatedWatermarkFlag() {
-  watermark_invalidated_mark_ ^= 1 << WATERMARK_INVALIDATED;
+void Page::FlipMeaningOfInvalidatedWatermarkFlag(Heap* heap) {
+  heap->page_watermark_invalidated_mark_ ^= 1 << WATERMARK_INVALIDATED;
 }
 
 
 bool Page::IsWatermarkValid() {
-  return (flags_ & (1 << WATERMARK_INVALIDATED)) != watermark_invalidated_mark_;
+  return (flags_ & (1 << WATERMARK_INVALIDATED)) !=
+      heap_->page_watermark_invalidated_mark_;
 }
 
 
 void Page::InvalidateWatermark(bool value) {
   if (value) {
     flags_ = (flags_ & ~(1 << WATERMARK_INVALIDATED)) |
-             watermark_invalidated_mark_;
+             heap_->page_watermark_invalidated_mark_;
   } else {
-    flags_ = (flags_ & ~(1 << WATERMARK_INVALIDATED)) |
-             (watermark_invalidated_mark_ ^ (1 << WATERMARK_INVALIDATED));
+    flags_ =
+        (flags_ & ~(1 << WATERMARK_INVALIDATED)) |
+        (heap_->page_watermark_invalidated_mark_ ^
+         (1 << WATERMARK_INVALIDATED));
   }
 
   ASSERT(IsWatermarkValid() == !value);
@@ -264,7 +268,7 @@ void Page::ClearPageFlags() {
 void Page::ClearGCFields() {
   InvalidateWatermark(true);
   SetAllocationWatermark(ObjectAreaStart());
-  if (Heap::gc_state() == Heap::SCAVENGE) {
+  if (heap_->gc_state() == Heap::SCAVENGE) {
     SetCachedAllocationWatermark(ObjectAreaStart());
   }
   SetRegionMarks(kAllRegionsCleanMarks);
@@ -308,6 +312,7 @@ void MemoryAllocator::ChunkInfo::init(Address a, size_t s, PagedSpace* o) {
   size_ = s;
   owner_ = o;
   executable_ = (o == NULL) ? NOT_EXECUTABLE : o->executable();
+  owner_identity_ = (o == NULL) ? FIRST_SPACE : o->identity();
 }
 
 
@@ -408,15 +413,7 @@ void MemoryAllocator::UnprotectChunkFromPage(Page* page) {
 bool PagedSpace::Contains(Address addr) {
   Page* p = Page::FromAddress(addr);
   if (!p->is_valid()) return false;
-  return MemoryAllocator::IsPageInSpace(p, this);
-}
-
-
-bool PagedSpace::SafeContains(Address addr) {
-  if (!MemoryAllocator::SafeIsInAPageChunk(addr)) return false;
-  Page* p = Page::FromAddress(addr);
-  if (!p->is_valid()) return false;
-  return MemoryAllocator::IsPageInSpace(p, this);
+  return heap()->isolate()->memory_allocator()->IsPageInSpace(p, this);
 }
 
 
@@ -477,7 +474,9 @@ Address LargeObjectChunk::GetStartAddress() {
 
 
 void LargeObjectChunk::Free(Executability executable) {
-  MemoryAllocator::FreeRawMemory(address(), size(), executable);
+  Isolate* isolate =
+      Page::FromAddress(RoundUp(address(), Page::kPageSize))->heap_->isolate();
+  isolate->memory_allocator()->FreeRawMemory(address(), size(), executable);
 }
 
 // -----------------------------------------------------------------------------
@@ -501,6 +500,12 @@ MaybeObject* NewSpace::AllocateRawInternal(int size_in_bytes,
 }
 
 
+intptr_t LargeObjectSpace::Available() {
+  return LargeObjectChunk::ObjectSizeFor(
+      heap()->isolate()->memory_allocator()->Available());
+}
+
+
 template <typename StringType>
 void NewSpace::ShrinkStringAtAllocationBoundary(String* string, int length) {
   ASSERT(length <= string->length());
@@ -514,9 +519,9 @@ void NewSpace::ShrinkStringAtAllocationBoundary(String* string, int length) {
 
 
 bool FreeListNode::IsFreeListNode(HeapObject* object) {
-  return object->map() == Heap::raw_unchecked_byte_array_map()
-      || object->map() == Heap::raw_unchecked_one_pointer_filler_map()
-      || object->map() == Heap::raw_unchecked_two_pointer_filler_map();
+  return object->map() == HEAP->raw_unchecked_byte_array_map()
+      || object->map() == HEAP->raw_unchecked_one_pointer_filler_map()
+      || object->map() == HEAP->raw_unchecked_two_pointer_filler_map();
 }
 
 } }  // namespace v8::internal
index 2d9687a72f00c6c8f2499ff366027958311712c7..11fb4ad557166a762245f6ef3be4ca3e4030a827 100644 (file)
@@ -42,8 +42,6 @@ namespace internal {
          && (info).top <= (space).high()              \
          && (info).limit == (space).high())
 
-intptr_t Page::watermark_invalidated_mark_ = 1 << Page::WATERMARK_INVALIDATED;
-
 // ----------------------------------------------------------------------------
 // HeapObjectIterator
 
@@ -149,10 +147,14 @@ PageIterator::PageIterator(PagedSpace* space, Mode mode) : space_(space) {
 // -----------------------------------------------------------------------------
 // CodeRange
 
-List<CodeRange::FreeBlock> CodeRange::free_list_(0);
-List<CodeRange::FreeBlock> CodeRange::allocation_list_(0);
-int CodeRange::current_allocation_block_index_ = 0;
-VirtualMemory* CodeRange::code_range_ = NULL;
+
+CodeRange::CodeRange()
+    : code_range_(NULL),
+      free_list_(0),
+      allocation_list_(0),
+      current_allocation_block_index_(0),
+      isolate_(NULL) {
+}
 
 
 bool CodeRange::Setup(const size_t requested) {
@@ -168,7 +170,7 @@ bool CodeRange::Setup(const size_t requested) {
 
   // We are sure that we have mapped a block of requested addresses.
   ASSERT(code_range_->size() == requested);
-  LOG(NewEvent("CodeRange", code_range_->address(), requested));
+  LOG(isolate_, NewEvent("CodeRange", code_range_->address(), requested));
   allocation_list_.Add(FreeBlock(code_range_->address(), code_range_->size()));
   current_allocation_block_index_ = 0;
   return true;
@@ -271,24 +273,24 @@ void CodeRange::TearDown() {
 // -----------------------------------------------------------------------------
 // MemoryAllocator
 //
-intptr_t MemoryAllocator::capacity_ = 0;
-intptr_t MemoryAllocator::capacity_executable_ = 0;
-intptr_t MemoryAllocator::size_ = 0;
-intptr_t MemoryAllocator::size_executable_ = 0;
-
-List<MemoryAllocator::MemoryAllocationCallbackRegistration>
-  MemoryAllocator::memory_allocation_callbacks_;
-
-VirtualMemory* MemoryAllocator::initial_chunk_ = NULL;
 
 // 270 is an estimate based on the static default heap size of a pair of 256K
 // semispaces and a 64M old generation.
 const int kEstimatedNumberOfChunks = 270;
-List<MemoryAllocator::ChunkInfo> MemoryAllocator::chunks_(
-    kEstimatedNumberOfChunks);
-List<int> MemoryAllocator::free_chunk_ids_(kEstimatedNumberOfChunks);
-int MemoryAllocator::max_nof_chunks_ = 0;
-int MemoryAllocator::top_ = 0;
+
+
+MemoryAllocator::MemoryAllocator()
+    : capacity_(0),
+      capacity_executable_(0),
+      size_(0),
+      size_executable_(0),
+      initial_chunk_(NULL),
+      chunks_(kEstimatedNumberOfChunks),
+      free_chunk_ids_(kEstimatedNumberOfChunks),
+      max_nof_chunks_(0),
+      top_(0),
+      isolate_(NULL) {
+}
 
 
 void MemoryAllocator::Push(int free_chunk_id) {
@@ -334,11 +336,6 @@ bool MemoryAllocator::Setup(intptr_t capacity, intptr_t capacity_executable) {
 }
 
 
-bool MemoryAllocator::SafeIsInAPageChunk(Address addr) {
-  return InInitialChunk(addr) || InAllocatedChunks(addr);
-}
-
-
 void MemoryAllocator::TearDown() {
   for (int i = 0; i < max_nof_chunks_; i++) {
     if (chunks_[i].address() != NULL) DeleteChunk(i);
@@ -347,15 +344,11 @@ void MemoryAllocator::TearDown() {
   free_chunk_ids_.Clear();
 
   if (initial_chunk_ != NULL) {
-    LOG(DeleteEvent("InitialChunk", initial_chunk_->address()));
+    LOG(isolate_, DeleteEvent("InitialChunk", initial_chunk_->address()));
     delete initial_chunk_;
     initial_chunk_ = NULL;
   }
 
-  FreeChunkTables(&chunk_table_[0],
-                  kChunkTableTopLevelEntries,
-                  kChunkTableLevels);
-
   ASSERT(top_ == max_nof_chunks_);  // all chunks are free
   top_ = 0;
   capacity_ = 0;
@@ -365,22 +358,6 @@ void MemoryAllocator::TearDown() {
 }
 
 
-void MemoryAllocator::FreeChunkTables(uintptr_t* array, int len, int level) {
-  for (int i = 0; i < len; i++) {
-    if (array[i] != kUnusedChunkTableEntry) {
-      uintptr_t* subarray = reinterpret_cast<uintptr_t*>(array[i]);
-      if (level > 1) {
-        array[i] = kUnusedChunkTableEntry;
-        FreeChunkTables(subarray, 1 << kChunkTableBitsPerLevel, level - 1);
-      } else {
-        array[i] = kUnusedChunkTableEntry;
-      }
-      delete[] subarray;
-    }
-  }
-}
-
-
 void* MemoryAllocator::AllocateRawMemory(const size_t requested,
                                          size_t* allocated,
                                          Executability executable) {
@@ -393,14 +370,15 @@ void* MemoryAllocator::AllocateRawMemory(const size_t requested,
     // Check executable memory limit.
     if (size_executable_ + requested >
         static_cast<size_t>(capacity_executable_)) {
-      LOG(StringEvent("MemoryAllocator::AllocateRawMemory",
+      LOG(isolate_,
+          StringEvent("MemoryAllocator::AllocateRawMemory",
                       "V8 Executable Allocation capacity exceeded"));
       return NULL;
     }
     // Allocate executable memory either from code range or from the
     // OS.
-    if (CodeRange::exists()) {
-      mem = CodeRange::AllocateRawMemory(requested, allocated);
+    if (isolate_->code_range()->exists()) {
+      mem = isolate_->code_range()->AllocateRawMemory(requested, allocated);
     } else {
       mem = OS::Allocate(requested, allocated, true);
     }
@@ -415,7 +393,7 @@ void* MemoryAllocator::AllocateRawMemory(const size_t requested,
 #ifdef DEBUG
   ZapBlock(reinterpret_cast<Address>(mem), alloced);
 #endif
-  Counters::memory_allocated.Increment(alloced);
+  COUNTERS->memory_allocated()->Increment(alloced);
   return mem;
 }
 
@@ -426,12 +404,12 @@ void MemoryAllocator::FreeRawMemory(void* mem,
 #ifdef DEBUG
   ZapBlock(reinterpret_cast<Address>(mem), length);
 #endif
-  if (CodeRange::contains(static_cast<Address>(mem))) {
-    CodeRange::FreeRawMemory(mem, length);
+  if (isolate_->code_range()->contains(static_cast<Address>(mem))) {
+    isolate_->code_range()->FreeRawMemory(mem, length);
   } else {
     OS::Free(mem, length);
   }
-  Counters::memory_allocated.Decrement(static_cast<int>(length));
+  COUNTERS->memory_allocated()->Decrement(static_cast<int>(length));
   size_ -= static_cast<int>(length);
   if (executable == EXECUTABLE) size_executable_ -= static_cast<int>(length);
 
@@ -498,7 +476,8 @@ void* MemoryAllocator::ReserveInitialChunk(const size_t requested) {
 
   // We are sure that we have mapped a block of requested addresses.
   ASSERT(initial_chunk_->size() == requested);
-  LOG(NewEvent("InitialChunk", initial_chunk_->address(), requested));
+  LOG(isolate_,
+      NewEvent("InitialChunk", initial_chunk_->address(), requested));
   size_ += static_cast<int>(requested);
   return initial_chunk_->address();
 }
@@ -522,14 +501,14 @@ Page* MemoryAllocator::AllocatePages(int requested_pages,
 
   void* chunk = AllocateRawMemory(chunk_size, &chunk_size, owner->executable());
   if (chunk == NULL) return Page::FromAddress(NULL);
-  LOG(NewEvent("PagedChunk", chunk, chunk_size));
+  LOG(isolate_, NewEvent("PagedChunk", chunk, chunk_size));
 
   *allocated_pages = PagesInChunk(static_cast<Address>(chunk), chunk_size);
   // We may 'lose' a page due to alignment.
   ASSERT(*allocated_pages >= kPagesPerChunk - 1);
   if (*allocated_pages == 0) {
     FreeRawMemory(chunk, chunk_size, owner->executable());
-    LOG(DeleteEvent("PagedChunk", chunk));
+    LOG(isolate_, DeleteEvent("PagedChunk", chunk));
     return Page::FromAddress(NULL);
   }
 
@@ -540,8 +519,6 @@ Page* MemoryAllocator::AllocatePages(int requested_pages,
   PerformAllocationCallback(space, kAllocationActionAllocate, chunk_size);
   Page* new_pages = InitializePagesInChunk(chunk_id, *allocated_pages, owner);
 
-  AddToAllocatedChunks(static_cast<Address>(chunk), chunk_size);
-
   return new_pages;
 }
 
@@ -560,7 +537,7 @@ Page* MemoryAllocator::CommitPages(Address start, size_t size,
 #ifdef DEBUG
   ZapBlock(start, size);
 #endif
-  Counters::memory_allocated.Increment(static_cast<int>(size));
+  COUNTERS->memory_allocated()->Increment(static_cast<int>(size));
 
   // So long as we correctly overestimated the number of chunks we should not
   // run out of chunk ids.
@@ -584,7 +561,7 @@ bool MemoryAllocator::CommitBlock(Address start,
 #ifdef DEBUG
   ZapBlock(start, size);
 #endif
-  Counters::memory_allocated.Increment(static_cast<int>(size));
+  COUNTERS->memory_allocated()->Increment(static_cast<int>(size));
   return true;
 }
 
@@ -597,7 +574,7 @@ bool MemoryAllocator::UncommitBlock(Address start, size_t size) {
   ASSERT(InInitialChunk(start + size - 1));
 
   if (!initial_chunk_->Uncommit(start, size)) return false;
-  Counters::memory_allocated.Decrement(static_cast<int>(size));
+  COUNTERS->memory_allocated()->Decrement(static_cast<int>(size));
   return true;
 }
 
@@ -628,6 +605,7 @@ Page* MemoryAllocator::InitializePagesInChunk(int chunk_id, int pages_in_chunk,
   Address page_addr = low;
   for (int i = 0; i < pages_in_chunk; i++) {
     Page* p = Page::FromAddress(page_addr);
+    p->heap_ = owner->heap();
     p->opaque_header = OffsetFrom(page_addr + Page::kPageSize) | chunk_id;
     p->InvalidateWatermark(true);
     p->SetIsLargeObjectPage(false);
@@ -697,11 +675,10 @@ void MemoryAllocator::DeleteChunk(int chunk_id) {
     // TODO(1240712): VirtualMemory::Uncommit has a return value which
     // is ignored here.
     initial_chunk_->Uncommit(c.address(), c.size());
-    Counters::memory_allocated.Decrement(static_cast<int>(c.size()));
+    COUNTERS->memory_allocated()->Decrement(static_cast<int>(c.size()));
   } else {
-    RemoveFromAllocatedChunks(c.address(), c.size());
-    LOG(DeleteEvent("PagedChunk", c.address()));
-    ObjectSpace space = static_cast<ObjectSpace>(1 << c.owner()->identity());
+    LOG(isolate_, DeleteEvent("PagedChunk", c.address()));
+    ObjectSpace space = static_cast<ObjectSpace>(1 << c.owner_identity());
     size_t size = c.size();
     FreeRawMemory(c.address(), size, c.executable());
     PerformAllocationCallback(space, kAllocationActionFree, size);
@@ -813,131 +790,14 @@ Page* MemoryAllocator::RelinkPagesInChunk(int chunk_id,
 }
 
 
-void MemoryAllocator::AddToAllocatedChunks(Address addr, intptr_t size) {
-  ASSERT(size == kChunkSize);
-  uintptr_t int_address = reinterpret_cast<uintptr_t>(addr);
-  AddChunkUsingAddress(int_address, int_address);
-  AddChunkUsingAddress(int_address, int_address + size - 1);
-}
-
-
-void MemoryAllocator::AddChunkUsingAddress(uintptr_t chunk_start,
-                                           uintptr_t chunk_index_base) {
-  uintptr_t* fine_grained = AllocatedChunksFinder(
-      chunk_table_,
-      chunk_index_base,
-      kChunkSizeLog2 + (kChunkTableLevels - 1) * kChunkTableBitsPerLevel,
-      kCreateTablesAsNeeded);
-  int index = FineGrainedIndexForAddress(chunk_index_base);
-  if (fine_grained[index] != kUnusedChunkTableEntry) index++;
-  ASSERT(fine_grained[index] == kUnusedChunkTableEntry);
-  fine_grained[index] = chunk_start;
-}
-
-
-void MemoryAllocator::RemoveFromAllocatedChunks(Address addr, intptr_t size) {
-  ASSERT(size == kChunkSize);
-  uintptr_t int_address = reinterpret_cast<uintptr_t>(addr);
-  RemoveChunkFoundUsingAddress(int_address, int_address);
-  RemoveChunkFoundUsingAddress(int_address, int_address + size - 1);
-}
-
-
-void MemoryAllocator::RemoveChunkFoundUsingAddress(
-    uintptr_t chunk_start,
-    uintptr_t chunk_index_base) {
-  uintptr_t* fine_grained = AllocatedChunksFinder(
-      chunk_table_,
-      chunk_index_base,
-      kChunkSizeLog2 + (kChunkTableLevels - 1) * kChunkTableBitsPerLevel,
-      kDontCreateTables);
-  // Can't remove an entry that's not there.
-  ASSERT(fine_grained != kUnusedChunkTableEntry);
-  int index = FineGrainedIndexForAddress(chunk_index_base);
-  ASSERT(fine_grained[index] != kUnusedChunkTableEntry);
-  if (fine_grained[index] != chunk_start) {
-    index++;
-    ASSERT(fine_grained[index] == chunk_start);
-    fine_grained[index] = kUnusedChunkTableEntry;
-  } else {
-    // If only one of the entries is used it must be the first, since
-    // InAllocatedChunks relies on that.  Move things around so that this is
-    // the case.
-    fine_grained[index] = fine_grained[index + 1];
-    fine_grained[index + 1] = kUnusedChunkTableEntry;
-  }
-}
-
-
-bool MemoryAllocator::InAllocatedChunks(Address addr) {
-  uintptr_t int_address = reinterpret_cast<uintptr_t>(addr);
-  uintptr_t* fine_grained = AllocatedChunksFinder(
-      chunk_table_,
-      int_address,
-      kChunkSizeLog2 + (kChunkTableLevels - 1) * kChunkTableBitsPerLevel,
-      kDontCreateTables);
-  if (fine_grained == NULL) return false;
-  int index = FineGrainedIndexForAddress(int_address);
-  if (fine_grained[index] == kUnusedChunkTableEntry) return false;
-  uintptr_t entry = fine_grained[index];
-  if (entry <= int_address && entry + kChunkSize > int_address) return true;
-  index++;
-  if (fine_grained[index] == kUnusedChunkTableEntry) return false;
-  entry = fine_grained[index];
-  if (entry <= int_address && entry + kChunkSize > int_address) return true;
-  return false;
-}
-
-
-uintptr_t* MemoryAllocator::AllocatedChunksFinder(
-    uintptr_t* table,
-    uintptr_t address,
-    int bit_position,
-    CreateTables create_as_needed) {
-  if (bit_position == kChunkSizeLog2) {
-    return table;
-  }
-  ASSERT(bit_position >= kChunkSizeLog2 + kChunkTableBitsPerLevel);
-  int index =
-      ((address >> bit_position) &
-       ((V8_INTPTR_C(1) << kChunkTableBitsPerLevel) - 1));
-  uintptr_t more_fine_grained_address =
-      address & ((V8_INTPTR_C(1) << bit_position) - 1);
-  ASSERT((table == chunk_table_ && index < kChunkTableTopLevelEntries) ||
-         (table != chunk_table_ && index < 1 << kChunkTableBitsPerLevel));
-  uintptr_t* more_fine_grained_table =
-      reinterpret_cast<uintptr_t*>(table[index]);
-  if (more_fine_grained_table == kUnusedChunkTableEntry) {
-    if (create_as_needed == kDontCreateTables) return NULL;
-    int words_needed = 1 << kChunkTableBitsPerLevel;
-    if (bit_position == kChunkTableBitsPerLevel + kChunkSizeLog2) {
-      words_needed =
-          (1 << kChunkTableBitsPerLevel) * kChunkTableFineGrainedWordsPerEntry;
-    }
-    more_fine_grained_table = new uintptr_t[words_needed];
-    for (int i = 0; i < words_needed; i++) {
-      more_fine_grained_table[i] = kUnusedChunkTableEntry;
-    }
-    table[index] = reinterpret_cast<uintptr_t>(more_fine_grained_table);
-  }
-  return AllocatedChunksFinder(
-      more_fine_grained_table,
-      more_fine_grained_address,
-      bit_position - kChunkTableBitsPerLevel,
-      create_as_needed);
-}
-
-
-uintptr_t MemoryAllocator::chunk_table_[kChunkTableTopLevelEntries];
-
-
 // -----------------------------------------------------------------------------
 // PagedSpace implementation
 
-PagedSpace::PagedSpace(intptr_t max_capacity,
+PagedSpace::PagedSpace(Heap* heap,
+                       intptr_t max_capacity,
                        AllocationSpace id,
                        Executability executable)
-    : Space(id, executable) {
+    : Space(heap, id, executable) {
   max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize)
                   * Page::kObjectAreaSize;
   accounting_stats_.Clear();
@@ -958,15 +818,17 @@ bool PagedSpace::Setup(Address start, size_t size) {
   // contain at least one page, ignore it and allocate instead.
   int pages_in_chunk = PagesInChunk(start, size);
   if (pages_in_chunk > 0) {
-    first_page_ = MemoryAllocator::CommitPages(RoundUp(start, Page::kPageSize),
-                                               Page::kPageSize * pages_in_chunk,
-                                               this, &num_pages);
+    first_page_ = Isolate::Current()->memory_allocator()->CommitPages(
+        RoundUp(start, Page::kPageSize),
+        Page::kPageSize * pages_in_chunk,
+        this, &num_pages);
   } else {
     int requested_pages =
         Min(MemoryAllocator::kPagesPerChunk,
             static_cast<int>(max_capacity_ / Page::kObjectAreaSize));
     first_page_ =
-        MemoryAllocator::AllocatePages(requested_pages, &num_pages, this);
+        Isolate::Current()->memory_allocator()->AllocatePages(
+            requested_pages, &num_pages, this);
     if (!first_page_->is_valid()) return false;
   }
 
@@ -999,7 +861,7 @@ bool PagedSpace::HasBeenSetup() {
 
 
 void PagedSpace::TearDown() {
-  MemoryAllocator::FreeAllPages(this);
+  Isolate::Current()->memory_allocator()->FreeAllPages(this);
   first_page_ = NULL;
   accounting_stats_.Clear();
 }
@@ -1010,8 +872,9 @@ void PagedSpace::TearDown() {
 void PagedSpace::Protect() {
   Page* page = first_page_;
   while (page->is_valid()) {
-    MemoryAllocator::ProtectChunkFromPage(page);
-    page = MemoryAllocator::FindLastPageInSameChunk(page)->next_page();
+    Isolate::Current()->memory_allocator()->ProtectChunkFromPage(page);
+    page = Isolate::Current()->memory_allocator()->
+        FindLastPageInSameChunk(page)->next_page();
   }
 }
 
@@ -1019,8 +882,9 @@ void PagedSpace::Protect() {
 void PagedSpace::Unprotect() {
   Page* page = first_page_;
   while (page->is_valid()) {
-    MemoryAllocator::UnprotectChunkFromPage(page);
-    page = MemoryAllocator::FindLastPageInSameChunk(page)->next_page();
+    Isolate::Current()->memory_allocator()->UnprotectChunkFromPage(page);
+    page = Isolate::Current()->memory_allocator()->
+        FindLastPageInSameChunk(page)->next_page();
   }
 }
 
@@ -1038,7 +902,7 @@ void PagedSpace::MarkAllPagesClean() {
 MaybeObject* PagedSpace::FindObject(Address addr) {
   // Note: this function can only be called before or after mark-compact GC
   // because it accesses map pointers.
-  ASSERT(!MarkCompactCollector::in_use());
+  ASSERT(!heap()->mark_compact_collector()->in_use());
 
   if (!Contains(addr)) return Failure::Exception();
 
@@ -1158,13 +1022,14 @@ bool PagedSpace::Expand(Page* last_page) {
   if (available_pages < MemoryAllocator::kPagesPerChunk) return false;
 
   int desired_pages = Min(available_pages, MemoryAllocator::kPagesPerChunk);
-  Page* p = MemoryAllocator::AllocatePages(desired_pages, &desired_pages, this);
+  Page* p = heap()->isolate()->memory_allocator()->AllocatePages(
+      desired_pages, &desired_pages, this);
   if (!p->is_valid()) return false;
 
   accounting_stats_.ExpandSpace(desired_pages * Page::kObjectAreaSize);
   ASSERT(Capacity() <= max_capacity_);
 
-  MemoryAllocator::SetNextPage(last_page, p);
+  heap()->isolate()->memory_allocator()->SetNextPage(last_page, p);
 
   // Sequentially clear region marks of new pages and and cache the
   // new last page in the space.
@@ -1207,8 +1072,9 @@ void PagedSpace::Shrink() {
   }
 
   // Free pages after top_page.
-  Page* p = MemoryAllocator::FreePages(top_page->next_page());
-  MemoryAllocator::SetNextPage(top_page, p);
+  Page* p = heap()->isolate()->memory_allocator()->
+      FreePages(top_page->next_page());
+  heap()->isolate()->memory_allocator()->SetNextPage(top_page, p);
 
   // Find out how many pages we failed to free and update last_page_.
   // Please note pages can only be freed in whole chunks.
@@ -1230,7 +1096,8 @@ bool PagedSpace::EnsureCapacity(int capacity) {
   Page* last_page = AllocationTopPage();
   Page* next_page = last_page->next_page();
   while (next_page->is_valid()) {
-    last_page = MemoryAllocator::FindLastPageInSameChunk(next_page);
+    last_page = heap()->isolate()->memory_allocator()->
+        FindLastPageInSameChunk(next_page);
     next_page = last_page->next_page();
   }
 
@@ -1239,7 +1106,8 @@ bool PagedSpace::EnsureCapacity(int capacity) {
     if (!Expand(last_page)) return false;
     ASSERT(last_page->next_page()->is_valid());
     last_page =
-        MemoryAllocator::FindLastPageInSameChunk(last_page->next_page());
+        heap()->isolate()->memory_allocator()->FindLastPageInSameChunk(
+            last_page->next_page());
   } while (Capacity() < capacity);
 
   return true;
@@ -1259,7 +1127,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
   // space.
   ASSERT(allocation_info_.VerifyPagedAllocation());
   Page* top_page = Page::FromAllocationTop(allocation_info_.top);
-  ASSERT(MemoryAllocator::IsPageInSpace(top_page, this));
+  ASSERT(heap()->isolate()->memory_allocator()->IsPageInSpace(top_page, this));
 
   // Loop over all the pages.
   bool above_allocation_top = false;
@@ -1284,7 +1152,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
         // be in map space.
         Map* map = object->map();
         ASSERT(map->IsMap());
-        ASSERT(Heap::map_space()->Contains(map));
+        ASSERT(heap()->map_space()->Contains(map));
 
         // Perform space-specific object verification.
         VerifyObject(object);
@@ -1320,8 +1188,8 @@ bool NewSpace::Setup(Address start, int size) {
   // start and size. The provided space is divided into two semi-spaces.
   // To support fast containment testing in the new space, the size of
   // this chunk must be a power of two and it must be aligned to its size.
-  int initial_semispace_capacity = Heap::InitialSemiSpaceSize();
-  int maximum_semispace_capacity = Heap::MaxSemiSpaceSize();
+  int initial_semispace_capacity = heap()->InitialSemiSpaceSize();
+  int maximum_semispace_capacity = heap()->MaxSemiSpaceSize();
 
   ASSERT(initial_semispace_capacity <= maximum_semispace_capacity);
   ASSERT(IsPowerOf2(maximum_semispace_capacity));
@@ -1337,7 +1205,7 @@ bool NewSpace::Setup(Address start, int size) {
 #undef SET_NAME
 #endif
 
-  ASSERT(size == 2 * Heap::ReservedSemiSpaceSize());
+  ASSERT(size == 2 * heap()->ReservedSemiSpaceSize());
   ASSERT(IsAddressAligned(start, size, 0));
 
   if (!to_space_.Setup(start,
@@ -1392,16 +1260,16 @@ void NewSpace::TearDown() {
 #ifdef ENABLE_HEAP_PROTECTION
 
 void NewSpace::Protect() {
-  MemoryAllocator::Protect(ToSpaceLow(), Capacity());
-  MemoryAllocator::Protect(FromSpaceLow(), Capacity());
+  heap()->isolate()->memory_allocator()->Protect(ToSpaceLow(), Capacity());
+  heap()->isolate()->memory_allocator()->Protect(FromSpaceLow(), Capacity());
 }
 
 
 void NewSpace::Unprotect() {
-  MemoryAllocator::Unprotect(ToSpaceLow(), Capacity(),
-                             to_space_.executable());
-  MemoryAllocator::Unprotect(FromSpaceLow(), Capacity(),
-                             from_space_.executable());
+  heap()->isolate()->memory_allocator()->Unprotect(ToSpaceLow(), Capacity(),
+                                                   to_space_.executable());
+  heap()->isolate()->memory_allocator()->Unprotect(FromSpaceLow(), Capacity(),
+                                                   from_space_.executable());
 }
 
 #endif
@@ -1495,7 +1363,7 @@ void NewSpace::Verify() {
     // be in map space.
     Map* map = object->map();
     ASSERT(map->IsMap());
-    ASSERT(Heap::map_space()->Contains(map));
+    ASSERT(heap()->map_space()->Contains(map));
 
     // The object should not be code or a map.
     ASSERT(!object->IsMap());
@@ -1520,7 +1388,8 @@ void NewSpace::Verify() {
 
 bool SemiSpace::Commit() {
   ASSERT(!is_committed());
-  if (!MemoryAllocator::CommitBlock(start_, capacity_, executable())) {
+  if (!heap()->isolate()->memory_allocator()->CommitBlock(
+      start_, capacity_, executable())) {
     return false;
   }
   committed_ = true;
@@ -1530,7 +1399,8 @@ bool SemiSpace::Commit() {
 
 bool SemiSpace::Uncommit() {
   ASSERT(is_committed());
-  if (!MemoryAllocator::UncommitBlock(start_, capacity_)) {
+  if (!heap()->isolate()->memory_allocator()->UncommitBlock(
+      start_, capacity_)) {
     return false;
   }
   committed_ = false;
@@ -1576,7 +1446,8 @@ bool SemiSpace::Grow() {
   int maximum_extra = maximum_capacity_ - capacity_;
   int extra = Min(RoundUp(capacity_, static_cast<int>(OS::AllocateAlignment())),
                   maximum_extra);
-  if (!MemoryAllocator::CommitBlock(high(), extra, executable())) {
+  if (!heap()->isolate()->memory_allocator()->CommitBlock(
+      high(), extra, executable())) {
     return false;
   }
   capacity_ += extra;
@@ -1589,7 +1460,8 @@ bool SemiSpace::GrowTo(int new_capacity) {
   ASSERT(new_capacity > capacity_);
   size_t delta = new_capacity - capacity_;
   ASSERT(IsAligned(delta, OS::AllocateAlignment()));
-  if (!MemoryAllocator::CommitBlock(high(), delta, executable())) {
+  if (!heap()->isolate()->memory_allocator()->CommitBlock(
+      high(), delta, executable())) {
     return false;
   }
   capacity_ = new_capacity;
@@ -1602,7 +1474,8 @@ bool SemiSpace::ShrinkTo(int new_capacity) {
   ASSERT(new_capacity < capacity_);
   size_t delta = capacity_ - new_capacity;
   ASSERT(IsAligned(delta, OS::AllocateAlignment()));
-  if (!MemoryAllocator::UncommitBlock(high() - delta, delta)) {
+  if (!heap()->isolate()->memory_allocator()->UncommitBlock(
+      high() - delta, delta)) {
     return false;
   }
   capacity_ = new_capacity;
@@ -1650,36 +1523,32 @@ void SemiSpaceIterator::Initialize(NewSpace* space, Address start,
 
 
 #ifdef DEBUG
-// A static array of histogram info for each type.
-static HistogramInfo heap_histograms[LAST_TYPE+1];
-static JSObject::SpillInformation js_spill_information;
-
 // heap_histograms is shared, always clear it before using it.
 static void ClearHistograms() {
+  Isolate* isolate = Isolate::Current();
   // We reset the name each time, though it hasn't changed.
-#define DEF_TYPE_NAME(name) heap_histograms[name].set_name(#name);
+#define DEF_TYPE_NAME(name) isolate->heap_histograms()[name].set_name(#name);
   INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
 #undef DEF_TYPE_NAME
 
-#define CLEAR_HISTOGRAM(name) heap_histograms[name].clear();
+#define CLEAR_HISTOGRAM(name) isolate->heap_histograms()[name].clear();
   INSTANCE_TYPE_LIST(CLEAR_HISTOGRAM)
 #undef CLEAR_HISTOGRAM
 
-  js_spill_information.Clear();
+  isolate->js_spill_information()->Clear();
 }
 
 
-static int code_kind_statistics[Code::NUMBER_OF_KINDS];
-
-
 static void ClearCodeKindStatistics() {
+  Isolate* isolate = Isolate::Current();
   for (int i = 0; i < Code::NUMBER_OF_KINDS; i++) {
-    code_kind_statistics[i] = 0;
+    isolate->code_kind_statistics()[i] = 0;
   }
 }
 
 
 static void ReportCodeKindStatistics() {
+  Isolate* isolate = Isolate::Current();
   const char* table[Code::NUMBER_OF_KINDS] = { NULL };
 
 #define CASE(name)                            \
@@ -1710,8 +1579,9 @@ static void ReportCodeKindStatistics() {
 
   PrintF("\n   Code kind histograms: \n");
   for (int i = 0; i < Code::NUMBER_OF_KINDS; i++) {
-    if (code_kind_statistics[i] > 0) {
-      PrintF("     %-20s: %10d bytes\n", table[i], code_kind_statistics[i]);
+    if (isolate->code_kind_statistics()[i] > 0) {
+      PrintF("     %-20s: %10d bytes\n", table[i],
+          isolate->code_kind_statistics()[i]);
     }
   }
   PrintF("\n");
@@ -1719,14 +1589,16 @@ static void ReportCodeKindStatistics() {
 
 
 static int CollectHistogramInfo(HeapObject* obj) {
+  Isolate* isolate = Isolate::Current();
   InstanceType type = obj->map()->instance_type();
   ASSERT(0 <= type && type <= LAST_TYPE);
-  ASSERT(heap_histograms[type].name() != NULL);
-  heap_histograms[type].increment_number(1);
-  heap_histograms[type].increment_bytes(obj->Size());
+  ASSERT(isolate->heap_histograms()[type].name() != NULL);
+  isolate->heap_histograms()[type].increment_number(1);
+  isolate->heap_histograms()[type].increment_bytes(obj->Size());
 
   if (FLAG_collect_heap_spill_statistics && obj->IsJSObject()) {
-    JSObject::cast(obj)->IncrementSpillStatistics(&js_spill_information);
+    JSObject::cast(obj)->IncrementSpillStatistics(
+        isolate->js_spill_information());
   }
 
   return obj->Size();
@@ -1734,13 +1606,14 @@ static int CollectHistogramInfo(HeapObject* obj) {
 
 
 static void ReportHistogram(bool print_spill) {
+  Isolate* isolate = Isolate::Current();
   PrintF("\n  Object Histogram:\n");
   for (int i = 0; i <= LAST_TYPE; i++) {
-    if (heap_histograms[i].number() > 0) {
+    if (isolate->heap_histograms()[i].number() > 0) {
       PrintF("    %-34s%10d (%10d bytes)\n",
-             heap_histograms[i].name(),
-             heap_histograms[i].number(),
-             heap_histograms[i].bytes());
+             isolate->heap_histograms()[i].name(),
+             isolate->heap_histograms()[i].number(),
+             isolate->heap_histograms()[i].bytes());
     }
   }
   PrintF("\n");
@@ -1749,8 +1622,8 @@ static void ReportHistogram(bool print_spill) {
   int string_number = 0;
   int string_bytes = 0;
 #define INCREMENT(type, size, name, camel_name)      \
-    string_number += heap_histograms[type].number(); \
-    string_bytes += heap_histograms[type].bytes();
+    string_number += isolate->heap_histograms()[type].number(); \
+    string_bytes += isolate->heap_histograms()[type].bytes();
   STRING_TYPE_LIST(INCREMENT)
 #undef INCREMENT
   if (string_number > 0) {
@@ -1759,7 +1632,7 @@ static void ReportHistogram(bool print_spill) {
   }
 
   if (FLAG_collect_heap_spill_statistics && print_spill) {
-    js_spill_information.Print();
+    isolate->js_spill_information()->Print();
   }
 }
 #endif  // DEBUG
@@ -1788,8 +1661,9 @@ void NewSpace::CollectStatistics() {
 
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
-static void DoReportStatistics(HistogramInfo* info, const char* description) {
-  LOG(HeapSampleBeginEvent("NewSpace", description));
+static void DoReportStatistics(Isolate* isolate,
+                               HistogramInfo* info, const char* description) {
+  LOG(isolate, HeapSampleBeginEvent("NewSpace", description));
   // Lump all the string types together.
   int string_number = 0;
   int string_bytes = 0;
@@ -1799,17 +1673,19 @@ static void DoReportStatistics(HistogramInfo* info, const char* description) {
   STRING_TYPE_LIST(INCREMENT)
 #undef INCREMENT
   if (string_number > 0) {
-    LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
+    LOG(isolate,
+        HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
   }
 
   // Then do the other types.
   for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) {
     if (info[i].number() > 0) {
-      LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
+      LOG(isolate,
+          HeapSampleItemEvent(info[i].name(), info[i].number(),
                               info[i].bytes()));
     }
   }
-  LOG(HeapSampleEndEvent("NewSpace", description));
+  LOG(isolate, HeapSampleEndEvent("NewSpace", description));
 }
 #endif  // ENABLE_LOGGING_AND_PROFILING
 
@@ -1836,8 +1712,9 @@ void NewSpace::ReportStatistics() {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
   if (FLAG_log_gc) {
-    DoReportStatistics(allocated_histogram_, "allocated");
-    DoReportStatistics(promoted_histogram_, "promoted");
+    Isolate* isolate = ISOLATE;
+    DoReportStatistics(isolate, allocated_histogram_, "allocated");
+    DoReportStatistics(isolate, promoted_histogram_, "promoted");
   }
 #endif  // ENABLE_LOGGING_AND_PROFILING
 }
@@ -1875,14 +1752,14 @@ void FreeListNode::set_size(int size_in_bytes) {
   // field and a next pointer, we give it a filler map that gives it the
   // correct size.
   if (size_in_bytes > ByteArray::kHeaderSize) {
-    set_map(Heap::raw_unchecked_byte_array_map());
+    set_map(HEAP->raw_unchecked_byte_array_map());
     // Can't use ByteArray::cast because it fails during deserialization.
     ByteArray* this_as_byte_array = reinterpret_cast<ByteArray*>(this);
     this_as_byte_array->set_length(ByteArray::LengthFor(size_in_bytes));
   } else if (size_in_bytes == kPointerSize) {
-    set_map(Heap::raw_unchecked_one_pointer_filler_map());
+    set_map(HEAP->raw_unchecked_one_pointer_filler_map());
   } else if (size_in_bytes == 2 * kPointerSize) {
-    set_map(Heap::raw_unchecked_two_pointer_filler_map());
+    set_map(HEAP->raw_unchecked_two_pointer_filler_map());
   } else {
     UNREACHABLE();
   }
@@ -1893,7 +1770,7 @@ void FreeListNode::set_size(int size_in_bytes) {
 
 Address FreeListNode::next() {
   ASSERT(IsFreeListNode(this));
-  if (map() == Heap::raw_unchecked_byte_array_map()) {
+  if (map() == HEAP->raw_unchecked_byte_array_map()) {
     ASSERT(Size() >= kNextOffset + kPointerSize);
     return Memory::Address_at(address() + kNextOffset);
   } else {
@@ -1904,7 +1781,7 @@ Address FreeListNode::next() {
 
 void FreeListNode::set_next(Address next) {
   ASSERT(IsFreeListNode(this));
-  if (map() == Heap::raw_unchecked_byte_array_map()) {
+  if (map() == HEAP->raw_unchecked_byte_array_map()) {
     ASSERT(Size() >= kNextOffset + kPointerSize);
     Memory::Address_at(address() + kNextOffset) = next;
   } else {
@@ -1945,7 +1822,7 @@ void OldSpaceFreeList::RebuildSizeList() {
 
 int OldSpaceFreeList::Free(Address start, int size_in_bytes) {
 #ifdef DEBUG
-  MemoryAllocator::ZapBlock(start, size_in_bytes);
+  Isolate::Current()->memory_allocator()->ZapBlock(start, size_in_bytes);
 #endif
   FreeListNode* node = FreeListNode::FromAddress(start);
   node->set_size(size_in_bytes);
@@ -2089,10 +1966,10 @@ void FixedSizeFreeList::Reset() {
 
 void FixedSizeFreeList::Free(Address start) {
 #ifdef DEBUG
-  MemoryAllocator::ZapBlock(start, object_size_);
+  Isolate::Current()->memory_allocator()->ZapBlock(start, object_size_);
 #endif
   // We only use the freelists with mark-sweep.
-  ASSERT(!MarkCompactCollector::IsCompacting());
+  ASSERT(!HEAP->mark_compact_collector()->IsCompacting());
   FreeListNode* node = FreeListNode::FromAddress(start);
   node->set_size(object_size_);
   node->set_next(NULL);
@@ -2219,13 +2096,14 @@ void PagedSpace::FreePages(Page* prev, Page* last) {
     first_page_ = last->next_page();
   } else {
     first = prev->next_page();
-    MemoryAllocator::SetNextPage(prev, last->next_page());
+    heap()->isolate()->memory_allocator()->SetNextPage(
+        prev, last->next_page());
   }
 
   // Attach it after the last page.
-  MemoryAllocator::SetNextPage(last_page_, first);
+  heap()->isolate()->memory_allocator()->SetNextPage(last_page_, first);
   last_page_ = last;
-  MemoryAllocator::SetNextPage(last, NULL);
+  heap()->isolate()->memory_allocator()->SetNextPage(last, NULL);
 
   // Clean them up.
   do {
@@ -2264,10 +2142,8 @@ void PagedSpace::RelinkPageListInChunkOrder(bool deallocate_blocks) {
   if (page_list_is_chunk_ordered_) return;
 
   Page* new_last_in_use = Page::FromAddress(NULL);
-  MemoryAllocator::RelinkPageListInChunkOrder(this,
-                                              &first_page_,
-                                              &last_page_,
-                                              &new_last_in_use);
+  heap()->isolate()->memory_allocator()->RelinkPageListInChunkOrder(
+      this, &first_page_, &last_page_, &new_last_in_use);
   ASSERT(new_last_in_use->is_valid());
 
   if (new_last_in_use != last_in_use) {
@@ -2284,7 +2160,7 @@ void PagedSpace::RelinkPageListInChunkOrder(bool deallocate_blocks) {
         accounting_stats_.AllocateBytes(size_in_bytes);
         DeallocateBlock(start, size_in_bytes, add_to_freelist);
       } else {
-        Heap::CreateFillerObjectAt(start, size_in_bytes);
+        heap()->CreateFillerObjectAt(start, size_in_bytes);
       }
     }
 
@@ -2311,7 +2187,7 @@ void PagedSpace::RelinkPageListInChunkOrder(bool deallocate_blocks) {
         accounting_stats_.AllocateBytes(size_in_bytes);
         DeallocateBlock(start, size_in_bytes, add_to_freelist);
       } else {
-        Heap::CreateFillerObjectAt(start, size_in_bytes);
+        heap()->CreateFillerObjectAt(start, size_in_bytes);
       }
     }
   }
@@ -2340,7 +2216,7 @@ bool PagedSpace::ReserveSpace(int bytes) {
   int bytes_left_to_reserve = bytes;
   while (bytes_left_to_reserve > 0) {
     if (!reserved_page->next_page()->is_valid()) {
-      if (Heap::OldGenerationAllocationLimitReached()) return false;
+      if (heap()->OldGenerationAllocationLimitReached()) return false;
       Expand(reserved_page);
     }
     bytes_left_to_reserve -= Page::kPageSize;
@@ -2358,7 +2234,7 @@ bool PagedSpace::ReserveSpace(int bytes) {
 // You have to call this last, since the implementation from PagedSpace
 // doesn't know that memory was 'promised' to large object space.
 bool LargeObjectSpace::ReserveSpace(int bytes) {
-  return Heap::OldGenerationSpaceAvailable() >= bytes;
+  return heap()->OldGenerationSpaceAvailable() >= bytes;
 }
 
 
@@ -2377,7 +2253,7 @@ HeapObject* OldSpace::SlowAllocateRaw(int size_in_bytes) {
 
   // There is no next page in this space.  Try free list allocation unless that
   // is currently forbidden.
-  if (!Heap::linear_allocation()) {
+  if (!heap()->linear_allocation()) {
     int wasted_bytes;
     Object* result;
     MaybeObject* maybe = free_list_.Allocate(size_in_bytes, &wasted_bytes);
@@ -2404,7 +2280,8 @@ HeapObject* OldSpace::SlowAllocateRaw(int size_in_bytes) {
   // Free list allocation failed and there is no next page.  Fail if we have
   // hit the old generation size limit that should cause a garbage
   // collection.
-  if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
+  if (!heap()->always_allocate() &&
+      heap()->OldGenerationAllocationLimitReached()) {
     return NULL;
   }
 
@@ -2467,28 +2344,14 @@ void OldSpace::DeallocateBlock(Address start,
 
 
 #ifdef DEBUG
-struct CommentStatistic {
-  const char* comment;
-  int size;
-  int count;
-  void Clear() {
-    comment = NULL;
-    size = 0;
-    count = 0;
-  }
-};
-
-
-// must be small, since an iteration is used for lookup
-const int kMaxComments = 64;
-static CommentStatistic comments_statistics[kMaxComments+1];
-
-
 void PagedSpace::ReportCodeStatistics() {
+  Isolate* isolate = Isolate::Current();
+  CommentStatistic* comments_statistics =
+      isolate->paged_space_comments_statistics();
   ReportCodeKindStatistics();
   PrintF("Code comment statistics (\"   [ comment-txt   :    size/   "
          "count  (average)\"):\n");
-  for (int i = 0; i <= kMaxComments; i++) {
+  for (int i = 0; i <= CommentStatistic::kMaxComments; i++) {
     const CommentStatistic& cs = comments_statistics[i];
     if (cs.size > 0) {
       PrintF("   %-30s: %10d/%6d     (%d)\n", cs.comment, cs.size, cs.count,
@@ -2500,23 +2363,30 @@ void PagedSpace::ReportCodeStatistics() {
 
 
 void PagedSpace::ResetCodeStatistics() {
+  Isolate* isolate = Isolate::Current();
+  CommentStatistic* comments_statistics =
+      isolate->paged_space_comments_statistics();
   ClearCodeKindStatistics();
-  for (int i = 0; i < kMaxComments; i++) comments_statistics[i].Clear();
-  comments_statistics[kMaxComments].comment = "Unknown";
-  comments_statistics[kMaxComments].size = 0;
-  comments_statistics[kMaxComments].count = 0;
+  for (int i = 0; i < CommentStatistic::kMaxComments; i++) {
+    comments_statistics[i].Clear();
+  }
+  comments_statistics[CommentStatistic::kMaxComments].comment = "Unknown";
+  comments_statistics[CommentStatistic::kMaxComments].size = 0;
+  comments_statistics[CommentStatistic::kMaxComments].count = 0;
 }
 
 
-// Adds comment to 'comment_statistics' table. Performance OK sa long as
+// Adds comment to 'comment_statistics' table. Performance OK as long as
 // 'kMaxComments' is small
-static void EnterComment(const char* comment, int delta) {
+static void EnterComment(Isolate* isolate, const char* comment, int delta) {
+  CommentStatistic* comments_statistics =
+      isolate->paged_space_comments_statistics();
   // Do not count empty comments
   if (delta <= 0) return;
-  CommentStatistic* cs = &comments_statistics[kMaxComments];
+  CommentStatistic* cs = &comments_statistics[CommentStatistic::kMaxComments];
   // Search for a free or matching entry in 'comments_statistics': 'cs'
   // points to result.
-  for (int i = 0; i < kMaxComments; i++) {
+  for (int i = 0; i < CommentStatistic::kMaxComments; i++) {
     if (comments_statistics[i].comment == NULL) {
       cs = &comments_statistics[i];
       cs->comment = comment;
@@ -2534,7 +2404,7 @@ static void EnterComment(const char* comment, int delta) {
 
 // Call for each nested comment start (start marked with '[ xxx', end marked
 // with ']'.  RelocIterator 'it' must point to a comment reloc info.
-static void CollectCommentStatistics(RelocIterator* it) {
+static void CollectCommentStatistics(Isolate* isolate, RelocIterator* it) {
   ASSERT(!it->done());
   ASSERT(it->rinfo()->rmode() == RelocInfo::COMMENT);
   const char* tmp = reinterpret_cast<const char*>(it->rinfo()->data());
@@ -2559,13 +2429,13 @@ static void CollectCommentStatistics(RelocIterator* it) {
       flat_delta += static_cast<int>(it->rinfo()->pc() - prev_pc);
       if (txt[0] == ']') break;  // End of nested  comment
       // A new comment
-      CollectCommentStatistics(it);
+      CollectCommentStatistics(isolate, it);
       // Skip code that was covered with previous comment
       prev_pc = it->rinfo()->pc();
     }
     it->next();
   }
-  EnterComment(comment_txt, flat_delta);
+  EnterComment(isolate, comment_txt, flat_delta);
 }
 
 
@@ -2573,18 +2443,19 @@ static void CollectCommentStatistics(RelocIterator* it) {
 // - by code kind
 // - by code comment
 void PagedSpace::CollectCodeStatistics() {
+  Isolate* isolate = heap()->isolate();
   HeapObjectIterator obj_it(this);
   for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) {
     if (obj->IsCode()) {
       Code* code = Code::cast(obj);
-      code_kind_statistics[code->kind()] += code->Size();
+      isolate->code_kind_statistics()[code->kind()] += code->Size();
       RelocIterator it(code);
       int delta = 0;
       const byte* prev_pc = code->instruction_start();
       while (!it.done()) {
         if (it.rinfo()->rmode() == RelocInfo::COMMENT) {
           delta += static_cast<int>(it.rinfo()->pc() - prev_pc);
-          CollectCommentStatistics(&it);
+          CollectCommentStatistics(isolate, &it);
           prev_pc = it.rinfo()->pc();
         }
         it.next();
@@ -2593,7 +2464,7 @@ void PagedSpace::CollectCodeStatistics() {
       ASSERT(code->instruction_start() <= prev_pc &&
              prev_pc <= code->instruction_end());
       delta += static_cast<int>(code->instruction_end() - prev_pc);
-      EnterComment("NoComment", delta);
+      EnterComment(isolate, "NoComment", delta);
     }
   }
 }
@@ -2687,7 +2558,7 @@ HeapObject* FixedSpace::SlowAllocateRaw(int size_in_bytes) {
   // There is no next page in this space.  Try free list allocation unless
   // that is currently forbidden.  The fixed space free list implicitly assumes
   // that all free blocks are of the fixed size.
-  if (!Heap::linear_allocation()) {
+  if (!heap()->linear_allocation()) {
     Object* result;
     MaybeObject* maybe = free_list_.Allocate();
     if (maybe->ToObject(&result)) {
@@ -2711,7 +2582,8 @@ HeapObject* FixedSpace::SlowAllocateRaw(int size_in_bytes) {
   // Free list allocation failed and there is no next page.  Fail if we have
   // hit the old generation size limit that should cause a garbage
   // collection.
-  if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
+  if (!heap()->always_allocate() &&
+      heap()->OldGenerationAllocationLimitReached()) {
     return NULL;
   }
 
@@ -2813,7 +2685,7 @@ void MapSpace::VerifyObject(HeapObject* object) {
 void CellSpace::VerifyObject(HeapObject* object) {
   // The object should be a global object property cell or a free-list node.
   ASSERT(object->IsJSGlobalPropertyCell() ||
-         object->map() == Heap::two_pointer_filler_map());
+         object->map() == heap()->two_pointer_filler_map());
 }
 #endif
 
@@ -2850,28 +2722,33 @@ LargeObjectChunk* LargeObjectChunk::New(int size_in_bytes,
                                         Executability executable) {
   size_t requested = ChunkSizeFor(size_in_bytes);
   size_t size;
-  void* mem = MemoryAllocator::AllocateRawMemory(requested, &size, executable);
+  Isolate* isolate = Isolate::Current();
+  void* mem = isolate->memory_allocator()->AllocateRawMemory(
+      requested, &size, executable);
   if (mem == NULL) return NULL;
 
   // The start of the chunk may be overlayed with a page so we have to
   // make sure that the page flags fit in the size field.
   ASSERT((size & Page::kPageFlagMask) == 0);
 
-  LOG(NewEvent("LargeObjectChunk", mem, size));
+  LOG(isolate, NewEvent("LargeObjectChunk", mem, size));
   if (size < requested) {
-    MemoryAllocator::FreeRawMemory(mem, size, executable);
-    LOG(DeleteEvent("LargeObjectChunk", mem));
+    isolate->memory_allocator()->FreeRawMemory(
+        mem, size, executable);
+    LOG(isolate, DeleteEvent("LargeObjectChunk", mem));
     return NULL;
   }
 
   ObjectSpace space = (executable == EXECUTABLE)
       ? kObjectSpaceCodeSpace
       : kObjectSpaceLoSpace;
-  MemoryAllocator::PerformAllocationCallback(
+  isolate->memory_allocator()->PerformAllocationCallback(
       space, kAllocationActionAllocate, size);
 
   LargeObjectChunk* chunk = reinterpret_cast<LargeObjectChunk*>(mem);
   chunk->size_ = size;
+  Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize));
+  page->heap_ = Isolate::Current()->heap();
   return chunk;
 }
 
@@ -2887,8 +2764,8 @@ int LargeObjectChunk::ChunkSizeFor(int size_in_bytes) {
 // -----------------------------------------------------------------------------
 // LargeObjectSpace
 
-LargeObjectSpace::LargeObjectSpace(AllocationSpace id)
-    : Space(id, NOT_EXECUTABLE),  // Managed on a per-allocation basis
+LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
+    : Space(heap, id, NOT_EXECUTABLE),  // Managed on a per-allocation basis
       first_chunk_(NULL),
       size_(0),
       page_count_(0),
@@ -2908,15 +2785,17 @@ void LargeObjectSpace::TearDown() {
   while (first_chunk_ != NULL) {
     LargeObjectChunk* chunk = first_chunk_;
     first_chunk_ = first_chunk_->next();
-    LOG(DeleteEvent("LargeObjectChunk", chunk->address()));
+    LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", chunk->address()));
     Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize));
     Executability executable =
         page->IsPageExecutable() ? EXECUTABLE : NOT_EXECUTABLE;
     ObjectSpace space = kObjectSpaceLoSpace;
     if (executable == EXECUTABLE) space = kObjectSpaceCodeSpace;
     size_t size = chunk->size();
-    MemoryAllocator::FreeRawMemory(chunk->address(), size, executable);
-    MemoryAllocator::PerformAllocationCallback(
+    heap()->isolate()->memory_allocator()->FreeRawMemory(chunk->address(),
+                                                         size,
+                                                         executable);
+    heap()->isolate()->memory_allocator()->PerformAllocationCallback(
         space, kAllocationActionFree, size);
   }
 
@@ -2931,7 +2810,8 @@ void LargeObjectSpace::TearDown() {
 void LargeObjectSpace::Protect() {
   LargeObjectChunk* chunk = first_chunk_;
   while (chunk != NULL) {
-    MemoryAllocator::Protect(chunk->address(), chunk->size());
+    heap()->isolate()->memory_allocator()->Protect(chunk->address(),
+                                                   chunk->size());
     chunk = chunk->next();
   }
 }
@@ -2941,8 +2821,8 @@ void LargeObjectSpace::Unprotect() {
   LargeObjectChunk* chunk = first_chunk_;
   while (chunk != NULL) {
     bool is_code = chunk->GetObject()->IsCode();
-    MemoryAllocator::Unprotect(chunk->address(), chunk->size(),
-                               is_code ? EXECUTABLE : NOT_EXECUTABLE);
+    heap()->isolate()->memory_allocator()->Unprotect(chunk->address(),
+        chunk->size(), is_code ? EXECUTABLE : NOT_EXECUTABLE);
     chunk = chunk->next();
   }
 }
@@ -2957,7 +2837,8 @@ MaybeObject* LargeObjectSpace::AllocateRawInternal(int requested_size,
 
   // Check if we want to force a GC before growing the old space further.
   // If so, fail the allocation.
-  if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
+  if (!heap()->always_allocate() &&
+      heap()->OldGenerationAllocationLimitReached()) {
     return Failure::RetryAfterGC(identity());
   }
 
@@ -3062,22 +2943,22 @@ void LargeObjectSpace::IterateDirtyRegions(ObjectSlotCallback copy_object) {
         // Iterate regions of the first normal page covering object.
         uint32_t first_region_number = page->GetRegionNumberForAddress(start);
         newmarks |=
-            Heap::IterateDirtyRegions(marks >> first_region_number,
-                                      start,
-                                      end,
-                                      &Heap::IteratePointersInDirtyRegion,
-                                      copy_object) << first_region_number;
+            heap()->IterateDirtyRegions(marks >> first_region_number,
+                                        start,
+                                        end,
+                                        &Heap::IteratePointersInDirtyRegion,
+                                        copy_object) << first_region_number;
 
         start = end;
         end = start + Page::kPageSize;
         while (end <= object_end) {
           // Iterate next 32 regions.
           newmarks |=
-              Heap::IterateDirtyRegions(marks,
-                                        start,
-                                        end,
-                                        &Heap::IteratePointersInDirtyRegion,
-                                        copy_object);
+              heap()->IterateDirtyRegions(marks,
+                                          start,
+                                          end,
+                                          &Heap::IteratePointersInDirtyRegion,
+                                          copy_object);
           start = end;
           end = start + Page::kPageSize;
         }
@@ -3086,11 +2967,11 @@ void LargeObjectSpace::IterateDirtyRegions(ObjectSlotCallback copy_object) {
           // Iterate the last piece of an object which is less than
           // Page::kPageSize.
           newmarks |=
-              Heap::IterateDirtyRegions(marks,
-                                        start,
-                                        object_end,
-                                        &Heap::IteratePointersInDirtyRegion,
-                                        copy_object);
+              heap()->IterateDirtyRegions(marks,
+                                          start,
+                                          object_end,
+                                          &Heap::IteratePointersInDirtyRegion,
+                                          copy_object);
         }
 
         page->SetRegionMarks(newmarks);
@@ -3107,7 +2988,7 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
     HeapObject* object = current->GetObject();
     if (object->IsMarked()) {
       object->ClearMark();
-      MarkCompactCollector::tracer()->decrement_marked_count();
+      heap()->mark_compact_collector()->tracer()->decrement_marked_count();
       previous = current;
       current = current->next();
     } else {
@@ -3127,7 +3008,7 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
       }
 
       // Free the chunk.
-      MarkCompactCollector::ReportDeleteIfNeeded(object);
+      heap()->mark_compact_collector()->ReportDeleteIfNeeded(object);
       LiveObjectList::ProcessNonLive(object);
 
       size_ -= static_cast<int>(chunk_size);
@@ -3135,10 +3016,12 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
       page_count_--;
       ObjectSpace space = kObjectSpaceLoSpace;
       if (executable == EXECUTABLE) space = kObjectSpaceCodeSpace;
-      MemoryAllocator::FreeRawMemory(chunk_address, chunk_size, executable);
-      MemoryAllocator::PerformAllocationCallback(space, kAllocationActionFree,
-                                                 size_);
-      LOG(DeleteEvent("LargeObjectChunk", chunk_address));
+      heap()->isolate()->memory_allocator()->FreeRawMemory(chunk_address,
+                                                           chunk_size,
+                                                           executable);
+      heap()->isolate()->memory_allocator()->PerformAllocationCallback(
+          space, kAllocationActionFree, size_);
+      LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", chunk_address));
     }
   }
 }
@@ -3146,7 +3029,7 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
 
 bool LargeObjectSpace::Contains(HeapObject* object) {
   Address address = object->address();
-  if (Heap::new_space()->Contains(address)) {
+  if (heap()->new_space()->Contains(address)) {
     return false;
   }
   Page* page = Page::FromAddress(address);
@@ -3175,7 +3058,7 @@ void LargeObjectSpace::Verify() {
     // in map space.
     Map* map = object->map();
     ASSERT(map->IsMap());
-    ASSERT(Heap::map_space()->Contains(map));
+    ASSERT(heap()->map_space()->Contains(map));
 
     // We have only code, sequential strings, external strings
     // (sequential strings that have been morphed into external
@@ -3202,9 +3085,9 @@ void LargeObjectSpace::Verify() {
         Object* element = array->get(j);
         if (element->IsHeapObject()) {
           HeapObject* element_object = HeapObject::cast(element);
-          ASSERT(Heap::Contains(element_object));
+          ASSERT(heap()->Contains(element_object));
           ASSERT(element_object->map()->IsMap());
-          if (Heap::InNewSpace(element_object)) {
+          if (heap()->InNewSpace(element_object)) {
             Address array_addr = object->address();
             Address element_addr = array_addr + FixedArray::kHeaderSize +
                 j * kPointerSize;
@@ -3243,11 +3126,12 @@ void LargeObjectSpace::ReportStatistics() {
 
 
 void LargeObjectSpace::CollectCodeStatistics() {
+  Isolate* isolate = heap()->isolate();
   LargeObjectIterator obj_it(this);
   for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) {
     if (obj->IsCode()) {
       Code* code = Code::cast(obj);
-      code_kind_statistics[code->kind()] += code->Size();
+      isolate->code_kind_statistics()[code->kind()] += code->Size();
     }
   }
 }
index 6165255fd47a27c89b8db68325f38e404edbb94d..358555b5110ef62e6ded99f0430c7ab13e9fb9ad 100644 (file)
@@ -34,6 +34,8 @@
 namespace v8 {
 namespace internal {
 
+class Isolate;
+
 // -----------------------------------------------------------------------------
 // Heap structures:
 //
@@ -241,7 +243,7 @@ class Page {
   static const intptr_t kPageAlignmentMask = (1 << kPageSizeBits) - 1;
 
   static const int kPageHeaderSize = kPointerSize + kPointerSize + kIntSize +
-    kIntSize + kPointerSize;
+    kIntSize + kPointerSize + kPointerSize;
 
   // The start offset of the object area in a page. Aligned to both maps and
   // code alignment to be suitable for both.
@@ -286,7 +288,7 @@ class Page {
   // This invariant guarantees that after flipping flag meaning at the
   // beginning of scavenge all pages in use will be marked as having valid
   // watermark.
-  static inline void FlipMeaningOfInvalidatedWatermarkFlag();
+  static inline void FlipMeaningOfInvalidatedWatermarkFlag(Heap* heap);
 
   // Returns true if the page allocation watermark was not altered during
   // scavenge.
@@ -312,11 +314,6 @@ class Page {
   STATIC_CHECK(kBitsPerInt - kAllocationWatermarkOffsetShift >=
                kAllocationWatermarkOffsetBits);
 
-  // This field contains the meaning of the WATERMARK_INVALIDATED flag.
-  // Instead of clearing this flag from all pages we just flip
-  // its meaning at the beginning of a scavenge.
-  static intptr_t watermark_invalidated_mark_;
-
   //---------------------------------------------------------------------------
   // Page header description.
   //
@@ -353,6 +350,8 @@ class Page {
   // During scavenge collection this field is used to store allocation watermark
   // if it is altered during scavenge.
   Address mc_first_forwarded;
+
+  Heap* heap_;
 };
 
 
@@ -360,11 +359,13 @@ class Page {
 // Space is the abstract superclass for all allocation spaces.
 class Space : public Malloced {
  public:
-  Space(AllocationSpace id, Executability executable)
-      : id_(id), executable_(executable) {}
+  Space(Heap* heap, AllocationSpace id, Executability executable)
+      : heap_(heap), id_(id), executable_(executable) {}
 
   virtual ~Space() {}
 
+  Heap* heap() const { return heap_; }
+
   // Does the space need executable memory?
   Executability executable() { return executable_; }
 
@@ -397,6 +398,7 @@ class Space : public Malloced {
   virtual bool ReserveSpace(int bytes) = 0;
 
  private:
+  Heap* heap_;
   AllocationSpace id_;
   Executability executable_;
 };
@@ -409,19 +411,19 @@ class Space : public Malloced {
 // displacements cover the entire 4GB virtual address space.  On 64-bit
 // platforms, we support this using the CodeRange object, which reserves and
 // manages a range of virtual memory.
-class CodeRange : public AllStatic {
+class CodeRange {
  public:
   // Reserves a range of virtual memory, but does not commit any of it.
   // Can only be called once, at heap initialization time.
   // Returns false on failure.
-  static bool Setup(const size_t requested_size);
+  bool Setup(const size_t requested_size);
 
   // Frees the range of virtual memory, and frees the data structures used to
   // manage it.
-  static void TearDown();
+  void TearDown();
 
-  static bool exists() { return code_range_ != NULL; }
-  static bool contains(Address address) {
+  bool exists() { return code_range_ != NULL; }
+  bool contains(Address address) {
     if (code_range_ == NULL) return false;
     Address start = static_cast<Address>(code_range_->address());
     return start <= address && address < start + code_range_->size();
@@ -430,13 +432,15 @@ class CodeRange : public AllStatic {
   // Allocates a chunk of memory from the large-object portion of
   // the code range.  On platforms with no separate code range, should
   // not be called.
-  MUST_USE_RESULT static void* AllocateRawMemory(const size_t requested,
-                                                 size_t* allocated);
-  static void FreeRawMemory(void* buf, size_t length);
+  MUST_USE_RESULT void* AllocateRawMemory(const size_t requested,
+                                          size_t* allocated);
+  void FreeRawMemory(void* buf, size_t length);
 
  private:
+  CodeRange();
+
   // The reserved range of virtual memory that all code objects are put in.
-  static VirtualMemory* code_range_;
+  VirtualMemory* code_range_;
   // Plain old data class, just a struct plus a constructor.
   class FreeBlock {
    public:
@@ -452,20 +456,26 @@ class CodeRange : public AllStatic {
   // Freed blocks of memory are added to the free list.  When the allocation
   // list is exhausted, the free list is sorted and merged to make the new
   // allocation list.
-  static List<FreeBlock> free_list_;
+  List<FreeBlock> free_list_;
   // Memory is allocated from the free blocks on the allocation list.
   // The block at current_allocation_block_index_ is the current block.
-  static List<FreeBlock> allocation_list_;
-  static int current_allocation_block_index_;
+  List<FreeBlock> allocation_list_;
+  int current_allocation_block_index_;
 
   // Finds a block on the allocation list that contains at least the
   // requested amount of memory.  If none is found, sorts and merges
   // the existing free memory blocks, and searches again.
   // If none can be found, terminates V8 with FatalProcessOutOfMemory.
-  static void GetNextAllocationBlock(size_t requested);
+  void GetNextAllocationBlock(size_t requested);
   // Compares the start addresses of two free blocks.
   static int CompareFreeBlockAddress(const FreeBlock* left,
                                      const FreeBlock* right);
+
+  friend class Isolate;
+
+  Isolate* isolate_;
+
+  DISALLOW_COPY_AND_ASSIGN(CodeRange);
 };
 
 
@@ -493,14 +503,14 @@ class CodeRange : public AllStatic {
 //
 
 
-class MemoryAllocator : public AllStatic {
+class MemoryAllocator {
  public:
   // Initializes its internal bookkeeping structures.
   // Max capacity of the total space and executable memory limit.
-  static bool Setup(intptr_t max_capacity, intptr_t capacity_executable);
+  bool Setup(intptr_t max_capacity, intptr_t capacity_executable);
 
   // Deletes valid chunks.
-  static void TearDown();
+  void TearDown();
 
   // Reserves an initial address range of virtual memory to be split between
   // the two new space semispaces, the old space, and the map space.  The
@@ -511,7 +521,7 @@ class MemoryAllocator : public AllStatic {
   // address of the initial chunk if successful, with the side effect of
   // setting the initial chunk, or else NULL if unsuccessful and leaves the
   // initial chunk NULL.
-  static void* ReserveInitialChunk(const size_t requested);
+  void* ReserveInitialChunk(const size_t requested);
 
   // Commits pages from an as-yet-unmanaged block of virtual memory into a
   // paged space.  The block should be part of the initial chunk reserved via
@@ -520,24 +530,24 @@ class MemoryAllocator : public AllStatic {
   // address is non-null and that it is big enough to hold at least one
   // page-aligned page.  The call always succeeds, and num_pages is always
   // greater than zero.
-  static Page* CommitPages(Address start, size_t size, PagedSpace* owner,
-                           int* num_pages);
+  Page* CommitPages(Address start, size_t size, PagedSpace* owner,
+                    int* num_pages);
 
   // Commit a contiguous block of memory from the initial chunk.  Assumes that
   // the address is not NULL, the size is greater than zero, and that the
   // block is contained in the initial chunk.  Returns true if it succeeded
   // and false otherwise.
-  static bool CommitBlock(Address start, size_t size, Executability executable);
+  bool CommitBlock(Address start, size_t size, Executability executable);
 
   // Uncommit a contiguous block of memory [start..(start+size)[.
   // start is not NULL, the size is greater than zero, and the
   // block is contained in the initial chunk.  Returns true if it succeeded
   // and false otherwise.
-  static bool UncommitBlock(Address start, size_t size);
+  bool UncommitBlock(Address start, size_t size);
 
   // Zaps a contiguous block of memory [start..(start+size)[ thus
   // filling it up with a recognizable non-NULL bit pattern.
-  static void ZapBlock(Address start, size_t size);
+  void ZapBlock(Address start, size_t size);
 
   // Attempts to allocate the requested (non-zero) number of pages from the
   // OS.  Fewer pages might be allocated than requested. If it fails to
@@ -548,8 +558,8 @@ class MemoryAllocator : public AllStatic {
   // number of allocated pages is returned in the output parameter
   // allocated_pages.  If the PagedSpace owner is executable and there is
   // a code range, the pages are allocated from the code range.
-  static Page* AllocatePages(int requested_pages, int* allocated_pages,
-                             PagedSpace* owner);
+  Page* AllocatePages(int requested_pages, int* allocated_pages,
+                      PagedSpace* owner);
 
   // Frees pages from a given page and after. Requires pages to be
   // linked in chunk-order (see comment for class).
@@ -558,10 +568,10 @@ class MemoryAllocator : public AllStatic {
   // Otherwise, the function searches a page after 'p' that is
   // the first page of a chunk. Pages after the found page
   // are freed and the function returns 'p'.
-  static Page* FreePages(Page* p);
+  Page* FreePages(Page* p);
 
   // Frees all pages owned by given space.
-  static void FreeAllPages(PagedSpace* space);
+  void FreeAllPages(PagedSpace* space);
 
   // Allocates and frees raw memory of certain size.
   // These are just thin wrappers around OS::Allocate and OS::Free,
@@ -569,96 +579,83 @@ class MemoryAllocator : public AllStatic {
   // If the flag is EXECUTABLE and a code range exists, the requested
   // memory is allocated from the code range.  If a code range exists
   // and the freed memory is in it, the code range manages the freed memory.
-  MUST_USE_RESULT static void* AllocateRawMemory(const size_t requested,
-                                                 size_t* allocated,
-                                                 Executability executable);
-  static void FreeRawMemory(void* buf,
-                            size_t length,
-                            Executability executable);
-  static void PerformAllocationCallback(ObjectSpace space,
-                                        AllocationAction action,
-                                        size_t size);
-
-  static void AddMemoryAllocationCallback(MemoryAllocationCallback callback,
-                                          ObjectSpace space,
-                                          AllocationAction action);
-  static void RemoveMemoryAllocationCallback(
-      MemoryAllocationCallback callback);
-  static bool MemoryAllocationCallbackRegistered(
-      MemoryAllocationCallback callback);
+  MUST_USE_RESULT void* AllocateRawMemory(const size_t requested,
+                                          size_t* allocated,
+                                          Executability executable);
+  void FreeRawMemory(void* buf,
+                     size_t length,
+                     Executability executable);
+  void PerformAllocationCallback(ObjectSpace space,
+                                 AllocationAction action,
+                                 size_t size);
+
+  void AddMemoryAllocationCallback(MemoryAllocationCallback callback,
+                                   ObjectSpace space,
+                                   AllocationAction action);
+  void RemoveMemoryAllocationCallback(MemoryAllocationCallback callback);
+  bool MemoryAllocationCallbackRegistered(MemoryAllocationCallback callback);
 
   // Returns the maximum available bytes of heaps.
-  static intptr_t Available() {
-    return capacity_ < size_ ? 0 : capacity_ - size_;
-  }
+  intptr_t Available() { return capacity_ < size_ ? 0 : capacity_ - size_; }
 
   // Returns allocated spaces in bytes.
-  static intptr_t Size() { return size_; }
+  intptr_t Size() { return size_; }
 
   // Returns the maximum available executable bytes of heaps.
-  static intptr_t AvailableExecutable() {
+  intptr_t AvailableExecutable() {
     if (capacity_executable_ < size_executable_) return 0;
     return capacity_executable_ - size_executable_;
   }
 
   // Returns allocated executable spaces in bytes.
-  static intptr_t SizeExecutable() { return size_executable_; }
+  intptr_t SizeExecutable() { return size_executable_; }
 
   // Returns maximum available bytes that the old space can have.
-  static intptr_t MaxAvailable() {
+  intptr_t MaxAvailable() {
     return (Available() / Page::kPageSize) * Page::kObjectAreaSize;
   }
 
-  // Sanity check on a pointer.
-  static bool SafeIsInAPageChunk(Address addr);
-
   // Links two pages.
-  static inline void SetNextPage(Page* prev, Page* next);
+  inline void SetNextPage(Page* prev, Page* next);
 
   // Returns the next page of a given page.
-  static inline Page* GetNextPage(Page* p);
+  inline Page* GetNextPage(Page* p);
 
   // Checks whether a page belongs to a space.
-  static inline bool IsPageInSpace(Page* p, PagedSpace* space);
+  inline bool IsPageInSpace(Page* p, PagedSpace* space);
 
   // Returns the space that owns the given page.
-  static inline PagedSpace* PageOwner(Page* page);
+  inline PagedSpace* PageOwner(Page* page);
 
   // Finds the first/last page in the same chunk as a given page.
-  static Page* FindFirstPageInSameChunk(Page* p);
-  static Page* FindLastPageInSameChunk(Page* p);
+  Page* FindFirstPageInSameChunk(Page* p);
+  Page* FindLastPageInSameChunk(Page* p);
 
   // Relinks list of pages owned by space to make it chunk-ordered.
   // Returns new first and last pages of space.
   // Also returns last page in relinked list which has WasInUsedBeforeMC
   // flag set.
-  static void RelinkPageListInChunkOrder(PagedSpace* space,
-                                         Page** first_page,
-                                         Page** last_page,
-                                         Page** last_page_in_use);
+  void RelinkPageListInChunkOrder(PagedSpace* space,
+                                  Page** first_page,
+                                  Page** last_page,
+                                  Page** last_page_in_use);
 
 #ifdef ENABLE_HEAP_PROTECTION
   // Protect/unprotect a block of memory by marking it read-only/writable.
-  static inline void Protect(Address start, size_t size);
-  static inline void Unprotect(Address start, size_t size,
-                               Executability executable);
+  inline void Protect(Address start, size_t size);
+  inline void Unprotect(Address start, size_t size,
+                        Executability executable);
 
   // Protect/unprotect a chunk given a page in the chunk.
-  static inline void ProtectChunkFromPage(Page* page);
-  static inline void UnprotectChunkFromPage(Page* page);
+  inline void ProtectChunkFromPage(Page* page);
+  inline void UnprotectChunkFromPage(Page* page);
 #endif
 
 #ifdef DEBUG
   // Reports statistic info of the space.
-  static void ReportStatistics();
+  void ReportStatistics();
 #endif
 
-  static void AddToAllocatedChunks(Address addr, intptr_t size);
-  static void RemoveFromAllocatedChunks(Address addr, intptr_t size);
-  // Note: This only checks the regular chunks, not the odd-sized initial
-  // chunk.
-  static bool InAllocatedChunks(Address addr);
-
   // Due to encoding limitation, we can only have 8K chunks.
   static const int kMaxNofChunks = 1 << kPageSizeBits;
   // If a chunk has at least 16 pages, the maximum heap size is about
@@ -678,29 +675,21 @@ class MemoryAllocator : public AllStatic {
 #endif
 
  private:
+  MemoryAllocator();
+
   static const int kChunkSize = kPagesPerChunk * Page::kPageSize;
   static const int kChunkSizeLog2 = kPagesPerChunkLog2 + kPageSizeBits;
-  static const int kChunkTableTopLevelEntries =
-      1 << (sizeof(intptr_t) * kBitsPerByte - kChunkSizeLog2 -
-          (kChunkTableLevels - 1) * kChunkTableBitsPerLevel);
-
-  // The chunks are not chunk-size aligned so for a given chunk-sized area of
-  // memory there can be two chunks that cover it.
-  static const int kChunkTableFineGrainedWordsPerEntry = 2;
-  static const uintptr_t kUnusedChunkTableEntry = 0;
 
   // Maximum space size in bytes.
-  static intptr_t capacity_;
+  intptr_t capacity_;
   // Maximum subset of capacity_ that can be executable
-  static intptr_t capacity_executable_;
-
-  // Top level table to track whether memory is part of a chunk or not.
-  static uintptr_t chunk_table_[kChunkTableTopLevelEntries];
+  intptr_t capacity_executable_;
 
   // Allocated space size in bytes.
-  static intptr_t size_;
+  intptr_t size_;
+
   // Allocated executable space size in bytes.
-  static intptr_t size_executable_;
+  intptr_t size_executable_;
 
   struct MemoryAllocationCallbackRegistration {
     MemoryAllocationCallbackRegistration(MemoryAllocationCallback callback,
@@ -713,11 +702,11 @@ class MemoryAllocator : public AllStatic {
     AllocationAction action;
   };
   // A List of callback that are triggered when memory is allocated or free'd
-  static List<MemoryAllocationCallbackRegistration>
+  List<MemoryAllocationCallbackRegistration>
       memory_allocation_callbacks_;
 
   // The initial chunk of virtual memory.
-  static VirtualMemory* initial_chunk_;
+  VirtualMemory* initial_chunk_;
 
   // Allocated chunk info: chunk start address, chunk size, and owning space.
   class ChunkInfo BASE_EMBEDDED {
@@ -725,7 +714,8 @@ class MemoryAllocator : public AllStatic {
     ChunkInfo() : address_(NULL),
                   size_(0),
                   owner_(NULL),
-                  executable_(NOT_EXECUTABLE) {}
+                  executable_(NOT_EXECUTABLE),
+                  owner_identity_(FIRST_SPACE) {}
     inline void init(Address a, size_t s, PagedSpace* o);
     Address address() { return address_; }
     size_t size() { return size_; }
@@ -733,74 +723,60 @@ class MemoryAllocator : public AllStatic {
     // We save executability of the owner to allow using it
     // when collecting stats after the owner has been destroyed.
     Executability executable() const { return executable_; }
+    AllocationSpace owner_identity() const { return owner_identity_; }
 
    private:
     Address address_;
     size_t size_;
     PagedSpace* owner_;
     Executability executable_;
+    AllocationSpace owner_identity_;
   };
 
   // Chunks_, free_chunk_ids_ and top_ act as a stack of free chunk ids.
-  static List<ChunkInfo> chunks_;
-  static List<int> free_chunk_ids_;
-  static int max_nof_chunks_;
-  static int top_;
+  List<ChunkInfo> chunks_;
+  List<int> free_chunk_ids_;
+  int max_nof_chunks_;
+  int top_;
 
   // Push/pop a free chunk id onto/from the stack.
-  static void Push(int free_chunk_id);
-  static int Pop();
-  static bool OutOfChunkIds() { return top_ == 0; }
+  void Push(int free_chunk_id);
+  int Pop();
+  bool OutOfChunkIds() { return top_ == 0; }
 
   // Frees a chunk.
-  static void DeleteChunk(int chunk_id);
-
-  // Helpers to maintain and query the chunk tables.
-  static void AddChunkUsingAddress(
-      uintptr_t chunk_start,        // Where the chunk starts.
-      uintptr_t chunk_index_base);  // Used to place the chunk in the tables.
-  static void RemoveChunkFoundUsingAddress(
-      uintptr_t chunk_start,        // Where the chunk starts.
-      uintptr_t chunk_index_base);  // Used to locate the entry in the tables.
-  // Controls whether the lookup creates intermediate levels of tables as
-  // needed.
-  enum CreateTables { kDontCreateTables, kCreateTablesAsNeeded };
-  static uintptr_t* AllocatedChunksFinder(uintptr_t* table,
-                                          uintptr_t address,
-                                          int bit_position,
-                                          CreateTables create_as_needed);
-  static void FreeChunkTables(uintptr_t* array, int length, int level);
-  static int FineGrainedIndexForAddress(uintptr_t address) {
-    int index = ((address >> kChunkSizeLog2) &
-        ((1 << kChunkTableBitsPerLevel) - 1));
-    return index * kChunkTableFineGrainedWordsPerEntry;
-  }
-
+  void DeleteChunk(int chunk_id);
 
   // Basic check whether a chunk id is in the valid range.
-  static inline bool IsValidChunkId(int chunk_id);
+  inline bool IsValidChunkId(int chunk_id);
 
   // Checks whether a chunk id identifies an allocated chunk.
-  static inline bool IsValidChunk(int chunk_id);
+  inline bool IsValidChunk(int chunk_id);
 
   // Returns the chunk id that a page belongs to.
-  static inline int GetChunkId(Page* p);
+  inline int GetChunkId(Page* p);
 
   // True if the address lies in the initial chunk.
-  static inline bool InInitialChunk(Address address);
+  inline bool InInitialChunk(Address address);
 
   // Initializes pages in a chunk. Returns the first page address.
   // This function and GetChunkId() are provided for the mark-compact
   // collector to rebuild page headers in the from space, which is
   // used as a marking stack and its page headers are destroyed.
-  static Page* InitializePagesInChunk(int chunk_id, int pages_in_chunk,
-                                      PagedSpace* owner);
+  Page* InitializePagesInChunk(int chunk_id, int pages_in_chunk,
+                               PagedSpace* owner);
 
-  static Page* RelinkPagesInChunk(int chunk_id,
-                                  Address chunk_start,
-                                  size_t chunk_size,
-                                  Page* prev,
-                                  Page** last_page_in_use);
+  Page* RelinkPagesInChunk(int chunk_id,
+                           Address chunk_start,
+                           size_t chunk_size,
+                           Page* prev,
+                           Page** last_page_in_use);
+
+  friend class Isolate;
+
+  Isolate* isolate_;
+
+  DISALLOW_COPY_AND_ASSIGN(MemoryAllocator);
 };
 
 
@@ -1048,7 +1024,8 @@ class AllocationStats BASE_EMBEDDED {
 class PagedSpace : public Space {
  public:
   // Creates a space with a maximum capacity, and an id.
-  PagedSpace(intptr_t max_capacity,
+  PagedSpace(Heap* heap,
+             intptr_t max_capacity,
              AllocationSpace id,
              Executability executable);
 
@@ -1341,7 +1318,7 @@ class HistogramInfo: public NumberAndSizeInfo {
 class SemiSpace : public Space {
  public:
   // Constructor.
-  SemiSpace() :Space(NEW_SPACE, NOT_EXECUTABLE) {
+  explicit SemiSpace(Heap* heap) : Space(heap, NEW_SPACE, NOT_EXECUTABLE) {
     start_ = NULL;
     age_mark_ = NULL;
   }
@@ -1508,7 +1485,10 @@ class SemiSpaceIterator : public ObjectIterator {
 class NewSpace : public Space {
  public:
   // Constructor.
-  NewSpace() : Space(NEW_SPACE, NOT_EXECUTABLE) {}
+  explicit NewSpace(Heap* heap)
+    : Space(heap, NEW_SPACE, NOT_EXECUTABLE),
+      to_space_(heap),
+      from_space_(heap) {}
 
   // Sets up the new space using the given chunk.
   bool Setup(Address start, int size);
@@ -1909,10 +1889,11 @@ class OldSpace : public PagedSpace {
  public:
   // Creates an old space object with a given maximum capacity.
   // The constructor does not allocate pages from OS.
-  explicit OldSpace(intptr_t max_capacity,
-                    AllocationSpace id,
-                    Executability executable)
-      : PagedSpace(max_capacity, id, executable), free_list_(id) {
+  OldSpace(Heap* heap,
+           intptr_t max_capacity,
+           AllocationSpace id,
+           Executability executable)
+      : PagedSpace(heap, max_capacity, id, executable), free_list_(id) {
     page_extra_ = 0;
   }
 
@@ -1981,11 +1962,12 @@ class OldSpace : public PagedSpace {
 
 class FixedSpace : public PagedSpace {
  public:
-  FixedSpace(intptr_t max_capacity,
+  FixedSpace(Heap* heap,
+             intptr_t max_capacity,
              AllocationSpace id,
              int object_size_in_bytes,
              const char* name)
-      : PagedSpace(max_capacity, id, NOT_EXECUTABLE),
+      : PagedSpace(heap, max_capacity, id, NOT_EXECUTABLE),
         object_size_in_bytes_(object_size_in_bytes),
         name_(name),
         free_list_(id, object_size_in_bytes) {
@@ -2059,8 +2041,11 @@ class FixedSpace : public PagedSpace {
 class MapSpace : public FixedSpace {
  public:
   // Creates a map space object with a maximum capacity.
-  MapSpace(intptr_t max_capacity, int max_map_space_pages, AllocationSpace id)
-      : FixedSpace(max_capacity, id, Map::kSize, "map"),
+  MapSpace(Heap* heap,
+           intptr_t max_capacity,
+           int max_map_space_pages,
+           AllocationSpace id)
+      : FixedSpace(heap, max_capacity, id, Map::kSize, "map"),
         max_map_space_pages_(max_map_space_pages) {
     ASSERT(max_map_space_pages < kMaxMapPageIndex);
   }
@@ -2170,8 +2155,9 @@ class MapSpace : public FixedSpace {
 class CellSpace : public FixedSpace {
  public:
   // Creates a property cell space object with a maximum capacity.
-  CellSpace(intptr_t max_capacity, AllocationSpace id)
-      : FixedSpace(max_capacity, id, JSGlobalPropertyCell::kSize, "cell") {}
+  CellSpace(Heap* heap, intptr_t max_capacity, AllocationSpace id)
+      : FixedSpace(heap, max_capacity, id, JSGlobalPropertyCell::kSize, "cell")
+  {}
 
  protected:
 #ifdef DEBUG
@@ -2246,7 +2232,7 @@ class LargeObjectChunk {
 
 class LargeObjectSpace : public Space {
  public:
-  explicit LargeObjectSpace(AllocationSpace id);
+  LargeObjectSpace(Heap* heap, AllocationSpace id);
   virtual ~LargeObjectSpace() {}
 
   // Initializes internal data structures.
@@ -2263,9 +2249,7 @@ class LargeObjectSpace : public Space {
   MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int size_in_bytes);
 
   // Available bytes for objects in this space.
-  intptr_t Available() {
-    return LargeObjectChunk::ObjectSizeFor(MemoryAllocator::Available());
-  }
+  inline intptr_t Available();
 
   virtual intptr_t Size() {
     return size_;
@@ -2357,6 +2341,22 @@ class LargeObjectIterator: public ObjectIterator {
 };
 
 
+#ifdef DEBUG
+struct CommentStatistic {
+  const char* comment;
+  int size;
+  int count;
+  void Clear() {
+    comment = NULL;
+    size = 0;
+    count = 0;
+  }
+  // Must be small, since an iteration is used for lookup.
+  static const int kMaxComments = 64;
+};
+#endif
+
+
 } }  // namespace v8::internal
 
 #endif  // V8_SPACES_H_
index 56874432f909b21d57fe8f19417fbd5e3339fcb1..3ae68b5d4016f53c9b24f98373512dc2e15dc9ed 100644 (file)
@@ -33,8 +33,9 @@ namespace internal {
 
 // Storage for constants used by string-search.
 
-int StringSearchBase::kBadCharShiftTable[kUC16AlphabetSize];
-int StringSearchBase::kGoodSuffixShiftTable[kBMMaxShift + 1];
-int StringSearchBase::kSuffixTable[kBMMaxShift + 1];
+// Now in Isolate:
+// bad_char_shift_table()
+// good_suffix_shift_table()
+// suffix_table()
 
 }}  // namespace v8::internal
index 5de3c0951e5d37a1f06d399c4d7919efa5d9019c..1223db0f98347c72a274973d87f0cdd6d8fab755 100644 (file)
@@ -44,7 +44,7 @@ class StringSearchBase {
   // limit, we can fix the size of tables. For a needle longer than this limit,
   // search will not be optimal, since we only build tables for a suffix
   // of the string, but it is a safe approximation.
-  static const int kBMMaxShift = 250;
+  static const int kBMMaxShift = Isolate::kBMMaxShift;
 
   // Reduce alphabet to this size.
   // One of the tables used by Boyer-Moore and Boyer-Moore-Horspool has size
@@ -54,7 +54,7 @@ class StringSearchBase {
   // For needles using only characters in the same Unicode 256-code point page,
   // there is no search speed degradation.
   static const int kAsciiAlphabetSize = 128;
-  static const int kUC16AlphabetSize = 256;
+  static const int kUC16AlphabetSize = Isolate::kUC16AlphabetSize;
 
   // Bad-char shift table stored in the state. It's length is the alphabet size.
   // For patterns below this length, the skip length of Boyer-Moore is too short
@@ -69,25 +69,16 @@ class StringSearchBase {
     return String::IsAscii(string.start(), string.length());
   }
 
-  // The following tables are shared by all searches.
-  // TODO(lrn): Introduce a way for a pattern to keep its tables
-  // between searches (e.g., for an Atom RegExp).
-
-  // Store for the BoyerMoore(Horspool) bad char shift table.
-  static int kBadCharShiftTable[kUC16AlphabetSize];
-  // Store for the BoyerMoore good suffix shift table.
-  static int kGoodSuffixShiftTable[kBMMaxShift + 1];
-  // Table used temporarily while building the BoyerMoore good suffix
-  // shift table.
-  static int kSuffixTable[kBMMaxShift + 1];
+  friend class Isolate;
 };
 
 
 template <typename PatternChar, typename SubjectChar>
 class StringSearch : private StringSearchBase {
  public:
-  explicit StringSearch(Vector<const PatternChar> pattern)
-      : pattern_(pattern),
+  StringSearch(Isolate* isolate, Vector<const PatternChar> pattern)
+      : isolate_(isolate),
+        pattern_(pattern),
         start_(Max(0, pattern.length() - kBMMaxShift)) {
     if (sizeof(PatternChar) > sizeof(SubjectChar)) {
       if (!IsAsciiString(pattern_)) {
@@ -175,24 +166,33 @@ class StringSearch : private StringSearchBase {
     return bad_char_occurrence[equiv_class];
   }
 
+  // The following tables are shared by all searches.
+  // TODO(lrn): Introduce a way for a pattern to keep its tables
+  // between searches (e.g., for an Atom RegExp).
+
+  // Store for the BoyerMoore(Horspool) bad char shift table.
   // Return a table covering the last kBMMaxShift+1 positions of
   // pattern.
   int* bad_char_table() {
-    return kBadCharShiftTable;
+    return isolate_->bad_char_shift_table();
   }
 
+  // Store for the BoyerMoore good suffix shift table.
   int* good_suffix_shift_table() {
     // Return biased pointer that maps the range  [start_..pattern_.length()
     // to the kGoodSuffixShiftTable array.
-    return kGoodSuffixShiftTable - start_;
+    return isolate_->good_suffix_shift_table() - start_;
   }
 
+  // Table used temporarily while building the BoyerMoore good suffix
+  // shift table.
   int* suffix_table() {
     // Return biased pointer that maps the range  [start_..pattern_.length()
     // to the kSuffixTable array.
-    return kSuffixTable - start_;
+    return isolate_->suffix_table() - start_;
   }
 
+  Isolate* isolate_;
   // The pattern to search for.
   Vector<const PatternChar> pattern_;
   // Pointer to implementation of the search.
@@ -555,10 +555,11 @@ int StringSearch<PatternChar, SubjectChar>::InitialSearch(
 // object should be constructed once and the Search function then called
 // for each search.
 template <typename SubjectChar, typename PatternChar>
-static int SearchString(Vector<const SubjectChar> subject,
+static int SearchString(Isolate* isolate,
+                        Vector<const SubjectChar> subject,
                         Vector<const PatternChar> pattern,
                         int start_index) {
-  StringSearch<PatternChar, SubjectChar> search(pattern);
+  StringSearch<PatternChar, SubjectChar> search(isolate, pattern);
   return search.Search(subject, start_index);
 }
 
index 7abd1bbe858f534355e2c8af80f662c1386af78f..aea142042bca8a3af557a34a422deae6002d26f4 100644 (file)
@@ -34,9 +34,6 @@ namespace v8 {
 namespace internal {
 
 static const int kMentionedObjectCacheMaxSize = 256;
-static List<HeapObject*, PreallocatedStorage>* debug_object_cache = NULL;
-static Object* current_security_token = NULL;
-
 
 char* HeapStringAllocator::allocate(unsigned bytes) {
   space_ = NewArray<char>(bytes);
@@ -195,6 +192,8 @@ void StringStream::PrintObject(Object* o) {
     return;
   }
   if (o->IsHeapObject()) {
+    DebugObjectCache* debug_object_cache = Isolate::Current()->
+        string_stream_debug_object_cache();
     for (int i = 0; i < debug_object_cache->length(); i++) {
       if ((*debug_object_cache)[i] == o) {
         Add("#%d#", i);
@@ -260,7 +259,7 @@ SmartPointer<const char> StringStream::ToCString() const {
 
 
 void StringStream::Log() {
-  LOG(StringEvent("StackDump", buffer_));
+  LOG(ISOLATE, StringEvent("StackDump", buffer_));
 }
 
 
@@ -281,22 +280,25 @@ void StringStream::OutputToFile(FILE* out) {
 
 
 Handle<String> StringStream::ToString() {
-  return Factory::NewStringFromUtf8(Vector<const char>(buffer_, length_));
+  return FACTORY->NewStringFromUtf8(Vector<const char>(buffer_, length_));
 }
 
 
 void StringStream::ClearMentionedObjectCache() {
-  current_security_token = NULL;
-  if (debug_object_cache == NULL) {
-    debug_object_cache = new List<HeapObject*, PreallocatedStorage>(0);
+  Isolate* isolate = Isolate::Current();
+  isolate->set_string_stream_current_security_token(NULL);
+  if (isolate->string_stream_debug_object_cache() == NULL) {
+    isolate->set_string_stream_debug_object_cache(
+        new List<HeapObject*, PreallocatedStorage>(0));
   }
-  debug_object_cache->Clear();
+  isolate->string_stream_debug_object_cache()->Clear();
 }
 
 
 #ifdef DEBUG
 bool StringStream::IsMentionedObjectCacheClear() {
-  return (debug_object_cache->length() == 0);
+  return (
+      Isolate::Current()->string_stream_debug_object_cache()->length() == 0);
 }
 #endif
 
@@ -338,7 +340,7 @@ void StringStream::PrintName(Object* name) {
 
 void StringStream::PrintUsingMap(JSObject* js_object) {
   Map* map = js_object->map();
-  if (!Heap::Contains(map) ||
+  if (!HEAP->Contains(map) ||
       !map->IsHeapObject() ||
       !map->IsMap()) {
     Add("<Invalid map>\n");
@@ -375,9 +377,10 @@ void StringStream::PrintUsingMap(JSObject* js_object) {
 
 
 void StringStream::PrintFixedArray(FixedArray* array, unsigned int limit) {
+  Heap* heap = HEAP;
   for (unsigned int i = 0; i < 10 && i < limit; i++) {
     Object* element = array->get(i);
-    if (element != Heap::the_hole_value()) {
+    if (element != heap->the_hole_value()) {
       for (int len = 1; len < 18; len++)
         Put(' ');
       Add("%d: %o\n", i, array->get(i));
@@ -412,6 +415,8 @@ void StringStream::PrintByteArray(ByteArray* byte_array) {
 
 
 void StringStream::PrintMentionedObjectCache() {
+  DebugObjectCache* debug_object_cache =
+      Isolate::Current()->string_stream_debug_object_cache();
   Add("==== Key         ============================================\n\n");
   for (int i = 0; i < debug_object_cache->length(); i++) {
     HeapObject* printee = (*debug_object_cache)[i];
@@ -444,12 +449,14 @@ void StringStream::PrintMentionedObjectCache() {
 
 
 void StringStream::PrintSecurityTokenIfChanged(Object* f) {
-  if (!f->IsHeapObject() || !Heap::Contains(HeapObject::cast(f))) {
+  Isolate* isolate = Isolate::Current();
+  Heap* heap = isolate->heap();
+  if (!f->IsHeapObject() || !heap->Contains(HeapObject::cast(f))) {
     return;
   }
   Map* map = HeapObject::cast(f)->map();
   if (!map->IsHeapObject() ||
-      !Heap::Contains(map) ||
+      !heap->Contains(map) ||
       !map->IsMap() ||
       !f->IsJSFunction()) {
     return;
@@ -458,17 +465,17 @@ void StringStream::PrintSecurityTokenIfChanged(Object* f) {
   JSFunction* fun = JSFunction::cast(f);
   Object* perhaps_context = fun->unchecked_context();
   if (perhaps_context->IsHeapObject() &&
-      Heap::Contains(HeapObject::cast(perhaps_context)) &&
+      heap->Contains(HeapObject::cast(perhaps_context)) &&
       perhaps_context->IsContext()) {
     Context* context = fun->context();
-    if (!Heap::Contains(context)) {
+    if (!heap->Contains(context)) {
       Add("(Function context is outside heap)\n");
       return;
     }
     Object* token = context->global_context()->security_token();
-    if (token != current_security_token) {
+    if (token != isolate->string_stream_current_security_token()) {
       Add("Security context: %o\n", token);
-      current_security_token = token;
+      isolate->set_string_stream_current_security_token(token);
     }
   } else {
     Add("(Function context is corrupt)\n");
@@ -478,8 +485,8 @@ void StringStream::PrintSecurityTokenIfChanged(Object* f) {
 
 void StringStream::PrintFunction(Object* f, Object* receiver, Code** code) {
   if (f->IsHeapObject() &&
-      Heap::Contains(HeapObject::cast(f)) &&
-      Heap::Contains(HeapObject::cast(f)->map()) &&
+      HEAP->Contains(HeapObject::cast(f)) &&
+      HEAP->Contains(HeapObject::cast(f)->map()) &&
       HeapObject::cast(f)->map()->IsMap()) {
     if (f->IsJSFunction()) {
       JSFunction* fun = JSFunction::cast(f);
@@ -506,11 +513,11 @@ void StringStream::PrintFunction(Object* f, Object* receiver, Code** code) {
       Add("/* warning: 'function' was not a heap object */ ");
       return;
     }
-    if (!Heap::Contains(HeapObject::cast(f))) {
+    if (!HEAP->Contains(HeapObject::cast(f))) {
       Add("/* warning: 'function' was not on the heap */ ");
       return;
     }
-    if (!Heap::Contains(HeapObject::cast(f)->map())) {
+    if (!HEAP->Contains(HeapObject::cast(f)->map())) {
       Add("/* warning: function's map was not on the heap */ ");
       return;
     }
@@ -526,10 +533,11 @@ void StringStream::PrintFunction(Object* f, Object* receiver, Code** code) {
 void StringStream::PrintPrototype(JSFunction* fun, Object* receiver) {
   Object* name = fun->shared()->name();
   bool print_name = false;
-  for (Object* p = receiver; p != Heap::null_value(); p = p->GetPrototype()) {
+  Heap* heap = HEAP;
+  for (Object* p = receiver; p != heap->null_value(); p = p->GetPrototype()) {
     if (p->IsJSObject()) {
       Object* key = JSObject::cast(p)->SlowReverseLookup(fun);
-      if (key != Heap::undefined_value()) {
+      if (key != heap->undefined_value()) {
         if (!name->IsString() ||
             !key->IsString() ||
             !String::cast(name)->Equals(String::cast(key))) {
index 789dafd89ae1eacded7da8a79e7e8d18ffb2bdda..0e78af46d752b27b888c3ebbab0aaeab9ec3c750 100644 (file)
@@ -41,8 +41,12 @@ namespace internal {
 // StubCache implementation.
 
 
-StubCache::Entry StubCache::primary_[StubCache::kPrimaryTableSize];
-StubCache::Entry StubCache::secondary_[StubCache::kSecondaryTableSize];
+StubCache::StubCache(Isolate* isolate) : isolate_(isolate) {
+  ASSERT(isolate == Isolate::Current());
+  memset(primary_, 0, sizeof(primary_[0]) * StubCache::kPrimaryTableSize);
+  memset(secondary_, 0, sizeof(secondary_[0]) * StubCache::kSecondaryTableSize);
+}
+
 
 void StubCache::Initialize(bool create_heap_objects) {
   ASSERT(IsPowerOf2(kPrimaryTableSize));
@@ -60,7 +64,7 @@ Code* StubCache::Set(String* name, Map* map, Code* code) {
 
   // Validate that the name does not move on scavenge, and that we
   // can use identity checks instead of string equality checks.
-  ASSERT(!Heap::InNewSpace(name));
+  ASSERT(!isolate_->heap()->InNewSpace(name));
   ASSERT(name->IsSymbol());
 
   // The state bits are not important to the hash function because
@@ -80,7 +84,7 @@ Code* StubCache::Set(String* name, Map* map, Code* code) {
 
   // If the primary entry has useful data in it, we retire it to the
   // secondary cache before overwriting it.
-  if (hit != Builtins::builtin(Builtins::Illegal)) {
+  if (hit != isolate_->builtins()->builtin(Builtins::Illegal)) {
     Code::Flags primary_flags = Code::RemoveTypeFromFlags(hit->flags());
     int secondary_offset =
         SecondaryOffset(primary->key, primary_flags, primary_offset);
@@ -104,10 +108,10 @@ MaybeObject* StubCache::ComputeLoadNonexistent(String* name,
   // there are global objects involved, we need to check global
   // property cells in the stub and therefore the stub will be
   // specific to the name.
-  String* cache_name = Heap::empty_string();
+  String* cache_name = isolate_->heap()->empty_string();
   if (receiver->IsGlobalObject()) cache_name = name;
   JSObject* last = receiver;
-  while (last->GetPrototype() != Heap::null_value()) {
+  while (last->GetPrototype() != isolate_->heap()->null_value()) {
     last = JSObject::cast(last->GetPrototype());
     if (last->IsGlobalObject()) cache_name = name;
   }
@@ -122,7 +126,8 @@ MaybeObject* StubCache::ComputeLoadNonexistent(String* name,
           compiler.CompileLoadNonexistent(cache_name, receiver, last);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), cache_name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), cache_name));
     GDBJIT(AddCode(GDBJITInterface::LOAD_IC, cache_name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -147,7 +152,8 @@ MaybeObject* StubCache::ComputeLoadField(String* name,
           compiler.CompileLoadField(receiver, holder, field_index, name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -173,7 +179,8 @@ MaybeObject* StubCache::ComputeLoadCallback(String* name,
           compiler.CompileLoadCallback(name, receiver, holder, callback);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -199,7 +206,8 @@ MaybeObject* StubCache::ComputeLoadConstant(String* name,
           compiler.CompileLoadConstant(receiver, holder, value, name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -223,7 +231,8 @@ MaybeObject* StubCache::ComputeLoadInterceptor(String* name,
           compiler.CompileLoadInterceptor(receiver, holder, name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -236,7 +245,7 @@ MaybeObject* StubCache::ComputeLoadInterceptor(String* name,
 
 
 MaybeObject* StubCache::ComputeLoadNormal() {
-  return Builtins::builtin(Builtins::LoadIC_Normal);
+  return isolate_->builtins()->builtin(Builtins::LoadIC_Normal);
 }
 
 
@@ -257,7 +266,8 @@ MaybeObject* StubCache::ComputeLoadGlobal(String* name,
                                                            is_dont_delete);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -282,7 +292,8 @@ MaybeObject* StubCache::ComputeKeyedLoadField(String* name,
           compiler.CompileLoadField(name, receiver, holder, field_index);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -308,7 +319,8 @@ MaybeObject* StubCache::ComputeKeyedLoadConstant(String* name,
           compiler.CompileLoadConstant(name, receiver, holder, value);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -333,7 +345,8 @@ MaybeObject* StubCache::ComputeKeyedLoadInterceptor(String* name,
           compiler.CompileLoadInterceptor(receiver, holder, name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -359,7 +372,8 @@ MaybeObject* StubCache::ComputeKeyedLoadCallback(String* name,
           compiler.CompileLoadCallback(name, receiver, holder, callback);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -383,7 +397,8 @@ MaybeObject* StubCache::ComputeKeyedLoadArrayLength(String* name,
     { MaybeObject* maybe_code = compiler.CompileLoadArrayLength(name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -406,7 +421,8 @@ MaybeObject* StubCache::ComputeKeyedLoadStringLength(String* name,
     { MaybeObject* maybe_code = compiler.CompileLoadStringLength(name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result = map->UpdateCodeCache(name, Code::cast(code));
@@ -428,7 +444,8 @@ MaybeObject* StubCache::ComputeKeyedLoadFunctionPrototype(
     { MaybeObject* maybe_code = compiler.CompileLoadFunctionPrototype(name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -449,14 +466,15 @@ MaybeObject* StubCache::ComputeKeyedLoadSpecialized(JSObject* receiver) {
   // keyed loads that are not array elements go through a generic builtin stub.
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, NORMAL);
-  String* name = Heap::KeyedLoadSpecialized_symbol();
+  String* name = isolate_->heap()->KeyedLoadSpecialized_symbol();
   Object* code = receiver->map()->FindInCodeCache(name, flags);
   if (code->IsUndefined()) {
     KeyedLoadStubCompiler compiler;
     { MaybeObject* maybe_code = compiler.CompileLoadSpecialized(receiver);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), 0));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), 0));
     Object* result;
     { MaybeObject* maybe_result =
           receiver->UpdateMapCodeCache(name, Code::cast(code));
@@ -482,7 +500,8 @@ MaybeObject* StubCache::ComputeStoreField(String* name,
           compiler.CompileStoreField(receiver, field_index, transition, name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -499,14 +518,15 @@ MaybeObject* StubCache::ComputeKeyedStoreSpecialized(
     StrictModeFlag strict_mode) {
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, NORMAL, strict_mode);
-  String* name = Heap::KeyedStoreSpecialized_symbol();
+  String* name = isolate_->heap()->KeyedStoreSpecialized_symbol();
   Object* code = receiver->map()->FindInCodeCache(name, flags);
   if (code->IsUndefined()) {
     KeyedStoreStubCompiler compiler(strict_mode);
     { MaybeObject* maybe_code = compiler.CompileStoreSpecialized(receiver);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, Code::cast(code), 0));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, Code::cast(code), 0));
     Object* result;
     { MaybeObject* maybe_result =
           receiver->UpdateMapCodeCache(name, Code::cast(code));
@@ -548,21 +568,21 @@ String* ExternalArrayTypeToStubName(ExternalArrayType array_type,
   if (is_store) {
     switch (array_type) {
       case kExternalByteArray:
-        return Heap::KeyedStoreExternalByteArray_symbol();
+        return HEAP->KeyedStoreExternalByteArray_symbol();
       case kExternalUnsignedByteArray:
-        return Heap::KeyedStoreExternalUnsignedByteArray_symbol();
+        return HEAP->KeyedStoreExternalUnsignedByteArray_symbol();
       case kExternalShortArray:
-        return Heap::KeyedStoreExternalShortArray_symbol();
+        return HEAP->KeyedStoreExternalShortArray_symbol();
       case kExternalUnsignedShortArray:
-        return Heap::KeyedStoreExternalUnsignedShortArray_symbol();
+        return HEAP->KeyedStoreExternalUnsignedShortArray_symbol();
       case kExternalIntArray:
-        return Heap::KeyedStoreExternalIntArray_symbol();
+        return HEAP->KeyedStoreExternalIntArray_symbol();
       case kExternalUnsignedIntArray:
-        return Heap::KeyedStoreExternalUnsignedIntArray_symbol();
+        return HEAP->KeyedStoreExternalUnsignedIntArray_symbol();
       case kExternalFloatArray:
-        return Heap::KeyedStoreExternalFloatArray_symbol();
+        return HEAP->KeyedStoreExternalFloatArray_symbol();
       case kExternalPixelArray:
-        return Heap::KeyedStoreExternalPixelArray_symbol();
+        return HEAP->KeyedStoreExternalPixelArray_symbol();
       default:
         UNREACHABLE();
         return NULL;
@@ -570,21 +590,21 @@ String* ExternalArrayTypeToStubName(ExternalArrayType array_type,
   } else {
     switch (array_type) {
       case kExternalByteArray:
-        return Heap::KeyedLoadExternalByteArray_symbol();
+        return HEAP->KeyedLoadExternalByteArray_symbol();
       case kExternalUnsignedByteArray:
-        return Heap::KeyedLoadExternalUnsignedByteArray_symbol();
+        return HEAP->KeyedLoadExternalUnsignedByteArray_symbol();
       case kExternalShortArray:
-        return Heap::KeyedLoadExternalShortArray_symbol();
+        return HEAP->KeyedLoadExternalShortArray_symbol();
       case kExternalUnsignedShortArray:
-        return Heap::KeyedLoadExternalUnsignedShortArray_symbol();
+        return HEAP->KeyedLoadExternalUnsignedShortArray_symbol();
       case kExternalIntArray:
-        return Heap::KeyedLoadExternalIntArray_symbol();
+        return HEAP->KeyedLoadExternalIntArray_symbol();
       case kExternalUnsignedIntArray:
-        return Heap::KeyedLoadExternalUnsignedIntArray_symbol();
+        return HEAP->KeyedLoadExternalUnsignedIntArray_symbol();
       case kExternalFloatArray:
-        return Heap::KeyedLoadExternalFloatArray_symbol();
+        return HEAP->KeyedLoadExternalFloatArray_symbol();
       case kExternalPixelArray:
-        return Heap::KeyedLoadExternalPixelArray_symbol();
+        return HEAP->KeyedLoadExternalPixelArray_symbol();
       default:
         UNREACHABLE();
         return NULL;
@@ -619,11 +639,11 @@ MaybeObject* StubCache::ComputeKeyedLoadOrStoreExternalArray(
     }
     Code::cast(code)->set_external_array_type(array_type);
     if (is_store) {
-      PROFILE(
+      PROFILE(isolate_,
           CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_STORE_IC_TAG,
                           Code::cast(code), 0));
     } else {
-      PROFILE(
+      PROFILE(isolate_,
           CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG,
                           Code::cast(code), 0));
     }
@@ -638,7 +658,7 @@ MaybeObject* StubCache::ComputeKeyedLoadOrStoreExternalArray(
 
 
 MaybeObject* StubCache::ComputeStoreNormal(StrictModeFlag strict_mode) {
-  return Builtins::builtin((strict_mode == kStrictMode)
+  return isolate_->builtins()->builtin((strict_mode == kStrictMode)
                             ? Builtins::StoreIC_Normal_Strict
                             : Builtins::StoreIC_Normal);
 }
@@ -657,7 +677,8 @@ MaybeObject* StubCache::ComputeStoreGlobal(String* name,
           compiler.CompileStoreGlobal(receiver, cell, name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -684,7 +705,8 @@ MaybeObject* StubCache::ComputeStoreCallback(
           compiler.CompileStoreCallback(receiver, callback, name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -709,7 +731,8 @@ MaybeObject* StubCache::ComputeStoreInterceptor(
           compiler.CompileStoreInterceptor(receiver, name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -736,8 +759,9 @@ MaybeObject* StubCache::ComputeKeyedStoreField(String* name,
           compiler.CompileStoreField(receiver, field_index, transition, name);
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
-    PROFILE(CodeCreateEvent(
-        Logger::KEYED_STORE_IC_TAG, Code::cast(code), name));
+    PROFILE(isolate_,
+            CodeCreateEvent(Logger::KEYED_STORE_IC_TAG,
+                            Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC, name, Code::cast(code)));
     Object* result;
     { MaybeObject* maybe_result =
@@ -796,7 +820,8 @@ MaybeObject* StubCache::ComputeCallConstant(int argc,
     }
     Code::cast(code)->set_check_type(check);
     ASSERT_EQ(flags, Code::cast(code)->flags());
-    PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
+    PROFILE(isolate_,
+            CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
                             Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code)));
     Object* result;
@@ -846,7 +871,8 @@ MaybeObject* StubCache::ComputeCallField(int argc,
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
     ASSERT_EQ(flags, Code::cast(code)->flags());
-    PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
+    PROFILE(isolate_,
+            CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
                             Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code)));
     Object* result;
@@ -891,7 +917,8 @@ MaybeObject* StubCache::ComputeCallInterceptor(int argc,
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
     ASSERT_EQ(flags, Code::cast(code)->flags());
-    PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
+    PROFILE(isolate_,
+            CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
                             Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code)));
     Object* result;
@@ -948,7 +975,8 @@ MaybeObject* StubCache::ComputeCallGlobal(int argc,
       if (!maybe_code->ToObject(&code)) return maybe_code;
     }
     ASSERT_EQ(flags, Code::cast(code)->flags());
-    PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
+    PROFILE(isolate_,
+            CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
                             Code::cast(code), name));
     GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code)));
     Object* result;
@@ -961,45 +989,48 @@ MaybeObject* StubCache::ComputeCallGlobal(int argc,
 }
 
 
-static Object* GetProbeValue(Code::Flags flags) {
+static Object* GetProbeValue(Isolate* isolate, Code::Flags flags) {
   // Use raw_unchecked... so we don't get assert failures during GC.
-  NumberDictionary* dictionary = Heap::raw_unchecked_non_monomorphic_cache();
-  int entry = dictionary->FindEntry(flags);
+  NumberDictionary* dictionary =
+      isolate->heap()->raw_unchecked_non_monomorphic_cache();
+  int entry = dictionary->FindEntry(isolate, flags);
   if (entry != -1) return dictionary->ValueAt(entry);
-  return Heap::raw_unchecked_undefined_value();
+  return isolate->heap()->raw_unchecked_undefined_value();
 }
 
 
-MUST_USE_RESULT static MaybeObject* ProbeCache(Code::Flags flags) {
-  Object* probe = GetProbeValue(flags);
-  if (probe != Heap::undefined_value()) return probe;
+MUST_USE_RESULT static MaybeObject* ProbeCache(Isolate* isolate,
+                                               Code::Flags flags) {
+  Heap* heap = isolate->heap();
+  Object* probe = GetProbeValue(isolate, flags);
+  if (probe != heap->undefined_value()) return probe;
   // Seed the cache with an undefined value to make sure that any
   // generated code object can always be inserted into the cache
   // without causing  allocation failures.
   Object* result;
   { MaybeObject* maybe_result =
-        Heap::non_monomorphic_cache()->AtNumberPut(flags,
-                                                   Heap::undefined_value());
+        heap->non_monomorphic_cache()->AtNumberPut(flags,
+                                                   heap->undefined_value());
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
-  Heap::public_set_non_monomorphic_cache(NumberDictionary::cast(result));
+  heap->public_set_non_monomorphic_cache(NumberDictionary::cast(result));
   return probe;
 }
 
 
-static MaybeObject* FillCache(MaybeObject* maybe_code) {
+static MaybeObject* FillCache(Isolate* isolate, MaybeObject* maybe_code) {
   Object* code;
   if (maybe_code->ToObject(&code)) {
     if (code->IsCode()) {
-      int entry =
-          Heap::non_monomorphic_cache()->FindEntry(
-              Code::cast(code)->flags());
+      Heap* heap = isolate->heap();
+      int entry = heap->non_monomorphic_cache()->FindEntry(
+          Code::cast(code)->flags());
       // The entry must be present see comment in ProbeCache.
       ASSERT(entry != -1);
-      ASSERT(Heap::non_monomorphic_cache()->ValueAt(entry) ==
-             Heap::undefined_value());
-      Heap::non_monomorphic_cache()->ValueAtPut(entry, code);
-      CHECK(GetProbeValue(Code::cast(code)->flags()) == code);
+      ASSERT(heap->non_monomorphic_cache()->ValueAt(entry) ==
+             heap->undefined_value());
+      heap->non_monomorphic_cache()->ValueAtPut(entry, code);
+      CHECK(GetProbeValue(isolate, Code::cast(code)->flags()) == code);
     }
   }
   return maybe_code;
@@ -1015,8 +1046,8 @@ Code* StubCache::FindCallInitialize(int argc,
                                          Code::kNoExtraICState,
                                          NORMAL,
                                          argc);
-  Object* result = ProbeCache(flags)->ToObjectUnchecked();
-  ASSERT(!result->IsUndefined());
+  Object* result = ProbeCache(isolate_, flags)->ToObjectUnchecked();
+  ASSERT(result != isolate_->heap()->undefined_value());
   // This might be called during the marking phase of the collector
   // hence the unchecked cast.
   return reinterpret_cast<Code*>(result);
@@ -1033,12 +1064,12 @@ MaybeObject* StubCache::ComputeCallInitialize(int argc,
                                          NORMAL,
                                          argc);
   Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(flags);
+  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
     if (!maybe_probe->ToObject(&probe)) return maybe_probe;
   }
   if (!probe->IsUndefined()) return probe;
   StubCompiler compiler;
-  return FillCache(compiler.CompileCallInitialize(flags));
+  return FillCache(isolate_, compiler.CompileCallInitialize(flags));
 }
 
 
@@ -1051,7 +1082,8 @@ Handle<Code> StubCache::ComputeCallInitialize(int argc, InLoopFlag in_loop) {
     // that it needs so we need to ensure it is generated already.
     ComputeCallInitialize(argc, NOT_IN_LOOP);
   }
-  CALL_HEAP_FUNCTION(ComputeCallInitialize(argc, in_loop, Code::CALL_IC), Code);
+  CALL_HEAP_FUNCTION(isolate_,
+                     ComputeCallInitialize(argc, in_loop, Code::CALL_IC), Code);
 }
 
 
@@ -1066,6 +1098,7 @@ Handle<Code> StubCache::ComputeKeyedCallInitialize(int argc,
     ComputeKeyedCallInitialize(argc, NOT_IN_LOOP);
   }
   CALL_HEAP_FUNCTION(
+      isolate_,
       ComputeCallInitialize(argc, in_loop, Code::KEYED_CALL_IC), Code);
 }
 
@@ -1080,12 +1113,12 @@ MaybeObject* StubCache::ComputeCallPreMonomorphic(int argc,
                                          NORMAL,
                                          argc);
   Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(flags);
+  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
     if (!maybe_probe->ToObject(&probe)) return maybe_probe;
   }
   if (!probe->IsUndefined()) return probe;
   StubCompiler compiler;
-  return FillCache(compiler.CompileCallPreMonomorphic(flags));
+  return FillCache(isolate_, compiler.CompileCallPreMonomorphic(flags));
 }
 
 
@@ -1099,12 +1132,12 @@ MaybeObject* StubCache::ComputeCallNormal(int argc,
                                          NORMAL,
                                          argc);
   Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(flags);
+  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
     if (!maybe_probe->ToObject(&probe)) return maybe_probe;
   }
   if (!probe->IsUndefined()) return probe;
   StubCompiler compiler;
-  return FillCache(compiler.CompileCallNormal(flags));
+  return FillCache(isolate_, compiler.CompileCallNormal(flags));
 }
 
 
@@ -1118,12 +1151,12 @@ MaybeObject* StubCache::ComputeCallMegamorphic(int argc,
                                          NORMAL,
                                          argc);
   Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(flags);
+  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
     if (!maybe_probe->ToObject(&probe)) return maybe_probe;
   }
   if (!probe->IsUndefined()) return probe;
   StubCompiler compiler;
-  return FillCache(compiler.CompileCallMegamorphic(flags));
+  return FillCache(isolate_, compiler.CompileCallMegamorphic(flags));
 }
 
 
@@ -1138,12 +1171,12 @@ MaybeObject* StubCache::ComputeCallMiss(int argc, Code::Kind kind) {
                                          argc,
                                          OWN_MAP);
   Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(flags);
+  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
     if (!maybe_probe->ToObject(&probe)) return maybe_probe;
   }
   if (!probe->IsUndefined()) return probe;
   StubCompiler compiler;
-  return FillCache(compiler.CompileCallMiss(flags));
+  return FillCache(isolate_, compiler.CompileCallMiss(flags));
 }
 
 
@@ -1156,12 +1189,12 @@ MaybeObject* StubCache::ComputeCallDebugBreak(int argc, Code::Kind kind) {
                                          NORMAL,
                                          argc);
   Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(flags);
+  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
     if (!maybe_probe->ToObject(&probe)) return maybe_probe;
   }
   if (!probe->IsUndefined()) return probe;
   StubCompiler compiler;
-  return FillCache(compiler.CompileCallDebugBreak(flags));
+  return FillCache(isolate_, compiler.CompileCallDebugBreak(flags));
 }
 
 
@@ -1174,24 +1207,26 @@ MaybeObject* StubCache::ComputeCallDebugPrepareStepIn(int argc,
                                          NORMAL,
                                          argc);
   Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(flags);
+  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
     if (!maybe_probe->ToObject(&probe)) return maybe_probe;
   }
   if (!probe->IsUndefined()) return probe;
   StubCompiler compiler;
-  return FillCache(compiler.CompileCallDebugPrepareStepIn(flags));
+  return FillCache(isolate_, compiler.CompileCallDebugPrepareStepIn(flags));
 }
 #endif
 
 
 void StubCache::Clear() {
   for (int i = 0; i < kPrimaryTableSize; i++) {
-    primary_[i].key = Heap::empty_string();
-    primary_[i].value = Builtins::builtin(Builtins::Illegal);
+    primary_[i].key = isolate_->heap()->empty_string();
+    primary_[i].value = isolate_->builtins()->builtin(
+        Builtins::Illegal);
   }
   for (int j = 0; j < kSecondaryTableSize; j++) {
-    secondary_[j].key = Heap::empty_string();
-    secondary_[j].value = Builtins::builtin(Builtins::Illegal);
+    secondary_[j].key = isolate_->heap()->empty_string();
+    secondary_[j].value = isolate_->builtins()->builtin(
+        Builtins::Illegal);
   }
 }
 
@@ -1242,7 +1277,8 @@ void StubCache::CollectMatchingMaps(ZoneMapList* types,
 // StubCompiler implementation.
 
 
-MaybeObject* LoadCallbackProperty(Arguments args) {
+MaybeObject* LoadCallbackProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args[0]->IsJSObject());
   ASSERT(args[1]->IsJSObject());
   AccessorInfo* callback = AccessorInfo::cast(args[3]);
@@ -1250,21 +1286,22 @@ MaybeObject* LoadCallbackProperty(Arguments args) {
   v8::AccessorGetter fun = FUNCTION_CAST<v8::AccessorGetter>(getter_address);
   ASSERT(fun != NULL);
   v8::AccessorInfo info(&args[0]);
-  HandleScope scope;
+  HandleScope scope(isolate);
   v8::Handle<v8::Value> result;
   {
     // Leaving JavaScript.
-    VMState state(EXTERNAL);
-    ExternalCallbackScope call_scope(getter_address);
+    VMState state(isolate, EXTERNAL);
+    ExternalCallbackScope call_scope(isolate, getter_address);
     result = fun(v8::Utils::ToLocal(args.at<String>(4)), info);
   }
-  RETURN_IF_SCHEDULED_EXCEPTION();
-  if (result.IsEmpty()) return Heap::undefined_value();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
+  if (result.IsEmpty()) return HEAP->undefined_value();
   return *v8::Utils::OpenHandle(*result);
 }
 
 
-MaybeObject* StoreCallbackProperty(Arguments args) {
+MaybeObject* StoreCallbackProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   JSObject* recv = JSObject::cast(args[0]);
   AccessorInfo* callback = AccessorInfo::cast(args[1]);
   Address setter_address = v8::ToCData<Address>(callback->setter());
@@ -1272,17 +1309,17 @@ MaybeObject* StoreCallbackProperty(Arguments args) {
   ASSERT(fun != NULL);
   Handle<String> name = args.at<String>(2);
   Handle<Object> value = args.at<Object>(3);
-  HandleScope scope;
-  LOG(ApiNamedPropertyAccess("store", recv, *name));
-  CustomArguments custom_args(callback->data(), recv, recv);
+  HandleScope scope(isolate);
+  LOG(isolate, ApiNamedPropertyAccess("store", recv, *name));
+  CustomArguments custom_args(isolate, callback->data(), recv, recv);
   v8::AccessorInfo info(custom_args.end());
   {
     // Leaving JavaScript.
-    VMState state(EXTERNAL);
-    ExternalCallbackScope call_scope(setter_address);
+    VMState state(isolate, EXTERNAL);
+    ExternalCallbackScope call_scope(isolate, setter_address);
     fun(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), info);
   }
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return *value;
 }
 
@@ -1297,7 +1334,8 @@ static const int kAccessorInfoOffsetInInterceptorArgs = 2;
  * Returns |Heap::no_interceptor_result_sentinel()| if interceptor doesn't
  * provide any value for the given name.
  */
-MaybeObject* LoadPropertyWithInterceptorOnly(Arguments args) {
+MaybeObject* LoadPropertyWithInterceptorOnly(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   Handle<String> name_handle = args.at<String>(0);
   Handle<InterceptorInfo> interceptor_info = args.at<InterceptorInfo>(1);
   ASSERT(kAccessorInfoOffsetInInterceptorArgs == 2);
@@ -1314,20 +1352,20 @@ MaybeObject* LoadPropertyWithInterceptorOnly(Arguments args) {
     // Use the interceptor getter.
     v8::AccessorInfo info(args.arguments() -
                           kAccessorInfoOffsetInInterceptorArgs);
-    HandleScope scope;
+    HandleScope scope(isolate);
     v8::Handle<v8::Value> r;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       r = getter(v8::Utils::ToLocal(name_handle), info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     if (!r.IsEmpty()) {
       return *v8::Utils::OpenHandle(*r);
     }
   }
 
-  return Heap::no_interceptor_result_sentinel();
+  return isolate->heap()->no_interceptor_result_sentinel();
 }
 
 
@@ -1335,17 +1373,17 @@ static MaybeObject* ThrowReferenceError(String* name) {
   // If the load is non-contextual, just return the undefined result.
   // Note that both keyed and non-keyed loads may end up here, so we
   // can't use either LoadIC or KeyedLoadIC constructors.
-  IC ic(IC::NO_EXTRA_FRAME);
+  IC ic(IC::NO_EXTRA_FRAME, Isolate::Current());
   ASSERT(ic.target()->is_load_stub() || ic.target()->is_keyed_load_stub());
-  if (!ic.SlowIsContextual()) return Heap::undefined_value();
+  if (!ic.SlowIsContextual()) return HEAP->undefined_value();
 
   // Throw a reference error.
   HandleScope scope;
   Handle<String> name_handle(name);
   Handle<Object> error =
-      Factory::NewReferenceError("not_defined",
+      FACTORY->NewReferenceError("not_defined",
                                   HandleVector(&name_handle, 1));
-  return Top::Throw(*error);
+  return Isolate::Current()->Throw(*error);
 }
 
 
@@ -1358,6 +1396,8 @@ static MaybeObject* LoadWithInterceptor(Arguments* args,
   Handle<JSObject> holder_handle = args->at<JSObject>(3);
   ASSERT(args->length() == 5);  // Last arg is data object.
 
+  Isolate* isolate = receiver_handle->GetIsolate();
+
   Address getter_address = v8::ToCData<Address>(interceptor_info->getter());
   v8::NamedPropertyGetter getter =
       FUNCTION_CAST<v8::NamedPropertyGetter>(getter_address);
@@ -1367,14 +1407,14 @@ static MaybeObject* LoadWithInterceptor(Arguments* args,
     // Use the interceptor getter.
     v8::AccessorInfo info(args->arguments() -
                           kAccessorInfoOffsetInInterceptorArgs);
-    HandleScope scope;
+    HandleScope scope(isolate);
     v8::Handle<v8::Value> r;
     {
       // Leaving JavaScript.
-      VMState state(EXTERNAL);
+      VMState state(isolate, EXTERNAL);
       r = getter(v8::Utils::ToLocal(name_handle), info);
     }
-    RETURN_IF_SCHEDULED_EXCEPTION();
+    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
     if (!r.IsEmpty()) {
       *attrs = NONE;
       return *v8::Utils::OpenHandle(*r);
@@ -1385,7 +1425,7 @@ static MaybeObject* LoadWithInterceptor(Arguments* args,
       *receiver_handle,
       *name_handle,
       attrs);
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   return result;
 }
 
@@ -1394,7 +1434,8 @@ static MaybeObject* LoadWithInterceptor(Arguments* args,
  * Loads a property with an interceptor performing post interceptor
  * lookup if interceptor failed.
  */
-MaybeObject* LoadPropertyWithInterceptorForLoad(Arguments args) {
+MaybeObject* LoadPropertyWithInterceptorForLoad(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   PropertyAttributes attr = NONE;
   Object* result;
   { MaybeObject* maybe_result = LoadWithInterceptor(&args, &attr);
@@ -1407,10 +1448,11 @@ MaybeObject* LoadPropertyWithInterceptorForLoad(Arguments args) {
 }
 
 
-MaybeObject* LoadPropertyWithInterceptorForCall(Arguments args) {
+MaybeObject* LoadPropertyWithInterceptorForCall(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   PropertyAttributes attr;
   MaybeObject* result = LoadWithInterceptor(&args, &attr);
-  RETURN_IF_SCHEDULED_EXCEPTION();
+  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   // This is call IC. In this case, we simply return the undefined result which
   // will lead to an exception when trying to invoke the result as a
   // function.
@@ -1418,7 +1460,8 @@ MaybeObject* LoadPropertyWithInterceptorForCall(Arguments args) {
 }
 
 
-MaybeObject* StoreInterceptorProperty(Arguments args) {
+MaybeObject* StoreInterceptorProperty(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   ASSERT(args.length() == 4);
   JSObject* recv = JSObject::cast(args[0]);
   String* name = String::cast(args[1]);
@@ -1434,7 +1477,8 @@ MaybeObject* StoreInterceptorProperty(Arguments args) {
 }
 
 
-MaybeObject* KeyedLoadPropertyWithInterceptor(Arguments args) {
+MaybeObject* KeyedLoadPropertyWithInterceptor(RUNTIME_CALLING_CONVENTION) {
+  RUNTIME_GET_ISOLATE;
   JSObject* receiver = JSObject::cast(args[0]);
   ASSERT(Smi::cast(args[1])->value() >= 0);
   uint32_t index = Smi::cast(args[1])->value();
@@ -1443,7 +1487,7 @@ MaybeObject* KeyedLoadPropertyWithInterceptor(Arguments args) {
 
 
 MaybeObject* StubCompiler::CompileCallInitialize(Code::Flags flags) {
-  HandleScope scope;
+  HandleScope scope(isolate());
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
   if (kind == Code::CALL_IC) {
@@ -1456,10 +1500,11 @@ MaybeObject* StubCompiler::CompileCallInitialize(Code::Flags flags) {
         GetCodeWithFlags(flags, "CompileCallInitialize");
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
-  Counters::call_initialize_stubs.Increment();
+  COUNTERS->call_initialize_stubs()->Increment();
   Code* code = Code::cast(result);
   USE(code);
-  PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_INITIALIZE_TAG),
+  PROFILE(isolate(),
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_INITIALIZE_TAG),
                           code, code->arguments_count()));
   GDBJIT(AddCode(GDBJITInterface::CALL_INITIALIZE, Code::cast(code)));
   return result;
@@ -1467,7 +1512,7 @@ MaybeObject* StubCompiler::CompileCallInitialize(Code::Flags flags) {
 
 
 MaybeObject* StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) {
-  HandleScope scope;
+  HandleScope scope(isolate());
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   // The code of the PreMonomorphic stub is the same as the code
   // of the Initialized stub.  They just differ on the code object flags.
@@ -1482,10 +1527,11 @@ MaybeObject* StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) {
         GetCodeWithFlags(flags, "CompileCallPreMonomorphic");
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
-  Counters::call_premonomorphic_stubs.Increment();
+  COUNTERS->call_premonomorphic_stubs()->Increment();
   Code* code = Code::cast(result);
   USE(code);
-  PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_PRE_MONOMORPHIC_TAG),
+  PROFILE(isolate(),
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_PRE_MONOMORPHIC_TAG),
                           code, code->arguments_count()));
   GDBJIT(AddCode(GDBJITInterface::CALL_PRE_MONOMORPHIC, Code::cast(code)));
   return result;
@@ -1493,7 +1539,7 @@ MaybeObject* StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) {
 
 
 MaybeObject* StubCompiler::CompileCallNormal(Code::Flags flags) {
-  HandleScope scope;
+  HandleScope scope(isolate());
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
   if (kind == Code::CALL_IC) {
@@ -1505,10 +1551,11 @@ MaybeObject* StubCompiler::CompileCallNormal(Code::Flags flags) {
   { MaybeObject* maybe_result = GetCodeWithFlags(flags, "CompileCallNormal");
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
-  Counters::call_normal_stubs.Increment();
+  COUNTERS->call_normal_stubs()->Increment();
   Code* code = Code::cast(result);
   USE(code);
-  PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_NORMAL_TAG),
+  PROFILE(isolate(),
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_NORMAL_TAG),
                           code, code->arguments_count()));
   GDBJIT(AddCode(GDBJITInterface::CALL_NORMAL, Code::cast(code)));
   return result;
@@ -1516,7 +1563,7 @@ MaybeObject* StubCompiler::CompileCallNormal(Code::Flags flags) {
 
 
 MaybeObject* StubCompiler::CompileCallMegamorphic(Code::Flags flags) {
-  HandleScope scope;
+  HandleScope scope(isolate());
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
   if (kind == Code::CALL_IC) {
@@ -1524,16 +1571,16 @@ MaybeObject* StubCompiler::CompileCallMegamorphic(Code::Flags flags) {
   } else {
     KeyedCallIC::GenerateMegamorphic(masm(), argc);
   }
-
   Object* result;
   { MaybeObject* maybe_result =
         GetCodeWithFlags(flags, "CompileCallMegamorphic");
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
-  Counters::call_megamorphic_stubs.Increment();
+  COUNTERS->call_megamorphic_stubs()->Increment();
   Code* code = Code::cast(result);
   USE(code);
-  PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MEGAMORPHIC_TAG),
+  PROFILE(isolate(),
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MEGAMORPHIC_TAG),
                           code, code->arguments_count()));
   GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, Code::cast(code)));
   return result;
@@ -1541,7 +1588,7 @@ MaybeObject* StubCompiler::CompileCallMegamorphic(Code::Flags flags) {
 
 
 MaybeObject* StubCompiler::CompileCallMiss(Code::Flags flags) {
-  HandleScope scope;
+  HandleScope scope(isolate());
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
   if (kind == Code::CALL_IC) {
@@ -1553,10 +1600,11 @@ MaybeObject* StubCompiler::CompileCallMiss(Code::Flags flags) {
   { MaybeObject* maybe_result = GetCodeWithFlags(flags, "CompileCallMiss");
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
-  Counters::call_megamorphic_stubs.Increment();
+  COUNTERS->call_megamorphic_stubs()->Increment();
   Code* code = Code::cast(result);
   USE(code);
-  PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MISS_TAG),
+  PROFILE(isolate(),
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MISS_TAG),
                           code, code->arguments_count()));
   GDBJIT(AddCode(GDBJITInterface::CALL_MISS, Code::cast(code)));
   return result;
@@ -1565,7 +1613,7 @@ MaybeObject* StubCompiler::CompileCallMiss(Code::Flags flags) {
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
 MaybeObject* StubCompiler::CompileCallDebugBreak(Code::Flags flags) {
-  HandleScope scope;
+  HandleScope scope(isolate());
   Debug::GenerateCallICDebugBreak(masm());
   Object* result;
   { MaybeObject* maybe_result =
@@ -1576,14 +1624,15 @@ MaybeObject* StubCompiler::CompileCallDebugBreak(Code::Flags flags) {
   USE(code);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
   USE(kind);
-  PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_DEBUG_BREAK_TAG),
+  PROFILE(isolate(),
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_DEBUG_BREAK_TAG),
                           code, code->arguments_count()));
   return result;
 }
 
 
 MaybeObject* StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) {
-  HandleScope scope;
+  HandleScope scope(isolate());
   // Use the same code for the the step in preparations as we do for
   // the miss case.
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
@@ -1600,10 +1649,11 @@ MaybeObject* StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) {
   }
   Code* code = Code::cast(result);
   USE(code);
-  PROFILE(CodeCreateEvent(
-      CALL_LOGGER_TAG(kind, CALL_DEBUG_PREPARE_STEP_IN_TAG),
-      code,
-      code->arguments_count()));
+  PROFILE(isolate(),
+          CodeCreateEvent(
+              CALL_LOGGER_TAG(kind, CALL_DEBUG_PREPARE_STEP_IN_TAG),
+              code,
+              code->arguments_count()));
   return result;
 }
 #endif
@@ -1618,7 +1668,7 @@ MaybeObject* StubCompiler::GetCodeWithFlags(Code::Flags flags,
   // Create code object in the heap.
   CodeDesc desc;
   masm_.GetCode(&desc);
-  MaybeObject* result = Heap::CreateCode(desc, flags, masm_.CodeObject());
+  MaybeObject* result = HEAP->CreateCode(desc, flags, masm_.CodeObject());
 #ifdef ENABLE_DISASSEMBLER
   if (FLAG_print_code_stubs && !result->IsFailure()) {
     Code::cast(result->ToObjectUnchecked())->Disassemble(name);
@@ -1643,7 +1693,7 @@ void StubCompiler::LookupPostInterceptor(JSObject* holder,
   if (!lookup->IsProperty()) {
     lookup->NotFound();
     Object* proto = holder->GetPrototype();
-    if (proto != Heap::null_value()) {
+    if (!proto->IsNull()) {
       proto->Lookup(name, lookup);
     }
   }
@@ -1655,7 +1705,8 @@ MaybeObject* LoadStubCompiler::GetCode(PropertyType type, String* name) {
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, type);
   MaybeObject* result = GetCodeWithFlags(flags, name);
   if (!result->IsFailure()) {
-    PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG,
+    PROFILE(isolate(),
+            CodeCreateEvent(Logger::LOAD_IC_TAG,
                             Code::cast(result->ToObjectUnchecked()),
                             name));
     GDBJIT(AddCode(GDBJITInterface::LOAD_IC,
@@ -1670,7 +1721,8 @@ MaybeObject* KeyedLoadStubCompiler::GetCode(PropertyType type, String* name) {
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, type);
   MaybeObject* result = GetCodeWithFlags(flags, name);
   if (!result->IsFailure()) {
-    PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG,
+    PROFILE(isolate(),
+            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG,
                             Code::cast(result->ToObjectUnchecked()),
                             name));
     GDBJIT(AddCode(GDBJITInterface::LOAD_IC,
@@ -1686,7 +1738,8 @@ MaybeObject* StoreStubCompiler::GetCode(PropertyType type, String* name) {
       Code::STORE_IC, type, strict_mode_);
   MaybeObject* result = GetCodeWithFlags(flags, name);
   if (!result->IsFailure()) {
-    PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG,
+    PROFILE(isolate(),
+            CodeCreateEvent(Logger::STORE_IC_TAG,
                             Code::cast(result->ToObjectUnchecked()),
                             name));
     GDBJIT(AddCode(GDBJITInterface::STORE_IC,
@@ -1702,7 +1755,8 @@ MaybeObject* KeyedStoreStubCompiler::GetCode(PropertyType type, String* name) {
       Code::KEYED_STORE_IC, type, strict_mode_);
   MaybeObject* result = GetCodeWithFlags(flags, name);
   if (!result->IsFailure()) {
-    PROFILE(CodeCreateEvent(Logger::KEYED_STORE_IC_TAG,
+    PROFILE(isolate(),
+            CodeCreateEvent(Logger::KEYED_STORE_IC_TAG,
                             Code::cast(result->ToObjectUnchecked()),
                             name));
     GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC,
@@ -1803,7 +1857,7 @@ MaybeObject* ConstructStubCompiler::GetCode() {
   }
   Code* code = Code::cast(result);
   USE(code);
-  PROFILE(CodeCreateEvent(Logger::STUB_TAG, code, "ConstructStub"));
+  PROFILE(isolate(), CodeCreateEvent(Logger::STUB_TAG, code, "ConstructStub"));
   GDBJIT(AddCode(GDBJITInterface::STUB, "ConstructStub", Code::cast(code)));
   return result;
 }
@@ -1883,7 +1937,8 @@ MaybeObject* ExternalArrayStubCompiler::GetCode(Code::Flags flags) {
   }
   Code* code = Code::cast(result);
   USE(code);
-  PROFILE(CodeCreateEvent(Logger::STUB_TAG, code, "ExternalArrayStub"));
+  PROFILE(isolate(),
+          CodeCreateEvent(Logger::STUB_TAG, code, "ExternalArrayStub"));
   return result;
 }
 
index 99c024211d4e89c3df75ba1b364745ccc575cb9b..19c4bac282d0a038b60965c5ef8061e710ced593 100644 (file)
@@ -28,6 +28,7 @@
 #ifndef V8_STUB_CACHE_H_
 #define V8_STUB_CACHE_H_
 
+#include "arguments.h"
 #include "macro-assembler.h"
 #include "zone-inl.h"
 
@@ -43,50 +44,62 @@ namespace internal {
 // invalidate the cache whenever a prototype map is changed.  The stub
 // validates the map chain as in the mono-morphic case.
 
-class SCTableReference;
+class StubCache;
 
+class SCTableReference {
+ public:
+  Address address() const { return address_; }
+
+ private:
+  explicit SCTableReference(Address address) : address_(address) {}
+
+  Address address_;
+
+  friend class StubCache;
+};
 
-class StubCache : public AllStatic {
+
+class StubCache {
  public:
   struct Entry {
     String* key;
     Code* value;
   };
 
+  void Initialize(bool create_heap_objects);
 
-  static void Initialize(bool create_heap_objects);
 
   // Computes the right stub matching. Inserts the result in the
   // cache before returning.  This might compile a stub if needed.
-  MUST_USE_RESULT static MaybeObject* ComputeLoadNonexistent(
+  MUST_USE_RESULT MaybeObject* ComputeLoadNonexistent(
       String* name,
       JSObject* receiver);
 
-  MUST_USE_RESULT static MaybeObject* ComputeLoadField(String* name,
-                                                       JSObject* receiver,
-                                                       JSObject* holder,
-                                                       int field_index);
+  MUST_USE_RESULT MaybeObject* ComputeLoadField(String* name,
+                                                JSObject* receiver,
+                                                JSObject* holder,
+                                                int field_index);
 
-  MUST_USE_RESULT static MaybeObject* ComputeLoadCallback(
+  MUST_USE_RESULT MaybeObject* ComputeLoadCallback(
       String* name,
       JSObject* receiver,
       JSObject* holder,
       AccessorInfo* callback);
 
-  MUST_USE_RESULT static MaybeObject* ComputeLoadConstant(String* name,
-                                                          JSObject* receiver,
-                                                          JSObject* holder,
-                                                          Object* value);
+  MUST_USE_RESULT MaybeObject* ComputeLoadConstant(String* name,
+                                                   JSObject* receiver,
+                                                   JSObject* holder,
+                                                   Object* value);
 
-  MUST_USE_RESULT static MaybeObject* ComputeLoadInterceptor(
+  MUST_USE_RESULT MaybeObject* ComputeLoadInterceptor(
       String* name,
       JSObject* receiver,
       JSObject* holder);
 
-  MUST_USE_RESULT static MaybeObject* ComputeLoadNormal();
+  MUST_USE_RESULT MaybeObject* ComputeLoadNormal();
 
 
-  MUST_USE_RESULT static MaybeObject* ComputeLoadGlobal(
+  MUST_USE_RESULT MaybeObject* ComputeLoadGlobal(
       String* name,
       JSObject* receiver,
       GlobalObject* holder,
@@ -96,101 +109,102 @@ class StubCache : public AllStatic {
 
   // ---
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadField(String* name,
-                                                            JSObject* receiver,
-                                                            JSObject* holder,
-                                                            int field_index);
+  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadField(String* name,
+                                                     JSObject* receiver,
+                                                     JSObject* holder,
+                                                     int field_index);
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadCallback(
+  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadCallback(
       String* name,
       JSObject* receiver,
       JSObject* holder,
       AccessorInfo* callback);
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadConstant(
+  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadConstant(
       String* name,
       JSObject* receiver,
       JSObject* holder,
       Object* value);
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadInterceptor(
+  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadInterceptor(
       String* name,
       JSObject* receiver,
       JSObject* holder);
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadArrayLength(
+  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadArrayLength(
       String* name,
       JSArray* receiver);
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadStringLength(
+  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadStringLength(
       String* name,
       String* receiver);
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadFunctionPrototype(
+  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadFunctionPrototype(
       String* name,
       JSFunction* receiver);
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadSpecialized(
+  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadSpecialized(
       JSObject* receiver);
 
   // ---
 
-  MUST_USE_RESULT static MaybeObject* ComputeStoreField(
+  MUST_USE_RESULT MaybeObject* ComputeStoreField(
       String* name,
       JSObject* receiver,
       int field_index,
       Map* transition,
       StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT static MaybeObject* ComputeStoreNormal(
+  MUST_USE_RESULT MaybeObject* ComputeStoreNormal(
       StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT static MaybeObject* ComputeStoreGlobal(
+  MUST_USE_RESULT MaybeObject* ComputeStoreGlobal(
       String* name,
       GlobalObject* receiver,
       JSGlobalPropertyCell* cell,
       StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT static MaybeObject* ComputeStoreCallback(
+  MUST_USE_RESULT MaybeObject* ComputeStoreCallback(
       String* name,
       JSObject* receiver,
       AccessorInfo* callback,
       StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT static MaybeObject* ComputeStoreInterceptor(
+  MUST_USE_RESULT MaybeObject* ComputeStoreInterceptor(
       String* name,
       JSObject* receiver,
       StrictModeFlag strict_mode);
 
   // ---
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedStoreField(
+  MUST_USE_RESULT MaybeObject* ComputeKeyedStoreField(
       String* name,
       JSObject* receiver,
       int field_index,
       Map* transition,
       StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedStoreSpecialized(
+  MUST_USE_RESULT MaybeObject* ComputeKeyedStoreSpecialized(
       JSObject* receiver,
       StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadOrStoreExternalArray(
+
+  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadOrStoreExternalArray(
       JSObject* receiver,
       bool is_store,
       StrictModeFlag strict_mode);
 
   // ---
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallField(int argc,
-                                                       InLoopFlag in_loop,
-                                                       Code::Kind,
-                                                       String* name,
-                                                       Object* object,
-                                                       JSObject* holder,
-                                                       int index);
+  MUST_USE_RESULT MaybeObject* ComputeCallField(int argc,
+                                                InLoopFlag in_loop,
+                                                Code::Kind,
+                                                String* name,
+                                                Object* object,
+                                                JSObject* holder,
+                                                int index);
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallConstant(
+  MUST_USE_RESULT MaybeObject* ComputeCallConstant(
       int argc,
       InLoopFlag in_loop,
       Code::Kind,
@@ -200,19 +214,19 @@ class StubCache : public AllStatic {
       JSObject* holder,
       JSFunction* function);
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallNormal(int argc,
-                                                        InLoopFlag in_loop,
-                                                        Code::Kind,
-                                                        String* name,
-                                                        JSObject* receiver);
+  MUST_USE_RESULT MaybeObject* ComputeCallNormal(int argc,
+                                                 InLoopFlag in_loop,
+                                                 Code::Kind,
+                                                 String* name,
+                                                 JSObject* receiver);
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallInterceptor(int argc,
-                                                             Code::Kind,
-                                                             String* name,
-                                                             Object* object,
-                                                             JSObject* holder);
+  MUST_USE_RESULT MaybeObject* ComputeCallInterceptor(int argc,
+                                                      Code::Kind,
+                                                      String* name,
+                                                      Object* object,
+                                                      JSObject* holder);
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallGlobal(
+  MUST_USE_RESULT MaybeObject* ComputeCallGlobal(
       int argc,
       InLoopFlag in_loop,
       Code::Kind,
@@ -224,80 +238,103 @@ class StubCache : public AllStatic {
 
   // ---
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallInitialize(int argc,
-                                                            InLoopFlag in_loop,
-                                                            Code::Kind kind);
+  MUST_USE_RESULT MaybeObject* ComputeCallInitialize(int argc,
+                                                     InLoopFlag in_loop,
+                                                     Code::Kind kind);
 
-  static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
+  Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
 
-  static Handle<Code> ComputeKeyedCallInitialize(int argc, InLoopFlag in_loop);
+  Handle<Code> ComputeKeyedCallInitialize(int argc, InLoopFlag in_loop);
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallPreMonomorphic(
+  MUST_USE_RESULT MaybeObject* ComputeCallPreMonomorphic(
       int argc,
       InLoopFlag in_loop,
       Code::Kind kind);
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallNormal(int argc,
-                                                        InLoopFlag in_loop,
-                                                        Code::Kind kind);
-
-  MUST_USE_RESULT static MaybeObject* ComputeCallMegamorphic(int argc,
-                                                             InLoopFlag in_loop,
-                                                             Code::Kind kind);
+  MUST_USE_RESULT MaybeObject* ComputeCallNormal(int argc,
+                                                 InLoopFlag in_loop,
+                                                 Code::Kind kind);
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallMiss(int argc,
+  MUST_USE_RESULT MaybeObject* ComputeCallMegamorphic(int argc,
+                                                      InLoopFlag in_loop,
                                                       Code::Kind kind);
 
+  MUST_USE_RESULT MaybeObject* ComputeCallMiss(int argc, Code::Kind kind);
+
   // Finds the Code object stored in the Heap::non_monomorphic_cache().
-  MUST_USE_RESULT static Code* FindCallInitialize(int argc,
-                                                  InLoopFlag in_loop,
-                                                  Code::Kind kind);
+  MUST_USE_RESULT Code* FindCallInitialize(int argc,
+                                           InLoopFlag in_loop,
+                                           Code::Kind kind);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  MUST_USE_RESULT static MaybeObject* ComputeCallDebugBreak(int argc,
-                                                            Code::Kind kind);
+  MUST_USE_RESULT MaybeObject* ComputeCallDebugBreak(int argc, Code::Kind kind);
 
-  MUST_USE_RESULT static MaybeObject* ComputeCallDebugPrepareStepIn(
-      int argc,
-      Code::Kind kind);
+  MUST_USE_RESULT MaybeObject* ComputeCallDebugPrepareStepIn(int argc,
+                                                             Code::Kind kind);
 #endif
 
   // Update cache for entry hash(name, map).
-  static Code* Set(String* name, Map* map, Code* code);
+  Code* Set(String* name, Map* map, Code* code);
 
   // Clear the lookup table (@ mark compact collection).
-  static void Clear();
+  void Clear();
 
   // Collect all maps that match the name and flags.
-  static void CollectMatchingMaps(ZoneMapList* types,
-                                  String* name,
-                                  Code::Flags flags);
+  void CollectMatchingMaps(ZoneMapList* types,
+                           String* name,
+                           Code::Flags flags);
 
   // Generate code for probing the stub cache table.
   // Arguments extra and extra2 may be used to pass additional scratch
   // registers. Set to no_reg if not needed.
-  static void GenerateProbe(MacroAssembler* masm,
-                            Code::Flags flags,
-                            Register receiver,
-                            Register name,
-                            Register scratch,
-                            Register extra,
-                            Register extra2 = no_reg);
+  void GenerateProbe(MacroAssembler* masm,
+                     Code::Flags flags,
+                     Register receiver,
+                     Register name,
+                     Register scratch,
+                     Register extra,
+                     Register extra2 = no_reg);
 
   enum Table {
     kPrimary,
     kSecondary
   };
 
+
+  SCTableReference key_reference(StubCache::Table table) {
+    return SCTableReference(
+        reinterpret_cast<Address>(&first_entry(table)->key));
+  }
+
+
+  SCTableReference value_reference(StubCache::Table table) {
+    return SCTableReference(
+        reinterpret_cast<Address>(&first_entry(table)->value));
+  }
+
+
+  StubCache::Entry* first_entry(StubCache::Table table) {
+    switch (table) {
+      case StubCache::kPrimary: return StubCache::primary_;
+      case StubCache::kSecondary: return StubCache::secondary_;
+    }
+    UNREACHABLE();
+    return NULL;
+  }
+
+
  private:
+  explicit StubCache(Isolate* isolate);
+
+  friend class Isolate;
   friend class SCTableReference;
   static const int kPrimaryTableSize = 2048;
   static const int kSecondaryTableSize = 512;
-  static Entry primary_[];
-  static Entry secondary_[];
+  Entry primary_[kPrimaryTableSize];
+  Entry secondary_[kSecondaryTableSize];
 
   // Computes the hashed offsets for primary and secondary caches.
-  static int PrimaryOffset(String* name, Code::Flags flags, Map* map) {
+  RLYSTC int PrimaryOffset(String* name, Code::Flags flags, Map* map) {
     // This works well because the heap object tag size and the hash
     // shift are equal.  Shifting down the length field to get the
     // hash code would effectively throw away two bits of the hash
@@ -320,7 +357,7 @@ class StubCache : public AllStatic {
     return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize);
   }
 
-  static int SecondaryOffset(String* name, Code::Flags flags, int seed) {
+  RLYSTC int SecondaryOffset(String* name, Code::Flags flags, int seed) {
     // Use the seed from the primary cache in the secondary cache.
     uint32_t string_low32bits =
         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
@@ -337,59 +374,33 @@ class StubCache : public AllStatic {
   // ends in String::kHashShift 0s.  Then we shift it so it is a multiple
   // of sizeof(Entry).  This makes it easier to avoid making mistakes
   // in the hashed offset computations.
-  static Entry* entry(Entry* table, int offset) {
+  RLYSTC Entry* entry(Entry* table, int offset) {
     const int shift_amount = kPointerSizeLog2 + 1 - String::kHashShift;
     return reinterpret_cast<Entry*>(
         reinterpret_cast<Address>(table) + (offset << shift_amount));
   }
-};
 
+  Isolate* isolate_;
 
-class SCTableReference {
- public:
-  static SCTableReference keyReference(StubCache::Table table) {
-    return SCTableReference(
-        reinterpret_cast<Address>(&first_entry(table)->key));
-  }
-
-
-  static SCTableReference valueReference(StubCache::Table table) {
-    return SCTableReference(
-        reinterpret_cast<Address>(&first_entry(table)->value));
-  }
-
-  Address address() const { return address_; }
-
- private:
-  explicit SCTableReference(Address address) : address_(address) {}
-
-  static StubCache::Entry* first_entry(StubCache::Table table) {
-    switch (table) {
-      case StubCache::kPrimary: return StubCache::primary_;
-      case StubCache::kSecondary: return StubCache::secondary_;
-    }
-    UNREACHABLE();
-    return NULL;
-  }
-
-  Address address_;
+  DISALLOW_COPY_AND_ASSIGN(StubCache);
 };
 
+
 // ------------------------------------------------------------------------
 
 
 // Support functions for IC stubs for callbacks.
-MaybeObject* LoadCallbackProperty(Arguments args);
-MaybeObject* StoreCallbackProperty(Arguments args);
+MaybeObject* LoadCallbackProperty(RUNTIME_CALLING_CONVENTION);
+MaybeObject* StoreCallbackProperty(RUNTIME_CALLING_CONVENTION);
 
 
 // Support functions for IC stubs for interceptors.
-MaybeObject* LoadPropertyWithInterceptorOnly(Arguments args);
-MaybeObject* LoadPropertyWithInterceptorForLoad(Arguments args);
-MaybeObject* LoadPropertyWithInterceptorForCall(Arguments args);
-MaybeObject* StoreInterceptorProperty(Arguments args);
-MaybeObject* CallInterceptorProperty(Arguments args);
-MaybeObject* KeyedLoadPropertyWithInterceptor(Arguments args);
+MaybeObject* LoadPropertyWithInterceptorOnly(RUNTIME_CALLING_CONVENTION);
+MaybeObject* LoadPropertyWithInterceptorForLoad(RUNTIME_CALLING_CONVENTION);
+MaybeObject* LoadPropertyWithInterceptorForCall(RUNTIME_CALLING_CONVENTION);
+MaybeObject* StoreInterceptorProperty(RUNTIME_CALLING_CONVENTION);
+MaybeObject* CallInterceptorProperty(RUNTIME_CALLING_CONVENTION);
+MaybeObject* KeyedLoadPropertyWithInterceptor(RUNTIME_CALLING_CONVENTION);
 
 
 // The stub compiler compiles stubs for the stub cache.
@@ -547,6 +558,8 @@ class StubCompiler BASE_EMBEDDED {
                                     String* name,
                                     LookupResult* lookup);
 
+  Isolate* isolate() { return scope_.isolate(); }
+
  private:
   HandleScope scope_;
   MacroAssembler masm_;
index 488e90979f00533aadfa706e92d5ff1d7fd4f7b3..feca7beb9fb39ea091da7772acd0c2c28afc3400 100644 (file)
@@ -32,21 +32,21 @@ namespace v8 {
 namespace internal {
 
 #define T(name, string, precedence) #name,
-const char* Token::name_[NUM_TOKENS] = {
+const char* const Token::name_[NUM_TOKENS] = {
   TOKEN_LIST(T, T, IGNORE_TOKEN)
 };
 #undef T
 
 
 #define T(name, string, precedence) string,
-const char* Token::string_[NUM_TOKENS] = {
+const char* const Token::string_[NUM_TOKENS] = {
   TOKEN_LIST(T, T, IGNORE_TOKEN)
 };
 #undef T
 
 
 #define T(name, string, precedence) precedence,
-int8_t Token::precedence_[NUM_TOKENS] = {
+const int8_t Token::precedence_[NUM_TOKENS] = {
   TOKEN_LIST(T, T, IGNORE_TOKEN)
 };
 #undef T
index 776d9f3bc120d99e36cb0686ffa2065d422cf0a1..a0afbc14f5f19eb82b192bcf3f30f048c32e6d1a 100644 (file)
@@ -277,9 +277,9 @@ class Token {
   }
 
  private:
-  static const char* name_[NUM_TOKENS];
-  static const char* string_[NUM_TOKENS];
-  static int8_t precedence_[NUM_TOKENS];
+  static const char* const name_[NUM_TOKENS];
+  static const char* const string_[NUM_TOKENS];
+  static const int8_t precedence_[NUM_TOKENS];
   static const char token_type[NUM_TOKENS];
 };
 
index 78db26a50973975caeed7d114c5e48d5c5b130a6..62850b13e0e742f50d18736c364f3c118390ae73 100644 (file)
 #include "string-stream.h"
 #include "vm-state-inl.h"
 
+// TODO(isolates): move to isolate.cc. This stuff is kept here to
+// simplify merging.
+
 namespace v8 {
 namespace internal {
 
-#ifdef ENABLE_LOGGING_AND_PROFILING
-Semaphore* Top::runtime_profiler_semaphore_ = NULL;
-#endif
-ThreadLocalTop Top::thread_local_;
-Mutex* Top::break_access_ = OS::CreateMutex();
-
-NoAllocationStringAllocator* preallocated_message_space = NULL;
-
-bool capture_stack_trace_for_uncaught_exceptions = false;
-int stack_trace_for_uncaught_exceptions_frame_limit = 0;
-StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options =
-    StackTrace::kOverview;
-
-Address top_addresses[] = {
-#define C(name) reinterpret_cast<Address>(Top::name()),
-    TOP_ADDRESS_LIST(C)
-    TOP_ADDRESS_LIST_PROF(C)
-#undef C
-    NULL
-};
-
-
 v8::TryCatch* ThreadLocalTop::TryCatchHandler() {
   return TRY_CATCH_FROM_ADDRESS(try_catch_handler_address());
 }
@@ -72,9 +53,9 @@ void ThreadLocalTop::Initialize() {
   handler_ = 0;
 #ifdef USE_SIMULATOR
 #ifdef V8_TARGET_ARCH_ARM
-  simulator_ = Simulator::current();
+  simulator_ = Simulator::current(Isolate::Current());
 #elif V8_TARGET_ARCH_MIPS
-  simulator_ = assembler::mips::Simulator::current();
+  simulator_ = Simulator::current(Isolate::Current());
 #endif
 #endif
 #ifdef ENABLE_LOGGING_AND_PROFILING
@@ -83,11 +64,10 @@ void ThreadLocalTop::Initialize() {
 #endif
 #ifdef ENABLE_VMSTATE_TRACKING
   current_vm_state_ = EXTERNAL;
-  runtime_profiler_state_ = Top::PROF_NOT_IN_JS;
 #endif
   try_catch_handler_address_ = NULL;
   context_ = NULL;
-  int id = ThreadManager::CurrentId();
+  int id = Isolate::Current()->thread_manager()->CurrentId();
   thread_id_ = (id == 0) ? ThreadManager::kInvalidId : id;
   external_caught_exception_ = false;
   failed_access_check_callback_ = NULL;
@@ -96,32 +76,32 @@ void ThreadLocalTop::Initialize() {
 }
 
 
-Address Top::get_address_from_id(Top::AddressId id) {
-  return top_addresses[id];
+Address Isolate::get_address_from_id(Isolate::AddressId id) {
+  return isolate_addresses_[id];
 }
 
 
-char* Top::Iterate(ObjectVisitor* v, char* thread_storage) {
+char* Isolate::Iterate(ObjectVisitor* v, char* thread_storage) {
   ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
   Iterate(v, thread);
   return thread_storage + sizeof(ThreadLocalTop);
 }
 
 
-void Top::IterateThread(ThreadVisitor* v) {
-  v->VisitThread(&thread_local_);
+void Isolate::IterateThread(ThreadVisitor* v) {
+  v->VisitThread(thread_local_top());
 }
 
 
-void Top::IterateThread(ThreadVisitor* v, char* t) {
+void Isolate::IterateThread(ThreadVisitor* v, char* t) {
   ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
   v->VisitThread(thread);
 }
 
 
-void Top::Iterate(ObjectVisitor* v, ThreadLocalTop* thread) {
+void Isolate::Iterate(ObjectVisitor* v, ThreadLocalTop* thread) {
   // Visit the roots from the top for a given thread.
-  Object *pending;
+  Objectpending;
   // The pending exception can sometimes be a failure.  We can't show
   // that to the GC, which only understands objects.
   if (thread->pending_exception_->ToObject(&pending)) {
@@ -151,176 +131,13 @@ void Top::Iterate(ObjectVisitor* v, ThreadLocalTop* thread) {
 }
 
 
-void Top::Iterate(ObjectVisitor* v) {
-  ThreadLocalTop* current_t = &thread_local_;
+void Isolate::Iterate(ObjectVisitor* v) {
+  ThreadLocalTop* current_t = thread_local_top();
   Iterate(v, current_t);
 }
 
 
-void Top::InitializeThreadLocal() {
-  thread_local_.Initialize();
-  clear_pending_exception();
-  clear_pending_message();
-  clear_scheduled_exception();
-}
-
-
-// Create a dummy thread that will wait forever on a semaphore. The only
-// purpose for this thread is to have some stack area to save essential data
-// into for use by a stacks only core dump (aka minidump).
-class PreallocatedMemoryThread: public Thread {
- public:
-  PreallocatedMemoryThread()
-    : Thread("v8:PreallocMem"),
-      keep_running_(true) {
-    wait_for_ever_semaphore_ = OS::CreateSemaphore(0);
-    data_ready_semaphore_ = OS::CreateSemaphore(0);
-  }
-
-  // When the thread starts running it will allocate a fixed number of bytes
-  // on the stack and publish the location of this memory for others to use.
-  void Run() {
-    EmbeddedVector<char, 15 * 1024> local_buffer;
-
-    // Initialize the buffer with a known good value.
-    OS::StrNCpy(local_buffer, "Trace data was not generated.\n",
-                local_buffer.length());
-
-    // Publish the local buffer and signal its availability.
-    data_ = local_buffer.start();
-    length_ = local_buffer.length();
-    data_ready_semaphore_->Signal();
-
-    while (keep_running_) {
-      // This thread will wait here until the end of time.
-      wait_for_ever_semaphore_->Wait();
-    }
-
-    // Make sure we access the buffer after the wait to remove all possibility
-    // of it being optimized away.
-    OS::StrNCpy(local_buffer, "PreallocatedMemoryThread shutting down.\n",
-                local_buffer.length());
-  }
-
-  static char* data() {
-    if (data_ready_semaphore_ != NULL) {
-      // Initial access is guarded until the data has been published.
-      data_ready_semaphore_->Wait();
-      delete data_ready_semaphore_;
-      data_ready_semaphore_ = NULL;
-    }
-    return data_;
-  }
-
-  static unsigned length() {
-    if (data_ready_semaphore_ != NULL) {
-      // Initial access is guarded until the data has been published.
-      data_ready_semaphore_->Wait();
-      delete data_ready_semaphore_;
-      data_ready_semaphore_ = NULL;
-    }
-    return length_;
-  }
-
-  static void StartThread() {
-    if (the_thread_ != NULL) return;
-
-    the_thread_ = new PreallocatedMemoryThread();
-    the_thread_->Start();
-  }
-
-  // Stop the PreallocatedMemoryThread and release its resources.
-  static void StopThread() {
-    if (the_thread_ == NULL) return;
-
-    the_thread_->keep_running_ = false;
-    wait_for_ever_semaphore_->Signal();
-
-    // Wait for the thread to terminate.
-    the_thread_->Join();
-
-    if (data_ready_semaphore_ != NULL) {
-      delete data_ready_semaphore_;
-      data_ready_semaphore_ = NULL;
-    }
-
-    delete wait_for_ever_semaphore_;
-    wait_for_ever_semaphore_ = NULL;
-
-    // Done with the thread entirely.
-    delete the_thread_;
-    the_thread_ = NULL;
-  }
-
- private:
-  // Used to make sure that the thread keeps looping even for spurious wakeups.
-  bool keep_running_;
-
-  // The preallocated memory thread singleton.
-  static PreallocatedMemoryThread* the_thread_;
-  // This semaphore is used by the PreallocatedMemoryThread to wait for ever.
-  static Semaphore* wait_for_ever_semaphore_;
-  // Semaphore to signal that the data has been initialized.
-  static Semaphore* data_ready_semaphore_;
-
-  // Location and size of the preallocated memory block.
-  static char* data_;
-  static unsigned length_;
-
-  DISALLOW_COPY_AND_ASSIGN(PreallocatedMemoryThread);
-};
-
-PreallocatedMemoryThread* PreallocatedMemoryThread::the_thread_ = NULL;
-Semaphore* PreallocatedMemoryThread::wait_for_ever_semaphore_ = NULL;
-Semaphore* PreallocatedMemoryThread::data_ready_semaphore_ = NULL;
-char* PreallocatedMemoryThread::data_ = NULL;
-unsigned PreallocatedMemoryThread::length_ = 0;
-
-static bool initialized = false;
-
-void Top::Initialize() {
-  CHECK(!initialized);
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
-  ASSERT(runtime_profiler_semaphore_ == NULL);
-  runtime_profiler_semaphore_ = OS::CreateSemaphore(0);
-#endif
-
-  InitializeThreadLocal();
-
-  // Only preallocate on the first initialization.
-  if (FLAG_preallocate_message_memory && (preallocated_message_space == NULL)) {
-    // Start the thread which will set aside some memory.
-    PreallocatedMemoryThread::StartThread();
-    preallocated_message_space =
-        new NoAllocationStringAllocator(PreallocatedMemoryThread::data(),
-                                        PreallocatedMemoryThread::length());
-    PreallocatedStorage::Init(PreallocatedMemoryThread::length() / 4);
-  }
-  initialized = true;
-}
-
-
-void Top::TearDown() {
-  if (initialized) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
-    delete runtime_profiler_semaphore_;
-    runtime_profiler_semaphore_ = NULL;
-#endif
-
-    // Remove the external reference to the preallocated stack memory.
-    if (preallocated_message_space != NULL) {
-      delete preallocated_message_space;
-      preallocated_message_space = NULL;
-    }
-
-    PreallocatedMemoryThread::StopThread();
-    initialized = false;
-  }
-}
-
-
-void Top::RegisterTryCatchHandler(v8::TryCatch* that) {
+void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
   // The ARM simulator has a separate JS stack.  We therefore register
   // the C++ try catch handler with the simulator and get back an
   // address that can be used for comparisons with addresses into the
@@ -328,68 +145,64 @@ void Top::RegisterTryCatchHandler(v8::TryCatch* that) {
   // returned will be the address of the C++ try catch handler itself.
   Address address = reinterpret_cast<Address>(
       SimulatorStack::RegisterCTryCatch(reinterpret_cast<uintptr_t>(that)));
-  thread_local_.set_try_catch_handler_address(address);
+  thread_local_top()->set_try_catch_handler_address(address);
 }
 
 
-void Top::UnregisterTryCatchHandler(v8::TryCatch* that) {
-  ASSERT(try_catch_handler() == that);
-  thread_local_.set_try_catch_handler_address(
+void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
+  ASSERT(thread_local_top()->TryCatchHandler() == that);
+  thread_local_top()->set_try_catch_handler_address(
       reinterpret_cast<Address>(that->next_));
-  thread_local_.catcher_ = NULL;
+  thread_local_top()->catcher_ = NULL;
   SimulatorStack::UnregisterCTryCatch();
 }
 
 
-
-static int stack_trace_nesting_level = 0;
-static StringStream* incomplete_message = NULL;
-
-
-Handle<String> Top::StackTraceString() {
-  if (stack_trace_nesting_level == 0) {
-    stack_trace_nesting_level++;
+Handle<String> Isolate::StackTraceString() {
+  if (stack_trace_nesting_level_ == 0) {
+    stack_trace_nesting_level_++;
     HeapStringAllocator allocator;
     StringStream::ClearMentionedObjectCache();
     StringStream accumulator(&allocator);
-    incomplete_message = &accumulator;
+    incomplete_message_ = &accumulator;
     PrintStack(&accumulator);
     Handle<String> stack_trace = accumulator.ToString();
-    incomplete_message = NULL;
-    stack_trace_nesting_level = 0;
+    incomplete_message_ = NULL;
+    stack_trace_nesting_level_ = 0;
     return stack_trace;
-  } else if (stack_trace_nesting_level == 1) {
-    stack_trace_nesting_level++;
+  } else if (stack_trace_nesting_level_ == 1) {
+    stack_trace_nesting_level_++;
     OS::PrintError(
       "\n\nAttempt to print stack while printing stack (double fault)\n");
     OS::PrintError(
       "If you are lucky you may find a partial stack dump on stdout.\n\n");
-    incomplete_message->OutputToStdOut();
-    return Factory::empty_symbol();
+    incomplete_message_->OutputToStdOut();
+    return factory()->empty_symbol();
   } else {
     OS::Abort();
     // Unreachable
-    return Factory::empty_symbol();
+    return factory()->empty_symbol();
   }
 }
 
 
-Handle<JSArray> Top::CaptureCurrentStackTrace(
+Handle<JSArray> Isolate::CaptureCurrentStackTrace(
     int frame_limit, StackTrace::StackTraceOptions options) {
   // Ensure no negative values.
   int limit = Max(frame_limit, 0);
-  Handle<JSArray> stack_trace = Factory::NewJSArray(frame_limit);
+  Handle<JSArray> stack_trace = factory()->NewJSArray(frame_limit);
 
-  Handle<String> column_key = Factory::LookupAsciiSymbol("column");
-  Handle<String> line_key = Factory::LookupAsciiSymbol("lineNumber");
-  Handle<String> script_key = Factory::LookupAsciiSymbol("scriptName");
+  Handle<String> column_key = factory()->LookupAsciiSymbol("column");
+  Handle<String> line_key = factory()->LookupAsciiSymbol("lineNumber");
+  Handle<String> script_key = factory()->LookupAsciiSymbol("scriptName");
   Handle<String> name_or_source_url_key =
-      Factory::LookupAsciiSymbol("nameOrSourceURL");
+      factory()->LookupAsciiSymbol("nameOrSourceURL");
   Handle<String> script_name_or_source_url_key =
-      Factory::LookupAsciiSymbol("scriptNameOrSourceURL");
-  Handle<String> function_key = Factory::LookupAsciiSymbol("functionName");
-  Handle<String> eval_key = Factory::LookupAsciiSymbol("isEval");
-  Handle<String> constructor_key = Factory::LookupAsciiSymbol("isConstructor");
+      factory()->LookupAsciiSymbol("scriptNameOrSourceURL");
+  Handle<String> function_key = factory()->LookupAsciiSymbol("functionName");
+  Handle<String> eval_key = factory()->LookupAsciiSymbol("isEval");
+  Handle<String> constructor_key =
+      factory()->LookupAsciiSymbol("isConstructor");
 
   StackTraceFrameIterator it;
   int frames_seen = 0;
@@ -400,7 +213,7 @@ Handle<JSArray> Top::CaptureCurrentStackTrace(
     frame->Summarize(&frames);
     for (int i = frames.length() - 1; i >= 0 && frames_seen < limit; i--) {
       // Create a JSObject to hold the information for the StackFrame.
-      Handle<JSObject> stackFrame = Factory::NewJSObject(object_function());
+      Handle<JSObject> stackFrame = factory()->NewJSObject(object_function());
 
       Handle<JSFunction> fun = frames[i].function();
       Handle<Script> script(Script::cast(fun->shared()->script()));
@@ -429,12 +242,12 @@ Handle<JSArray> Top::CaptureCurrentStackTrace(
       }
 
       if (options & StackTrace::kScriptName) {
-        Handle<Object> script_name(script->name());
+        Handle<Object> script_name(script->name(), this);
         SetLocalPropertyNoThrow(stackFrame, script_key, script_name);
       }
 
       if (options & StackTrace::kScriptNameOrSourceURL) {
-        Handle<Object> script_name(script->name());
+        Handle<Object> script_name(script->name(), this);
         Handle<JSValue> script_wrapper = GetScriptWrapper(script);
         Handle<Object> property = GetProperty(script_wrapper,
                                               name_or_source_url_key);
@@ -444,16 +257,16 @@ Handle<JSArray> Top::CaptureCurrentStackTrace(
         Handle<Object> result = Execution::TryCall(method, script_wrapper, 0,
                                                    NULL, &caught_exception);
         if (caught_exception) {
-          result = Factory::undefined_value();
+          result = factory()->undefined_value();
         }
         SetLocalPropertyNoThrow(stackFrame, script_name_or_source_url_key,
                                 result);
       }
 
       if (options & StackTrace::kFunctionName) {
-        Handle<Object> fun_name(fun->shared()->name());
+        Handle<Object> fun_name(fun->shared()->name(), this);
         if (fun_name->ToBoolean()->IsFalse()) {
-          fun_name = Handle<Object>(fun->shared()->inferred_name());
+          fun_name = Handle<Object>(fun->shared()->inferred_name(), this);
         }
         SetLocalPropertyNoThrow(stackFrame, function_key, fun_name);
       }
@@ -461,13 +274,13 @@ Handle<JSArray> Top::CaptureCurrentStackTrace(
       if (options & StackTrace::kIsEval) {
         int type = Smi::cast(script->compilation_type())->value();
         Handle<Object> is_eval = (type == Script::COMPILATION_TYPE_EVAL) ?
-            Factory::true_value() : Factory::false_value();
+            factory()->true_value() : factory()->false_value();
         SetLocalPropertyNoThrow(stackFrame, eval_key, is_eval);
       }
 
       if (options & StackTrace::kIsConstructor) {
         Handle<Object> is_constructor = (frames[i].is_constructor()) ?
-            Factory::true_value() : Factory::false_value();
+            factory()->true_value() : factory()->false_value();
         SetLocalPropertyNoThrow(stackFrame, constructor_key, is_constructor);
       }
 
@@ -482,15 +295,15 @@ Handle<JSArray> Top::CaptureCurrentStackTrace(
 }
 
 
-void Top::PrintStack() {
-  if (stack_trace_nesting_level == 0) {
-    stack_trace_nesting_level++;
+void Isolate::PrintStack() {
+  if (stack_trace_nesting_level_ == 0) {
+    stack_trace_nesting_level_++;
 
     StringAllocator* allocator;
-    if (preallocated_message_space == NULL) {
+    if (preallocated_message_space_ == NULL) {
       allocator = new HeapStringAllocator();
     } else {
-      allocator = preallocated_message_space;
+      allocator = preallocated_message_space_;
     }
 
     NativeAllocationChecker allocation_checker(
@@ -500,23 +313,23 @@ void Top::PrintStack() {
 
     StringStream::ClearMentionedObjectCache();
     StringStream accumulator(allocator);
-    incomplete_message = &accumulator;
+    incomplete_message_ = &accumulator;
     PrintStack(&accumulator);
     accumulator.OutputToStdOut();
     accumulator.Log();
-    incomplete_message = NULL;
-    stack_trace_nesting_level = 0;
-    if (preallocated_message_space == NULL) {
+    incomplete_message_ = NULL;
+    stack_trace_nesting_level_ = 0;
+    if (preallocated_message_space_ == NULL) {
       // Remove the HeapStringAllocator created above.
       delete allocator;
     }
-  } else if (stack_trace_nesting_level == 1) {
-    stack_trace_nesting_level++;
+  } else if (stack_trace_nesting_level_ == 1) {
+    stack_trace_nesting_level_++;
     OS::PrintError(
       "\n\nAttempt to print stack while printing stack (double fault)\n");
     OS::PrintError(
       "If you are lucky you may find a partial stack dump on stdout.\n\n");
-    incomplete_message->OutputToStdOut();
+    incomplete_message_->OutputToStdOut();
   }
 }
 
@@ -530,13 +343,20 @@ static void PrintFrames(StringStream* accumulator,
 }
 
 
-void Top::PrintStack(StringStream* accumulator) {
+void Isolate::PrintStack(StringStream* accumulator) {
+  if (!IsInitialized()) {
+    accumulator->Add(
+        "\n==== Stack trace is not available ==========================\n\n");
+    accumulator->Add(
+        "\n==== Isolate for the thread is not initialized =============\n\n");
+    return;
+  }
   // The MentionedObjectCache is not GC-proof at the moment.
   AssertNoAllocation nogc;
   ASSERT(StringStream::IsMentionedObjectCacheClear());
 
   // Avoid printing anything if there are no frames.
-  if (c_entry_fp(GetCurrentThread()) == 0) return;
+  if (c_entry_fp(thread_local_top()) == 0) return;
 
   accumulator->Add(
       "\n==== Stack trace ============================================\n\n");
@@ -551,28 +371,29 @@ void Top::PrintStack(StringStream* accumulator) {
 }
 
 
-void Top::SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback) {
-  thread_local_.failed_access_check_callback_ = callback;
+void Isolate::SetFailedAccessCheckCallback(
+    v8::FailedAccessCheckCallback callback) {
+  thread_local_top()->failed_access_check_callback_ = callback;
 }
 
 
-void Top::ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type) {
-  if (!thread_local_.failed_access_check_callback_) return;
+void Isolate::ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type) {
+  if (!thread_local_top()->failed_access_check_callback_) return;
 
   ASSERT(receiver->IsAccessCheckNeeded());
-  ASSERT(Top::context());
+  ASSERT(context());
 
   // Get the data object from access check info.
   JSFunction* constructor = JSFunction::cast(receiver->map()->constructor());
   if (!constructor->shared()->IsApiFunction()) return;
   Object* data_obj =
       constructor->shared()->get_api_func_data()->access_check_info();
-  if (data_obj == Heap::undefined_value()) return;
+  if (data_obj == heap_.undefined_value()) return;
 
   HandleScope scope;
   Handle<JSObject> receiver_handle(receiver);
   Handle<Object> data(AccessCheckInfo::cast(data_obj)->data());
-  thread_local_.failed_access_check_callback_(
+  thread_local_top()->failed_access_check_callback_(
     v8::Utils::ToLocal(receiver_handle),
     type,
     v8::Utils::ToLocal(data));
@@ -584,18 +405,19 @@ enum MayAccessDecision {
 };
 
 
-static MayAccessDecision MayAccessPreCheck(JSObject* receiver,
+static MayAccessDecision MayAccessPreCheck(Isolate* isolate,
+                                           JSObject* receiver,
                                            v8::AccessType type) {
   // During bootstrapping, callback functions are not enabled yet.
-  if (Bootstrapper::IsActive()) return YES;
+  if (isolate->bootstrapper()->IsActive()) return YES;
 
   if (receiver->IsJSGlobalProxy()) {
     Object* receiver_context = JSGlobalProxy::cast(receiver)->context();
     if (!receiver_context->IsContext()) return NO;
 
     // Get the global context of current top context.
-    // avoid using Top::global_context() because it uses Handle.
-    Context* global_context = Top::context()->global()->global_context();
+    // avoid using Isolate::global_context() because it uses Handle.
+    Context* global_context = isolate->context()->global()->global_context();
     if (receiver_context == global_context) return YES;
 
     if (Context::cast(receiver_context)->security_token() ==
@@ -607,7 +429,8 @@ static MayAccessDecision MayAccessPreCheck(JSObject* receiver,
 }
 
 
-bool Top::MayNamedAccess(JSObject* receiver, Object* key, v8::AccessType type) {
+bool Isolate::MayNamedAccess(JSObject* receiver, Object* key,
+                             v8::AccessType type) {
   ASSERT(receiver->IsAccessCheckNeeded());
 
   // The callers of this method are not expecting a GC.
@@ -615,13 +438,13 @@ bool Top::MayNamedAccess(JSObject* receiver, Object* key, v8::AccessType type) {
 
   // Skip checks for hidden properties access.  Note, we do not
   // require existence of a context in this case.
-  if (key == Heap::hidden_symbol()) return true;
+  if (key == heap_.hidden_symbol()) return true;
 
   // Check for compatibility between the security tokens in the
   // current lexical context and the accessed object.
-  ASSERT(Top::context());
+  ASSERT(context());
 
-  MayAccessDecision decision = MayAccessPreCheck(receiver, type);
+  MayAccessDecision decision = MayAccessPreCheck(this, receiver, type);
   if (decision != UNKNOWN) return decision == YES;
 
   // Get named access check callback
@@ -630,7 +453,7 @@ bool Top::MayNamedAccess(JSObject* receiver, Object* key, v8::AccessType type) {
 
   Object* data_obj =
      constructor->shared()->get_api_func_data()->access_check_info();
-  if (data_obj == Heap::undefined_value()) return false;
+  if (data_obj == heap_.undefined_value()) return false;
 
   Object* fun_obj = AccessCheckInfo::cast(data_obj)->named_callback();
   v8::NamedSecurityCallback callback =
@@ -638,15 +461,15 @@ bool Top::MayNamedAccess(JSObject* receiver, Object* key, v8::AccessType type) {
 
   if (!callback) return false;
 
-  HandleScope scope;
-  Handle<JSObject> receiver_handle(receiver);
-  Handle<Object> key_handle(key);
-  Handle<Object> data(AccessCheckInfo::cast(data_obj)->data());
-  LOG(ApiNamedSecurityCheck(key));
+  HandleScope scope(this);
+  Handle<JSObject> receiver_handle(receiver, this);
+  Handle<Object> key_handle(key, this);
+  Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this);
+  LOG(this, ApiNamedSecurityCheck(key));
   bool result = false;
   {
     // Leaving JavaScript.
-    VMState state(EXTERNAL);
+    VMState state(this, EXTERNAL);
     result = callback(v8::Utils::ToLocal(receiver_handle),
                       v8::Utils::ToLocal(key_handle),
                       type,
@@ -656,17 +479,15 @@ bool Top::MayNamedAccess(JSObject* receiver, Object* key, v8::AccessType type) {
 }
 
 
-bool Top::MayIndexedAccess(JSObject* receiver,
-                           uint32_t index,
-                           v8::AccessType type) {
+bool Isolate::MayIndexedAccess(JSObject* receiver,
+                               uint32_t index,
+                               v8::AccessType type) {
   ASSERT(receiver->IsAccessCheckNeeded());
   // Check for compatibility between the security tokens in the
   // current lexical context and the accessed object.
-  ASSERT(Top::context());
-  // The callers of this method are not expecting a GC.
-  AssertNoAllocation no_gc;
+  ASSERT(context());
 
-  MayAccessDecision decision = MayAccessPreCheck(receiver, type);
+  MayAccessDecision decision = MayAccessPreCheck(this, receiver, type);
   if (decision != UNKNOWN) return decision == YES;
 
   // Get indexed access check callback
@@ -675,7 +496,7 @@ bool Top::MayIndexedAccess(JSObject* receiver,
 
   Object* data_obj =
       constructor->shared()->get_api_func_data()->access_check_info();
-  if (data_obj == Heap::undefined_value()) return false;
+  if (data_obj == heap_.undefined_value()) return false;
 
   Object* fun_obj = AccessCheckInfo::cast(data_obj)->indexed_callback();
   v8::IndexedSecurityCallback callback =
@@ -683,14 +504,14 @@ bool Top::MayIndexedAccess(JSObject* receiver,
 
   if (!callback) return false;
 
-  HandleScope scope;
-  Handle<JSObject> receiver_handle(receiver);
-  Handle<Object> data(AccessCheckInfo::cast(data_obj)->data());
-  LOG(ApiIndexedSecurityCheck(index));
+  HandleScope scope(this);
+  Handle<JSObject> receiver_handle(receiver, this);
+  Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this);
+  LOG(this, ApiIndexedSecurityCheck(index));
   bool result = false;
   {
     // Leaving JavaScript.
-    VMState state(EXTERNAL);
+    VMState state(this, EXTERNAL);
     result = callback(v8::Utils::ToLocal(receiver_handle),
                       index,
                       type,
@@ -700,15 +521,15 @@ bool Top::MayIndexedAccess(JSObject* receiver,
 }
 
 
-const char* Top::kStackOverflowMessage =
+const char* const Isolate::kStackOverflowMessage =
   "Uncaught RangeError: Maximum call stack size exceeded";
 
 
-Failure* Top::StackOverflow() {
+Failure* Isolate::StackOverflow() {
   HandleScope scope;
-  Handle<String> key = Factory::stack_overflow_symbol();
+  Handle<String> key = factory()->stack_overflow_symbol();
   Handle<JSObject> boilerplate =
-      Handle<JSObject>::cast(GetProperty(Top::builtins(), key));
+      Handle<JSObject>::cast(GetProperty(js_builtins_object(), key));
   Handle<Object> exception = Copy(boilerplate);
   // TODO(1240995): To avoid having to call JavaScript code to compute
   // the message for stack overflow exceptions which is very likely to
@@ -719,23 +540,23 @@ Failure* Top::StackOverflow() {
 }
 
 
-Failure* Top::TerminateExecution() {
-  DoThrow(Heap::termination_exception(), NULL, NULL);
+Failure* Isolate::TerminateExecution() {
+  DoThrow(heap_.termination_exception(), NULL, NULL);
   return Failure::Exception();
 }
 
 
-Failure* Top::Throw(Object* exception, MessageLocation* location) {
+Failure* Isolate::Throw(Object* exception, MessageLocation* location) {
   DoThrow(exception, location, NULL);
   return Failure::Exception();
 }
 
 
-Failure* Top::ReThrow(MaybeObject* exception, MessageLocation* location) {
+Failure* Isolate::ReThrow(MaybeObject* exception, MessageLocation* location) {
   bool can_be_caught_externally = false;
   ShouldReportException(&can_be_caught_externally,
                         is_catchable_by_javascript(exception));
-  thread_local_.catcher_ = can_be_caught_externally ?
+  thread_local_top()->catcher_ = can_be_caught_externally ?
       try_catch_handler() : NULL;
 
   // Set the exception being re-thrown.
@@ -744,22 +565,22 @@ Failure* Top::ReThrow(MaybeObject* exception, MessageLocation* location) {
 }
 
 
-Failure* Top::ThrowIllegalOperation() {
-  return Throw(Heap::illegal_access_symbol());
+Failure* Isolate::ThrowIllegalOperation() {
+  return Throw(heap_.illegal_access_symbol());
 }
 
 
-void Top::ScheduleThrow(Object* exception) {
+void Isolate::ScheduleThrow(Object* exception) {
   // When scheduling a throw we first throw the exception to get the
   // error reporting if it is uncaught before rescheduling it.
   Throw(exception);
-  thread_local_.scheduled_exception_ = pending_exception();
-  thread_local_.external_caught_exception_ = false;
+  thread_local_top()->scheduled_exception_ = pending_exception();
+  thread_local_top()->external_caught_exception_ = false;
   clear_pending_exception();
 }
 
 
-Failure* Top::PromoteScheduledException() {
+Failure* Isolate::PromoteScheduledException() {
   MaybeObject* thrown = scheduled_exception();
   clear_scheduled_exception();
   // Re-throw the exception to avoid getting repeated error reporting.
@@ -767,13 +588,13 @@ Failure* Top::PromoteScheduledException() {
 }
 
 
-void Top::PrintCurrentStackTrace(FILE* out) {
+void Isolate::PrintCurrentStackTrace(FILE* out) {
   StackTraceFrameIterator it;
   while (!it.done()) {
     HandleScope scope;
     // Find code position if recorded in relocation info.
     JavaScriptFrame* frame = it.frame();
-    int pos = frame->code()->SourcePosition(frame->pc());
+    int pos = frame->LookupCode(this)->SourcePosition(frame->pc());
     Handle<Object> pos_obj(Smi::FromInt(pos));
     // Fetch function and receiver.
     Handle<JSFunction> fun(JSFunction::cast(frame->function()));
@@ -782,8 +603,8 @@ void Top::PrintCurrentStackTrace(FILE* out) {
     // current frame is the top-level frame.
     it.Advance();
     Handle<Object> is_top_level = it.done()
-        ? Factory::true_value()
-        : Factory::false_value();
+        ? factory()->true_value()
+        : factory()->false_value();
     // Generate and print stack trace line.
     Handle<String> line =
         Execution::GetStackTraceLine(recv, fun, pos_obj, is_top_level);
@@ -795,8 +616,8 @@ void Top::PrintCurrentStackTrace(FILE* out) {
 }
 
 
-void Top::ComputeLocation(MessageLocation* target) {
-  *target = MessageLocation(Handle<Script>(Heap::empty_script()), -1, -1);
+void Isolate::ComputeLocation(MessageLocation* target) {
+  *target = MessageLocation(Handle<Script>(heap_.empty_script()), -1, -1);
   StackTraceFrameIterator it;
   if (!it.done()) {
     JavaScriptFrame* frame = it.frame();
@@ -804,7 +625,7 @@ void Top::ComputeLocation(MessageLocation* target) {
     Object* script = fun->shared()->script();
     if (script->IsScript() &&
         !(Script::cast(script)->source()->IsUndefined())) {
-      int pos = frame->code()->SourcePosition(frame->pc());
+      int pos = frame->LookupCode(this)->SourcePosition(frame->pc());
       // Compute the location from the function and the reloc info.
       Handle<Script> casted_script(Script::cast(script));
       *target = MessageLocation(casted_script, pos, pos + 1);
@@ -813,18 +634,19 @@ void Top::ComputeLocation(MessageLocation* target) {
 }
 
 
-bool Top::ShouldReportException(bool* can_be_caught_externally,
-                                bool catchable_by_javascript) {
+bool Isolate::ShouldReportException(bool* can_be_caught_externally,
+                                    bool catchable_by_javascript) {
   // Find the top-most try-catch handler.
   StackHandler* handler =
-      StackHandler::FromAddress(Top::handler(Top::GetCurrentThread()));
+      StackHandler::FromAddress(Isolate::handler(thread_local_top()));
   while (handler != NULL && !handler->is_try_catch()) {
     handler = handler->next();
   }
 
   // Get the address of the external handler so we can compare the address to
   // determine which one is closer to the top of the stack.
-  Address external_handler_address = thread_local_.try_catch_handler_address();
+  Address external_handler_address =
+      thread_local_top()->try_catch_handler_address();
 
   // The exception has been externally caught if and only if there is
   // an external handler which is on top of the top-most try-catch
@@ -843,9 +665,9 @@ bool Top::ShouldReportException(bool* can_be_caught_externally,
 }
 
 
-void Top::DoThrow(MaybeObject* exception,
-                  MessageLocation* location,
-                  const char* message) {
+void Isolate::DoThrow(MaybeObject* exception,
+                      MessageLocation* location,
+                      const char* message) {
   ASSERT(!has_pending_exception());
 
   HandleScope scope;
@@ -865,7 +687,7 @@ void Top::DoThrow(MaybeObject* exception,
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Notify debugger of exception.
   if (catchable_by_javascript) {
-    Debugger::OnException(exception_handle, report_exception);
+    debugger_->OnException(exception_handle, report_exception);
   }
 #endif
 
@@ -881,17 +703,17 @@ void Top::DoThrow(MaybeObject* exception,
       ComputeLocation(&potential_computed_location);
       location = &potential_computed_location;
     }
-    if (!Bootstrapper::IsActive()) {
+    if (!bootstrapper()->IsActive()) {
       // It's not safe to try to make message objects or collect stack
       // traces while the bootstrapper is active since the infrastructure
       // may not have been properly initialized.
       Handle<String> stack_trace;
       if (FLAG_trace_exception) stack_trace = StackTraceString();
       Handle<JSArray> stack_trace_object;
-      if (report_exception && capture_stack_trace_for_uncaught_exceptions) {
-          stack_trace_object = Top::CaptureCurrentStackTrace(
-              stack_trace_for_uncaught_exceptions_frame_limit,
-              stack_trace_for_uncaught_exceptions_options);
+      if (report_exception && capture_stack_trace_for_uncaught_exceptions_) {
+          stack_trace_object = CaptureCurrentStackTrace(
+              stack_trace_for_uncaught_exceptions_frame_limit_,
+              stack_trace_for_uncaught_exceptions_options_);
       }
       ASSERT(is_object);  // Can't use the handle unless there's a real object.
       message_obj = MessageHandler::MakeMessageObject("uncaught_exception",
@@ -901,20 +723,20 @@ void Top::DoThrow(MaybeObject* exception,
   }
 
   // Save the message for reporting if the the exception remains uncaught.
-  thread_local_.has_pending_message_ = report_exception;
-  thread_local_.pending_message_ = message;
+  thread_local_top()->has_pending_message_ = report_exception;
+  thread_local_top()->pending_message_ = message;
   if (!message_obj.is_null()) {
-    thread_local_.pending_message_obj_ = *message_obj;
+    thread_local_top()->pending_message_obj_ = *message_obj;
     if (location != NULL) {
-      thread_local_.pending_message_script_ = *location->script();
-      thread_local_.pending_message_start_pos_ = location->start_pos();
-      thread_local_.pending_message_end_pos_ = location->end_pos();
+      thread_local_top()->pending_message_script_ = *location->script();
+      thread_local_top()->pending_message_start_pos_ = location->start_pos();
+      thread_local_top()->pending_message_end_pos_ = location->end_pos();
     }
   }
 
   // Do not forget to clean catcher_ if currently thrown exception cannot
   // be caught.  If necessary, ReThrow will update the catcher.
-  thread_local_.catcher_ = can_be_caught_externally ?
+  thread_local_top()->catcher_ = can_be_caught_externally ?
       try_catch_handler() : NULL;
 
   // NOTE: Notifying the debugger or generating the message
@@ -930,11 +752,11 @@ void Top::DoThrow(MaybeObject* exception,
 }
 
 
-bool Top::IsExternallyCaught() {
+bool Isolate::IsExternallyCaught() {
   ASSERT(has_pending_exception());
 
-  if ((thread_local_.catcher_ == NULL) ||
-      (try_catch_handler() != thread_local_.catcher_)) {
+  if ((thread_local_top()->catcher_ == NULL) ||
+      (try_catch_handler() != thread_local_top()->catcher_)) {
     // When throwing the exception, we found no v8::TryCatch
     // which should care about this exception.
     return false;
@@ -946,7 +768,8 @@ bool Top::IsExternallyCaught() {
 
   // Get the address of the external handler so we can compare the address to
   // determine which one is closer to the top of the stack.
-  Address external_handler_address = thread_local_.try_catch_handler_address();
+  Address external_handler_address =
+      thread_local_top()->try_catch_handler_address();
   ASSERT(external_handler_address != NULL);
 
   // The exception has been externally caught if and only if there is
@@ -959,7 +782,7 @@ bool Top::IsExternallyCaught() {
   // aborted by jumps in control flow like return, break, etc. and we'll
   // have another chances to set proper v8::TryCatch.
   StackHandler* handler =
-      StackHandler::FromAddress(Top::handler(Top::GetCurrentThread()));
+      StackHandler::FromAddress(Isolate::handler(thread_local_top()));
   while (handler != NULL && handler->address() < external_handler_address) {
     ASSERT(!handler->is_try_catch());
     if (handler->is_try_finally()) return false;
@@ -971,46 +794,48 @@ bool Top::IsExternallyCaught() {
 }
 
 
-void Top::ReportPendingMessages() {
+void Isolate::ReportPendingMessages() {
   ASSERT(has_pending_exception());
   // If the pending exception is OutOfMemoryException set out_of_memory in
   // the global context.  Note: We have to mark the global context here
   // since the GenerateThrowOutOfMemory stub cannot make a RuntimeCall to
   // set it.
   bool external_caught = IsExternallyCaught();
-  thread_local_.external_caught_exception_ = external_caught;
-  HandleScope scope;
-  if (thread_local_.pending_exception_ == Failure::OutOfMemoryException()) {
+  thread_local_top()->external_caught_exception_ = external_caught;
+  HandleScope scope(this);
+  if (thread_local_top()->pending_exception_ ==
+      Failure::OutOfMemoryException()) {
     context()->mark_out_of_memory();
-  } else if (thread_local_.pending_exception_ ==
-             Heap::termination_exception()) {
+  } else if (thread_local_top()->pending_exception_ ==
+             heap_.termination_exception()) {
     if (external_caught) {
       try_catch_handler()->can_continue_ = false;
-      try_catch_handler()->exception_ = Heap::null_value();
+      try_catch_handler()->exception_ = heap_.null_value();
     }
   } else {
     // At this point all non-object (failure) exceptions have
     // been dealt with so this shouldn't fail.
     Object* pending_exception_object = pending_exception()->ToObjectUnchecked();
     Handle<Object> exception(pending_exception_object);
-    thread_local_.external_caught_exception_ = false;
+    thread_local_top()->external_caught_exception_ = false;
     if (external_caught) {
       try_catch_handler()->can_continue_ = true;
-      try_catch_handler()->exception_ = thread_local_.pending_exception_;
-      if (!thread_local_.pending_message_obj_->IsTheHole()) {
-        try_catch_handler()->message_ = thread_local_.pending_message_obj_;
+      try_catch_handler()->exception_ = thread_local_top()->pending_exception_;
+      if (!thread_local_top()->pending_message_obj_->IsTheHole()) {
+        try_catch_handler()->message_ =
+            thread_local_top()->pending_message_obj_;
       }
     }
-    if (thread_local_.has_pending_message_) {
-      thread_local_.has_pending_message_ = false;
-      if (thread_local_.pending_message_ != NULL) {
-        MessageHandler::ReportMessage(thread_local_.pending_message_);
-      } else if (!thread_local_.pending_message_obj_->IsTheHole()) {
-        Handle<Object> message_obj(thread_local_.pending_message_obj_);
-        if (thread_local_.pending_message_script_ != NULL) {
-          Handle<Script> script(thread_local_.pending_message_script_);
-          int start_pos = thread_local_.pending_message_start_pos_;
-          int end_pos = thread_local_.pending_message_end_pos_;
+    if (thread_local_top()->has_pending_message_) {
+      thread_local_top()->has_pending_message_ = false;
+      if (thread_local_top()->pending_message_ != NULL) {
+        MessageHandler::ReportMessage(thread_local_top()->pending_message_);
+      } else if (!thread_local_top()->pending_message_obj_->IsTheHole()) {
+        Handle<Object> message_obj(thread_local_top()->pending_message_obj_);
+        if (thread_local_top()->pending_message_script_ != NULL) {
+          Handle<Script> script(thread_local_top()->pending_message_script_);
+          int start_pos = thread_local_top()->pending_message_start_pos_;
+          int end_pos = thread_local_top()->pending_message_end_pos_;
           MessageLocation location(script, start_pos, end_pos);
           MessageHandler::ReportMessage(&location, message_obj);
         } else {
@@ -1018,40 +843,40 @@ void Top::ReportPendingMessages() {
         }
       }
     }
-    thread_local_.external_caught_exception_ = external_caught;
+    thread_local_top()->external_caught_exception_ = external_caught;
     set_pending_exception(*exception);
   }
   clear_pending_message();
 }
 
 
-void Top::TraceException(bool flag) {
-  FLAG_trace_exception = flag;
+void Isolate::TraceException(bool flag) {
+  FLAG_trace_exception = flag;  // TODO(isolates): This is an unfortunate use.
 }
 
 
-bool Top::OptionalRescheduleException(bool is_bottom_call) {
+bool Isolate::OptionalRescheduleException(bool is_bottom_call) {
   // Allways reschedule out of memory exceptions.
   if (!is_out_of_memory()) {
     bool is_termination_exception =
-        pending_exception() == Heap::termination_exception();
+        pending_exception() == heap_.termination_exception();
 
     // Do not reschedule the exception if this is the bottom call.
     bool clear_exception = is_bottom_call;
 
     if (is_termination_exception) {
       if (is_bottom_call) {
-        thread_local_.external_caught_exception_ = false;
+        thread_local_top()->external_caught_exception_ = false;
         clear_pending_exception();
         return false;
       }
-    } else if (thread_local_.external_caught_exception_) {
+    } else if (thread_local_top()->external_caught_exception_) {
       // If the exception is externally caught, clear it if there are no
       // JavaScript frames on the way to the C++ frame that has the
       // external handler.
-      ASSERT(thread_local_.try_catch_handler_address() != NULL);
+      ASSERT(thread_local_top()->try_catch_handler_address() != NULL);
       Address external_handler_address =
-          thread_local_.try_catch_handler_address();
+          thread_local_top()->try_catch_handler_address();
       JavaScriptFrameIterator it;
       if (it.done() || (it.frame()->sp() > external_handler_address)) {
         clear_exception = true;
@@ -1060,30 +885,30 @@ bool Top::OptionalRescheduleException(bool is_bottom_call) {
 
     // Clear the exception if needed.
     if (clear_exception) {
-      thread_local_.external_caught_exception_ = false;
+      thread_local_top()->external_caught_exception_ = false;
       clear_pending_exception();
       return false;
     }
   }
 
   // Reschedule the exception.
-  thread_local_.scheduled_exception_ = pending_exception();
+  thread_local_top()->scheduled_exception_ = pending_exception();
   clear_pending_exception();
   return true;
 }
 
 
-void Top::SetCaptureStackTraceForUncaughtExceptions(
+void Isolate::SetCaptureStackTraceForUncaughtExceptions(
       bool capture,
       int frame_limit,
       StackTrace::StackTraceOptions options) {
-  capture_stack_trace_for_uncaught_exceptions = capture;
-  stack_trace_for_uncaught_exceptions_frame_limit = frame_limit;
-  stack_trace_for_uncaught_exceptions_options = options;
+  capture_stack_trace_for_uncaught_exceptions_ = capture;
+  stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit;
+  stack_trace_for_uncaught_exceptions_options_ = options;
 }
 
 
-bool Top::is_out_of_memory() {
+bool Isolate::is_out_of_memory() {
   if (has_pending_exception()) {
     MaybeObject* e = pending_exception();
     if (e->IsFailure() && Failure::cast(e)->IsOutOfMemoryException()) {
@@ -1100,20 +925,20 @@ bool Top::is_out_of_memory() {
 }
 
 
-Handle<Context> Top::global_context() {
-  GlobalObject* global = thread_local_.context_->global();
+Handle<Context> Isolate::global_context() {
+  GlobalObject* global = thread_local_top()->context_->global();
   return Handle<Context>(global->global_context());
 }
 
 
-Handle<Context> Top::GetCallingGlobalContext() {
+Handle<Context> Isolate::GetCallingGlobalContext() {
   JavaScriptFrameIterator it;
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  if (Debug::InDebugger()) {
+  if (debug_->InDebugger()) {
     while (!it.done()) {
       JavaScriptFrame* frame = it.frame();
       Context* context = Context::cast(frame->context());
-      if (context->global_context() == *Debug::debug_context()) {
+      if (context->global_context() == *debug_->debug_context()) {
         it.Advance();
       } else {
         break;
@@ -1128,25 +953,33 @@ Handle<Context> Top::GetCallingGlobalContext() {
 }
 
 
-char* Top::ArchiveThread(char* to) {
-  memcpy(to, reinterpret_cast<char*>(&thread_local_), sizeof(thread_local_));
+char* Isolate::ArchiveThread(char* to) {
+  if (RuntimeProfiler::IsEnabled() && current_vm_state() == JS) {
+    RuntimeProfiler::IsolateExitedJS(this);
+  }
+  memcpy(to, reinterpret_cast<char*>(thread_local_top()),
+         sizeof(ThreadLocalTop));
   InitializeThreadLocal();
-  return to + sizeof(thread_local_);
+  return to + sizeof(ThreadLocalTop);
 }
 
 
-char* Top::RestoreThread(char* from) {
-  memcpy(reinterpret_cast<char*>(&thread_local_), from, sizeof(thread_local_));
+char* Isolate::RestoreThread(char* from) {
+  memcpy(reinterpret_cast<char*>(thread_local_top()), from,
+         sizeof(ThreadLocalTop));
   // This might be just paranoia, but it seems to be needed in case a
   // thread_local_ is restored on a separate OS thread.
 #ifdef USE_SIMULATOR
 #ifdef V8_TARGET_ARCH_ARM
-  thread_local_.simulator_ = Simulator::current();
+  thread_local_top()->simulator_ = Simulator::current(this);
 #elif V8_TARGET_ARCH_MIPS
-  thread_local_.simulator_ = assembler::mips::Simulator::current();
+  thread_local_top()->simulator_ = Simulator::current(this);
 #endif
 #endif
-  return from + sizeof(thread_local_);
+  if (RuntimeProfiler::IsEnabled() && current_vm_state() == JS) {
+    RuntimeProfiler::IsolateEnteredJS(this);
+  }
+  return from + sizeof(ThreadLocalTop);
 }
 
 } }  // namespace v8::internal
diff --git a/src/top.h b/src/top.h
deleted file mode 100644 (file)
index 26ae542..0000000
--- a/src/top.h
+++ /dev/null
@@ -1,608 +0,0 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_TOP_H_
-#define V8_TOP_H_
-
-#include "atomicops.h"
-#include "compilation-cache.h"
-#include "frames-inl.h"
-#include "runtime-profiler.h"
-
-namespace v8 {
-namespace internal {
-
-class Simulator;
-
-#define RETURN_IF_SCHEDULED_EXCEPTION() \
-  if (Top::has_scheduled_exception()) return Top::PromoteScheduledException()
-
-#define RETURN_IF_EMPTY_HANDLE_VALUE(call, value) \
-  if (call.is_null()) {                           \
-    ASSERT(Top::has_pending_exception());         \
-    return value;                                 \
-  }
-
-#define RETURN_IF_EMPTY_HANDLE(call)      \
-  RETURN_IF_EMPTY_HANDLE_VALUE(call, Failure::Exception())
-
-// Top has static variables used for JavaScript execution.
-
-class SaveContext;  // Forward declaration.
-class ThreadVisitor;  // Defined in v8threads.h
-class VMState;  // Defined in vm-state.h
-
-class ThreadLocalTop BASE_EMBEDDED {
- public:
-  // Initialize the thread data.
-  void Initialize();
-
-  // Get the top C++ try catch handler or NULL if none are registered.
-  //
-  // This method is not guarenteed to return an address that can be
-  // used for comparison with addresses into the JS stack.  If such an
-  // address is needed, use try_catch_handler_address.
-  v8::TryCatch* TryCatchHandler();
-
-  // Get the address of the top C++ try catch handler or NULL if
-  // none are registered.
-  //
-  // This method always returns an address that can be compared to
-  // pointers into the JavaScript stack.  When running on actual
-  // hardware, try_catch_handler_address and TryCatchHandler return
-  // the same pointer.  When running on a simulator with a separate JS
-  // stack, try_catch_handler_address returns a JS stack address that
-  // corresponds to the place on the JS stack where the C++ handler
-  // would have been if the stack were not separate.
-  inline Address try_catch_handler_address() {
-    return try_catch_handler_address_;
-  }
-
-  // Set the address of the top C++ try catch handler.
-  inline void set_try_catch_handler_address(Address address) {
-    try_catch_handler_address_ = address;
-  }
-
-  void Free() {
-    ASSERT(!has_pending_message_);
-    ASSERT(!external_caught_exception_);
-    ASSERT(try_catch_handler_address_ == NULL);
-  }
-
-  // The context where the current execution method is created and for variable
-  // lookups.
-  Context* context_;
-  int thread_id_;
-  MaybeObject* pending_exception_;
-  bool has_pending_message_;
-  const char* pending_message_;
-  Object* pending_message_obj_;
-  Script* pending_message_script_;
-  int pending_message_start_pos_;
-  int pending_message_end_pos_;
-  // Use a separate value for scheduled exceptions to preserve the
-  // invariants that hold about pending_exception.  We may want to
-  // unify them later.
-  MaybeObject* scheduled_exception_;
-  bool external_caught_exception_;
-  SaveContext* save_context_;
-  v8::TryCatch* catcher_;
-
-  // Stack.
-  Address c_entry_fp_;  // the frame pointer of the top c entry frame
-  Address handler_;   // try-blocks are chained through the stack
-
-#ifdef USE_SIMULATOR
-#ifdef V8_TARGET_ARCH_ARM
-  Simulator* simulator_;
-#elif V8_TARGET_ARCH_MIPS
-  assembler::mips::Simulator* simulator_;
-#endif
-#endif  // USE_SIMULATOR
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
-  Address js_entry_sp_;  // the stack pointer of the bottom js entry frame
-  Address external_callback_;  // the external callback we're currently in
-#endif
-
-#ifdef ENABLE_VMSTATE_TRACKING
-  StateTag current_vm_state_;
-
-  // Used for communication with the runtime profiler thread.
-  // Possible values are specified in RuntimeProfilerState.
-  Atomic32 runtime_profiler_state_;
-#endif
-
-  // Generated code scratch locations.
-  int32_t formal_count_;
-
-  // Call back function to report unsafe JS accesses.
-  v8::FailedAccessCheckCallback failed_access_check_callback_;
-
- private:
-  Address try_catch_handler_address_;
-};
-
-#define TOP_ADDRESS_LIST(C)            \
-  C(handler_address)                   \
-  C(c_entry_fp_address)                \
-  C(context_address)                   \
-  C(pending_exception_address)         \
-  C(external_caught_exception_address)
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
-#define TOP_ADDRESS_LIST_PROF(C)       \
-  C(js_entry_sp_address)
-#else
-#define TOP_ADDRESS_LIST_PROF(C)
-#endif
-
-
-class Top {
- public:
-  enum AddressId {
-#define C(name) k_##name,
-    TOP_ADDRESS_LIST(C)
-    TOP_ADDRESS_LIST_PROF(C)
-#undef C
-    k_top_address_count
-  };
-
-  static Address get_address_from_id(AddressId id);
-
-  // Access to top context (where the current function object was created).
-  static Context* context() { return thread_local_.context_; }
-  static void set_context(Context* context) {
-    thread_local_.context_ = context;
-  }
-  static Context** context_address() { return &thread_local_.context_; }
-
-  static SaveContext* save_context() {return thread_local_.save_context_; }
-  static void set_save_context(SaveContext* save) {
-    thread_local_.save_context_ = save;
-  }
-
-  // Access to current thread id.
-  static int thread_id() { return thread_local_.thread_id_; }
-  static void set_thread_id(int id) { thread_local_.thread_id_ = id; }
-
-  // Interface to pending exception.
-  static MaybeObject* pending_exception() {
-    ASSERT(has_pending_exception());
-    return thread_local_.pending_exception_;
-  }
-  static bool external_caught_exception() {
-    return thread_local_.external_caught_exception_;
-  }
-  static void set_pending_exception(MaybeObject* exception) {
-    thread_local_.pending_exception_ = exception;
-  }
-  static void clear_pending_exception() {
-    thread_local_.pending_exception_ = Heap::the_hole_value();
-  }
-
-  static MaybeObject** pending_exception_address() {
-    return &thread_local_.pending_exception_;
-  }
-  static bool has_pending_exception() {
-    return !thread_local_.pending_exception_->IsTheHole();
-  }
-  static void clear_pending_message() {
-    thread_local_.has_pending_message_ = false;
-    thread_local_.pending_message_ = NULL;
-    thread_local_.pending_message_obj_ = Heap::the_hole_value();
-    thread_local_.pending_message_script_ = NULL;
-  }
-  static v8::TryCatch* try_catch_handler() {
-    return thread_local_.TryCatchHandler();
-  }
-  static Address try_catch_handler_address() {
-    return thread_local_.try_catch_handler_address();
-  }
-  // This method is called by the api after operations that may throw
-  // exceptions.  If an exception was thrown and not handled by an external
-  // handler the exception is scheduled to be rethrown when we return to running
-  // JavaScript code.  If an exception is scheduled true is returned.
-  static bool OptionalRescheduleException(bool is_bottom_call);
-
-
-  static bool* external_caught_exception_address() {
-    return &thread_local_.external_caught_exception_;
-  }
-
-  static MaybeObject** scheduled_exception_address() {
-    return &thread_local_.scheduled_exception_;
-  }
-
-  static MaybeObject* scheduled_exception() {
-    ASSERT(has_scheduled_exception());
-    return thread_local_.scheduled_exception_;
-  }
-  static bool has_scheduled_exception() {
-    return !thread_local_.scheduled_exception_->IsTheHole();
-  }
-  static void clear_scheduled_exception() {
-    thread_local_.scheduled_exception_ = Heap::the_hole_value();
-  }
-
-  static bool IsExternallyCaught();
-
-  static void SetCaptureStackTraceForUncaughtExceptions(
-      bool capture,
-      int frame_limit,
-      StackTrace::StackTraceOptions options);
-
-  // Tells whether the current context has experienced an out of memory
-  // exception.
-  static bool is_out_of_memory();
-
-  static bool is_catchable_by_javascript(MaybeObject* exception) {
-    return (exception != Failure::OutOfMemoryException()) &&
-        (exception != Heap::termination_exception());
-  }
-
-  // JS execution stack (see frames.h).
-  static Address c_entry_fp(ThreadLocalTop* thread) {
-    return thread->c_entry_fp_;
-  }
-  static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
-
-  static inline Address* c_entry_fp_address() {
-    return &thread_local_.c_entry_fp_;
-  }
-  static inline Address* handler_address() { return &thread_local_.handler_; }
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
-  // Bottom JS entry (see StackTracer::Trace in log.cc).
-  static Address js_entry_sp(ThreadLocalTop* thread) {
-    return thread->js_entry_sp_;
-  }
-  static inline Address* js_entry_sp_address() {
-    return &thread_local_.js_entry_sp_;
-  }
-
-  static Address external_callback() {
-    return thread_local_.external_callback_;
-  }
-  static void set_external_callback(Address callback) {
-    thread_local_.external_callback_ = callback;
-  }
-#endif
-
-#ifdef ENABLE_VMSTATE_TRACKING
-  static StateTag current_vm_state() {
-    return thread_local_.current_vm_state_;
-  }
-
-  static void SetCurrentVMState(StateTag state) {
-    if (RuntimeProfiler::IsEnabled()) {
-      if (state == JS) {
-        // JS or non-JS -> JS transition.
-        RuntimeProfilerState old_state = SwapRuntimeProfilerState(PROF_IN_JS);
-        if (old_state == PROF_NOT_IN_JS_WAITING_FOR_JS) {
-          // If the runtime profiler was waiting, we reset the eager
-          // optimizing data in the compilation cache to get a fresh
-          // start after not running JavaScript code for a while and
-          // signal the runtime profiler so it can resume.
-          CompilationCache::ResetEagerOptimizingData();
-          runtime_profiler_semaphore_->Signal();
-        }
-      } else if (thread_local_.current_vm_state_ == JS) {
-        // JS -> non-JS transition. Update the runtime profiler state.
-        ASSERT(IsInJSState());
-        SetRuntimeProfilerState(PROF_NOT_IN_JS);
-      }
-    }
-    thread_local_.current_vm_state_ = state;
-  }
-
-  // Called in the runtime profiler thread.
-  // Returns whether the current VM state is set to JS.
-  static bool IsInJSState() {
-    ASSERT(RuntimeProfiler::IsEnabled());
-    return static_cast<RuntimeProfilerState>(
-        NoBarrier_Load(&thread_local_.runtime_profiler_state_)) == PROF_IN_JS;
-  }
-
-  // Called in the runtime profiler thread.
-  // Waits for the VM state to transtion from non-JS to JS. Returns
-  // true when notified of the transition, false when the current
-  // state is not the expected non-JS state.
-  static bool WaitForJSState() {
-    ASSERT(RuntimeProfiler::IsEnabled());
-    // Try to switch to waiting state.
-    RuntimeProfilerState old_state = CompareAndSwapRuntimeProfilerState(
-        PROF_NOT_IN_JS, PROF_NOT_IN_JS_WAITING_FOR_JS);
-    if (old_state == PROF_NOT_IN_JS) {
-      runtime_profiler_semaphore_->Wait();
-      return true;
-    }
-    return false;
-  }
-
-  // When shutting down we join the profiler thread. Doing so while
-  // it's waiting on a semaphore will cause a deadlock, so we have to
-  // wake it up first.
-  static void WakeUpRuntimeProfilerThreadBeforeShutdown() {
-    runtime_profiler_semaphore_->Signal();
-  }
-#endif
-
-  // Generated code scratch locations.
-  static void* formal_count_address() { return &thread_local_.formal_count_; }
-
-  static void PrintCurrentStackTrace(FILE* out);
-  static void PrintStackTrace(FILE* out, char* thread_data);
-  static void PrintStack(StringStream* accumulator);
-  static void PrintStack();
-  static Handle<String> StackTraceString();
-  static Handle<JSArray> CaptureCurrentStackTrace(
-      int frame_limit,
-      StackTrace::StackTraceOptions options);
-
-  // Returns if the top context may access the given global object. If
-  // the result is false, the pending exception is guaranteed to be
-  // set.
-  static bool MayNamedAccess(JSObject* receiver,
-                             Object* key,
-                             v8::AccessType type);
-  static bool MayIndexedAccess(JSObject* receiver,
-                               uint32_t index,
-                               v8::AccessType type);
-
-  static void SetFailedAccessCheckCallback(
-      v8::FailedAccessCheckCallback callback);
-  static void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
-
-  // Exception throwing support. The caller should use the result
-  // of Throw() as its return value.
-  static Failure* Throw(Object* exception, MessageLocation* location = NULL);
-  // Re-throw an exception.  This involves no error reporting since
-  // error reporting was handled when the exception was thrown
-  // originally.
-  static Failure* ReThrow(MaybeObject* exception,
-                          MessageLocation* location = NULL);
-  static void ScheduleThrow(Object* exception);
-  static void ReportPendingMessages();
-  static Failure* ThrowIllegalOperation();
-
-  // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
-  static Failure* PromoteScheduledException();
-  static void DoThrow(MaybeObject* exception,
-                      MessageLocation* location,
-                      const char* message);
-  // Checks if exception should be reported and finds out if it's
-  // caught externally.
-  static bool ShouldReportException(bool* can_be_caught_externally,
-                                    bool catchable_by_javascript);
-
-  // Attempts to compute the current source location, storing the
-  // result in the target out parameter.
-  static void ComputeLocation(MessageLocation* target);
-
-  // Override command line flag.
-  static void TraceException(bool flag);
-
-  // Out of resource exception helpers.
-  static Failure* StackOverflow();
-  static Failure* TerminateExecution();
-
-  // Administration
-  static void Initialize();
-  static void TearDown();
-  static void Iterate(ObjectVisitor* v);
-  static void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
-  static char* Iterate(ObjectVisitor* v, char* t);
-  static void IterateThread(ThreadVisitor* v);
-  static void IterateThread(ThreadVisitor* v, char* t);
-
-  // Returns the global object of the current context. It could be
-  // a builtin object, or a js global object.
-  static Handle<GlobalObject> global() {
-    return Handle<GlobalObject>(context()->global());
-  }
-
-  // Returns the global proxy object of the current context.
-  static Object* global_proxy() {
-    return context()->global_proxy();
-  }
-
-  // Returns the current global context.
-  static Handle<Context> global_context();
-
-  // Returns the global context of the calling JavaScript code.  That
-  // is, the global context of the top-most JavaScript frame.
-  static Handle<Context> GetCallingGlobalContext();
-
-  static Handle<JSBuiltinsObject> builtins() {
-    return Handle<JSBuiltinsObject>(thread_local_.context_->builtins());
-  }
-
-  static void RegisterTryCatchHandler(v8::TryCatch* that);
-  static void UnregisterTryCatchHandler(v8::TryCatch* that);
-
-#define TOP_GLOBAL_CONTEXT_FIELD_ACCESSOR(index, type, name)  \
-  static Handle<type> name() {                                \
-    return Handle<type>(context()->global_context()->name()); \
-  }
-  GLOBAL_CONTEXT_FIELDS(TOP_GLOBAL_CONTEXT_FIELD_ACCESSOR)
-#undef TOP_GLOBAL_CONTEXT_FIELD_ACCESSOR
-
-  static inline ThreadLocalTop* GetCurrentThread() { return &thread_local_; }
-  static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
-  static char* ArchiveThread(char* to);
-  static char* RestoreThread(char* from);
-  static void FreeThreadResources() { thread_local_.Free(); }
-
-  static const char* kStackOverflowMessage;
-
- private:
-#ifdef ENABLE_VMSTATE_TRACKING
-  // Set of states used when communicating with the runtime profiler.
-  //
-  // The set of possible transitions is divided between the VM and the
-  // profiler threads.
-  //
-  // The VM thread can perform these transitions:
-  //   o IN_JS -> NOT_IN_JS
-  //   o NOT_IN_JS -> IN_JS
-  //   o NOT_IN_JS_WAITING_FOR_JS -> IN_JS notifying the profiler thread
-  //     using the semaphore.
-  // All the above transitions are caused by VM state changes.
-  //
-  // The profiler thread can only perform a single transition
-  // NOT_IN_JS -> NOT_IN_JS_WAITING_FOR_JS before it starts waiting on
-  // the semaphore.
-  enum RuntimeProfilerState {
-    PROF_NOT_IN_JS,
-    PROF_NOT_IN_JS_WAITING_FOR_JS,
-    PROF_IN_JS
-  };
-
-  static void SetRuntimeProfilerState(RuntimeProfilerState state) {
-    NoBarrier_Store(&thread_local_.runtime_profiler_state_, state);
-  }
-
-  static RuntimeProfilerState SwapRuntimeProfilerState(
-      RuntimeProfilerState state) {
-    return static_cast<RuntimeProfilerState>(
-        NoBarrier_AtomicExchange(&thread_local_.runtime_profiler_state_,
-                                 state));
-  }
-
-  static RuntimeProfilerState CompareAndSwapRuntimeProfilerState(
-      RuntimeProfilerState old_state,
-      RuntimeProfilerState state) {
-    return static_cast<RuntimeProfilerState>(
-        NoBarrier_CompareAndSwap(&thread_local_.runtime_profiler_state_,
-                                 old_state,
-                                 state));
-  }
-
-  static Semaphore* runtime_profiler_semaphore_;
-#endif  // ENABLE_VMSTATE_TRACKING
-
-  // The context that initiated this JS execution.
-  static ThreadLocalTop thread_local_;
-  static void InitializeThreadLocal();
-  static void PrintStackTrace(FILE* out, ThreadLocalTop* thread);
-  static void MarkCompactPrologue(bool is_compacting,
-                                  ThreadLocalTop* archived_thread_data);
-  static void MarkCompactEpilogue(bool is_compacting,
-                                  ThreadLocalTop* archived_thread_data);
-
-  // Debug.
-  // Mutex for serializing access to break control structures.
-  static Mutex* break_access_;
-
-  friend class SaveContext;
-  friend class AssertNoContextChange;
-  friend class ExecutionAccess;
-  friend class ThreadLocalTop;
-
-  static void FillCache();
-};
-
-
-// If the GCC version is 4.1.x or 4.2.x an additional field is added to the
-// class as a work around for a bug in the generated code found with these
-// versions of GCC. See V8 issue 122 for details.
-class SaveContext BASE_EMBEDDED {
- public:
-  SaveContext()
-      : context_(Top::context()),
-#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
-        dummy_(Top::context()),
-#endif
-        prev_(Top::save_context()) {
-    Top::set_save_context(this);
-
-    // If there is no JS frame under the current C frame, use the value 0.
-    JavaScriptFrameIterator it;
-    js_sp_ = it.done() ? 0 : it.frame()->sp();
-  }
-
-  ~SaveContext() {
-    Top::set_context(*context_);
-    Top::set_save_context(prev_);
-  }
-
-  Handle<Context> context() { return context_; }
-  SaveContext* prev() { return prev_; }
-
-  // Returns true if this save context is below a given JavaScript frame.
-  bool below(JavaScriptFrame* frame) {
-    return (js_sp_ == 0) || (frame->sp() < js_sp_);
-  }
-
- private:
-  Handle<Context> context_;
-#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
-  Handle<Context> dummy_;
-#endif
-  SaveContext* prev_;
-  Address js_sp_;  // The top JS frame's sp when saving context.
-};
-
-
-class AssertNoContextChange BASE_EMBEDDED {
-#ifdef DEBUG
- public:
-  AssertNoContextChange() :
-      context_(Top::context()) {
-  }
-
-  ~AssertNoContextChange() {
-    ASSERT(Top::context() == *context_);
-  }
-
- private:
-  HandleScope scope_;
-  Handle<Context> context_;
-#else
- public:
-  AssertNoContextChange() { }
-#endif
-};
-
-
-class ExecutionAccess BASE_EMBEDDED {
- public:
-  ExecutionAccess() { Lock(); }
-  ~ExecutionAccess() { Unlock(); }
-
-  static void Lock() { Top::break_access_->Lock(); }
-  static void Unlock() { Top::break_access_->Unlock(); }
-
-  static bool TryLock() {
-    return Top::break_access_->TryLock();
-  }
-};
-
-} }  // namespace v8::internal
-
-#endif  // V8_TOP_H_
index 3a59c06388d8d06c303df974097a19c69148d44a..73673907b1d00efa037773a7109f8162887bc405 100644 (file)
@@ -73,7 +73,7 @@ Handle<Object> TypeFeedbackOracle::GetInfo(int pos) {
   int entry = dictionary_->FindEntry(pos);
   return entry != NumberDictionary::kNotFound
       ? Handle<Object>(dictionary_->ValueAt(entry))
-      : Factory::undefined_value();
+      : Isolate::Current()->factory()->undefined_value();
 }
 
 
@@ -207,7 +207,8 @@ Handle<JSObject> TypeFeedbackOracle::GetPrototypeForPrimitiveCheck(
 
 
 bool TypeFeedbackOracle::LoadIsBuiltin(Property* expr, Builtins::Name id) {
-  return *GetInfo(expr->position()) == Builtins::builtin(id);
+  return *GetInfo(expr->position()) ==
+      Isolate::Current()->builtins()->builtin(id);
 }
 
 
@@ -330,10 +331,11 @@ TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) {
 ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(int position,
                                                       Handle<String> name,
                                                       Code::Flags flags) {
+  Isolate* isolate = Isolate::Current();
   Handle<Object> object = GetInfo(position);
   if (object->IsUndefined() || object->IsSmi()) return NULL;
 
-  if (*object == Builtins::builtin(Builtins::StoreIC_GlobalProxy)) {
+  if (*object == isolate->builtins()->builtin(Builtins::StoreIC_GlobalProxy)) {
     // TODO(fschneider): We could collect the maps and signal that
     // we need a generic store (or load) here.
     ASSERT(Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC);
@@ -345,7 +347,7 @@ ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(int position,
   } else if (Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC) {
     ZoneMapList* types = new ZoneMapList(4);
     ASSERT(object->IsCode());
-    StubCache::CollectMatchingMaps(types, *name, flags);
+    isolate->stub_cache()->CollectMatchingMaps(types, *name, flags);
     return types->length() > 0 ? types : NULL;
   } else {
     return NULL;
@@ -354,7 +356,8 @@ ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(int position,
 
 
 void TypeFeedbackOracle::PopulateMap(Handle<Code> code) {
-  HandleScope scope;
+  Isolate* isolate = Isolate::Current();
+  HandleScope scope(isolate);
 
   const int kInitialCapacity = 16;
   List<int> code_positions(kInitialCapacity);
@@ -362,12 +365,13 @@ void TypeFeedbackOracle::PopulateMap(Handle<Code> code) {
   CollectPositions(*code, &code_positions, &source_positions);
 
   ASSERT(dictionary_.is_null());  // Only initialize once.
-  dictionary_ = Factory::NewNumberDictionary(code_positions.length());
+  dictionary_ = isolate->factory()->NewNumberDictionary(
+      code_positions.length());
 
   int length = code_positions.length();
   ASSERT(source_positions.length() == length);
   for (int i = 0; i < length; i++) {
-    HandleScope loop_scope;
+    HandleScope loop_scope(isolate);
     RelocInfo info(code->instruction_start() + code_positions[i],
                    RelocInfo::CODE_TARGET, 0);
     Handle<Code> target(Code::GetCodeFromTargetAddress(info.target_address()));
@@ -389,13 +393,13 @@ void TypeFeedbackOracle::PopulateMap(Handle<Code> code) {
       if (kind == Code::KEYED_EXTERNAL_ARRAY_LOAD_IC ||
           kind == Code::KEYED_EXTERNAL_ARRAY_STORE_IC) {
         value = target;
-      } else if (kind != Code::CALL_IC ||
-                 target->check_type() == RECEIVER_MAP_CHECK) {
-        Handle<Map> map = Handle<Map>(target->FindFirstMap());
-        if (*map == NULL) {
+      } else if (target->kind() != Code::CALL_IC ||
+          target->check_type() == RECEIVER_MAP_CHECK) {
+        Map* map = target->FindFirstMap();
+        if (map == NULL) {
           value = target;
         } else {
-          value = map;
+          value = Handle<Map>(map);
         }
       } else {
         ASSERT(target->kind() == Code::CALL_IC);
@@ -409,7 +413,8 @@ void TypeFeedbackOracle::PopulateMap(Handle<Code> code) {
 
     if (!value.is_null()) {
       Handle<NumberDictionary> new_dict =
-          Factory::DictionaryAtNumberPut(dictionary_, position, value);
+          isolate->factory()->DictionaryAtNumberPut(
+              dictionary_, position, value);
       dictionary_ = loop_scope.CloseAndEscape(new_dict);
     }
   }
index 346f673fd518f542f57285ee56ea5cf12b55c628..6e0ac1a3570d93a988588e9d5b57cb3cc81f1bb3 100644 (file)
@@ -1572,7 +1572,7 @@ int CanonicalizationRange::Convert(uchar c,
 }
 
 
-uchar UnicodeData::kMaxCodePoint = 65533;
+const uchar UnicodeData::kMaxCodePoint = 65533;
 
 int UnicodeData::GetByteCount() {
   return kUppercaseTable0Size * sizeof(int32_t)  // NOLINT
index 9d1d683231e7ad233e70f5c58a34d10133cc4534..39fc349687c2cd7a758532c567db7d008e0a7c56 100644 (file)
@@ -97,7 +97,7 @@ class UnicodeData {
  private:
   friend class Test;
   static int GetByteCount();
-  static uchar kMaxCodePoint;
+  static const uchar kMaxCodePoint;
 };
 
 // --- U t f   8 ---
index de2ce66954badd55cf6a79d01451173f1ad8a958..c6aa9cb7f8a3ed8bce78dd03628f9af25c353c70 100644 (file)
 namespace v8 {
 namespace internal {
 
+Counters::Counters() {
 #define HT(name, caption) \
-  HistogramTimer Counters::name = { #caption, NULL, false, 0, 0 }; \
-
-  HISTOGRAM_TIMER_LIST(HT)
-#undef SR
+    HistogramTimer name = { #caption, NULL, false, 0, 0 }; \
+    name##_ = name;
+    HISTOGRAM_TIMER_LIST(HT)
+#undef HT
 
 #define SC(name, caption) \
-  StatsCounter Counters::name = { "c:" #caption, NULL, false };
+    StatsCounter name = { "c:" #caption, NULL, false };\
+    name##_ = name;
 
-  STATS_COUNTER_LIST_1(SC)
-  STATS_COUNTER_LIST_2(SC)
+    STATS_COUNTER_LIST_1(SC)
+    STATS_COUNTER_LIST_2(SC)
 #undef SC
 
-StatsCounter Counters::state_counters[] = {
+  StatsCounter state_counters[] = {
 #define COUNTER_NAME(name) \
-  { "c:V8.State" #name, NULL, false },
-  STATE_TAG_LIST(COUNTER_NAME)
+    { "c:V8.State" #name, NULL, false },
+    STATE_TAG_LIST(COUNTER_NAME)
 #undef COUNTER_NAME
-};
+  };
+
+  for (int i = 0; i < kSlidingStateWindowCounterCount; ++i) {
+    state_counters_[i] = state_counters[i];
+  }
+}
 
 } }  // namespace v8::internal
index c71daa1dc30428235ad015410fa61b694c9da62b..27c2a96f33d528f516571b9bf12b683e616018e3 100644 (file)
@@ -254,15 +254,15 @@ namespace internal {
 
 
 // This file contains all the v8 counters that are in use.
-class Counters : AllStatic {
+class Counters {
  public:
 #define HT(name, caption) \
-  static HistogramTimer name;
+  HistogramTimer* name() { return &name##_; }
   HISTOGRAM_TIMER_LIST(HT)
 #undef HT
 
 #define SC(name, caption) \
-  static StatsCounter name;
+  StatsCounter* name() { return &name##_; }
   STATS_COUNTER_LIST_1(SC)
   STATS_COUNTER_LIST_2(SC)
 #undef SC
@@ -272,19 +272,47 @@ class Counters : AllStatic {
     HISTOGRAM_TIMER_LIST(RATE_ID)
 #undef RATE_ID
 #define COUNTER_ID(name, caption) k_##name,
-  STATS_COUNTER_LIST_1(COUNTER_ID)
-  STATS_COUNTER_LIST_2(COUNTER_ID)
+    STATS_COUNTER_LIST_1(COUNTER_ID)
+    STATS_COUNTER_LIST_2(COUNTER_ID)
 #undef COUNTER_ID
 #define COUNTER_ID(name) k_##name,
-  STATE_TAG_LIST(COUNTER_ID)
+    STATE_TAG_LIST(COUNTER_ID)
 #undef COUNTER_ID
     stats_counter_count
   };
 
+  StatsCounter* state_counters(StateTag state) {
+    return &state_counters_[state];
+  }
+
+ private:
+#define HT(name, caption) \
+  HistogramTimer name##_;
+  HISTOGRAM_TIMER_LIST(HT)
+#undef HT
+
+#define SC(name, caption) \
+  StatsCounter name##_;
+  STATS_COUNTER_LIST_1(SC)
+  STATS_COUNTER_LIST_2(SC)
+#undef SC
+
+  enum {
+#define COUNTER_ID(name) __##name,
+    STATE_TAG_LIST(COUNTER_ID)
+#undef COUNTER_ID
+    kSlidingStateWindowCounterCount
+  };
+
   // Sliding state window counters.
-  static StatsCounter state_counters[];
+  StatsCounter state_counters_[kSlidingStateWindowCounterCount];
+  friend class Isolate;
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
 };
 
+#define COUNTERS Isolate::Current()->counters()
+
 } }  // namespace v8::internal
 
 #endif  // V8_V8_COUNTERS_H_
index 27648b144a44fd6b3cfa2ded0a23e451aa3d88c0..8153372fbbafb1dde15f8cfc687da6a117f00bad 100644 (file)
--- a/src/v8.cc
+++ b/src/v8.cc
@@ -27,6 +27,7 @@
 
 #include "v8.h"
 
+#include "isolate.h"
 #include "bootstrapper.h"
 #include "debug.h"
 #include "deoptimizer.h"
@@ -36,8 +37,6 @@
 #include "log.h"
 #include "runtime-profiler.h"
 #include "serialize.h"
-#include "simulator.h"
-#include "stub-cache.h"
 
 namespace v8 {
 namespace internal {
@@ -50,9 +49,24 @@ bool V8::use_crankshaft_ = true;
 
 
 bool V8::Initialize(Deserializer* des) {
-  bool create_heap_objects = des == NULL;
-  if (has_been_disposed_ || has_fatal_error_) return false;
-  if (IsRunning()) return true;
+  // The current thread may not yet had entered an isolate to run.
+  // Note the Isolate::Current() may be non-null because for various
+  // initialization purposes an initializing thread may be assigned an isolate
+  // but not actually enter it.
+  if (i::Isolate::CurrentPerIsolateThreadData() == NULL) {
+    i::Isolate::EnterDefaultIsolate();
+  }
+
+  ASSERT(i::Isolate::CurrentPerIsolateThreadData() != NULL);
+  ASSERT(i::Isolate::CurrentPerIsolateThreadData()->thread_id() ==
+         i::Thread::GetThreadLocalInt(i::Isolate::thread_id_key()));
+  ASSERT(i::Isolate::CurrentPerIsolateThreadData()->isolate() ==
+         i::Isolate::Current());
+
+  if (IsDead()) return false;
+
+  Isolate* isolate = Isolate::Current();
+  if (isolate->IsInitialized()) return true;
 
 #if defined(V8_TARGET_ARCH_ARM) && !defined(USE_ARM_EABI)
   use_crankshaft_ = false;
@@ -62,90 +76,13 @@ bool V8::Initialize(Deserializer* des) {
 
   // Peephole optimization might interfere with deoptimization.
   FLAG_peephole_optimization = !use_crankshaft_;
+
   is_running_ = true;
   has_been_setup_ = true;
   has_fatal_error_ = false;
   has_been_disposed_ = false;
-#ifdef DEBUG
-  // The initialization process does not handle memory exhaustion.
-  DisallowAllocationFailure disallow_allocation_failure;
-#endif
-
-  // Enable logging before setting up the heap
-  Logger::Setup();
-
-  CpuProfiler::Setup();
-  HeapProfiler::Setup();
-
-  // Setup the platform OS support.
-  OS::Setup();
 
-  // Initialize other runtime facilities
-#if defined(USE_SIMULATOR)
-#if defined(V8_TARGET_ARCH_ARM)
-  Simulator::Initialize();
-#elif defined(V8_TARGET_ARCH_MIPS)
-  ::assembler::mips::Simulator::Initialize();
-#endif
-#endif
-
-  { // NOLINT
-    // Ensure that the thread has a valid stack guard.  The v8::Locker object
-    // will ensure this too, but we don't have to use lockers if we are only
-    // using one thread.
-    ExecutionAccess lock;
-    StackGuard::InitThread(lock);
-  }
-
-  // Setup the object heap
-  ASSERT(!Heap::HasBeenSetup());
-  if (!Heap::Setup(create_heap_objects)) {
-    SetFatalError();
-    return false;
-  }
-
-  Bootstrapper::Initialize(create_heap_objects);
-  Builtins::Setup(create_heap_objects);
-  Top::Initialize();
-
-  if (FLAG_preemption) {
-    v8::Locker locker;
-    v8::Locker::StartPreemption(100);
-  }
-
-#ifdef ENABLE_DEBUGGER_SUPPORT
-  Debug::Setup(create_heap_objects);
-#endif
-  StubCache::Initialize(create_heap_objects);
-
-  // If we are deserializing, read the state into the now-empty heap.
-  if (des != NULL) {
-    des->Deserialize();
-    StubCache::Clear();
-  }
-
-  // Deserializing may put strange things in the root array's copy of the
-  // stack guard.
-  Heap::SetStackLimits();
-
-  // Setup the CPU support. Must be done after heap setup and after
-  // any deserialization because we have to have the initial heap
-  // objects in place for creating the code object used for probing.
-  CPU::Setup();
-
-  Deoptimizer::Setup();
-  LAllocator::Setup();
-  RuntimeProfiler::Setup();
-
-  // If we are deserializing, log non-function code objects and compiled
-  // functions found in the snapshot.
-  if (des != NULL && FLAG_log_code) {
-    HandleScope scope;
-    LOG(LogCodeObjects());
-    LOG(LogCompiledFunctions());
-  }
-
-  return true;
+  return isolate->Init(des);
 }
 
 
@@ -156,31 +93,11 @@ void V8::SetFatalError() {
 
 
 void V8::TearDown() {
-  if (!has_been_setup_ || has_been_disposed_) return;
-
-  if (FLAG_hydrogen_stats) HStatistics::Instance()->Print();
+  Isolate* isolate = Isolate::Current();
+  ASSERT(isolate->IsDefaultIsolate());
 
-  // We must stop the logger before we tear down other components.
-  Logger::EnsureTickerStopped();
-
-  Deoptimizer::TearDown();
-
-  if (FLAG_preemption) {
-    v8::Locker locker;
-    v8::Locker::StopPreemption();
-  }
-
-  Builtins::TearDown();
-  Bootstrapper::TearDown();
-
-  Top::TearDown();
-
-  HeapProfiler::TearDown();
-  CpuProfiler::TearDown();
-  RuntimeProfiler::TearDown();
-
-  Logger::TearDown();
-  Heap::TearDown();
+  if (!has_been_setup_ || has_been_disposed_) return;
+  isolate->TearDown();
 
   is_running_ = false;
   has_been_disposed_ = true;
@@ -218,7 +135,9 @@ static uint32_t random_base(random_state *state) {
 
 
 // Used by JavaScript APIs
-uint32_t V8::Random() {
+uint32_t V8::Random(Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  // TODO(isolates): move lo and hi to isolate
   static random_state state = {0, 0};
   return random_base(&state);
 }
@@ -227,7 +146,9 @@ uint32_t V8::Random() {
 // Used internally by the JIT and memory allocator for security
 // purposes. So, we keep a different state to prevent informations
 // leaks that could be used in an exploit.
-uint32_t V8::RandomPrivate() {
+uint32_t V8::RandomPrivate(Isolate* isolate) {
+  ASSERT(isolate == Isolate::Current());
+  // TODO(isolates): move lo and hi to isolate
   static random_state state = {0, 0};
   return random_base(&state);
 }
@@ -239,7 +160,7 @@ bool V8::IdleNotification() {
   if (!FLAG_use_idle_notification) return true;
 
   // Tell the heap that it may want to adjust.
-  return Heap::IdleNotification();
+  return HEAP->IdleNotification();
 }
 
 
@@ -251,7 +172,7 @@ typedef union {
 
 
 Object* V8::FillHeapNumberWithRandom(Object* heap_number) {
-  uint64_t random_bits = Random();
+  uint64_t random_bits = Random(Isolate::Current());
   // Make a double* from address (heap_number + sizeof(double)).
   double_int_union* r = reinterpret_cast<double_int_union*>(
       reinterpret_cast<char*>(heap_number) +
index cc1673e13fe826d164615993f48152c044e16849..e7ca0d2e32c6bf9e63eaa420a3b57103f18ee755 100644 (file)
--- a/src/v8.h
+++ b/src/v8.h
@@ -86,6 +86,7 @@ class V8 : public AllStatic {
   static bool UseCrankshaft() { return use_crankshaft_; }
   static void DisableCrankshaft() { use_crankshaft_ = false; }
   // To be dead you have to have lived
+  // TODO(isolates): move IsDead to Isolate.
   static bool IsDead() { return has_fatal_error_ || has_been_disposed_; }
   static void SetFatalError();
 
@@ -94,12 +95,12 @@ class V8 : public AllStatic {
                                       bool take_snapshot = false);
 
   // Random number generation support. Not cryptographically safe.
-  static uint32_t Random();
+  static uint32_t Random(Isolate* isolate);
   // We use random numbers internally in memory allocation and in the
   // compilers for security. In order to prevent information leaks we
   // use a separate random state for internal random number
   // generation.
-  static uint32_t RandomPrivate();
+  static uint32_t RandomPrivate(Isolate* isolate);
   static Object* FillHeapNumberWithRandom(Object* heap_number);
 
   // Idle notification directly from the API.
index e44b9e742ee95fff420ac3957867aba48cf26039..0933acffdae137cff857ab388fb10e7c9d0c5a95 100644 (file)
@@ -443,11 +443,11 @@ enum StateTag {
 #define TRACK_MEMORY(name) \
   void* operator new(size_t size) { \
     void* result = ::operator new(size); \
-    Logger::NewEvent(name, result, size); \
+    Logger::NewEventStatic(name, result, size); \
     return result; \
   } \
   void operator delete(void* object) { \
-    Logger::DeleteEvent(name, object); \
+    Logger::DeleteEventStatic(name, object); \
     ::operator delete(object); \
   }
 #else
index 8a5fe6902dbbdcd76dd0eb6bd73005cca630efb6..cecafaa60c9c620de29d37ec31b9a252afb69901 100644 (file)
 
 namespace v8 {
 
-static internal::Thread::LocalStorageKey thread_state_key =
-    internal::Thread::CreateThreadLocalKey();
-static internal::Thread::LocalStorageKey thread_id_key =
-    internal::Thread::CreateThreadLocalKey();
-
 
 // Track whether this V8 instance has ever called v8::Locker. This allows the
 // API code to verify that the lock is always held when V8 is being entered.
@@ -50,64 +45,88 @@ bool Locker::active_ = false;
 // Constructor for the Locker object.  Once the Locker is constructed the
 // current thread will be guaranteed to have the big V8 lock.
 Locker::Locker() : has_lock_(false), top_level_(true) {
+  // TODO(isolates): When Locker has Isolate parameter and it is provided, grab
+  // that one instead of using the current one.
+  // We pull default isolate for Locker constructor w/o p[arameter.
+  // A thread should not enter an isolate before acquiring a lock,
+  // in cases which mandate using Lockers.
+  // So getting a lock is the first thing threads do in a scenario where
+  // multple threads share an isolate. Hence, we need to access
+  // 'locking isolate' before we can actually enter into default isolate.
+  internal::Isolate* isolate = internal::Isolate::GetDefaultIsolateForLocking();
+  ASSERT(isolate != NULL);
+
   // Record that the Locker has been used at least once.
   active_ = true;
   // Get the big lock if necessary.
-  if (!internal::ThreadManager::IsLockedByCurrentThread()) {
-    internal::ThreadManager::Lock();
+  if (!isolate->thread_manager()->IsLockedByCurrentThread()) {
+    isolate->thread_manager()->Lock();
     has_lock_ = true;
+
+    if (isolate->IsDefaultIsolate()) {
+      // This only enters if not yet entered.
+      internal::Isolate::EnterDefaultIsolate();
+    }
+
+    ASSERT(internal::Thread::HasThreadLocal(
+        internal::Isolate::thread_id_key()));
+
     // Make sure that V8 is initialized.  Archiving of threads interferes
     // with deserialization by adding additional root pointers, so we must
     // initialize here, before anyone can call ~Locker() or Unlocker().
-    if (!internal::V8::IsRunning()) {
+    if (!isolate->IsInitialized()) {
       V8::Initialize();
     }
     // This may be a locker within an unlocker in which case we have to
     // get the saved state for this thread and restore it.
-    if (internal::ThreadManager::RestoreThread()) {
+    if (isolate->thread_manager()->RestoreThread()) {
       top_level_ = false;
     } else {
-      internal::ExecutionAccess access;
-      internal::StackGuard::ClearThread(access);
-      internal::StackGuard::InitThread(access);
+      internal::ExecutionAccess access(isolate);
+      isolate->stack_guard()->ClearThread(access);
+      isolate->stack_guard()->InitThread(access);
     }
   }
-  ASSERT(internal::ThreadManager::IsLockedByCurrentThread());
-
-  // Make sure this thread is assigned a thread id.
-  internal::ThreadManager::AssignId();
+  ASSERT(isolate->thread_manager()->IsLockedByCurrentThread());
 }
 
 
 bool Locker::IsLocked() {
-  return internal::ThreadManager::IsLockedByCurrentThread();
+  return internal::Isolate::Current()->thread_manager()->
+      IsLockedByCurrentThread();
 }
 
 
 Locker::~Locker() {
-  ASSERT(internal::ThreadManager::IsLockedByCurrentThread());
+  // TODO(isolate): this should use a field storing the isolate it
+  // locked instead.
+  internal::Isolate* isolate = internal::Isolate::Current();
+  ASSERT(isolate->thread_manager()->IsLockedByCurrentThread());
   if (has_lock_) {
     if (top_level_) {
-      internal::ThreadManager::FreeThreadResources();
+      isolate->thread_manager()->FreeThreadResources();
     } else {
-      internal::ThreadManager::ArchiveThread();
+      isolate->thread_manager()->ArchiveThread();
     }
-    internal::ThreadManager::Unlock();
+    isolate->thread_manager()->Unlock();
   }
 }
 
 
 Unlocker::Unlocker() {
-  ASSERT(internal::ThreadManager::IsLockedByCurrentThread());
-  internal::ThreadManager::ArchiveThread();
-  internal::ThreadManager::Unlock();
+  internal::Isolate* isolate = internal::Isolate::Current();
+  ASSERT(isolate->thread_manager()->IsLockedByCurrentThread());
+  isolate->thread_manager()->ArchiveThread();
+  isolate->thread_manager()->Unlock();
 }
 
 
 Unlocker::~Unlocker() {
-  ASSERT(!internal::ThreadManager::IsLockedByCurrentThread());
-  internal::ThreadManager::Lock();
-  internal::ThreadManager::RestoreThread();
+  // TODO(isolates): check it's the isolate we unlocked.
+  internal::Isolate* isolate = internal::Isolate::Current();
+  ASSERT(!isolate->thread_manager()->IsLockedByCurrentThread());
+  isolate->thread_manager()->Lock();
+  isolate->thread_manager()->RestoreThread();
 }
 
 
@@ -130,44 +149,45 @@ bool ThreadManager::RestoreThread() {
   // had prepared back in the free list, since we didn't need it after all.
   if (lazily_archived_thread_.IsSelf()) {
     lazily_archived_thread_.Initialize(ThreadHandle::INVALID);
-    ASSERT(Thread::GetThreadLocal(thread_state_key) ==
+    ASSERT(Isolate::CurrentPerIsolateThreadData()->thread_state() ==
            lazily_archived_thread_state_);
     lazily_archived_thread_state_->set_id(kInvalidId);
     lazily_archived_thread_state_->LinkInto(ThreadState::FREE_LIST);
     lazily_archived_thread_state_ = NULL;
-    Thread::SetThreadLocal(thread_state_key, NULL);
+    Isolate::CurrentPerIsolateThreadData()->set_thread_state(NULL);
     return true;
   }
 
   // Make sure that the preemption thread cannot modify the thread state while
   // it is being archived or restored.
-  ExecutionAccess access;
+  ExecutionAccess access(isolate_);
 
   // If there is another thread that was lazily archived then we have to really
   // archive it now.
   if (lazily_archived_thread_.IsValid()) {
     EagerlyArchiveThread();
   }
-  ThreadState* state =
-      reinterpret_cast<ThreadState*>(Thread::GetThreadLocal(thread_state_key));
-  if (state == NULL) {
+  Isolate::PerIsolateThreadData* per_thread =
+      Isolate::CurrentPerIsolateThreadData();
+  if (per_thread == NULL || per_thread->thread_state() == NULL) {
     // This is a new thread.
-    StackGuard::InitThread(access);
+    isolate_->stack_guard()->InitThread(access);
     return false;
   }
+  ThreadState* state = per_thread->thread_state();
   char* from = state->data();
-  from = HandleScopeImplementer::RestoreThread(from);
-  from = Top::RestoreThread(from);
+  from = isolate_->handle_scope_implementer()->RestoreThread(from);
+  from = isolate_->RestoreThread(from);
   from = Relocatable::RestoreState(from);
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  from = Debug::RestoreDebug(from);
+  from = isolate_->debug()->RestoreDebug(from);
 #endif
-  from = StackGuard::RestoreStackGuard(from);
-  from = RegExpStack::RestoreStack(from);
-  from = Bootstrapper::RestoreState(from);
-  Thread::SetThreadLocal(thread_state_key, NULL);
+  from = isolate_->stack_guard()->RestoreStackGuard(from);
+  from = isolate_->regexp_stack()->RestoreStack(from);
+  from = isolate_->bootstrapper()->RestoreState(from);
+  per_thread->set_thread_state(NULL);
   if (state->terminate_on_restore()) {
-    StackGuard::TerminateExecution();
+    isolate_->stack_guard()->TerminateExecution();
     state->set_terminate_on_restore(false);
   }
   state->set_id(kInvalidId);
@@ -192,7 +212,7 @@ void ThreadManager::Unlock() {
 
 static int ArchiveSpacePerThread() {
   return HandleScopeImplementer::ArchiveSpacePerThread() +
-                            Top::ArchiveSpacePerThread() +
+                        Isolate::ArchiveSpacePerThread() +
 #ifdef ENABLE_DEBUGGER_SUPPORT
                           Debug::ArchiveSpacePerThread() +
 #endif
@@ -203,13 +223,12 @@ static int ArchiveSpacePerThread() {
 }
 
 
-ThreadState* ThreadState::free_anchor_ = new ThreadState();
-ThreadState* ThreadState::in_use_anchor_ = new ThreadState();
-
-
-ThreadState::ThreadState() : id_(ThreadManager::kInvalidId),
-                             terminate_on_restore_(false),
-                             next_(this), previous_(this) {
+ThreadState::ThreadState(ThreadManager* thread_manager)
+    : id_(ThreadManager::kInvalidId),
+      terminate_on_restore_(false),
+      next_(this),
+      previous_(this),
+      thread_manager_(thread_manager) {
 }
 
 
@@ -226,7 +245,8 @@ void ThreadState::Unlink() {
 
 void ThreadState::LinkInto(List list) {
   ThreadState* flying_anchor =
-      list == FREE_LIST ? free_anchor_ : in_use_anchor_;
+      list == FREE_LIST ? thread_manager_->free_anchor_
+                        : thread_manager_->in_use_anchor_;
   next_ = flying_anchor->next_;
   previous_ = flying_anchor;
   flying_anchor->next_ = this;
@@ -234,10 +254,10 @@ void ThreadState::LinkInto(List list) {
 }
 
 
-ThreadState* ThreadState::GetFree() {
+ThreadState* ThreadManager::GetFreeThreadState() {
   ThreadState* gotten = free_anchor_->next_;
   if (gotten == free_anchor_) {
-    ThreadState* new_thread_state = new ThreadState();
+    ThreadState* new_thread_state = new ThreadState(this);
     new_thread_state->AllocateSpace();
     return new_thread_state;
   }
@@ -246,13 +266,13 @@ ThreadState* ThreadState::GetFree() {
 
 
 // Gets the first in the list of archived threads.
-ThreadState* ThreadState::FirstInUse() {
+ThreadState* ThreadManager::FirstThreadStateInUse() {
   return in_use_anchor_->Next();
 }
 
 
 ThreadState* ThreadState::Next() {
-  if (next_ == in_use_anchor_) return NULL;
+  if (next_ == thread_manager_->in_use_anchor_) return NULL;
   return next_;
 }
 
@@ -260,19 +280,29 @@ ThreadState* ThreadState::Next() {
 // Thread ids must start with 1, because in TLS having thread id 0 can't
 // be distinguished from not having a thread id at all (since NULL is
 // defined as 0.)
-int ThreadManager::last_id_ = 0;
-Mutex* ThreadManager::mutex_ = OS::CreateMutex();
-ThreadHandle ThreadManager::mutex_owner_(ThreadHandle::INVALID);
-ThreadHandle ThreadManager::lazily_archived_thread_(ThreadHandle::INVALID);
-ThreadState* ThreadManager::lazily_archived_thread_state_ = NULL;
+ThreadManager::ThreadManager()
+    : mutex_(OS::CreateMutex()),
+      mutex_owner_(ThreadHandle::INVALID),
+      lazily_archived_thread_(ThreadHandle::INVALID),
+      lazily_archived_thread_state_(NULL),
+      free_anchor_(NULL),
+      in_use_anchor_(NULL) {
+  free_anchor_ = new ThreadState(this);
+  in_use_anchor_ = new ThreadState(this);
+}
+
+
+ThreadManager::~ThreadManager() {
+  // TODO(isolates): Destroy mutexes.
+}
 
 
 void ThreadManager::ArchiveThread() {
   ASSERT(!lazily_archived_thread_.IsValid());
   ASSERT(!IsArchived());
-  ThreadState* state = ThreadState::GetFree();
+  ThreadState* state = GetFreeThreadState();
   state->Unlink();
-  Thread::SetThreadLocal(thread_state_key, reinterpret_cast<void*>(state));
+  Isolate::CurrentPerIsolateThreadData()->set_thread_state(state);
   lazily_archived_thread_.Initialize(ThreadHandle::SELF);
   lazily_archived_thread_state_ = state;
   ASSERT(state->id() == kInvalidId);
@@ -287,84 +317,69 @@ void ThreadManager::EagerlyArchiveThread() {
   char* to = state->data();
   // Ensure that data containing GC roots are archived first, and handle them
   // in ThreadManager::Iterate(ObjectVisitor*).
-  to = HandleScopeImplementer::ArchiveThread(to);
-  to = Top::ArchiveThread(to);
+  to = isolate_->handle_scope_implementer()->ArchiveThread(to);
+  to = isolate_->ArchiveThread(to);
   to = Relocatable::ArchiveState(to);
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  to = Debug::ArchiveDebug(to);
+  to = isolate_->debug()->ArchiveDebug(to);
 #endif
-  to = StackGuard::ArchiveStackGuard(to);
-  to = RegExpStack::ArchiveStack(to);
-  to = Bootstrapper::ArchiveState(to);
+  to = isolate_->stack_guard()->ArchiveStackGuard(to);
+  to = isolate_->regexp_stack()->ArchiveStack(to);
+  to = isolate_->bootstrapper()->ArchiveState(to);
   lazily_archived_thread_.Initialize(ThreadHandle::INVALID);
   lazily_archived_thread_state_ = NULL;
 }
 
 
 void ThreadManager::FreeThreadResources() {
-  HandleScopeImplementer::FreeThreadResources();
-  Top::FreeThreadResources();
+  isolate_->handle_scope_implementer()->FreeThreadResources();
+  isolate_->FreeThreadResources();
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  Debug::FreeThreadResources();
+  isolate_->debug()->FreeThreadResources();
 #endif
-  StackGuard::FreeThreadResources();
-  RegExpStack::FreeThreadResources();
-  Bootstrapper::FreeThreadResources();
+  isolate_->stack_guard()->FreeThreadResources();
+  isolate_->regexp_stack()->FreeThreadResources();
+  isolate_->bootstrapper()->FreeThreadResources();
 }
 
 
 bool ThreadManager::IsArchived() {
-  return Thread::HasThreadLocal(thread_state_key);
+  Isolate::PerIsolateThreadData* data = Isolate::CurrentPerIsolateThreadData();
+  return data != NULL && data->thread_state() != NULL;
 }
 
 
 void ThreadManager::Iterate(ObjectVisitor* v) {
   // Expecting no threads during serialization/deserialization
-  for (ThreadState* state = ThreadState::FirstInUse();
+  for (ThreadState* state = FirstThreadStateInUse();
        state != NULL;
        state = state->Next()) {
     char* data = state->data();
     data = HandleScopeImplementer::Iterate(v, data);
-    data = Top::Iterate(v, data);
+    data = isolate_->Iterate(v, data);
     data = Relocatable::Iterate(v, data);
   }
 }
 
 
 void ThreadManager::IterateArchivedThreads(ThreadVisitor* v) {
-  for (ThreadState* state = ThreadState::FirstInUse();
+  for (ThreadState* state = FirstThreadStateInUse();
        state != NULL;
        state = state->Next()) {
     char* data = state->data();
     data += HandleScopeImplementer::ArchiveSpacePerThread();
-    Top::IterateThread(v, data);
+    isolate_->IterateThread(v, data);
   }
 }
 
 
 int ThreadManager::CurrentId() {
-  return Thread::GetThreadLocalInt(thread_id_key);
-}
-
-
-void ThreadManager::AssignId() {
-  if (!HasId()) {
-    ASSERT(Locker::IsLocked());
-    int thread_id = ++last_id_;
-    ASSERT(thread_id > 0);  // see the comment near last_id_ definition.
-    Thread::SetThreadLocalInt(thread_id_key, thread_id);
-    Top::set_thread_id(thread_id);
-  }
-}
-
-
-bool ThreadManager::HasId() {
-  return Thread::HasThreadLocal(thread_id_key);
+  return Thread::GetThreadLocalInt(Isolate::thread_id_key());
 }
 
 
 void ThreadManager::TerminateExecution(int thread_id) {
-  for (ThreadState* state = ThreadState::FirstInUse();
+  for (ThreadState* state = FirstThreadStateInUse();
        state != NULL;
        state = state->Next()) {
     if (thread_id == state->id()) {
@@ -374,13 +389,8 @@ void ThreadManager::TerminateExecution(int thread_id) {
 }
 
 
-// This is the ContextSwitcher singleton. There is at most a single thread
-// running which delivers preemption events to V8 threads.
-ContextSwitcher* ContextSwitcher::singleton_ = NULL;
-
-
-ContextSwitcher::ContextSwitcher(int every_n_ms)
-  : Thread("v8:CtxtSwitcher"),
+ContextSwitcher::ContextSwitcher(Isolate* isolate, int every_n_ms)
+  : Thread(isolate, "v8:CtxtSwitcher"),
     keep_going_(true),
     sleep_ms_(every_n_ms) {
 }
@@ -389,15 +399,16 @@ ContextSwitcher::ContextSwitcher(int every_n_ms)
 // Set the scheduling interval of V8 threads. This function starts the
 // ContextSwitcher thread if needed.
 void ContextSwitcher::StartPreemption(int every_n_ms) {
+  Isolate* isolate = Isolate::Current();
   ASSERT(Locker::IsLocked());
-  if (singleton_ == NULL) {
+  if (isolate->context_switcher() == NULL) {
     // If the ContextSwitcher thread is not running at the moment start it now.
-    singleton_ = new ContextSwitcher(every_n_ms);
-    singleton_->Start();
+    isolate->set_context_switcher(new ContextSwitcher(isolate, every_n_ms));
+    isolate->context_switcher()->Start();
   } else {
     // ContextSwitcher thread is already running, so we just change the
     // scheduling interval.
-    singleton_->sleep_ms_ = every_n_ms;
+    isolate->context_switcher()->sleep_ms_ = every_n_ms;
   }
 }
 
@@ -405,15 +416,17 @@ void ContextSwitcher::StartPreemption(int every_n_ms) {
 // Disable preemption of V8 threads. If multiple threads want to use V8 they
 // must cooperatively schedule amongst them from this point on.
 void ContextSwitcher::StopPreemption() {
+  Isolate* isolate = Isolate::Current();
   ASSERT(Locker::IsLocked());
-  if (singleton_ != NULL) {
+  if (isolate->context_switcher() != NULL) {
     // The ContextSwitcher thread is running. We need to stop it and release
     // its resources.
-    singleton_->keep_going_ = false;
-    singleton_->Join();  // Wait for the ContextSwitcher thread to exit.
+    isolate->context_switcher()->keep_going_ = false;
+    // Wait for the ContextSwitcher thread to exit.
+    isolate->context_switcher()->Join();
     // Thread has exited, now we can delete it.
-    delete(singleton_);
-    singleton_ = NULL;
+    delete(isolate->context_switcher());
+    isolate->set_context_switcher(NULL);
   }
 }
 
@@ -423,7 +436,7 @@ void ContextSwitcher::StopPreemption() {
 void ContextSwitcher::Run() {
   while (keep_going_) {
     OS::Sleep(sleep_ms_);
-    StackGuard::Preempt();
+    isolate()->stack_guard()->Preempt();
   }
 }
 
index da56d0525c64b3a6bb100731188430c7d2f10570..f1992ad765b1717416ee57211135a6f5da2ffa08 100644 (file)
@@ -34,8 +34,6 @@ namespace internal {
 
 class ThreadState {
  public:
-  // Iterate over in-use states.
-  static ThreadState* FirstInUse();
   // Returns NULL after the last one.
   ThreadState* Next();
 
@@ -44,8 +42,6 @@ class ThreadState {
   void LinkInto(List list);
   void Unlink();
 
-  static ThreadState* GetFree();
-
   // Id of thread.
   void set_id(int id) { id_ = id; }
   int id() { return id_; }
@@ -59,7 +55,7 @@ class ThreadState {
   // Get data area for archiving a thread.
   char* data() { return data_; }
  private:
-  ThreadState();
+  explicit ThreadState(ThreadManager* thread_manager);
 
   void AllocateSpace();
 
@@ -69,13 +65,9 @@ class ThreadState {
   ThreadState* next_;
   ThreadState* previous_;
 
-  // In the following two lists there is always at least one object on the list.
-  // The first object is a flying anchor that is only there to simplify linking
-  // and unlinking.
-  // Head of linked list of free states.
-  static ThreadState* free_anchor_;
-  // Head of linked list of states in use.
-  static ThreadState* in_use_anchor_;
+  ThreadManager* thread_manager_;
+
+  friend class ThreadManager;
 };
 
 
@@ -93,35 +85,52 @@ class ThreadVisitor {
 };
 
 
-class ThreadManager : public AllStatic {
+class ThreadManager {
  public:
-  static void Lock();
-  static void Unlock();
+  void Lock();
+  void Unlock();
 
-  static void ArchiveThread();
-  static bool RestoreThread();
-  static void FreeThreadResources();
-  static bool IsArchived();
+  void ArchiveThread();
+  bool RestoreThread();
+  void FreeThreadResources();
+  bool IsArchived();
 
-  static void Iterate(ObjectVisitor* v);
-  static void IterateArchivedThreads(ThreadVisitor* v);
-  static bool IsLockedByCurrentThread() { return mutex_owner_.IsSelf(); }
+  void Iterate(ObjectVisitor* v);
+  void IterateArchivedThreads(ThreadVisitor* v);
+  bool IsLockedByCurrentThread() { return mutex_owner_.IsSelf(); }
 
-  static int CurrentId();
-  static void AssignId();
-  static bool HasId();
+  int CurrentId();
 
-  static void TerminateExecution(int thread_id);
+  void TerminateExecution(int thread_id);
+
+  // Iterate over in-use states.
+  ThreadState* FirstThreadStateInUse();
+  ThreadState* GetFreeThreadState();
 
   static const int kInvalidId = -1;
  private:
-  static void EagerlyArchiveThread();
+  ThreadManager();
+  ~ThreadManager();
 
-  static int last_id_;  // V8 threads are identified through an integer.
-  static Mutex* mutex_;
-  static ThreadHandle mutex_owner_;
-  static ThreadHandle lazily_archived_thread_;
-  static ThreadState* lazily_archived_thread_state_;
+  void EagerlyArchiveThread();
+
+  Mutex* mutex_;
+  ThreadHandle mutex_owner_;
+  ThreadHandle lazily_archived_thread_;
+  ThreadState* lazily_archived_thread_state_;
+
+  // In the following two lists there is always at least one object on the list.
+  // The first object is a flying anchor that is only there to simplify linking
+  // and unlinking.
+  // Head of linked list of free states.
+  ThreadState* free_anchor_;
+  // Head of linked list of states in use.
+  ThreadState* in_use_anchor_;
+
+  Isolate* isolate_;
+
+  friend class Isolate;
+  friend class ThreadState;
 };
 
 
@@ -142,14 +151,12 @@ class ContextSwitcher: public Thread {
   static void PreemptionReceived();
 
  private:
-  explicit ContextSwitcher(int every_n_ms);
+  explicit ContextSwitcher(Isolate* isolate, int every_n_ms);
 
   void Run();
 
   bool keep_going_;
   int sleep_ms_;
-
-  static ContextSwitcher* singleton_;
 };
 
 } }  // namespace v8::internal
index 5d27a02d5375ec70b84934e637b0a18a0e621f41..67e1a185427381fdf24fbf3f6fb53a6ba553afee 100644 (file)
@@ -165,7 +165,7 @@ class Variable: public ZoneObject {
 
   // True if the variable is named eval and not known to be shadowed.
   bool is_possibly_eval() const {
-    return IsVariable(Factory::eval_symbol()) &&
+    return IsVariable(FACTORY->eval_symbol()) &&
         (mode_ == DYNAMIC || mode_ == DYNAMIC_GLOBAL);
   }
 
index dd69442511fe518100d8171c5f5316ee030d7a42..ccbca94c346ecbd215ccb6e0e01c68769544bbdd 100644 (file)
 #define MINOR_VERSION     2
 #define BUILD_NUMBER      4
 #define PATCH_LEVEL       0
-#define CANDIDATE_VERSION true
+// Use 1 for candidates and 0 otherwise.
+// (Boolean macro values are not supported by all preprocessors.)
+#define IS_CANDIDATE_VERSION 0
 
 // Define SONAME to have the SCons build the put a specific SONAME into the
 // shared library instead the generic SONAME generated from the V8 version
 // number. This define is mainly used by the SCons build script.
 #define SONAME            ""
 
+#if IS_CANDIDATE_VERSION
+#define CANDIDATE_STRING " (candidate)"
+#else
+#define CANDIDATE_STRING ""
+#endif
+
+#define SX(x) #x
+#define S(x) SX(x)
+
+#if PATCH_LEVEL > 0
+#define VERSION_STRING                                                         \
+    S(MAJOR_VERSION) "." S(MINOR_VERSION) "." S(BUILD_NUMBER) "."              \
+        S(PATCH_LEVEL) CANDIDATE_STRING
+#else
+#define VERSION_STRING                                                         \
+    S(MAJOR_VERSION) "." S(MINOR_VERSION) "." S(BUILD_NUMBER)                  \
+        CANDIDATE_STRING
+#endif
+
 namespace v8 {
 namespace internal {
 
@@ -50,9 +71,9 @@ int Version::major_ = MAJOR_VERSION;
 int Version::minor_ = MINOR_VERSION;
 int Version::build_ = BUILD_NUMBER;
 int Version::patch_ = PATCH_LEVEL;
-bool Version::candidate_ = CANDIDATE_VERSION;
+bool Version::candidate_ = (IS_CANDIDATE_VERSION != 0);
 const char* Version::soname_ = SONAME;
-
+const char* Version::version_string_ = VERSION_STRING;
 
 // Calculate the V8 version string.
 void Version::GetString(Vector<char> str) {
index c322a2fc02ab98232ef78079ae44cf26b0904bad..4b3e7e2bde3d30928f4f1eae47f2bc3b90cbdd38 100644 (file)
@@ -46,13 +46,17 @@ class Version {
   // Calculate the SONAME for the V8 shared library.
   static void GetSONAME(Vector<char> str);
 
+  static const char* GetVersion() { return version_string_; }
+
  private:
+  // NOTE: can't make these really const because of test-version.cc.
   static int major_;
   static int minor_;
   static int build_;
   static int patch_;
   static bool candidate_;
   static const char* soname_;
+  static const char* version_string_;
 
   // In test-version.cc.
   friend void SetVersion(int major, int minor, int build, int patch,
index 19520a6349d63dfce500276da7edba2ceec989ba..681f93fb4e8a1fa86c46650055bd90b9f0b6d1b7 100644 (file)
@@ -83,8 +83,9 @@ void VirtualFrame::PrepareForReturn() {
 VirtualFrame::RegisterAllocationScope::RegisterAllocationScope(
     CodeGenerator* cgen)
   : cgen_(cgen),
-    old_is_spilled_(SpilledScope::is_spilled_) {
-  SpilledScope::is_spilled_ = false;
+    old_is_spilled_(
+        Isolate::Current()->is_virtual_frame_in_spilled_scope()) {
+  Isolate::Current()->set_is_virtual_frame_in_spilled_scope(false);
   if (old_is_spilled_) {
     VirtualFrame* frame = cgen->frame();
     if (frame != NULL) {
@@ -95,7 +96,7 @@ VirtualFrame::RegisterAllocationScope::RegisterAllocationScope(
 
 
 VirtualFrame::RegisterAllocationScope::~RegisterAllocationScope() {
-  SpilledScope::is_spilled_ = old_is_spilled_;
+  Isolate::Current()->set_is_virtual_frame_in_spilled_scope(old_is_spilled_);
   if (old_is_spilled_) {
     VirtualFrame* frame = cgen_->frame();
     if (frame != NULL) {
@@ -106,7 +107,7 @@ VirtualFrame::RegisterAllocationScope::~RegisterAllocationScope() {
 
 
 CodeGenerator* VirtualFrame::cgen() const {
-  return CodeGeneratorScope::Current();
+  return CodeGeneratorScope::Current(Isolate::Current());
 }
 
 
index da912b74698af9eff87092c05a181bf27cc27dcd..1f363de626d0dd55f3cb8e3260b85c6dacff58dd 100644 (file)
@@ -58,25 +58,27 @@ inline const char* StateToString(StateTag state) {
   }
 }
 
-VMState::VMState(StateTag tag) : previous_tag_(Top::current_vm_state()) {
+
+VMState::VMState(Isolate* isolate, StateTag tag)
+    : isolate_(isolate), previous_tag_(isolate->current_vm_state()) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
   if (FLAG_log_state_changes) {
-    LOG(UncheckedStringEvent("Entering", StateToString(tag)));
-    LOG(UncheckedStringEvent("From", StateToString(previous_tag_)));
+    LOG(isolate, UncheckedStringEvent("Entering", StateToString(tag)));
+    LOG(isolate, UncheckedStringEvent("From", StateToString(previous_tag_)));
   }
 #endif
 
-  Top::SetCurrentVMState(tag);
+  isolate_->SetCurrentVMState(tag);
 
 #ifdef ENABLE_HEAP_PROTECTION
   if (FLAG_protect_heap) {
     if (tag == EXTERNAL) {
       // We are leaving V8.
       ASSERT(previous_tag_ != EXTERNAL);
-      Heap::Protect();
+      isolate_->heap()->Protect();
     } else if (previous_tag_ = EXTERNAL) {
       // We are entering V8.
-      Heap::Unprotect();
+      isolate_->heap()->Unprotect();
     }
   }
 #endif
@@ -86,27 +88,29 @@ VMState::VMState(StateTag tag) : previous_tag_(Top::current_vm_state()) {
 VMState::~VMState() {
 #ifdef ENABLE_LOGGING_AND_PROFILING
   if (FLAG_log_state_changes) {
-    LOG(UncheckedStringEvent("Leaving",
-                             StateToString(Top::current_vm_state())));
-    LOG(UncheckedStringEvent("To", StateToString(previous_tag_)));
+    LOG(isolate_,
+        UncheckedStringEvent("Leaving",
+                              StateToString(isolate_->current_vm_state())));
+    LOG(isolate_,
+        UncheckedStringEvent("To", StateToString(previous_tag_)));
   }
 #endif  // ENABLE_LOGGING_AND_PROFILING
 
 #ifdef ENABLE_HEAP_PROTECTION
-  StateTag tag = Top::current_vm_state();
+  StateTag tag = isolate_->current_vm_state();
 #endif
 
-  Top::SetCurrentVMState(previous_tag_);
+  isolate_->SetCurrentVMState(previous_tag_);
 
 #ifdef ENABLE_HEAP_PROTECTION
   if (FLAG_protect_heap) {
     if (tag == EXTERNAL) {
       // We are reentering V8.
       ASSERT(previous_tag_ != EXTERNAL);
-      Heap::Unprotect();
+      isolate_->heap()->Unprotect();
     } else if (previous_tag_ == EXTERNAL) {
       // We are leaving V8.
-      Heap::Protect();
+      isolate_->heap()->Protect();
     }
   }
 #endif  // ENABLE_HEAP_PROTECTION
@@ -117,13 +121,13 @@ VMState::~VMState() {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
 
-ExternalCallbackScope::ExternalCallbackScope(Address callback)
-    : previous_callback_(Top::external_callback()) {
-  Top::set_external_callback(callback);
+ExternalCallbackScope::ExternalCallbackScope(Isolate* isolate, Address callback)
+    : isolate_(isolate), previous_callback_(isolate->external_callback()) {
+  isolate_->set_external_callback(callback);
 }
 
 ExternalCallbackScope::~ExternalCallbackScope() {
-  Top::set_external_callback(previous_callback_);
+  isolate_->set_external_callback(previous_callback_);
 }
 
 #endif  // ENABLE_LOGGING_AND_PROFILING
index df7fb30ac57d2949799592474e2e1dad3be8b666..11fc6d67ef5981e0003ae52e6d52f2bbca41ed2a 100644 (file)
@@ -28,7 +28,7 @@
 #ifndef V8_VM_STATE_H_
 #define V8_VM_STATE_H_
 
-#include "top.h"
+#include "isolate.h"
 
 namespace v8 {
 namespace internal {
@@ -36,15 +36,16 @@ namespace internal {
 class VMState BASE_EMBEDDED {
 #ifdef ENABLE_VMSTATE_TRACKING
  public:
-  inline explicit VMState(StateTag tag);
+  inline VMState(Isolate* isolate, StateTag tag);
   inline ~VMState();
 
  private:
+  Isolate* isolate_;
   StateTag previous_tag_;
 
 #else
  public:
-  explicit VMState(StateTag state) {}
+  VMState(Isolate* isolate, StateTag state) {}
 #endif
 };
 
@@ -52,13 +53,14 @@ class VMState BASE_EMBEDDED {
 class ExternalCallbackScope BASE_EMBEDDED {
 #ifdef ENABLE_LOGGING_AND_PROFILING
  public:
-  inline explicit ExternalCallbackScope(Address callback);
+  inline ExternalCallbackScope(Isolate* isolate, Address callback);
   inline ~ExternalCallbackScope();
  private:
+  Isolate* isolate_;
   Address previous_callback_;
 #else
  public:
-  explicit ExternalCallbackScope(Address callback) {}
+  ExternalCallbackScope(Isolate* isolate, Address callback) {}
 #endif
 };
 
index b082624f44916630b46478e32f688540dc98f42b..af9f7cf029ea8bc265d2f021bba06e3a6020317e 100644 (file)
@@ -372,11 +372,12 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
     visitor->VisitExternalReference(target_reference_address());
     CPU::FlushICache(pc_, sizeof(Address));
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  } else if (Debug::has_break_points() &&
-             ((RelocInfo::IsJSReturn(mode) &&
+  // TODO(isolates): Get a cached isolate below.
+  } else if (((RelocInfo::IsJSReturn(mode) &&
               IsPatchedReturnSequence()) ||
              (RelocInfo::IsDebugBreakSlot(mode) &&
-              IsPatchedDebugBreakSlotSequence()))) {
+              IsPatchedDebugBreakSlotSequence())) &&
+             Isolate::Current()->debug()->has_break_points()) {
     visitor->VisitDebugTarget(this);
 #endif
   } else if (mode == RelocInfo::RUNTIME_ENTRY) {
@@ -386,10 +387,10 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
 
 
 template<typename StaticVisitor>
-void RelocInfo::Visit() {
+void RelocInfo::Visit(Heap* heap) {
   RelocInfo::Mode mode = rmode();
   if (mode == RelocInfo::EMBEDDED_OBJECT) {
-    StaticVisitor::VisitPointer(target_object_address());
+    StaticVisitor::VisitPointer(heap, target_object_address());
     CPU::FlushICache(pc_, sizeof(Address));
   } else if (RelocInfo::IsCodeTarget(mode)) {
     StaticVisitor::VisitCodeTarget(this);
@@ -399,7 +400,7 @@ void RelocInfo::Visit() {
     StaticVisitor::VisitExternalReference(target_reference_address());
     CPU::FlushICache(pc_, sizeof(Address));
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  } else if (Debug::has_break_points() &&
+  } else if (heap->isolate()->debug()->has_break_points() &&
              ((RelocInfo::IsJSReturn(mode) &&
               IsPatchedReturnSequence()) ||
              (RelocInfo::IsDebugBreakSlot(mode) &&
index 7eaac91d1e8ac46accfad751b034b8bbe70aadf9..26966e666f80c03f81eaa285858205ffa499ebec 100644 (file)
@@ -38,14 +38,15 @@ namespace internal {
 // -----------------------------------------------------------------------------
 // Implementation of CpuFeatures
 
-// The required user mode extensions in X64 are (from AMD64 ABI Table A.1):
-//   fpu, tsc, cx8, cmov, mmx, sse, sse2, fxsr, syscall
-uint64_t CpuFeatures::supported_ = kDefaultCpuFeatures;
-uint64_t CpuFeatures::enabled_ = 0;
-uint64_t CpuFeatures::found_by_runtime_probing_ = 0;
+CpuFeatures::CpuFeatures()
+    : supported_(kDefaultCpuFeatures),
+      enabled_(0),
+      found_by_runtime_probing_(0) {
+}
+
 
 void CpuFeatures::Probe(bool portable)  {
-  ASSERT(Heap::HasBeenSetup());
+  ASSERT(HEAP->HasBeenSetup());
   supported_ = kDefaultCpuFeatures;
   if (portable && Serializer::enabled()) {
     supported_ |= OS::CpuFeaturesImpliedByPlatform();
@@ -118,13 +119,16 @@ void CpuFeatures::Probe(bool portable)  {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  MaybeObject* maybe_code = Heap::CreateCode(desc,
-                                             Code::ComputeFlags(Code::STUB),
-                                             Handle<Object>());
+  Isolate* isolate = Isolate::Current();
+  MaybeObject* maybe_code =
+      isolate->heap()->CreateCode(desc,
+                                  Code::ComputeFlags(Code::STUB),
+                                  Handle<Object>());
   Object* code;
   if (!maybe_code->ToObject(&code)) return;
   if (!code->IsCode()) return;
-  PROFILE(CodeCreateEvent(Logger::BUILTIN_TAG,
+  PROFILE(isolate,
+          CodeCreateEvent(Logger::BUILTIN_TAG,
                           Code::cast(code), "CpuFeatures::Probe"));
   typedef uint64_t (*F0)();
   F0 probe = FUNCTION_CAST<F0>(Code::cast(code)->entry());
@@ -335,20 +339,19 @@ bool Operand::AddressUsesRegister(Register reg) const {
 static void InitCoverageLog();
 #endif
 
-byte* Assembler::spare_buffer_ = NULL;
-
 Assembler::Assembler(void* buffer, int buffer_size)
     : code_targets_(100),
       positions_recorder_(this),
       emit_debug_code_(FLAG_debug_code) {
+  Isolate* isolate = Isolate::Current();
   if (buffer == NULL) {
     // Do our own buffer management.
     if (buffer_size <= kMinimalBufferSize) {
       buffer_size = kMinimalBufferSize;
 
-      if (spare_buffer_ != NULL) {
-        buffer = spare_buffer_;
-        spare_buffer_ = NULL;
+      if (isolate->assembler_spare_buffer() != NULL) {
+        buffer = isolate->assembler_spare_buffer();
+        isolate->set_assembler_spare_buffer(NULL);
       }
     }
     if (buffer == NULL) {
@@ -389,9 +392,11 @@ Assembler::Assembler(void* buffer, int buffer_size)
 
 
 Assembler::~Assembler() {
+  Isolate* isolate = Isolate::Current();
   if (own_buffer_) {
-    if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) {
-      spare_buffer_ = buffer_;
+    if (isolate->assembler_spare_buffer() == NULL &&
+        buffer_size_ == kMinimalBufferSize) {
+      isolate->set_assembler_spare_buffer(buffer_);
     } else {
       DeleteArray(buffer_);
     }
@@ -412,7 +417,7 @@ void Assembler::GetCode(CodeDesc* desc) {
       static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
   desc->origin = this;
 
-  Counters::reloc_info_size.Increment(desc->reloc_size);
+  COUNTERS->reloc_info_size()->Increment(desc->reloc_size);
 }
 
 
@@ -476,6 +481,7 @@ void Assembler::bind(NearLabel* L) {
 
 
 void Assembler::GrowBuffer() {
+  Isolate* isolate = Isolate::Current();
   ASSERT(buffer_overflow());
   if (!own_buffer_) FATAL("external code buffer is too small");
 
@@ -489,7 +495,7 @@ void Assembler::GrowBuffer() {
   // Some internal data structures overflow for very large buffers,
   // they must ensure that kMaximalBufferSize is not too large.
   if ((desc.buffer_size > kMaximalBufferSize) ||
-      (desc.buffer_size > Heap::MaxOldGenerationSize())) {
+      (desc.buffer_size > HEAP->MaxOldGenerationSize())) {
     V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
   }
 
@@ -514,8 +520,9 @@ void Assembler::GrowBuffer() {
           reloc_info_writer.pos(), desc.reloc_size);
 
   // Switch buffers.
-  if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) {
-    spare_buffer_ = buffer_;
+  if (isolate->assembler_spare_buffer() == NULL &&
+      buffer_size_ == kMinimalBufferSize) {
+    isolate->set_assembler_spare_buffer(buffer_);
   } else {
     DeleteArray(buffer_);
   }
@@ -1028,7 +1035,7 @@ void Assembler::cmpb_al(Immediate imm8) {
 
 
 void Assembler::cpuid() {
-  ASSERT(CpuFeatures::IsEnabled(CPUID));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CPUID));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   emit(0x0F);
@@ -1675,7 +1682,7 @@ void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
     EnsureSpace ensure_space(this);
     last_pc_ = pc_;
     ASSERT(value->IsHeapObject());
-    ASSERT(!Heap::InNewSpace(*value));
+    ASSERT(!HEAP->InNewSpace(*value));
     emit_rex_64(dst);
     emit(0xB8 | dst.low_bits());
     emitq(reinterpret_cast<uintptr_t>(value.location()), mode);
@@ -2379,7 +2386,7 @@ void Assembler::fistp_s(const Operand& adr) {
 
 
 void Assembler::fisttp_s(const Operand& adr) {
-  ASSERT(CpuFeatures::IsEnabled(SSE3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   emit_optional_rex_32(adr);
@@ -2389,7 +2396,7 @@ void Assembler::fisttp_s(const Operand& adr) {
 
 
 void Assembler::fisttp_d(const Operand& adr) {
-  ASSERT(CpuFeatures::IsEnabled(SSE3));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   emit_optional_rex_32(adr);
@@ -2707,7 +2714,7 @@ void Assembler::movq(Register dst, XMMRegister src) {
 
 
 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   emit(0x66);
@@ -2719,7 +2726,7 @@ void Assembler::movdqa(const Operand& dst, XMMRegister src) {
 
 
 void Assembler::movdqa(XMMRegister dst, const Operand& src) {
-  ASSERT(CpuFeatures::IsEnabled(SSE2));
+  ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   emit(0x66);
index 9fcb95c96e805d5494dfd5c37582ade38e07bb5a..a7450f0792c8815f7d25dea82ba18a08d6ad3c71 100644 (file)
@@ -434,13 +434,14 @@ class Operand BASE_EMBEDDED {
 //   } else {
 //     // Generate standard x87 or SSE2 floating point code.
 //   }
-class CpuFeatures : public AllStatic {
+class CpuFeatures {
  public:
   // Detect features of the target CPU. Set safe defaults if the serializer
   // is enabled (snapshots must be portable).
-  static void Probe(bool portable);
+  void Probe(bool portable);
+
   // Check whether a feature is supported by the target CPU.
-  static bool IsSupported(CpuFeature f) {
+  bool IsSupported(CpuFeature f) const {
     if (f == SSE2 && !FLAG_enable_sse2) return false;
     if (f == SSE3 && !FLAG_enable_sse3) return false;
     if (f == CMOV && !FLAG_enable_cmov) return false;
@@ -449,35 +450,52 @@ class CpuFeatures : public AllStatic {
     return (supported_ & (V8_UINT64_C(1) << f)) != 0;
   }
   // Check whether a feature is currently enabled.
-  static bool IsEnabled(CpuFeature f) {
+  bool IsEnabled(CpuFeature f) const {
     return (enabled_ & (V8_UINT64_C(1) << f)) != 0;
   }
   // Enable a specified feature within a scope.
   class Scope BASE_EMBEDDED {
 #ifdef DEBUG
    public:
-    explicit Scope(CpuFeature f) {
+    explicit Scope(CpuFeature f)
+        : cpu_features_(Isolate::Current()->cpu_features()),
+          isolate_(Isolate::Current()) {
       uint64_t mask = (V8_UINT64_C(1) << f);
-      ASSERT(CpuFeatures::IsSupported(f));
-      ASSERT(!Serializer::enabled() || (found_by_runtime_probing_ & mask) == 0);
-      old_enabled_ = CpuFeatures::enabled_;
-      CpuFeatures::enabled_ |= mask;
+      ASSERT(cpu_features_->IsSupported(f));
+      ASSERT(!Serializer::enabled() ||
+          (cpu_features_->found_by_runtime_probing_ & mask) == 0);
+      old_enabled_ = cpu_features_->enabled_;
+      cpu_features_->enabled_ |= mask;
+    }
+    ~Scope() {
+      ASSERT_EQ(Isolate::Current(), isolate_);
+      cpu_features_->enabled_ = old_enabled_;
     }
-    ~Scope() { CpuFeatures::enabled_ = old_enabled_; }
    private:
     uint64_t old_enabled_;
+    CpuFeatures* cpu_features_;
+    Isolate* isolate_;
 #else
    public:
     explicit Scope(CpuFeature f) {}
 #endif
   };
  private:
+  CpuFeatures();
+
   // Safe defaults include SSE2 and CMOV for X64. It is always available, if
   // anyone checks, but they shouldn't need to check.
+  // The required user mode extensions in X64 are (from AMD64 ABI Table A.1):
+  //   fpu, tsc, cx8, cmov, mmx, sse, sse2, fxsr, syscall
   static const uint64_t kDefaultCpuFeatures = (1 << SSE2 | 1 << CMOV);
-  static uint64_t supported_;
-  static uint64_t enabled_;
-  static uint64_t found_by_runtime_probing_;
+
+  uint64_t supported_;
+  uint64_t enabled_;
+  uint64_t found_by_runtime_probing_;
+
+  friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(CpuFeatures);
 };
 
 
@@ -1549,8 +1567,6 @@ class Assembler : public Malloced {
   int buffer_size_;
   // True if the assembler owns the buffer, false if buffer is external.
   bool own_buffer_;
-  // A previously allocated buffer of kMinimalBufferSize bytes, or NULL.
-  static byte* spare_buffer_;
 
   // code generation
   byte* pc_;  // the program counter; moves forward
index b545876e21cda738e2ad0bbdf517f144734793c9..8f782a8d25e05e7742c409d74008fab10aa7a4f9 100644 (file)
@@ -98,8 +98,8 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
   // Set expected number of arguments to zero (not changing rax).
   __ movq(rbx, Immediate(0));
   __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
-  __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
-          RelocInfo::CODE_TARGET);
+  __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+        ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
 }
 
 
@@ -339,8 +339,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   // Call the function.
   if (is_api_function) {
     __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
-    Handle<Code> code = Handle<Code>(
-        Builtins::builtin(Builtins::HandleApiCallConstruct));
+    Handle<Code> code = Handle<Code>(Isolate::Current()->builtins()->builtin(
+        Builtins::HandleApiCallConstruct));
     ParameterCount expected(0);
     __ InvokeCode(code, expected, expected,
                   RelocInfo::CODE_TARGET, CALL_FUNCTION);
@@ -379,7 +379,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
   __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
   __ push(rcx);
-  __ IncrementCounter(&Counters::constructed_objects, 1);
+  __ IncrementCounter(COUNTERS->constructed_objects(), 1);
   __ ret(0);
 }
 
@@ -492,8 +492,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
   // Invoke the code.
   if (is_construct) {
     // Expects rdi to hold function pointer.
-    __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
-            RelocInfo::CODE_TARGET);
+    __ Call(Handle<Code>(Isolate::Current()->builtins()->builtin(
+        Builtins::JSConstructCall)), RelocInfo::CODE_TARGET);
   } else {
     ParameterCount actual(rax);
     // Function must be in rdi.
@@ -630,7 +630,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
     __ testq(rax, rax);
     __ j(not_zero, &done);
     __ pop(rbx);
-    __ Push(Factory::undefined_value());
+    __ Push(FACTORY->undefined_value());
     __ push(rbx);
     __ incq(rax);
     __ bind(&done);
@@ -733,8 +733,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
     __ j(not_zero, &function);
     __ Set(rbx, 0);
     __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
-    __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
-            RelocInfo::CODE_TARGET);
+    __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+        ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
     __ bind(&function);
   }
 
@@ -748,8 +748,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
   __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
   __ cmpq(rax, rbx);
   __ j(not_equal,
-       Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
-       RelocInfo::CODE_TARGET);
+       Handle<Code>(Isolate::Current()->builtins()->builtin(
+           ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
 
   ParameterCount expected(0);
   __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
@@ -863,7 +863,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
   __ movq(rdx, Operand(rbp, kArgumentsOffset));  // load arguments
 
   // Use inline caching to speed up access to arguments.
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   __ Call(ic, RelocInfo::CODE_TARGET);
   // It is important that we do not have a test instruction after the
   // call.  A test instruction after the call is used to indicate that
@@ -935,7 +936,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
   // scratch2: start of next object
   __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
   __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
-          Factory::empty_fixed_array());
+          FACTORY->empty_fixed_array());
   // Field JSArray::kElementsOffset is initialized later.
   __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
 
@@ -943,7 +944,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
   // fixed array.
   if (initial_capacity == 0) {
     __ Move(FieldOperand(result, JSArray::kElementsOffset),
-            Factory::empty_fixed_array());
+            FACTORY->empty_fixed_array());
     return;
   }
 
@@ -960,7 +961,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
   // scratch1: elements array
   // scratch2: start of next object
   __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
-          Factory::fixed_array_map());
+          FACTORY->fixed_array_map());
   __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
           Smi::FromInt(initial_capacity));
 
@@ -968,7 +969,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
   // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
   static const int kLoopUnfoldLimit = 4;
   ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
-  __ Move(scratch3, Factory::the_hole_value());
+  __ Move(scratch3, FACTORY->the_hole_value());
   if (initial_capacity <= kLoopUnfoldLimit) {
     // Use a scratch register here to have only one reloc info when unfolding
     // the loop.
@@ -1052,7 +1053,7 @@ static void AllocateJSArray(MacroAssembler* masm,
   // array_size: size of array (smi)
   __ bind(&allocated);
   __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
-  __ Move(elements_array, Factory::empty_fixed_array());
+  __ Move(elements_array, FACTORY->empty_fixed_array());
   __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
   // Field JSArray::kElementsOffset is initialized later.
   __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
@@ -1071,7 +1072,7 @@ static void AllocateJSArray(MacroAssembler* masm,
   // elements_array_end: start of next object
   // array_size: size of array (smi)
   __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
-          Factory::fixed_array_map());
+          FACTORY->fixed_array_map());
   Label not_empty_2, fill_array;
   __ SmiTest(array_size);
   __ j(not_zero, &not_empty_2);
@@ -1092,7 +1093,7 @@ static void AllocateJSArray(MacroAssembler* masm,
   __ bind(&fill_array);
   if (fill_with_hole) {
     Label loop, entry;
-    __ Move(scratch, Factory::the_hole_value());
+    __ Move(scratch, FACTORY->the_hole_value());
     __ lea(elements_array, Operand(elements_array,
                                    FixedArray::kHeaderSize - kHeapObjectTag));
     __ jmp(&entry);
@@ -1137,7 +1138,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
                        r8,
                        kPreallocatedArrayElements,
                        call_generic_code);
-  __ IncrementCounter(&Counters::array_function_native, 1);
+  __ IncrementCounter(COUNTERS->array_function_native(), 1);
   __ movq(rax, rbx);
   __ ret(kPointerSize);
 
@@ -1168,7 +1169,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
                   r9,
                   true,
                   call_generic_code);
-  __ IncrementCounter(&Counters::array_function_native, 1);
+  __ IncrementCounter(COUNTERS->array_function_native(), 1);
   __ movq(rax, rbx);
   __ ret(2 * kPointerSize);
 
@@ -1190,7 +1191,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
                   r9,
                   false,
                   call_generic_code);
-  __ IncrementCounter(&Counters::array_function_native, 1);
+  __ IncrementCounter(COUNTERS->array_function_native(), 1);
 
   // rax: argc
   // rbx: JSArray
@@ -1264,7 +1265,8 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
   // Jump to the generic array code in case the specialized code cannot handle
   // the construction.
   __ bind(&generic_array_code);
-  Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
+  Code* code = Isolate::Current()->builtins()->builtin(
+      Builtins::ArrayCodeGeneric);
   Handle<Code> array_code(code);
   __ Jump(array_code, RelocInfo::CODE_TARGET);
 }
@@ -1298,7 +1300,8 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
   // Jump to the generic construct code in case the specialized code cannot
   // handle the construction.
   __ bind(&generic_constructor);
-  Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
+  Code* code = Isolate::Current()->builtins()->builtin(
+      Builtins::JSConstructStubGeneric);
   Handle<Code> generic_construct_stub(code);
   __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
 }
@@ -1353,7 +1356,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // -----------------------------------
 
   Label invoke, dont_adapt_arguments;
-  __ IncrementCounter(&Counters::arguments_adaptors, 1);
+  __ IncrementCounter(COUNTERS->arguments_adaptors(), 1);
 
   Label enough, too_few;
   __ cmpq(rax, rbx);
index 20d625272ef7f9095da22d02eec979cae09b7db4..1d353617b3a9ce98efa348adbcadda010b9d358f 100644 (file)
@@ -284,7 +284,8 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
 const char* GenericBinaryOpStub::GetName() {
   if (name_ != NULL) return name_;
   const int kMaxNameLength = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
+      kMaxNameLength);
   if (name_ == NULL) return "OOM";
   const char* op_name = Token::Name(op_);
   const char* overwrite_name;
@@ -358,7 +359,7 @@ void GenericBinaryOpStub::GenerateCall(
 
     // Update flags to indicate that arguments are in registers.
     SetArgsInRegisters();
-    __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
+    __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
   }
 
   // Call the stub.
@@ -394,7 +395,7 @@ void GenericBinaryOpStub::GenerateCall(
 
     // Update flags to indicate that arguments are in registers.
     SetArgsInRegisters();
-    __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
+    __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
   }
 
   // Call the stub.
@@ -429,7 +430,7 @@ void GenericBinaryOpStub::GenerateCall(
     }
     // Update flags to indicate that arguments are in registers.
     SetArgsInRegisters();
-    __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
+    __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
   }
 
   // Call the stub.
@@ -1072,7 +1073,8 @@ void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
 const char* TypeRecordingBinaryOpStub::GetName() {
   if (name_ != NULL) return name_;
   const int kMaxNameLength = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
+      kMaxNameLength);
   if (name_ == NULL) return "OOM";
   const char* op_name = Token::Name(op_);
   const char* overwrite_name;
@@ -1582,15 +1584,16 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
   __ xorl(rcx, rdx);
   __ xorl(rax, rdi);
   __ xorl(rcx, rax);
-  ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
-  __ andl(rcx, Immediate(TranscendentalCache::kCacheSize - 1));
+  ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
+  __ andl(rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
 
   // ST[0] == double value.
   // rbx = bits of double value.
   // rcx = TranscendentalCache::hash(double value).
   __ movq(rax, ExternalReference::transcendental_cache_array_address());
   // rax points to cache array.
-  __ movq(rax, Operand(rax, type_ * sizeof(TranscendentalCache::caches_[0])));
+  __ movq(rax, Operand(rax, type_ * sizeof(
+      Isolate::Current()->transcendental_cache()->caches_[0])));
   // rax points to the cache for the type type_.
   // If NULL, the cache hasn't been initialized yet, so go through runtime.
   __ testq(rax, rax);
@@ -1598,7 +1601,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
 #ifdef DEBUG
   // Check that the layout of cache elements match expectations.
   {  // NOLINT - doesn't like a single brace on a line.
-    TranscendentalCache::Element test_elem[2];
+    TranscendentalCache::SubCache::Element test_elem[2];
     char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
     char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
     char* elem_in0  = reinterpret_cast<char*>(&(test_elem[0].in[0]));
@@ -2592,15 +2595,23 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // rcx: encoding of subject string (1 if ascii 0 if two_byte);
   // r11: code
   // All checks done. Now push arguments for native regexp code.
-  __ IncrementCounter(&Counters::regexp_entry_native, 1);
+  __ IncrementCounter(COUNTERS->regexp_entry_native(), 1);
 
-  static const int kRegExpExecuteArguments = 7;
+  // Isolates: note we add an additional parameter here (isolate pointer).
+  static const int kRegExpExecuteArguments = 8;
   int argument_slots_on_stack =
       masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
   __ EnterApiExitFrame(argument_slots_on_stack);
 
-  // Argument 7: Indicate that this is a direct call from JavaScript.
+  // Argument 8: Pass current isolate address.
+  // __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
+  //     Immediate(ExternalReference::isolate_address()));
+  __ movq(kScratchRegister, ExternalReference::isolate_address());
   __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
+          kScratchRegister);
+
+  // Argument 7: Indicate that this is a direct call from JavaScript.
+  __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize),
           Immediate(1));
 
   // Argument 6: Start (high end) of backtracking stack memory area.
@@ -2610,14 +2621,14 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   __ addq(r9, Operand(kScratchRegister, 0));
   // Argument 6 passed in r9 on Linux and on the stack on Windows.
 #ifdef _WIN64
-  __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), r9);
+  __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9);
 #endif
 
   // Argument 5: static offsets vector buffer.
   __ movq(r8, ExternalReference::address_of_static_offsets_vector());
   // Argument 5 passed in r8 on Linux and on the stack on Windows.
 #ifdef _WIN64
-  __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r8);
+  __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8);
 #endif
 
   // First four arguments are passed in registers on both Linux and Windows.
@@ -2750,7 +2761,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // stack overflow (on the backtrack stack) was detected in RegExp code but
   // haven't created the exception yet. Handle that in the runtime system.
   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
-  ExternalReference pending_exception_address(Top::k_pending_exception_address);
+  ExternalReference pending_exception_address(
+      Isolate::k_pending_exception_address);
   __ movq(rbx, pending_exception_address);
   __ movq(rax, Operand(rbx, 0));
   __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
@@ -2887,7 +2899,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
   Label load_result_from_cache;
   if (!object_is_smi) {
     __ JumpIfSmi(object, &is_smi);
-    __ CheckMap(object, Factory::heap_number_map(), not_found, true);
+    __ CheckMap(object, FACTORY->heap_number_map(), not_found, true);
 
     STATIC_ASSERT(8 == kDoubleSize);
     __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
@@ -2902,7 +2914,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
                          times_1,
                          FixedArray::kHeaderSize));
     __ JumpIfSmi(probe, not_found);
-    ASSERT(CpuFeatures::IsSupported(SSE2));
+    ASSERT(Isolate::Current()->cpu_features()->IsSupported(SSE2));
     CpuFeatures::Scope fscope(SSE2);
     __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
     __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
@@ -2932,7 +2944,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
                        index,
                        times_1,
                        FixedArray::kHeaderSize + kPointerSize));
-  __ IncrementCounter(&Counters::number_to_string_native, 1);
+  __ IncrementCounter(COUNTERS->number_to_string_native(), 1);
 }
 
 
@@ -3017,7 +3029,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
       __ bind(&check_for_nan);
     }
 
-    // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
+    // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(),
     // so we do the second best thing - test it ourselves.
     // Note: if cc_ != equal, never_nan_nan_ is not used.
     // We cannot set rax to EQUAL until just before return because
@@ -3030,7 +3042,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
       NearLabel heap_number;
       // If it's not a heap number, then return equal for (in)equality operator.
       __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
-             Factory::heap_number_map());
+             FACTORY->heap_number_map());
       __ j(equal, &heap_number);
       if (cc_ != equal) {
         // Call runtime on identical JSObjects.  Otherwise return equal.
@@ -3075,7 +3087,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
 
         // Check if the non-smi operand is a heap number.
         __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
-               Factory::heap_number_map());
+               FACTORY->heap_number_map());
         // If heap number, handle it in the slow case.
         __ j(equal, &slow);
         // Return non-equal.  ebx (the lower half of rbx) is not zero.
@@ -3308,7 +3320,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   __ Set(rax, argc_);
   __ Set(rbx, 0);
   __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
-  Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
+  Handle<Code> adaptor(Isolate::Current()->builtins()->builtin(
+      Builtins::ArgumentsAdaptorTrampoline));
   __ Jump(adaptor, RelocInfo::CODE_TARGET);
 }
 
@@ -3381,18 +3394,21 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
     // Pass a pointer to the Arguments object as the first argument.
     // Return result in single register (rax).
     __ lea(rcx, StackSpaceOperand(0));
+    __ movq(rdx, ExternalReference::isolate_address());
   } else {
     ASSERT_EQ(2, result_size_);
     // Pass a pointer to the result location as the first argument.
     __ lea(rcx, StackSpaceOperand(2));
     // Pass a pointer to the Arguments object as the second argument.
     __ lea(rdx, StackSpaceOperand(0));
+    __ movq(r8, ExternalReference::isolate_address());
   }
 
 #else  // _WIN64
   // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
   __ movq(rdi, r14);  // argc.
   __ movq(rsi, r15);  // argv.
+  __ movq(rdx, ExternalReference::isolate_address());
 #endif
   __ call(rbx);
   // Result is in rax - do not destroy this register!
@@ -3440,7 +3456,8 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
   __ j(equal, throw_out_of_memory_exception);
 
   // Retrieve the pending exception and clear the variable.
-  ExternalReference pending_exception_address(Top::k_pending_exception_address);
+  ExternalReference pending_exception_address(
+      Isolate::k_pending_exception_address);
   __ movq(kScratchRegister, pending_exception_address);
   __ movq(rax, Operand(kScratchRegister, 0));
   __ movq(rdx, ExternalReference::the_hole_value_location());
@@ -3573,7 +3590,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // callee save as well.
 
   // Save copies of the top frame descriptor on the stack.
-  ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
+  ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address);
   __ load_rax(c_entry_fp);
   __ push(rax);
 
@@ -3584,7 +3601,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
   // If this is the outermost JS call, set js_entry_sp value.
-  ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
+  ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address);
   __ load_rax(js_entry_sp);
   __ testq(rax, rax);
   __ j(not_zero, &not_outermost_js);
@@ -3598,7 +3615,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
   // Caught exception: Store result (exception) in the pending
   // exception field in the JSEnv and return a failure sentinel.
-  ExternalReference pending_exception(Top::k_pending_exception_address);
+  ExternalReference pending_exception(Isolate::k_pending_exception_address);
   __ store_rax(pending_exception);
   __ movq(rax, Failure::Exception(), RelocInfo::NONE);
   __ jmp(&exit);
@@ -3630,7 +3647,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ call(kScratchRegister);
 
   // Unlink this frame from the handler chain.
-  __ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
+  __ movq(kScratchRegister, ExternalReference(Isolate::k_handler_address));
   __ pop(Operand(kScratchRegister, 0));
   // Pop next_sp.
   __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
@@ -3647,7 +3664,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
   // Restore the top frame descriptor from the stack.
   __ bind(&exit);
-  __ movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
+  __ movq(kScratchRegister, ExternalReference(Isolate::k_c_entry_fp_address));
   __ pop(Operand(kScratchRegister, 0));
 
   // Restore callee-saved registers (X64 conventions).
@@ -3857,7 +3874,8 @@ const char* CompareStub::GetName() {
 
   if (name_ != NULL) return name_;
   const int kMaxNameLength = 100;
-  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
+      kMaxNameLength);
   if (name_ == NULL) return "OOM";
 
   const char* cc_name;
@@ -3991,7 +4009,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
   // Index is not a smi.
   __ bind(&index_not_smi_);
   // If index is a heap number, try converting it to an integer.
-  __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
+  __ CheckMap(index_, FACTORY->heap_number_map(), index_not_number_, true);
   call_helper.BeforeCall(masm);
   __ push(object_);
   __ push(index_);
@@ -4136,7 +4154,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ SmiTest(rcx);
   __ j(not_zero, &second_not_zero_length);
   // Second string is empty, result is first string which is already in rax.
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
   __ bind(&second_not_zero_length);
   __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
@@ -4144,7 +4162,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ j(not_zero, &both_not_zero_length);
   // First string is empty, result is second string which is in rdx.
   __ movq(rax, rdx);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
 
   // Both strings are non-empty.
@@ -4188,7 +4206,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   Label make_two_character_string, make_flat_ascii_string;
   StringHelper::GenerateTwoCharacterSymbolTableProbe(
       masm, rbx, rcx, r14, r11, rdi, r15, &make_two_character_string);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
 
   __ bind(&make_two_character_string);
@@ -4228,7 +4246,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
   __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
   __ movq(rax, rcx);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
   __ bind(&non_ascii);
   // At least one of the strings is two-byte. Check whether it happens
@@ -4302,7 +4320,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   // rdi: length of second argument
   StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
   __ movq(rax, rbx);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
 
   // Handle creating a flat two byte result.
@@ -4339,7 +4357,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   // rdi: length of second argument
   StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
   __ movq(rax, rbx);
-  __ IncrementCounter(&Counters::string_add_native, 1);
+  __ IncrementCounter(COUNTERS->string_add_native(), 1);
   __ ret(2 * kPointerSize);
 
   // Just jump to runtime to add the two strings.
@@ -4754,7 +4772,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   // rsi: character of sub string start
   StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
   __ movq(rsi, rdx);  // Restore rsi.
-  __ IncrementCounter(&Counters::sub_string_native, 1);
+  __ IncrementCounter(COUNTERS->sub_string_native(), 1);
   __ ret(kArgumentsSize);
 
   __ bind(&non_ascii_flat);
@@ -4791,7 +4809,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   __ movq(rsi, rdx);  // Restore esi.
 
   __ bind(&return_rax);
-  __ IncrementCounter(&Counters::sub_string_native, 1);
+  __ IncrementCounter(COUNTERS->sub_string_native(), 1);
   __ ret(kArgumentsSize);
 
   // Just jump to runtime to create the sub string.
@@ -4905,7 +4923,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
   __ cmpq(rdx, rax);
   __ j(not_equal, &not_same);
   __ Move(rax, Smi::FromInt(EQUAL));
-  __ IncrementCounter(&Counters::string_compare_native, 1);
+  __ IncrementCounter(COUNTERS->string_compare_native(), 1);
   __ ret(2 * kPointerSize);
 
   __ bind(&not_same);
@@ -4914,7 +4932,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
   __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
 
   // Inline comparison of ascii strings.
-  __ IncrementCounter(&Counters::string_compare_native, 1);
+  __ IncrementCounter(COUNTERS->string_compare_native(), 1);
   // Drop arguments from the stack
   __ pop(rcx);
   __ addq(rsp, Immediate(2 * kPointerSize));
index 031cc960d23d833768e851973b186a170a8bc6fe..40d65638f80d27a05cd08bb4ea8c7ed8e1770ff7 100644 (file)
@@ -180,7 +180,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
   ASSERT_EQ(0, loop_nesting_);
   loop_nesting_ = info->is_in_loop() ? 1 : 0;
 
-  JumpTarget::set_compiling_deferred_code(false);
+  Isolate::Current()->set_jump_target_compiling_deferred_code(false);
 
   {
     CodeGenState state(this);
@@ -281,7 +281,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
 
     // Initialize ThisFunction reference if present.
     if (scope()->is_function_scope() && scope()->function() != NULL) {
-      frame_->Push(Factory::the_hole_value());
+      frame_->Push(FACTORY->the_hole_value());
       StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
     }
 
@@ -316,7 +316,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
     if (!scope()->HasIllegalRedeclaration()) {
       Comment cmnt(masm_, "[ function body");
 #ifdef DEBUG
-      bool is_builtin = Bootstrapper::IsActive();
+      bool is_builtin = Isolate::Current()->bootstrapper()->IsActive();
       bool should_trace =
           is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
       if (should_trace) {
@@ -333,7 +333,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
         ASSERT(!function_return_is_shadowed_);
         CodeForReturnPosition(info->function());
         frame_->PrepareForReturn();
-        Result undefined(Factory::undefined_value());
+        Result undefined(FACTORY->undefined_value());
         if (function_return_.is_bound()) {
           function_return_.Jump(&undefined);
         } else {
@@ -365,9 +365,9 @@ void CodeGenerator::Generate(CompilationInfo* info) {
 
   // Process any deferred code using the register allocator.
   if (!HasStackOverflow()) {
-    JumpTarget::set_compiling_deferred_code(true);
+    info->isolate()->set_jump_target_compiling_deferred_code(true);
     ProcessDeferred();
-    JumpTarget::set_compiling_deferred_code(false);
+    info->isolate()->set_jump_target_compiling_deferred_code(false);
   }
 
   // There is no need to delete the register allocator, it is a
@@ -516,12 +516,12 @@ void CodeGenerator::Load(Expression* expr) {
   if (dest.false_was_fall_through()) {
     // The false target was just bound.
     JumpTarget loaded;
-    frame_->Push(Factory::false_value());
+    frame_->Push(FACTORY->false_value());
     // There may be dangling jumps to the true target.
     if (true_target.is_linked()) {
       loaded.Jump();
       true_target.Bind();
-      frame_->Push(Factory::true_value());
+      frame_->Push(FACTORY->true_value());
       loaded.Bind();
     }
 
@@ -529,11 +529,11 @@ void CodeGenerator::Load(Expression* expr) {
     // There is true, and possibly false, control flow (with true as
     // the fall through).
     JumpTarget loaded;
-    frame_->Push(Factory::true_value());
+    frame_->Push(FACTORY->true_value());
     if (false_target.is_linked()) {
       loaded.Jump();
       false_target.Bind();
-      frame_->Push(Factory::false_value());
+      frame_->Push(FACTORY->false_value());
       loaded.Bind();
     }
 
@@ -548,14 +548,14 @@ void CodeGenerator::Load(Expression* expr) {
       loaded.Jump();  // Don't lose the current TOS.
       if (true_target.is_linked()) {
         true_target.Bind();
-        frame_->Push(Factory::true_value());
+        frame_->Push(FACTORY->true_value());
         if (false_target.is_linked()) {
           loaded.Jump();
         }
       }
       if (false_target.is_linked()) {
         false_target.Bind();
-        frame_->Push(Factory::false_value());
+        frame_->Push(FACTORY->false_value());
       }
       loaded.Bind();
     }
@@ -632,7 +632,7 @@ Result CodeGenerator::StoreArgumentsObject(bool initial) {
     // When using lazy arguments allocation, we store the arguments marker value
     // as a sentinel indicating that the arguments object hasn't been
     // allocated yet.
-    frame_->Push(Factory::arguments_marker());
+    frame_->Push(FACTORY->arguments_marker());
   } else {
     ArgumentsAccessStub stub(is_strict_mode()
         ? ArgumentsAccessStub::NEW_STRICT
@@ -1062,7 +1062,7 @@ void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
 
 
 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
-  Object* answer_object = Heap::undefined_value();
+  Object* answer_object = HEAP->undefined_value();
   switch (op) {
     case Token::ADD:
       // Use intptr_t to detect overflow of 32-bit int.
@@ -1136,7 +1136,7 @@ bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
       UNREACHABLE();
       break;
   }
-  if (answer_object == Heap::undefined_value()) {
+  if (answer_object->IsUndefined()) {
     return false;
   }
   frame_->Push(Handle<Object>(answer_object));
@@ -1371,7 +1371,7 @@ Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
         if (!left_type_info.IsNumber()) {
           // Branch if not a heapnumber.
           __ Cmp(FieldOperand(answer.reg(), HeapObject::kMapOffset),
-                 Factory::heap_number_map());
+                 FACTORY->heap_number_map());
           deferred->Branch(not_equal);
         }
         // Load integer value into answer register using truncation.
@@ -2333,7 +2333,7 @@ void CodeGenerator::ConstantSmiComparison(Condition cc,
         // not to be a smi.
         JumpTarget not_number;
         __ Cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
-               Factory::heap_number_map());
+               FACTORY->heap_number_map());
         not_number.Branch(not_equal, left_side);
         __ movsd(xmm1,
                  FieldOperand(left_reg, HeapNumber::kValueOffset));
@@ -2493,7 +2493,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
   // give us a megamorphic load site. Not super, but it works.
   Load(applicand);
   frame()->Dup();
-  Handle<String> name = Factory::LookupAsciiSymbol("apply");
+  Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
   frame()->Push(name);
   Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
   __ nop();
@@ -2561,7 +2561,8 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
       __ j(not_equal, &build_args);
       __ movq(rcx, FieldOperand(rax, JSFunction::kCodeEntryOffset));
       __ subq(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
-      Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
+      Handle<Code> apply_code(Isolate::Current()->builtins()->builtin(
+          Builtins::FunctionApply));
       __ Cmp(rcx, apply_code);
       __ j(not_equal, &build_args);
 
@@ -2804,7 +2805,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
   // If we have a function or a constant, we need to initialize the variable.
   Expression* val = NULL;
   if (node->mode() == Variable::CONST) {
-    val = new Literal(Factory::the_hole_value());
+    val = new Literal(FACTORY->the_hole_value());
   } else {
     val = node->fun();  // NULL if we don't have a function
   }
@@ -3980,7 +3981,7 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
   function_return_is_shadowed_ = function_return_was_shadowed;
 
   // Get an external reference to the handler address.
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
 
   // Make sure that there's nothing left on the stack above the
   // handler structure.
@@ -4109,7 +4110,7 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
   function_return_is_shadowed_ = function_return_was_shadowed;
 
   // Get an external reference to the handler address.
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
 
   // If we can fall off the end of the try block, unlink from the try
   // chain and set the state on the frame to FALLING.
@@ -4276,8 +4277,8 @@ void CodeGenerator::InstantiateFunction(
     frame_->EmitPush(rsi);
     frame_->EmitPush(function_info);
     frame_->EmitPush(pretenure
-                     ? Factory::true_value()
-                     : Factory::false_value());
+                     ? FACTORY->true_value()
+                     : FACTORY->false_value());
     Result result = frame_->CallRuntime(Runtime::kNewClosure, 3);
     frame_->Push(&result);
   }
@@ -4762,7 +4763,7 @@ class DeferredAllocateInNewSpace: public DeferredCode {
                              Register target,
                              int registers_to_save = 0)
     : size_(size), target_(target), registers_to_save_(registers_to_save) {
-    ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
+    ASSERT(size >= kPointerSize && size <= HEAP->MaxObjectSizeInNewSpace());
     set_comment("[ DeferredAllocateInNewSpace");
   }
   void Generate();
@@ -4977,11 +4978,11 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
   frame_->Push(node->constant_elements());
   int length = node->values()->length();
   Result clone;
-  if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
+  if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
     clone = frame_->CallStub(&stub, 3);
-    __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
+    __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1);
   } else if (node->depth() > 1) {
     clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -5379,7 +5380,7 @@ void CodeGenerator::VisitCall(Call* node) {
     Load(function);
 
     // Allocate a frame slot for the receiver.
-    frame_->Push(Factory::undefined_value());
+    frame_->Push(FACTORY->undefined_value());
 
     // Load the arguments.
     int arg_count = args->length();
@@ -5411,7 +5412,7 @@ void CodeGenerator::VisitCall(Call* node) {
       if (arg_count > 0) {
         frame_->PushElementAt(arg_count);
       } else {
-        frame_->Push(Factory::undefined_value());
+        frame_->Push(FACTORY->undefined_value());
       }
       frame_->PushParameterAt(-1);
 
@@ -5433,7 +5434,7 @@ void CodeGenerator::VisitCall(Call* node) {
     if (arg_count > 0) {
       frame_->PushElementAt(arg_count);
     } else {
-      frame_->Push(Factory::undefined_value());
+      frame_->Push(FACTORY->undefined_value());
     }
     frame_->PushParameterAt(-1);
 
@@ -5722,7 +5723,7 @@ void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
   }
 #endif
   // Finally, we're expected to leave a value on the top of the stack.
-  frame_->Push(Factory::undefined_value());
+  frame_->Push(FACTORY->undefined_value());
 }
 
 
@@ -5985,7 +5986,7 @@ void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
   Condition is_smi = masm_->CheckSmi(obj.reg());
   destination()->false_target()->Branch(is_smi);
 
-  __ Move(kScratchRegister, Factory::null_value());
+  __ Move(kScratchRegister, FACTORY->null_value());
   __ cmpq(obj.reg(), kScratchRegister);
   destination()->true_target()->Branch(equal);
 
@@ -6077,7 +6078,7 @@ class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
     __ jmp(&entry);
     __ bind(&loop);
     __ movq(scratch2_, FieldOperand(map_result_, 0));
-    __ Cmp(scratch2_, Factory::value_of_symbol());
+    __ Cmp(scratch2_, FACTORY->value_of_symbol());
     __ j(equal, &false_result);
     __ addq(map_result_, Immediate(kPointerSize));
     __ bind(&entry);
@@ -6289,17 +6290,17 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
 
   // Functions have class 'Function'.
   function.Bind();
-  frame_->Push(Factory::function_class_symbol());
+  frame_->Push(FACTORY->function_class_symbol());
   leave.Jump();
 
   // Objects with a non-function constructor have class 'Object'.
   non_function_constructor.Bind();
-  frame_->Push(Factory::Object_symbol());
+  frame_->Push(FACTORY->Object_symbol());
   leave.Jump();
 
   // Non-JS objects have class null.
   null.Bind();
-  frame_->Push(Factory::null_value());
+  frame_->Push(FACTORY->null_value());
 
   // All done.
   leave.Bind();
@@ -6668,10 +6669,10 @@ void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
 
   Handle<FixedArray> jsfunction_result_caches(
-      Top::global_context()->jsfunction_result_caches());
+      Isolate::Current()->global_context()->jsfunction_result_caches());
   if (jsfunction_result_caches->length() <= cache_id) {
     __ Abort("Attempt to use undefined cache.");
-    frame_->Push(Factory::undefined_value());
+    frame_->Push(FACTORY->undefined_value());
     return;
   }
 
@@ -6837,7 +6838,7 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
   __ bind(&done);
 
   deferred->BindExit();
-  frame_->Push(Factory::undefined_value());
+  frame_->Push(FACTORY->undefined_value());
 }
 
 
@@ -7181,7 +7182,7 @@ void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
 
 
 void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
-  frame_->Push(Factory::undefined_value());
+  frame_->Push(FACTORY->undefined_value());
 }
 
 
@@ -7192,7 +7193,7 @@ void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
 
   ZoneList<Expression*>* args = node->arguments();
   Comment cmnt(masm_, "[ CallRuntime");
-  Runtime::Function* function = node->function();
+  const Runtime::Function* function = node->function();
 
   if (function == NULL) {
     // Push the builtins object found in the current global object.
@@ -7276,12 +7277,12 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
       } else {
         // Default: Result of deleting non-global, not dynamically
         // introduced variables is false.
-        frame_->Push(Factory::false_value());
+        frame_->Push(FACTORY->false_value());
       }
     } else {
       // Default: Result of deleting expressions is true.
       Load(node->expression());  // may have side-effects
-      frame_->SetElementAt(0, Factory::true_value());
+      frame_->SetElementAt(0, FACTORY->true_value());
     }
 
   } else if (op == Token::TYPEOF) {
@@ -7302,10 +7303,10 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
         expression->AsLiteral()->IsNull())) {
       // Omit evaluating the value of the primitive literal.
       // It will be discarded anyway, and can have no side effect.
-      frame_->Push(Factory::undefined_value());
+      frame_->Push(FACTORY->undefined_value());
     } else {
       Load(node->expression());
-      frame_->SetElementAt(0, Factory::undefined_value());
+      frame_->SetElementAt(0, FACTORY->undefined_value());
     }
 
   } else {
@@ -7777,7 +7778,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
     Result answer = frame_->Pop();
     answer.ToRegister();
 
-    if (check->Equals(Heap::number_symbol())) {
+    if (check->Equals(HEAP->number_symbol())) {
       Condition is_smi = masm_->CheckSmi(answer.reg());
       destination()->true_target()->Branch(is_smi);
       frame_->Spill(answer.reg());
@@ -7786,7 +7787,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       answer.Unuse();
       destination()->Split(equal);
 
-    } else if (check->Equals(Heap::string_symbol())) {
+    } else if (check->Equals(HEAP->string_symbol())) {
       Condition is_smi = masm_->CheckSmi(answer.reg());
       destination()->false_target()->Branch(is_smi);
 
@@ -7800,14 +7801,14 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       answer.Unuse();
       destination()->Split(below);  // Unsigned byte comparison needed.
 
-    } else if (check->Equals(Heap::boolean_symbol())) {
+    } else if (check->Equals(HEAP->boolean_symbol())) {
       __ CompareRoot(answer.reg(), Heap::kTrueValueRootIndex);
       destination()->true_target()->Branch(equal);
       __ CompareRoot(answer.reg(), Heap::kFalseValueRootIndex);
       answer.Unuse();
       destination()->Split(equal);
 
-    } else if (check->Equals(Heap::undefined_symbol())) {
+    } else if (check->Equals(HEAP->undefined_symbol())) {
       __ CompareRoot(answer.reg(), Heap::kUndefinedValueRootIndex);
       destination()->true_target()->Branch(equal);
 
@@ -7822,7 +7823,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       answer.Unuse();
       destination()->Split(not_zero);
 
-    } else if (check->Equals(Heap::function_symbol())) {
+    } else if (check->Equals(HEAP->function_symbol())) {
       Condition is_smi = masm_->CheckSmi(answer.reg());
       destination()->false_target()->Branch(is_smi);
       frame_->Spill(answer.reg());
@@ -7833,7 +7834,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
       answer.Unuse();
       destination()->Split(equal);
 
-    } else if (check->Equals(Heap::object_symbol())) {
+    } else if (check->Equals(HEAP->object_symbol())) {
       Condition is_smi = masm_->CheckSmi(answer.reg());
       destination()->false_target()->Branch(is_smi);
       __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex);
@@ -7997,7 +7998,8 @@ void DeferredReferenceGetNamedValue::Generate() {
     __ movq(rax, receiver_);
   }
   __ Move(rcx, name_);
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   __ Call(ic, RelocInfo::CODE_TARGET);
   // The call must be followed by a test rax instruction to indicate
   // that the inobject property case was inlined.
@@ -8009,7 +8011,7 @@ void DeferredReferenceGetNamedValue::Generate() {
   // Here we use masm_-> instead of the __ macro because this is the
   // instruction that gets patched and coverage code gets in the way.
   masm_->testl(rax, Immediate(-delta_to_patch_site));
-  __ IncrementCounter(&Counters::named_load_inline_miss, 1);
+  __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1);
 
   if (!dst_.is(rax)) __ movq(dst_, rax);
 }
@@ -8062,7 +8064,8 @@ void DeferredReferenceGetKeyedValue::Generate() {
   // it in the IC initialization code and patch the movq instruction.
   // This means that we cannot allow test instructions after calls to
   // KeyedLoadIC stubs in other places.
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   __ Call(ic, RelocInfo::CODE_TARGET);
   // The delta from the start of the map-compare instruction to the
   // test instruction.  We use masm_-> directly here instead of the __
@@ -8076,7 +8079,7 @@ void DeferredReferenceGetKeyedValue::Generate() {
   // 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't
   // be generated normally.
   masm_->testl(rax, Immediate(-delta_to_patch_site));
-  __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_inline_miss(), 1);
 
   if (!dst_.is(rax)) __ movq(dst_, rax);
 }
@@ -8109,7 +8112,7 @@ class DeferredReferenceSetKeyedValue: public DeferredCode {
 
 
 void DeferredReferenceSetKeyedValue::Generate() {
-  __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
+  __ IncrementCounter(COUNTERS->keyed_store_inline_miss(), 1);
   // Move value, receiver, and key to registers rax, rdx, and rcx, as
   // the IC stub expects.
   // Move value to rax, using xchg if the receiver or key is in rax.
@@ -8156,7 +8159,7 @@ void DeferredReferenceSetKeyedValue::Generate() {
   }
 
   // Call the IC stub.
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
       (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
                                     : Builtins::KeyedStoreIC_Initialize));
   __ Call(ic, RelocInfo::CODE_TARGET);
@@ -8225,7 +8228,7 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
     // This is the map check instruction that will be patched (so we can't
     // use the double underscore macro that may insert instructions).
     // Initially use an invalid map to force a failure.
-    masm()->movq(kScratchRegister, Factory::null_value(),
+    masm()->movq(kScratchRegister, FACTORY->null_value(),
                  RelocInfo::EMBEDDED_OBJECT);
     masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
                  kScratchRegister);
@@ -8244,7 +8247,7 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
     int offset = kMaxInt;
     masm()->movq(result.reg(), FieldOperand(receiver.reg(), offset));
 
-    __ IncrementCounter(&Counters::named_load_inline, 1);
+    __ IncrementCounter(COUNTERS->named_load_inline(), 1);
     deferred->BindExit();
   }
   ASSERT(frame()->height() == original_height - 1);
@@ -8302,7 +8305,7 @@ Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
     // the __ macro for the following two instructions because it
     // might introduce extra instructions.
     __ bind(&patch_site);
-    masm()->movq(kScratchRegister, Factory::null_value(),
+    masm()->movq(kScratchRegister, FACTORY->null_value(),
                  RelocInfo::EMBEDDED_OBJECT);
     masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
                  kScratchRegister);
@@ -8416,7 +8419,7 @@ Result CodeGenerator::EmitKeyedLoad() {
     // coverage code can interfere with the patching.  Do not use a load
     // from the root array to load null_value, since the load must be patched
     // with the expected receiver map, which is not in the root array.
-    masm_->movq(kScratchRegister, Factory::null_value(),
+    masm_->movq(kScratchRegister, FACTORY->null_value(),
                 RelocInfo::EMBEDDED_OBJECT);
     masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
                 kScratchRegister);
@@ -8451,7 +8454,7 @@ Result CodeGenerator::EmitKeyedLoad() {
     result = elements;
     __ CompareRoot(result.reg(), Heap::kTheHoleValueRootIndex);
     deferred->Branch(equal);
-    __ IncrementCounter(&Counters::keyed_load_inline, 1);
+    __ IncrementCounter(COUNTERS->keyed_load_inline(), 1);
 
     deferred->BindExit();
   } else {
@@ -8540,7 +8543,7 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
     __ bind(deferred->patch_site());
     // Avoid using __ to ensure the distance from patch_site
     // to the map address is always the same.
-    masm()->movq(kScratchRegister, Factory::fixed_array_map(),
+    masm()->movq(kScratchRegister, FACTORY->fixed_array_map(),
                RelocInfo::EMBEDDED_OBJECT);
     __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
             kScratchRegister);
@@ -8562,7 +8565,7 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
                          index.scale,
                          FixedArray::kHeaderSize),
             result.reg());
-    __ IncrementCounter(&Counters::keyed_store_inline, 1);
+    __ IncrementCounter(COUNTERS->keyed_store_inline(), 1);
 
     deferred->BindExit();
   } else {
index 118aa379266182ee708859d1df7d33dcf0b776eb..9a70907cc83513281f635166973e180b93f43b46 100644 (file)
@@ -733,6 +733,7 @@ class CodeGenerator: public AstVisitor {
   bool in_spilled_code_;
 
   friend class VirtualFrame;
+  friend class Isolate;
   friend class JumpTarget;
   friend class Reference;
   friend class Result;
@@ -741,6 +742,7 @@ class CodeGenerator: public AstVisitor {
   friend class FullCodeGenSyntaxChecker;
 
   friend class CodeGeneratorPatcher;  // Used in test-log-stack-tracer.cc
+  friend class InlineRuntimeFunctionsTable;
 
   DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
 };
index 3ff292e82b111e790f2300732e1830c761c5d7eb..b49fb1c4f0fcd02d93e57dfc18d9413db4eec0e9 100644 (file)
@@ -42,7 +42,7 @@ namespace v8 {
 namespace internal {
 
 void CPU::Setup() {
-  CpuFeatures::Probe(true);
+  Isolate::Current()->cpu_features()->Probe(true);
   if (Serializer::enabled()) {
     V8::DisableCrankshaft();
   }
index 2c50ddd14efbff6bf381ff1c845f066fd6231872..19175bcedfa11ae5387ce5d6ca211c4a4b4dc1a0 100644 (file)
@@ -49,7 +49,8 @@ bool BreakLocationIterator::IsDebugBreakAtReturn()  {
 void BreakLocationIterator::SetDebugBreakAtReturn()  {
   ASSERT(Assembler::kJSReturnSequenceLength >=
          Assembler::kCallInstructionLength);
-  rinfo()->PatchCodeWithCall(Debug::debug_break_return()->entry(),
+  rinfo()->PatchCodeWithCall(
+      Isolate::Current()->debug()->debug_break_return()->entry(),
       Assembler::kJSReturnSequenceLength - Assembler::kCallInstructionLength);
 }
 
@@ -79,7 +80,7 @@ bool BreakLocationIterator::IsDebugBreakAtSlot() {
 void BreakLocationIterator::SetDebugBreakAtSlot() {
   ASSERT(IsDebugBreakSlot());
   rinfo()->PatchCodeWithCall(
-      Debug::debug_break_slot()->entry(),
+      Isolate::Current()->debug()->debug_break_slot()->entry(),
       Assembler::kDebugBreakSlotLength - Assembler::kCallInstructionLength);
 }
 
index 177157000bc05dcfafdf001a785334acb5e49a40..257b7eab465c33ee2004b0492f27b1af94b92d48 100644 (file)
@@ -187,8 +187,9 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
 
   // Add the deoptimizing code to the list.
   DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
-  node->set_next(deoptimizing_code_list_);
-  deoptimizing_code_list_ = node;
+  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
+  node->set_next(data->deoptimizing_code_list_);
+  data->deoptimizing_code_list_ = node;
 
   // Set the code for the function to non-optimized version.
   function->ReplaceCode(function->shared()->code());
@@ -388,7 +389,8 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
         optimized_code_->entry() + pc_offset);
     output_[0]->SetPc(pc);
   }
-  Code* continuation = Builtins::builtin(Builtins::NotifyOSR);
+  Code* continuation = Isolate::Current()->builtins()->builtin(
+      Builtins::NotifyOSR);
   output_[0]->SetContinuation(
       reinterpret_cast<intptr_t>(continuation->entry()));
 
@@ -560,8 +562,9 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
   // Set the continuation for the topmost frame.
   if (is_topmost) {
     Code* continuation = (bailout_type_ == EAGER)
-        ? Builtins::builtin(Builtins::NotifyDeoptimized)
-        : Builtins::builtin(Builtins::NotifyLazyDeoptimized);
+        ? Isolate::Current()->builtins()->builtin(Builtins::NotifyDeoptimized)
+        : Isolate::Current()->builtins()->builtin(
+              Builtins::NotifyLazyDeoptimized);
     output_frame->SetContinuation(
         reinterpret_cast<intptr_t>(continuation->entry()));
   }
index 80dbfe12415919c6c66feade0467054dcf092f59..189ee42cea457d5569b29bf9d8a69ae14dbb95a5 100644 (file)
@@ -269,6 +269,7 @@ void InstructionTable::AddJumpConditionalShort() {
 
 static InstructionTable instruction_table;
 
+
 static InstructionDesc cmov_instructions[16] = {
   {"cmovo", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
   {"cmovno", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
@@ -1667,9 +1668,8 @@ static const char* xmm_regs[16] = {
 
 
 const char* NameConverter::NameOfAddress(byte* addr) const {
-  static v8::internal::EmbeddedVector<char, 32> tmp_buffer;
-  v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr);
-  return tmp_buffer.start();
+  v8::internal::OS::SNPrintF(tmp_buffer_, "%p", addr);
+  return tmp_buffer_.start();
 }
 
 
index 81be8191962eaa9ab617fccd0effac3163d8cb58..6a605d8b06d8bce0d7c9ba759f7e7e98fb410ad1 100644 (file)
@@ -28,6 +28,8 @@
 #ifndef V8_X64_FRAMES_X64_H_
 #define V8_X64_FRAMES_X64_H_
 
+#include "memory.h"
+
 namespace v8 {
 namespace internal {
 
index a3cf47d489fd97e56d0ff3089bfe3c82daeba3a4..7713dfb99a247dd156921a952ea0fe9e4b384e37 100644 (file)
@@ -481,10 +481,10 @@ void FullCodeGenerator::AccumulatorValueContext::Plug(
     Label* materialize_false) const {
   NearLabel done;
   __ bind(materialize_true);
-  __ Move(result_register(), Factory::true_value());
+  __ Move(result_register(), isolate()->factory()->true_value());
   __ jmp(&done);
   __ bind(materialize_false);
-  __ Move(result_register(), Factory::false_value());
+  __ Move(result_register(), isolate()->factory()->false_value());
   __ bind(&done);
 }
 
@@ -494,10 +494,10 @@ void FullCodeGenerator::StackValueContext::Plug(
     Label* materialize_false) const {
   NearLabel done;
   __ bind(materialize_true);
-  __ Push(Factory::true_value());
+  __ Push(isolate()->factory()->true_value());
   __ jmp(&done);
   __ bind(materialize_false);
-  __ Push(Factory::false_value());
+  __ Push(isolate()->factory()->false_value());
   __ bind(&done);
 }
 
@@ -740,9 +740,9 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
              prop->key()->AsLiteral()->handle()->IsSmi());
       __ Move(rcx, prop->key()->AsLiteral()->handle());
 
-      Handle<Code> ic(Builtins::builtin(
-          is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
-                           : Builtins::KeyedStoreIC_Initialize));
+      Handle<Code> ic(isolate()->builtins()->builtin(is_strict_mode()
+          ? Builtins::KeyedStoreIC_Initialize_Strict
+          : Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
     }
   }
@@ -1050,7 +1050,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
   } else {
     __ push(rsi);
     __ Push(info);
-    __ Push(pretenure ? Factory::true_value() : Factory::false_value());
+    __ Push(pretenure
+            ? isolate()->factory()->true_value()
+            : isolate()->factory()->false_value());
     __ CallRuntime(Runtime::kNewClosure, 3);
   }
   context()->Plug(rax);
@@ -1119,7 +1121,8 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
   // load IC call.
   __ movq(rax, GlobalObjectOperand());
   __ Move(rcx, slot->var()->name());
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
       ? RelocInfo::CODE_TARGET
       : RelocInfo::CODE_TARGET_CONTEXT;
@@ -1202,7 +1205,8 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
                   ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
                                                     slow));
           __ Move(rax, key_literal->handle());
-          Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+          Handle<Code> ic(isolate()->builtins()->builtin(
+              Builtins::KeyedLoadIC_Initialize));
           EmitCallIC(ic, RelocInfo::CODE_TARGET);
           __ jmp(done);
         }
@@ -1225,7 +1229,8 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
     // object on the stack.
     __ Move(rcx, var->name());
     __ movq(rax, GlobalObjectOperand());
-    Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(
+        Builtins::LoadIC_Initialize));
     EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
     context()->Plug(rax);
 
@@ -1288,7 +1293,8 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
     __ Move(rax, key_literal->handle());
 
     // Do a keyed property load.
-    Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(
+        Builtins::KeyedLoadIC_Initialize));
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
     context()->Plug(rax);
   }
@@ -1393,7 +1399,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
           __ Move(rcx, key->handle());
           __ movq(rdx, Operand(rsp, 0));
           if (property->emit_store()) {
-            Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+            Handle<Code> ic(isolate()->builtins()->builtin(
+                Builtins::StoreIC_Initialize));
             EmitCallIC(ic, RelocInfo::CODE_TARGET);
             PrepareForBailoutForId(key->id(), NO_REGISTERS);
           }
@@ -1442,11 +1449,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
   __ Push(Smi::FromInt(expr->literal_index()));
   __ Push(expr->constant_elements());
-  if (expr->constant_elements()->map() == Heap::fixed_cow_array_map()) {
+  if (expr->constant_elements()->map() ==
+      isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
     __ CallStub(&stub);
-    __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
+    __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
   } else if (expr->depth() > 1) {
     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -1630,14 +1638,16 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
   Literal* key = prop->key()->AsLiteral();
   __ Move(rcx, key->handle());
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
 }
 
 
 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
 }
 
@@ -1743,7 +1753,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
       __ movq(rdx, rax);
       __ pop(rax);  // Restore value.
       __ Move(rcx, prop->key()->AsLiteral()->handle());
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                            : Builtins::StoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1766,7 +1776,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
         __ pop(rdx);
       }
       __ pop(rax);  // Restore value.
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
                            : Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1792,9 +1802,9 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
     // rcx, and the global object on the stack.
     __ Move(rcx, var->name());
     __ movq(rdx, GlobalObjectOperand());
-    Handle<Code> ic(Builtins::builtin(
-        is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
-                         : Builtins::StoreIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(is_strict_mode()
+        ? Builtins::StoreIC_Initialize_Strict
+        : Builtins::StoreIC_Initialize));
     EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
 
   } else if (op == Token::INIT_CONST) {
@@ -1895,7 +1905,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   } else {
     __ pop(rdx);
   }
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                        : Builtins::StoreIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1935,7 +1945,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   }
   // Record source code position before IC call.
   SetSourcePosition(expr->position());
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
                        : Builtins::KeyedStoreIC_Initialize));
   EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1988,7 +1998,8 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
   SetSourcePosition(expr->position());
   // Call the IC initialization code.
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
+  Handle<Code> ic =
+      ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
   EmitCallIC(ic, mode);
   RecordJSReturnSite(expr);
   // Restore context register.
@@ -2021,7 +2032,8 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   SetSourcePosition(expr->position());
   // Call the IC initialization code.
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
-  Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
+  Handle<Code> ic =
+      ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
   __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize));  // Key.
   EmitCallIC(ic, mode);
   RecordJSReturnSite(expr);
@@ -2214,7 +2226,8 @@ void FullCodeGenerator::VisitCall(Call* expr) {
         // Record source code position for IC call.
         SetSourcePosition(prop->position());
 
-        Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+        Handle<Code> ic(isolate()->builtins()->builtin(
+            Builtins::KeyedLoadIC_Initialize));
         EmitCallIC(ic, RelocInfo::CODE_TARGET);
         // Push result (function).
         __ push(rax);
@@ -2235,7 +2248,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     // also use the full code generator.
     FunctionLiteral* lit = fun->AsFunctionLiteral();
     if (lit != NULL &&
-        lit->name()->Equals(Heap::empty_string()) &&
+        lit->name()->Equals(isolate()->heap()->empty_string()) &&
         loop_depth() == 0) {
       lit->set_try_full_codegen(true);
     }
@@ -2282,7 +2295,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ Set(rax, arg_count);
   __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
 
-  Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
+  Handle<Code> construct_builtin(isolate()->builtins()->builtin(
+      Builtins::JSConstructCall));
   __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
   context()->Plug(rax);
 }
@@ -2615,12 +2629,12 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
 
   // Functions have class 'Function'.
   __ bind(&function);
-  __ Move(rax, Factory::function_class_symbol());
+  __ Move(rax, isolate()->factory()->function_class_symbol());
   __ jmp(&done);
 
   // Objects with a non-function constructor have class 'Object'.
   __ bind(&non_function_constructor);
-  __ Move(rax, Factory::Object_symbol());
+  __ Move(rax, isolate()->factory()->Object_symbol());
   __ jmp(&done);
 
   // Non-JS objects have class null.
@@ -3079,7 +3093,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
 
   Handle<FixedArray> jsfunction_result_caches(
-      Top::global_context()->jsfunction_result_caches());
+      isolate()->global_context()->jsfunction_result_caches());
   if (jsfunction_result_caches->length() <= cache_id) {
     __ Abort("Attempt to use undefined cache.");
     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
@@ -3156,10 +3170,10 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
   __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
   __ j(equal, &ok);
   __ bind(&fail);
-  __ Move(rax, Factory::false_value());
+  __ Move(rax, isolate()->factory()->false_value());
   __ jmp(&done);
   __ bind(&ok);
-  __ Move(rax, Factory::true_value());
+  __ Move(rax, isolate()->factory()->true_value());
   __ bind(&done);
 
   context()->Plug(rax);
@@ -3236,7 +3250,8 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
     // Call the JS runtime function using a call IC.
     __ Move(rcx, expr->name());
     InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
-    Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
+    Handle<Code> ic =
+        ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
     // Restore context register.
     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -3548,7 +3563,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     case NAMED_PROPERTY: {
       __ Move(rcx, prop->key()->AsLiteral()->handle());
       __ pop(rdx);
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::StoreIC_Initialize_Strict
                            : Builtins::StoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3565,7 +3580,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     case KEYED_PROPERTY: {
       __ pop(rcx);
       __ pop(rdx);
-      Handle<Code> ic(Builtins::builtin(
+      Handle<Code> ic(isolate()->builtins()->builtin(
           is_strict_mode() ? Builtins::KeyedStoreIC_Initialize_Strict
                            : Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3592,7 +3607,8 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
     Comment cmnt(masm_, "Global variable");
     __ Move(rcx, proxy->name());
     __ movq(rax, GlobalObjectOperand());
-    Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+    Handle<Code> ic(isolate()->builtins()->builtin(
+        Builtins::LoadIC_Initialize));
     // Use a regular load, not a contextual load, to avoid a reference
     // error.
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -3645,12 +3661,12 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
   }
   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
 
-  if (check->Equals(Heap::number_symbol())) {
+  if (check->Equals(isolate()->heap()->number_symbol())) {
     __ JumpIfSmi(rax, if_true);
     __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
     __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
     Split(equal, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::string_symbol())) {
+  } else if (check->Equals(isolate()->heap()->string_symbol())) {
     __ JumpIfSmi(rax, if_false);
     // Check for undetectable objects => false.
     __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
@@ -3658,12 +3674,12 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
              Immediate(1 << Map::kIsUndetectable));
     Split(zero, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::boolean_symbol())) {
+  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
     __ j(equal, if_true);
     __ CompareRoot(rax, Heap::kFalseValueRootIndex);
     Split(equal, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::undefined_symbol())) {
+  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
     __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
     __ j(equal, if_true);
     __ JumpIfSmi(rax, if_false);
@@ -3672,11 +3688,11 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
              Immediate(1 << Map::kIsUndetectable));
     Split(not_zero, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::function_symbol())) {
+  } else if (check->Equals(isolate()->heap()->function_symbol())) {
     __ JumpIfSmi(rax, if_false);
     __ CmpObjectType(rax, FIRST_FUNCTION_CLASS_TYPE, rdx);
     Split(above_equal, if_true, if_false, fall_through);
-  } else if (check->Equals(Heap::object_symbol())) {
+  } else if (check->Equals(isolate()->heap()->object_symbol())) {
     __ JumpIfSmi(rax, if_false);
     __ CompareRoot(rax, Heap::kNullValueRootIndex);
     __ j(equal, if_true);
@@ -3858,16 +3874,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
          mode == RelocInfo::CODE_TARGET_CONTEXT);
   switch (ic->kind()) {
     case Code::LOAD_IC:
-      __ IncrementCounter(&Counters::named_load_full, 1);
+      __ IncrementCounter(COUNTERS->named_load_full(), 1);
       break;
     case Code::KEYED_LOAD_IC:
-      __ IncrementCounter(&Counters::keyed_load_full, 1);
+      __ IncrementCounter(COUNTERS->keyed_load_full(), 1);
       break;
     case Code::STORE_IC:
-      __ IncrementCounter(&Counters::named_store_full, 1);
+      __ IncrementCounter(COUNTERS->named_store_full(), 1);
       break;
     case Code::KEYED_STORE_IC:
-      __ IncrementCounter(&Counters::keyed_store_full, 1);
+      __ IncrementCounter(COUNTERS->keyed_store_full(), 1);
     default:
       break;
   }
@@ -3901,16 +3917,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
   switch (ic->kind()) {
     case Code::LOAD_IC:
-      __ IncrementCounter(&Counters::named_load_full, 1);
+      __ IncrementCounter(COUNTERS->named_load_full(), 1);
       break;
     case Code::KEYED_LOAD_IC:
-      __ IncrementCounter(&Counters::keyed_load_full, 1);
+      __ IncrementCounter(COUNTERS->keyed_load_full(), 1);
       break;
     case Code::STORE_IC:
-      __ IncrementCounter(&Counters::named_store_full, 1);
+      __ IncrementCounter(COUNTERS->named_store_full(), 1);
       break;
     case Code::KEYED_STORE_IC:
-      __ IncrementCounter(&Counters::keyed_store_full, 1);
+      __ IncrementCounter(COUNTERS->keyed_store_full(), 1);
     default:
       break;
   }
index f5b2fef40b8366160d648ad98f0d9945602f2579..2774fbeebe9c35113dbc2b1a079e03871808b3af 100644 (file)
@@ -579,7 +579,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
                         rax,
                         NULL,
                         &slow);
-  __ IncrementCounter(&Counters::keyed_load_generic_smi, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_smi(), 1);
   __ ret(0);
 
   __ bind(&check_number_dictionary);
@@ -601,7 +601,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   // Slow case: Jump to runtime.
   // rdx: receiver
   // rax: key
-  __ IncrementCounter(&Counters::keyed_load_generic_slow, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_slow(), 1);
   GenerateRuntimeGetProperty(masm);
 
   __ bind(&check_string);
@@ -652,7 +652,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
   __ addq(rcx, rdi);
   __ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
-  __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1);
   __ ret(0);
 
   // Load property array property.
@@ -660,7 +660,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   __ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset));
   __ movq(rax, FieldOperand(rax, rdi, times_pointer_size,
                             FixedArray::kHeaderSize));
-  __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1);
   __ ret(0);
 
   // Do a quick inline probe of the receiver's dictionary, if it
@@ -675,7 +675,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
   GenerateGlobalInstanceTypeCheck(masm, rcx, &slow);
 
   GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax);
-  __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_generic_symbol(), 1);
   __ ret(0);
 
   __ bind(&index_string);
@@ -880,7 +880,8 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
                                          Code::kNoExtraICState,
                                          NORMAL,
                                          argc);
-  StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, rax);
+  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
+                                                  rax);
 
   // If the stub cache probing failed, the receiver might be a value.
   // For value objects, we use the map of the prototype objects for
@@ -916,7 +917,8 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
 
   // Probe the stub cache for the value object.
   __ bind(&probe);
-  StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg);
+  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
+                                                  no_reg);
 
   __ bind(&miss);
 }
@@ -986,9 +988,9 @@ static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
   // -----------------------------------
 
   if (id == IC::kCallIC_Miss) {
-    __ IncrementCounter(&Counters::call_miss, 1);
+    __ IncrementCounter(COUNTERS->call_miss(), 1);
   } else {
-    __ IncrementCounter(&Counters::keyed_call_miss, 1);
+    __ IncrementCounter(COUNTERS->keyed_call_miss(), 1);
   }
 
   // Get the receiver of the function from the stack; 1 ~ return address.
@@ -1114,7 +1116,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
 
   GenerateFastArrayLoad(
       masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load);
-  __ IncrementCounter(&Counters::keyed_call_generic_smi_fast, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_smi_fast(), 1);
 
   __ bind(&do_call);
   // receiver in rdx is not used after this point.
@@ -1132,13 +1134,13 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   __ SmiToInteger32(rbx, rcx);
   // ebx: untagged index
   GenerateNumberDictionaryLoad(masm, &slow_load, rax, rcx, rbx, r9, rdi, rdi);
-  __ IncrementCounter(&Counters::keyed_call_generic_smi_dict, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_smi_dict(), 1);
   __ jmp(&do_call);
 
   __ bind(&slow_load);
   // This branch is taken when calling KeyedCallIC_Miss is neither required
   // nor beneficial.
-  __ IncrementCounter(&Counters::keyed_call_generic_slow_load, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_slow_load(), 1);
   __ EnterInternalFrame();
   __ push(rcx);  // save the key
   __ push(rdx);  // pass the receiver
@@ -1165,11 +1167,11 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   __ j(not_equal, &lookup_monomorphic_cache);
 
   GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi);
-  __ IncrementCounter(&Counters::keyed_call_generic_lookup_dict, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_dict(), 1);
   __ jmp(&do_call);
 
   __ bind(&lookup_monomorphic_cache);
-  __ IncrementCounter(&Counters::keyed_call_generic_lookup_cache, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_cache(), 1);
   GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
   // Fall through on miss.
 
@@ -1180,7 +1182,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   // - the value loaded is not a function,
   // - there is hope that the runtime will create a monomorphic call stub
   //   that will get fetched next time.
-  __ IncrementCounter(&Counters::keyed_call_generic_slow, 1);
+  __ IncrementCounter(COUNTERS->keyed_call_generic_slow(), 1);
   GenerateMiss(masm, argc);
 
   __ bind(&index_string);
@@ -1238,7 +1240,8 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
   Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
                                          NOT_IN_LOOP,
                                          MONOMORPHIC);
-  StubCache::GenerateProbe(masm, flags, rax, rcx, rbx, rdx);
+  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rax, rcx, rbx,
+                                                  rdx);
 
   // Cache miss: Jump to runtime.
   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
@@ -1273,7 +1276,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
   //  -- rsp[0] : return address
   // -----------------------------------
 
-  __ IncrementCounter(&Counters::load_miss, 1);
+  __ IncrementCounter(COUNTERS->load_miss(), 1);
 
   __ pop(rbx);
   __ push(rax);  // receiver
@@ -1356,7 +1359,7 @@ bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
   // (-1) or we should be clearing the inlined version.
   ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt - 1 ||
          *reinterpret_cast<int*>(offset_address) == -1 ||
-         (offset == 0 && map == Heap::null_value()));
+         (offset == 0 && map == HEAP->null_value()));
   *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
 
   // Patch the offset in the write-barrier code. The offset is the
@@ -1366,7 +1369,7 @@ bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
   // (-1) or we should be clearing the inlined version.
   ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt ||
          *reinterpret_cast<int*>(offset_address) == -1 ||
-         (offset == 0 && map == Heap::null_value()));
+         (offset == 0 && map == HEAP->null_value()));
   *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
 
   return true;
@@ -1417,7 +1420,7 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
   //  -- rsp[0]  : return address
   // -----------------------------------
 
-  __ IncrementCounter(&Counters::keyed_load_miss, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_miss(), 1);
 
   __ pop(rbx);
   __ push(rdx);  // receiver
@@ -1461,7 +1464,8 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
                                          NOT_IN_LOOP,
                                          MONOMORPHIC,
                                          strict_mode);
-  StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg);
+  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
+                                                  no_reg);
 
   // Cache miss: Jump to runtime.
   GenerateMiss(masm);
@@ -1558,11 +1562,11 @@ void StoreIC::GenerateNormal(MacroAssembler* masm) {
   GenerateStringDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss);
 
   GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9);
-  __ IncrementCounter(&Counters::store_normal_hit, 1);
+  __ IncrementCounter(COUNTERS->store_normal_hit(), 1);
   __ ret(0);
 
   __ bind(&miss);
-  __ IncrementCounter(&Counters::store_normal_miss, 1);
+  __ IncrementCounter(COUNTERS->store_normal_miss(), 1);
   GenerateMiss(masm);
 }
 
index aaad7c5fb2627094cbf23a1b5a8845e15b99b89c..6885961162de635eb7f1e6d580488c958a2382a1 100644 (file)
@@ -465,7 +465,7 @@ void LCodeGen::CallCode(Handle<Code> code,
 }
 
 
-void LCodeGen::CallRuntime(Runtime::Function* function,
+void LCodeGen::CallRuntime(const Runtime::Function* function,
                            int num_arguments,
                            LInstruction* instr) {
   ASSERT(instr != NULL);
@@ -553,14 +553,14 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
   if (length == 0) return;
   ASSERT(FLAG_deopt);
   Handle<DeoptimizationInputData> data =
-      Factory::NewDeoptimizationInputData(length, TENURED);
+      factory()->NewDeoptimizationInputData(length, TENURED);
 
   Handle<ByteArray> translations = translations_.CreateByteArray();
   data->SetTranslationByteArray(*translations);
   data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
 
   Handle<FixedArray> literals =
-      Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
+      factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
   for (int i = 0; i < deoptimization_literals_.length(); i++) {
     literals->set(i, *deoptimization_literals_[i]);
   }
@@ -1910,7 +1910,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
   Register map = ToRegister(instr->TempAt(0));
   __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
   __ bind(deferred->map_check());  // Label for calculating code patching.
-  __ Move(kScratchRegister, Factory::the_hole_value());
+  __ Move(kScratchRegister, factory()->the_hole_value());
   __ cmpq(map, kScratchRegister);  // Patched to cached map.
   __ j(not_equal, &cache_miss);
   // Patched to load either true or false.
@@ -2099,7 +2099,7 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
   ASSERT(ToRegister(instr->result()).is(rax));
 
   __ Move(rcx, instr->name());
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(Builtins::LoadIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2225,7 +2225,8 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
   ASSERT(ToRegister(instr->object()).is(rdx));
   ASSERT(ToRegister(instr->key()).is(rax));
 
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(isolate()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2724,7 +2725,8 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
   ASSERT(ToRegister(instr->result()).is(rax));
 
   int arity = instr->arity();
-  Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
+  Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(
+    arity, NOT_IN_LOOP);
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
 }
@@ -2734,7 +2736,8 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
   ASSERT(ToRegister(instr->result()).is(rax));
 
   int arity = instr->arity();
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
+  Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
+      arity, NOT_IN_LOOP);
   __ Move(rcx, instr->name());
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2755,7 +2758,8 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
   ASSERT(ToRegister(instr->result()).is(rax));
   int arity = instr->arity();
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
+  Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
+      arity, NOT_IN_LOOP);
   __ Move(rcx, instr->name());
   CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2773,7 +2777,8 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
   ASSERT(ToRegister(instr->result()).is(rax));
 
-  Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
+  Handle<Code> builtin(isolate()->builtins()->builtin(
+      Builtins::JSConstructCall));
   __ Set(rax, instr->arity());
   CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
 }
@@ -2819,7 +2824,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
   ASSERT(ToRegister(instr->value()).is(rax));
 
   __ Move(rcx, instr->hydrogen()->name());
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
                          : Builtins::StoreIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -2890,7 +2895,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
   ASSERT(ToRegister(instr->key()).is(rcx));
   ASSERT(ToRegister(instr->value()).is(rax));
 
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(isolate()->builtins()->builtin(
       info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
                          : Builtins::KeyedStoreIC_Initialize));
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3399,9 +3404,9 @@ void LCodeGen::DoCheckMap(LCheckMap* instr) {
 
 
 void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
-  if (Heap::InNewSpace(*object)) {
+  if (heap()->InNewSpace(*object)) {
     Handle<JSGlobalPropertyCell> cell =
-        Factory::NewJSGlobalPropertyCell(object);
+        factory()->NewJSGlobalPropertyCell(object);
     __ movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
     __ movq(result, Operand(result, 0));
   } else {
@@ -3630,14 +3635,14 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
                                  Register input,
                                  Handle<String> type_name) {
   Condition final_branch_condition = no_condition;
-  if (type_name->Equals(Heap::number_symbol())) {
+  if (type_name->Equals(heap()->number_symbol())) {
     __ JumpIfSmi(input, true_label);
     __ CompareRoot(FieldOperand(input, HeapObject::kMapOffset),
                    Heap::kHeapNumberMapRootIndex);
 
     final_branch_condition = equal;
 
-  } else if (type_name->Equals(Heap::string_symbol())) {
+  } else if (type_name->Equals(heap()->string_symbol())) {
     __ JumpIfSmi(input, false_label);
     __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
     __ j(above_equal, false_label);
@@ -3645,13 +3650,13 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
              Immediate(1 << Map::kIsUndetectable));
     final_branch_condition = zero;
 
-  } else if (type_name->Equals(Heap::boolean_symbol())) {
+  } else if (type_name->Equals(heap()->boolean_symbol())) {
     __ CompareRoot(input, Heap::kTrueValueRootIndex);
     __ j(equal, true_label);
     __ CompareRoot(input, Heap::kFalseValueRootIndex);
     final_branch_condition = equal;
 
-  } else if (type_name->Equals(Heap::undefined_symbol())) {
+  } else if (type_name->Equals(heap()->undefined_symbol())) {
     __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
     __ j(equal, true_label);
     __ JumpIfSmi(input, false_label);
@@ -3661,12 +3666,12 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
              Immediate(1 << Map::kIsUndetectable));
     final_branch_condition = not_zero;
 
-  } else if (type_name->Equals(Heap::function_symbol())) {
+  } else if (type_name->Equals(heap()->function_symbol())) {
     __ JumpIfSmi(input, false_label);
     __ CmpObjectType(input, FIRST_FUNCTION_CLASS_TYPE, input);
     final_branch_condition = above_equal;
 
-  } else if (type_name->Equals(Heap::object_symbol())) {
+  } else if (type_name->Equals(heap()->object_symbol())) {
     __ JumpIfSmi(input, false_label);
     __ CompareRoot(input, Heap::kNullValueRootIndex);
     __ j(equal, true_label);
index 6b00335241a93dbf7990198a8f2544fbe4e19514..fcd0a70a5563502f6a25ad43dbadd0e2f7d4f95d 100644 (file)
@@ -67,6 +67,9 @@ class LCodeGen BASE_EMBEDDED {
   // Simple accessors.
   MacroAssembler* masm() const { return masm_; }
   CompilationInfo* info() const { return info_; }
+  Isolate* isolate() const { return info_->isolate(); }
+  Factory* factory() const { return isolate()->factory(); }
+  Heap* heap() const { return isolate()->heap(); }
 
   // Support for converting LOperands to assembler types.
   Register ToRegister(LOperand* op) const;
@@ -156,13 +159,13 @@ class LCodeGen BASE_EMBEDDED {
   void CallCode(Handle<Code> code,
                 RelocInfo::Mode mode,
                 LInstruction* instr);
-  void CallRuntime(Runtime::Function* function,
+  void CallRuntime(const Runtime::Function* function,
                    int num_arguments,
                    LInstruction* instr);
   void CallRuntime(Runtime::FunctionId id,
                    int num_arguments,
                    LInstruction* instr) {
-    Runtime::Function* function = Runtime::FunctionForId(id);
+    const Runtime::Function* function = Runtime::FunctionForId(id);
     CallRuntime(function, num_arguments, instr);
   }
 
index 0e922c444547627c7c890a421165d6d73c256b55..6832ba430c464ba6134715767e3d37fd81495353 100644 (file)
@@ -1614,7 +1614,8 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
       bool needs_check = !instr->value()->type().IsSmi();
       if (needs_check) {
         LOperand* xmm_temp =
-            (instr->CanTruncateToInt32() && CpuFeatures::IsSupported(SSE3))
+            (instr->CanTruncateToInt32() &&
+             Isolate::Current()->cpu_features()->IsSupported(SSE3))
             ? NULL
             : FixedTemp(xmm1);
         LTaggedToI* res = new LTaggedToI(value, xmm_temp);
index 2d1b94c622673297778ed1e7900ab94cf1ec7307..7fbaba3a8fd71838ff9beff0f415f922250dac8a 100644 (file)
@@ -1423,7 +1423,7 @@ class LCallRuntime: public LTemplateInstruction<1, 0, 0> {
   DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime")
   DECLARE_HYDROGEN_ACCESSOR(CallRuntime)
 
-  Runtime::Function* function() const { return hydrogen()->function(); }
+  const Runtime::Function* function() const { return hydrogen()->function(); }
   int arity() const { return hydrogen()->argument_count(); }
 };
 
index c5cddf7770d60b754181b5ee155eaee1943219c0..b4d4d9d1ae384fbce698de4ad210557d5aea997e 100644 (file)
@@ -44,7 +44,7 @@ MacroAssembler::MacroAssembler(void* buffer, int size)
     : Assembler(buffer, size),
       generating_stub_(false),
       allow_stub_calls_(true),
-      code_object_(Heap::undefined_value()) {
+      code_object_(HEAP->undefined_value()) {
 }
 
 
@@ -391,7 +391,7 @@ void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
 
 
 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
-  Runtime::Function* function = Runtime::FunctionForId(id);
+  const Runtime::Function* function = Runtime::FunctionForId(id);
   Set(rax, function->nargs);
   movq(rbx, ExternalReference(function));
   CEntryStub ces(1);
@@ -406,7 +406,8 @@ MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
 }
 
 
-void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
+void MacroAssembler::CallRuntime(const Runtime::Function* f,
+                                 int num_arguments) {
   // If the expected number of arguments of the runtime function is
   // constant, we check that the actual number of arguments match the
   // expectation.
@@ -426,13 +427,13 @@ void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
 }
 
 
-MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
+MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
                                             int num_arguments) {
   if (f->nargs >= 0 && f->nargs != num_arguments) {
     IllegalOperation(num_arguments);
     // Since we did not call the stub, there was no allocation failure.
     // Return some non-failure object.
-    return Heap::undefined_value();
+    return HEAP->undefined_value();
   }
 
   // TODO(1236192): Most runtime routines don't need the number of
@@ -587,7 +588,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
 
   // Check if the function scheduled an exception.
   movq(rsi, scheduled_exception_address);
-  Cmp(Operand(rsi, 0), Factory::the_hole_value());
+  Cmp(Operand(rsi, 0), FACTORY->the_hole_value());
   j(not_equal, &promote_scheduled_exception);
 
   LeaveApiExitFrame();
@@ -602,13 +603,18 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
 
   bind(&empty_result);
   // It was zero; the result is undefined.
-  Move(rax, Factory::undefined_value());
+  Move(rax, FACTORY->undefined_value());
   jmp(&prologue);
 
   // HandleScope limit has changed. Delete allocated extensions.
   bind(&delete_allocated_handles);
   movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
   movq(prev_limit_reg, rax);
+#ifdef _WIN64
+  movq(rcx, ExternalReference::isolate_address());
+#else
+  movq(rdi, ExternalReference::isolate_address());
+#endif
   movq(rax, ExternalReference::delete_handle_scope_extensions());
   call(rax);
   movq(rax, prev_limit_reg);
@@ -1609,7 +1615,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
     push(Immediate(0));  // NULL frame pointer.
   }
   // Save the current handler.
-  movq(kScratchRegister, ExternalReference(Top::k_handler_address));
+  movq(kScratchRegister, ExternalReference(Isolate::k_handler_address));
   push(Operand(kScratchRegister, 0));
   // Link this handler.
   movq(Operand(kScratchRegister, 0), rsp);
@@ -1619,7 +1625,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
 void MacroAssembler::PopTryHandler() {
   ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
   // Unlink this handler.
-  movq(kScratchRegister, ExternalReference(Top::k_handler_address));
+  movq(kScratchRegister, ExternalReference(Isolate::k_handler_address));
   pop(Operand(kScratchRegister, 0));
   // Remove the remaining fields.
   addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
@@ -1638,7 +1644,7 @@ void MacroAssembler::Throw(Register value) {
     movq(rax, value);
   }
 
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
   movq(kScratchRegister, handler_address);
   movq(rsp, Operand(kScratchRegister, 0));
   // get next in chain
@@ -1666,7 +1672,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
     movq(rax, value);
   }
   // Fetch top stack handler.
-  ExternalReference handler_address(Top::k_handler_address);
+  ExternalReference handler_address(Isolate::k_handler_address);
   movq(kScratchRegister, handler_address);
   movq(rsp, Operand(kScratchRegister, 0));
 
@@ -1689,12 +1695,13 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
 
   if (type == OUT_OF_MEMORY) {
     // Set external caught exception to false.
-    ExternalReference external_caught(Top::k_external_caught_exception_address);
+    ExternalReference external_caught(
+        Isolate::k_external_caught_exception_address);
     movq(rax, Immediate(false));
     store_rax(external_caught);
 
     // Set pending exception and rax to out of memory exception.
-    ExternalReference pending_exception(Top::k_pending_exception_address);
+    ExternalReference pending_exception(Isolate::k_pending_exception_address);
     movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
     store_rax(pending_exception);
   }
@@ -1770,7 +1777,7 @@ void MacroAssembler::AbortIfNotNumber(Register object) {
   Condition is_smi = CheckSmi(object);
   j(is_smi, &ok);
   Cmp(FieldOperand(object, HeapObject::kMapOffset),
-      Factory::heap_number_map());
+      FACTORY->heap_number_map());
   Assert(equal, "Operand not a number");
   bind(&ok);
 }
@@ -2029,7 +2036,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
   push(kScratchRegister);
   if (emit_debug_code()) {
     movq(kScratchRegister,
-         Factory::undefined_value(),
+         FACTORY->undefined_value(),
          RelocInfo::EMBEDDED_OBJECT);
     cmpq(Operand(rsp, 0), kScratchRegister);
     Check(not_equal, "code object not properly patched");
@@ -2068,10 +2075,10 @@ void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
     movq(r14, rax);  // Backup rax in callee-save register.
   }
 
-  movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
+  movq(kScratchRegister, ExternalReference(Isolate::k_c_entry_fp_address));
   movq(Operand(kScratchRegister, 0), rbp);
 
-  movq(kScratchRegister, ExternalReference(Top::k_context_address));
+  movq(kScratchRegister, ExternalReference(Isolate::k_context_address));
   movq(Operand(kScratchRegister, 0), rsi);
 }
 
@@ -2098,7 +2105,7 @@ void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
   }
 
   // Get the required frame alignment for the OS.
-  static const int kFrameAlignment = OS::ActivationFrameAlignment();
+  const int kFrameAlignment = OS::ActivationFrameAlignment();
   if (kFrameAlignment > 0) {
     ASSERT(IsPowerOf2(kFrameAlignment));
     movq(kScratchRegister, Immediate(-kFrameAlignment));
@@ -2163,7 +2170,7 @@ void MacroAssembler::LeaveApiExitFrame() {
 
 void MacroAssembler::LeaveExitFrameEpilogue() {
   // Restore current context from top and clear it in debug mode.
-  ExternalReference context_address(Top::k_context_address);
+  ExternalReference context_address(Isolate::k_context_address);
   movq(kScratchRegister, context_address);
   movq(rsi, Operand(kScratchRegister, 0));
 #ifdef DEBUG
@@ -2171,7 +2178,7 @@ void MacroAssembler::LeaveExitFrameEpilogue() {
 #endif
 
   // Clear the top frame.
-  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
+  ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address);
   movq(kScratchRegister, c_entry_fp_address);
   movq(Operand(kScratchRegister, 0), Immediate(0));
 }
@@ -2200,7 +2207,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
   // Check the context is a global context.
   if (emit_debug_code()) {
     Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
-        Factory::global_context_map());
+        FACTORY->global_context_map());
     Check(equal, "JSGlobalObject::global_context should be a global context.");
   }
 
@@ -2625,6 +2632,11 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   }
 }
 
+#ifdef _WIN64
+static const int kRegisterPassedArguments = 4;
+#else
+static const int kRegisterPassedArguments = 6;
+#endif
 
 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   // Load the global or builtins object from the current context.
@@ -2642,7 +2654,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   if (emit_debug_code()) {
     Label ok, fail;
-    CheckMap(map, Factory::meta_map(), &fail, false);
+    CheckMap(map, FACTORY->meta_map(), &fail, false);
     jmp(&ok);
     bind(&fail);
     Abort("Global functions must have initial map");
@@ -2660,11 +2672,10 @@ int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
   // and the caller does not reserve stack slots for them.
   ASSERT(num_arguments >= 0);
 #ifdef _WIN64
-  static const int kMinimumStackSlots = 4;
+  const int kMinimumStackSlots = kRegisterPassedArguments;
   if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
   return num_arguments;
 #else
-  static const int kRegisterPassedArguments = 6;
   if (num_arguments < kRegisterPassedArguments) return 0;
   return num_arguments - kRegisterPassedArguments;
 #endif
@@ -2675,6 +2686,10 @@ void MacroAssembler::PrepareCallCFunction(int num_arguments) {
   int frame_alignment = OS::ActivationFrameAlignment();
   ASSERT(frame_alignment != 0);
   ASSERT(num_arguments >= 0);
+
+  // Reserve space for Isolate address which is always passed as last parameter
+  num_arguments += 1;
+
   // Make stack end at alignment and allocate space for arguments and old rsp.
   movq(kScratchRegister, rsp);
   ASSERT(IsPowerOf2(frame_alignment));
@@ -2694,6 +2709,26 @@ void MacroAssembler::CallCFunction(ExternalReference function,
 
 
 void MacroAssembler::CallCFunction(Register function, int num_arguments) {
+  // Pass current isolate address as additional parameter.
+  if (num_arguments < kRegisterPassedArguments) {
+#ifdef _WIN64
+    // First four arguments are passed in registers on Windows.
+    Register arg_to_reg[] = {rcx, rdx, r8, r9};
+#else
+    // First six arguments are passed in registers on other platforms.
+    Register arg_to_reg[] = {rdi, rsi, rdx, rcx, r8, r9};
+#endif
+    Register reg = arg_to_reg[num_arguments];
+    movq(reg, ExternalReference::isolate_address());
+  } else {
+    // Push Isolate pointer after all parameters.
+    int argument_slots_on_stack =
+        ArgumentStackSlotsForCFunctionCall(num_arguments);
+    movq(kScratchRegister, ExternalReference::isolate_address());
+    movq(Operand(rsp, argument_slots_on_stack * kPointerSize),
+         kScratchRegister);
+  }
+
   // Check stack alignment.
   if (emit_debug_code()) {
     CheckStackAlignment();
@@ -2702,6 +2737,7 @@ void MacroAssembler::CallCFunction(Register function, int num_arguments) {
   call(function);
   ASSERT(OS::ActivationFrameAlignment() != 0);
   ASSERT(num_arguments >= 0);
+  num_arguments += 1;
   int argument_slots_on_stack =
       ArgumentStackSlotsForCFunctionCall(num_arguments);
   movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
index 376a5978c4c8eb6abe4a5a824e2f53ed89d3f1b3..b7bd0392bbd0fe251132bf6df84734ac8ba129bb 100644 (file)
@@ -888,7 +888,7 @@ class MacroAssembler: public Assembler {
   void StubReturn(int argc);
 
   // Call a runtime routine.
-  void CallRuntime(Runtime::Function* f, int num_arguments);
+  void CallRuntime(const Runtime::Function* f, int num_arguments);
 
   // Call a runtime function and save the value of XMM registers.
   void CallRuntimeSaveDoubles(Runtime::FunctionId id);
@@ -896,7 +896,7 @@ class MacroAssembler: public Assembler {
   // Call a runtime function, returning the CodeStub object called.
   // Try to generate the stub code if necessary.  Do not perform a GC
   // but instead return a retry after GC failure.
-  MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::Function* f,
+  MUST_USE_RESULT MaybeObject* TryCallRuntime(const Runtime::Function* f,
                                               int num_arguments);
 
   // Convenience function: Same as above, but takes the fid instead.
@@ -1807,16 +1807,16 @@ void MacroAssembler::InNewSpace(Register object,
     cmpq(scratch, kScratchRegister);
     j(cc, branch);
   } else {
-    ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
+    ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
     intptr_t new_space_start =
-        reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
+        reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
     movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
     if (scratch.is(object)) {
       addq(scratch, kScratchRegister);
     } else {
       lea(scratch, Operand(object, kScratchRegister, times_1, 0));
     }
-    and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
+    and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
     j(cc, branch);
   }
 }
@@ -1870,7 +1870,8 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
 
   if (!definitely_matches) {
     Handle<Code> adaptor =
-        Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
+        Handle<Code>(Isolate::Current()->builtins()->builtin(
+            Builtins::ArgumentsAdaptorTrampoline));
     if (!code_constant.is_null()) {
       movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
       addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
index cd3bfbd42c4517c762aef2831c0444ba06870e81..37d93cc53d5bb69840c534a86e2ed6fd11f5c291 100644 (file)
@@ -68,6 +68,7 @@ namespace internal {
  *
  * The stack will have the following content, in some order, indexable from the
  * frame pointer (see, e.g., kStackHighEnd):
+ *    - Isolate* isolate     (Address of the current isolate)
  *    - direct_call          (if 1, direct call from JavaScript code, if 0 call
  *                            through the runtime system)
  *    - stack_area_base      (High end of the memory area to use as
@@ -954,10 +955,11 @@ Handle<Object> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
 
   CodeDesc code_desc;
   masm_->GetCode(&code_desc);
-  Handle<Code> code = Factory::NewCode(code_desc,
-                                       Code::ComputeFlags(Code::REGEXP),
-                                       masm_->CodeObject());
-  PROFILE(RegExpCodeCreateEvent(*code, *source));
+  Isolate* isolate = ISOLATE;
+  Handle<Code> code = isolate->factory()->NewCode(
+      code_desc, Code::ComputeFlags(Code::REGEXP),
+      masm_->CodeObject());
+  PROFILE(isolate, RegExpCodeCreateEvent(*code, *source));
   return Handle<Object>::cast(code);
 }
 
@@ -1141,8 +1143,10 @@ static T& frame_entry(Address re_frame, int frame_offset) {
 int RegExpMacroAssemblerX64::CheckStackGuardState(Address* return_address,
                                                   Code* re_code,
                                                   Address re_frame) {
-  if (StackGuard::IsStackOverflow()) {
-    Top::StackOverflow();
+  Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
+  ASSERT(isolate == Isolate::Current());
+  if (isolate->stack_guard()->IsStackOverflow()) {
+    isolate->StackOverflow();
     return EXCEPTION;
   }
 
index 421a229447bbb5249478af51c97d486b35ae6e1e..864441ae139ce973ff74152ae53589553c0389e6 100644 (file)
@@ -104,7 +104,8 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
                       Handle<String> subject,
                       int* offsets_vector,
                       int offsets_vector_length,
-                      int previous_index);
+                      int previous_index,
+                      Isolate* isolate);
 
   static Result Execute(Code* code,
                         String* input,
@@ -142,6 +143,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
   static const int kStackHighEnd = kRegisterOutput + kPointerSize;
   // DirectCall is passed as 32 bit int (values 0 or 1).
   static const int kDirectCall = kStackHighEnd + kPointerSize;
+  static const int kIsolate = kDirectCall + kPointerSize;
 #else
   // In AMD64 ABI Calling Convention, the first six integer parameters
   // are passed as registers, and caller must allocate space on the stack
@@ -153,6 +155,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
   static const int kRegisterOutput = kInputEnd - kPointerSize;
   static const int kStackHighEnd = kRegisterOutput - kPointerSize;
   static const int kDirectCall = kFrameAlign;
+  static const int kIsolate = kDirectCall + kPointerSize;
 #endif
 
 #ifdef _WIN64
index 1f5467e13031abc07a1fb13bdac5846f53ce0f68..65189f57b7ec407624c6d84127b8effaebbacb86 100644 (file)
@@ -42,9 +42,11 @@ namespace internal {
 void Result::ToRegister() {
   ASSERT(is_valid());
   if (is_constant()) {
-    Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate();
+    CodeGenerator* code_generator =
+        CodeGeneratorScope::Current(Isolate::Current());
+    Result fresh = code_generator->allocator()->Allocate();
     ASSERT(fresh.is_valid());
-    CodeGeneratorScope::Current()->masm()->Move(fresh.reg(), handle());
+    code_generator->masm()->Move(fresh.reg(), handle());
     // This result becomes a copy of the fresh one.
     fresh.set_type_info(type_info());
     *this = fresh;
@@ -55,21 +57,23 @@ void Result::ToRegister() {
 
 void Result::ToRegister(Register target) {
   ASSERT(is_valid());
+  CodeGenerator* code_generator =
+      CodeGeneratorScope::Current(Isolate::Current());
   if (!is_register() || !reg().is(target)) {
-    Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate(target);
+    Result fresh = code_generator->allocator()->Allocate(target);
     ASSERT(fresh.is_valid());
     if (is_register()) {
-      CodeGeneratorScope::Current()->masm()->movq(fresh.reg(), reg());
+      code_generator->masm()->movq(fresh.reg(), reg());
     } else {
       ASSERT(is_constant());
-      CodeGeneratorScope::Current()->masm()->Move(fresh.reg(), handle());
+      code_generator->masm()->Move(fresh.reg(), handle());
     }
     fresh.set_type_info(type_info());
     *this = fresh;
   } else if (is_register() && reg().is(target)) {
-    ASSERT(CodeGeneratorScope::Current()->has_valid_frame());
-    CodeGeneratorScope::Current()->frame()->Spill(target);
-    ASSERT(CodeGeneratorScope::Current()->allocator()->count(target) == 1);
+    ASSERT(code_generator->has_valid_frame());
+    code_generator->frame()->Spill(target);
+    ASSERT(code_generator->allocator()->count(target) == 1);
   }
   ASSERT(is_register());
   ASSERT(reg().is(target));
index aa2994f26762c6f13318b27f9139990096e73fac..cfaa5b8c3a332128fb8eb56b8612e710d4a4625d 100644 (file)
@@ -40,12 +40,12 @@ namespace internal {
   (entry(p0, p1, p2, p3, p4))
 
 typedef int (*regexp_matcher)(String*, int, const byte*,
-                              const byte*, int*, Address, int);
+                              const byte*, int*, Address, int, Isolate*);
 
 // Call the generated regexp code directly. The code at the entry address should
-// expect seven int/pointer sized arguments and return an int.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
-  (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6))
+// expect eight int/pointer sized arguments and return an int.
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+  (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
 
 #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
   (reinterpret_cast<TryCatch*>(try_catch_address))
index bc8effe6c151a28f931e5d12a378fef616480a97..620f2098d1a2a1f741f110a1712230fa354f0947 100644 (file)
@@ -39,7 +39,8 @@ namespace internal {
 #define __ ACCESS_MASM(masm)
 
 
-static void ProbeTable(MacroAssembler* masm,
+static void ProbeTable(Isolate* isolate,
+                       MacroAssembler* masm,
                        Code::Flags flags,
                        StubCache::Table table,
                        Register name,
@@ -48,7 +49,7 @@ static void ProbeTable(MacroAssembler* masm,
   ASSERT_EQ(16, sizeof(StubCache::Entry));
   // The offset register holds the entry offset times four (due to masking
   // and shifting optimizations).
-  ExternalReference key_offset(SCTableReference::keyReference(table));
+  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
   Label miss;
 
   __ movq(kScratchRegister, key_offset);
@@ -88,8 +89,8 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
                                              Register r0,
                                              Register r1) {
   ASSERT(name->IsSymbol());
-  __ IncrementCounter(&Counters::negative_lookups, 1);
-  __ IncrementCounter(&Counters::negative_lookups_miss, 1);
+  __ IncrementCounter(COUNTERS->negative_lookups(), 1);
+  __ IncrementCounter(COUNTERS->negative_lookups_miss(), 1);
 
   Label done;
   __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
@@ -151,7 +152,7 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
     ASSERT_EQ(kSmiTagSize, 1);
     __ movq(entity_name, Operand(properties, index, times_pointer_size,
                                  kElementsStartOffset - kHeapObjectTag));
-    __ Cmp(entity_name, Factory::undefined_value());
+    __ Cmp(entity_name, FACTORY->undefined_value());
     // __ jmp(miss_label);
     if (i != kProbes - 1) {
       __ j(equal, &done);
@@ -172,7 +173,7 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
   }
 
   __ bind(&done);
-  __ DecrementCounter(&Counters::negative_lookups_miss, 1);
+  __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1);
 }
 
 
@@ -183,6 +184,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
                               Register scratch,
                               Register extra,
                               Register extra2) {
+  Isolate* isolate = Isolate::Current();
   Label miss;
   USE(extra);   // The register extra is not used on the X64 platform.
   USE(extra2);  // The register extra2 is not used on the X64 platform.
@@ -212,7 +214,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
   __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
 
   // Probe the primary table.
-  ProbeTable(masm, flags, kPrimary, name, scratch);
+  ProbeTable(isolate, masm, flags, kPrimary, name, scratch);
 
   // Primary miss: Compute hash for secondary probe.
   __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
@@ -224,7 +226,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
   __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
 
   // Probe the secondary table.
-  ProbeTable(masm, flags, kSecondary, name, scratch);
+  ProbeTable(isolate, masm, flags, kSecondary, name, scratch);
 
   // Cache miss: Fall-through and let caller handle the miss by
   // entering the runtime system.
@@ -254,12 +256,13 @@ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
     MacroAssembler* masm, int index, Register prototype, Label* miss) {
   // Check we're still in the same context.
-  __ Move(prototype, Top::global());
+  __ Move(prototype, Isolate::Current()->global());
   __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
           prototype);
   __ j(not_equal, miss);
   // Get the global function with the given index.
-  JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
+  JSFunction* function = JSFunction::cast(
+      Isolate::Current()->global_context()->get(index));
   // Load its initial map. The global functions all have initial maps.
   __ Move(prototype, Handle<Map>(function->initial_map()));
   // Load the prototype from the initial map.
@@ -375,7 +378,7 @@ static void PushInterceptorArguments(MacroAssembler* masm,
                                      JSObject* holder_obj) {
   __ push(name);
   InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
-  ASSERT(!Heap::InNewSpace(interceptor));
+  ASSERT(!HEAP->InNewSpace(interceptor));
   __ Move(kScratchRegister, Handle<Object>(interceptor));
   __ push(kScratchRegister);
   __ push(receiver);
@@ -466,7 +469,7 @@ static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
   __ movq(Operand(rsp, 2 * kPointerSize), rdi);
   Object* call_data = optimization.api_call_info()->data();
   Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
-  if (Heap::InNewSpace(call_data)) {
+  if (HEAP->InNewSpace(call_data)) {
     __ Move(rcx, api_call_info_handle);
     __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
     __ movq(Operand(rsp, 3 * kPointerSize), rbx);
@@ -561,7 +564,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
                      name,
                      holder,
                      miss);
-      return Heap::undefined_value();  // Success.
+      return HEAP->undefined_value();  // Success.
     }
   }
 
@@ -597,10 +600,10 @@ class CallInterceptorCompiler BASE_EMBEDDED {
                              (depth2 != kInvalidProtoDepth);
     }
 
-    __ IncrementCounter(&Counters::call_const_interceptor, 1);
+    __ IncrementCounter(COUNTERS->call_const_interceptor(), 1);
 
     if (can_do_fast_api_call) {
-      __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1);
+      __ IncrementCounter(COUNTERS->call_const_interceptor_fast_api(), 1);
       ReserveSpaceForFastApiCall(masm, scratch1);
     }
 
@@ -660,7 +663,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
       FreeSpaceForFastApiCall(masm, scratch1);
     }
 
-    return Heap::undefined_value();  // Success.
+    return HEAP->undefined_value();  // Success.
   }
 
   void CompileRegular(MacroAssembler* masm,
@@ -729,9 +732,9 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
   ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
   Code* code = NULL;
   if (kind == Code::LOAD_IC) {
-    code = Builtins::builtin(Builtins::LoadIC_Miss);
+    code = Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Miss);
   } else {
-    code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
+    code = Isolate::Current()->builtins()->builtin(Builtins::KeyedLoadIC_Miss);
   }
 
   Handle<Code> ic(code);
@@ -836,7 +839,7 @@ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
   ASSERT(cell->value()->IsTheHole());
   __ Move(scratch, Handle<Object>(cell));
   __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
-         Factory::the_hole_value());
+         FACTORY->the_hole_value());
   __ j(not_equal, miss);
   return cell;
 }
@@ -885,7 +888,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
         !current->IsJSGlobalObject() &&
         !current->IsJSGlobalProxy()) {
       if (!name->IsSymbol()) {
-        MaybeObject* lookup_result = Heap::LookupSymbol(name);
+        MaybeObject* lookup_result = HEAP->LookupSymbol(name);
         if (lookup_result->IsFailure()) {
           set_failure(Failure::cast(lookup_result));
           return reg;
@@ -905,7 +908,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
       __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
       reg = holder_reg;  // from now the object is in holder_reg
       __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
-    } else if (Heap::InNewSpace(prototype)) {
+    } else if (HEAP->InNewSpace(prototype)) {
       // Get the map of the current object.
       __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
       __ Cmp(scratch1, Handle<Map>(current->map()));
@@ -956,7 +959,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
   __ j(not_equal, miss);
 
   // Log the check depth.
-  LOG(IntEvent("check-maps-depth", depth + 1));
+  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
 
   // Perform security check for access to the global object and return
   // the holder register.
@@ -1039,7 +1042,7 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
 
   __ push(receiver);  // receiver
   __ push(reg);  // holder
-  if (Heap::InNewSpace(callback_handle->data())) {
+  if (HEAP->InNewSpace(callback_handle->data())) {
     __ Move(scratch1, callback_handle);
     __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset));  // data
   } else {
@@ -1291,7 +1294,7 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
   __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
 
   // Check that the cell contains the same function.
-  if (Heap::InNewSpace(function)) {
+  if (HEAP->InNewSpace(function)) {
     // We can't embed a pointer to a function in new space so we have
     // to verify that the shared function info is unchanged. This has
     // the nice side effect that multiple closures based on the same
@@ -1313,8 +1316,8 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
 
 
 MaybeObject* CallStubCompiler::GenerateMissBranch() {
-  MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(),
-                                                      kind_);
+  MaybeObject* maybe_obj = Isolate::Current()->stub_cache()->ComputeCallMiss(
+      arguments().immediate(), kind_);
   Object* obj;
   if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
@@ -1393,7 +1396,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
   // -----------------------------------
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value();
 
   Label miss;
 
@@ -1427,7 +1430,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
 
     // Check that the elements are in fast mode and writable.
     __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
-           Factory::fixed_array_map());
+           FACTORY->fixed_array_map());
     __ j(not_equal, &call_builtin);
 
     if (argc == 1) {  // Otherwise fall through to call builtin.
@@ -1556,7 +1559,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
   // -----------------------------------
 
   // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value();
 
   Label miss, return_undefined, call_builtin;
 
@@ -1641,7 +1644,7 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1725,7 +1728,7 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
   // -----------------------------------
 
   // If object is not a string, bail out to regular call.
-  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
+  if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
 
   const int argc = arguments().immediate();
 
@@ -1814,7 +1817,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
 
   // If the object is not a JSObject or we got an unexpected number of
   // arguments, bail out to the regular call.
-  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+  if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
 
   Label miss;
   GenerateNameCheck(name, &miss);
@@ -1873,7 +1876,7 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
                                                     JSFunction* function,
                                                     String* name) {
   // TODO(872): implement this.
-  return Heap::undefined_value();
+  return HEAP->undefined_value();
 }
 
 
@@ -1894,7 +1897,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
 
   // If the object is not a JSObject or we got an unexpected number of
   // arguments, bail out to the regular call.
-  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
+  if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
 
   Label miss;
   GenerateNameCheck(name, &miss);
@@ -1943,7 +1946,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
 
   // Check if the argument is a heap number and load its value.
   __ bind(&not_smi);
-  __ CheckMap(rax, Factory::heap_number_map(), &slow, true);
+  __ CheckMap(rax, FACTORY->heap_number_map(), &slow, true);
   __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
 
   // Check the sign of the argument. If the argument is positive,
@@ -1992,11 +1995,11 @@ MaybeObject* CallStubCompiler::CompileFastApiCall(
   ASSERT(optimization.is_simple_api_call());
   // Bail out if object is a global object as we don't want to
   // repatch it to global receiver.
-  if (object->IsGlobalObject()) return Heap::undefined_value();
-  if (cell != NULL) return Heap::undefined_value();
+  if (object->IsGlobalObject()) return HEAP->undefined_value();
+  if (cell != NULL) return HEAP->undefined_value();
   int depth = optimization.GetPrototypeDepthOfExpectedType(
             JSObject::cast(object), holder);
-  if (depth == kInvalidProtoDepth) return Heap::undefined_value();
+  if (depth == kInvalidProtoDepth) return HEAP->undefined_value();
 
   Label miss, miss_before_stack_reserved;
 
@@ -2009,8 +2012,8 @@ MaybeObject* CallStubCompiler::CompileFastApiCall(
   // Check that the receiver isn't a smi.
   __ JumpIfSmi(rdx, &miss_before_stack_reserved);
 
-  __ IncrementCounter(&Counters::call_const, 1);
-  __ IncrementCounter(&Counters::call_const_fast_api, 1);
+  __ IncrementCounter(COUNTERS->call_const(), 1);
+  __ IncrementCounter(COUNTERS->call_const_fast_api(), 1);
 
   // Allocate space for v8::Arguments implicit values. Must be initialized
   // before calling any runtime function.
@@ -2085,7 +2088,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
   SharedFunctionInfo* function_info = function->shared();
   switch (check) {
     case RECEIVER_MAP_CHECK:
-      __ IncrementCounter(&Counters::call_const, 1);
+      __ IncrementCounter(COUNTERS->call_const(), 1);
 
       // Check that the maps haven't changed.
       CheckPrototypes(JSObject::cast(object), rdx, holder,
@@ -2291,7 +2294,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
   __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
 
   // Jump to the cached code (tail call).
-  __ IncrementCounter(&Counters::call_global_inline, 1);
+  __ IncrementCounter(COUNTERS->call_global_inline(), 1);
   ASSERT(function->is_compiled());
   ParameterCount expected(function->shared()->formal_parameter_count());
   if (V8::UseCrankshaft()) {
@@ -2307,7 +2310,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
   }
   // Handle call cache miss.
   __ bind(&miss);
-  __ IncrementCounter(&Counters::call_global_inline_miss, 1);
+  __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1);
   Object* obj;
   { MaybeObject* maybe_obj = GenerateMissBranch();
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
@@ -2340,7 +2343,8 @@ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
 
   // Handle store cache miss.
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2390,7 +2394,8 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
 
   // Handle store cache miss.
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2439,7 +2444,8 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
 
   // Handle store cache miss.
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2476,13 +2482,14 @@ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
   __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax);
 
   // Return the value (register rax).
-  __ IncrementCounter(&Counters::named_store_global_inline, 1);
+  __ IncrementCounter(COUNTERS->named_store_global_inline(), 1);
   __ ret(0);
 
   // Handle store cache miss.
   __ bind(&miss);
-  __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
-  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+  __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1);
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::StoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2502,7 +2509,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_store_field, 1);
+  __ IncrementCounter(COUNTERS->keyed_store_field(), 1);
 
   // Check that the name has not changed.
   __ Cmp(rcx, Handle<String>(name));
@@ -2518,8 +2525,9 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
 
   // Handle store cache miss.
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_store_field, 1);
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+  __ DecrementCounter(COUNTERS->keyed_store_field(), 1);
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedStoreIC_Miss));
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2551,7 +2559,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
   // Get the elements array and make sure it is a fast element array, not 'cow'.
   __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
   __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
-         Factory::fixed_array_map());
+         FACTORY->fixed_array_map());
   __ j(not_equal, &miss);
 
   // Check that the key is within bounds.
@@ -2576,7 +2584,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
 
   // Handle store cache miss.
   __ bind(&miss);
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedStoreIC_Miss));
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
@@ -2625,7 +2634,7 @@ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NONEXISTENT, Heap::empty_string());
+  return GetCode(NONEXISTENT, HEAP->empty_string());
 }
 
 
@@ -2764,12 +2773,12 @@ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
     __ Check(not_equal, "DontDelete cells can't contain the hole");
   }
 
-  __ IncrementCounter(&Counters::named_load_global_stub, 1);
+  __ IncrementCounter(COUNTERS->named_load_global_stub(), 1);
   __ movq(rax, rbx);
   __ ret(0);
 
   __ bind(&miss);
-  __ IncrementCounter(&Counters::named_load_global_stub_miss, 1);
+  __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1);
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
@@ -2788,7 +2797,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_field, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_field(), 1);
 
   // Check that the name has not changed.
   __ Cmp(rax, Handle<String>(name));
@@ -2797,7 +2806,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
   GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
 
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_field, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_field(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -2817,7 +2826,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_callback, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_callback(), 1);
 
   // Check that the name has not changed.
   __ Cmp(rax, Handle<String>(name));
@@ -2832,7 +2841,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
 
   __ bind(&miss);
 
-  __ DecrementCounter(&Counters::keyed_load_callback, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_callback(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -2851,7 +2860,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_constant_function, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_constant_function(), 1);
 
   // Check that the name has not changed.
   __ Cmp(rax, Handle<String>(name));
@@ -2860,7 +2869,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
   GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi,
                        value, name, &miss);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_constant_function, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_constant_function(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -2878,7 +2887,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_interceptor, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_interceptor(), 1);
 
   // Check that the name has not changed.
   __ Cmp(rax, Handle<String>(name));
@@ -2897,7 +2906,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
                           name,
                           &miss);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_interceptor, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_interceptor(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -2913,7 +2922,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_array_length, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_array_length(), 1);
 
   // Check that the name has not changed.
   __ Cmp(rax, Handle<String>(name));
@@ -2921,7 +2930,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
 
   GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_array_length, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_array_length(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -2937,7 +2946,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_string_length, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1);
 
   // Check that the name has not changed.
   __ Cmp(rax, Handle<String>(name));
@@ -2945,7 +2954,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
 
   GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_string_length, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -2961,7 +2970,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
   // -----------------------------------
   Label miss;
 
-  __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
 
   // Check that the name has not changed.
   __ Cmp(rax, Handle<String>(name));
@@ -2969,7 +2978,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
 
   GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
   __ bind(&miss);
-  __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
+  __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
@@ -3035,7 +3044,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
   Label generic_stub_call;
 
   // Use r8 for holding undefined which is used in several places below.
-  __ Move(r8, Factory::undefined_value());
+  __ Move(r8, FACTORY->undefined_value());
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Check to see whether there are any break points in the function code. If
@@ -3079,7 +3088,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
   // rbx: initial map
   // rdx: JSObject (untagged)
   __ movq(Operand(rdx, JSObject::kMapOffset), rbx);
-  __ Move(rbx, Factory::empty_fixed_array());
+  __ Move(rbx, FACTORY->empty_fixed_array());
   __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx);
   __ movq(Operand(rdx, JSObject::kElementsOffset), rbx);
 
@@ -3138,14 +3147,15 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
   __ pop(rcx);
   __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize));
   __ push(rcx);
-  __ IncrementCounter(&Counters::constructed_objects, 1);
-  __ IncrementCounter(&Counters::constructed_objects_stub, 1);
+  __ IncrementCounter(COUNTERS->constructed_objects(), 1);
+  __ IncrementCounter(COUNTERS->constructed_objects_stub(), 1);
   __ ret(0);
 
   // Jump to the generic stub in case the specialized code cannot handle the
   // construction.
   __ bind(&generic_stub_call);
-  Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
+  Code* code = Isolate::Current()->builtins()->builtin(
+      Builtins::JSConstructStubGeneric);
   Handle<Code> generic_construct_stub(code);
   __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
 
@@ -3260,7 +3270,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
 
   // Slow case: Jump to runtime.
   __ bind(&slow);
-  __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1);
+  __ IncrementCounter(COUNTERS->keyed_load_external_array_slow(), 1);
 
   // ----------- S t a t e -------------
   //  -- rax    : key
index c4d7e65663e76a197df1ca2c280ac7ae9229e8d1..6bd79653f669c5799aaf695d88b7d057bed68ad9 100644 (file)
@@ -113,7 +113,7 @@ void VirtualFrame::AllocateStackSlots() {
     // them later.  First sync everything above the stack pointer so we can
     // use pushes to allocate and initialize the locals.
     SyncRange(stack_pointer_ + 1, element_count() - 1);
-    Handle<Object> undefined = Factory::undefined_value();
+    Handle<Object> undefined = FACTORY->undefined_value();
     FrameElement initial_value =
         FrameElement::ConstantElement(undefined, FrameElement::SYNCED);
     if (count < kLocalVarBound) {
@@ -1019,7 +1019,7 @@ void VirtualFrame::SyncRange(int begin, int end) {
 //------------------------------------------------------------------------------
 // Virtual frame stub and IC calling functions.
 
-Result VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
+Result VirtualFrame::CallRuntime(const Runtime::Function* f, int arg_count) {
   PrepareForCall(arg_count, arg_count);
   ASSERT(cgen()->HasValidEntryRegisters());
   __ CallRuntime(f, arg_count);
@@ -1115,7 +1115,8 @@ void VirtualFrame::MoveResultsToRegisters(Result* a,
 Result VirtualFrame::CallLoadIC(RelocInfo::Mode mode) {
   // Name and receiver are on the top of the frame.  Both are dropped.
   // The IC expects name in rcx and receiver in rax.
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   Result name = Pop();
   Result receiver = Pop();
   PrepareForCall(0, 0);
@@ -1132,7 +1133,8 @@ Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
   PrepareForCall(0, 0);
   MoveResultsToRegisters(&key, &receiver, rax, rdx);
 
-  Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::KeyedLoadIC_Initialize));
   return RawCallCodeObject(ic, mode);
 }
 
@@ -1142,7 +1144,7 @@ Result VirtualFrame::CallStoreIC(Handle<String> name,
                                  StrictModeFlag strict_mode) {
   // Value and (if not contextual) receiver are on top of the frame.
   // The IC expects name in rcx, value in rax, and receiver in rdx.
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
       (strict_mode == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
                                    : Builtins::StoreIC_Initialize));
   Result value = Pop();
@@ -1208,7 +1210,7 @@ Result VirtualFrame::CallKeyedStoreIC(StrictModeFlag strict_mode) {
     receiver.Unuse();
   }
 
-  Handle<Code> ic(Builtins::builtin(
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
       (strict_mode == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
                                    : Builtins::KeyedStoreIC_Initialize));
   return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
@@ -1222,7 +1224,8 @@ Result VirtualFrame::CallCallIC(RelocInfo::Mode mode,
   // and dropped by the call.  The IC expects the name in rcx and the rest
   // on the stack, and drops them all.
   InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
-  Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
+  Handle<Code> ic =
+      ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
   Result name = Pop();
   // Spill args, receiver, and function.  The call will drop args and
   // receiver.
@@ -1241,7 +1244,7 @@ Result VirtualFrame::CallKeyedCallIC(RelocInfo::Mode mode,
   // on the stack, and drops them all.
   InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
   Handle<Code> ic =
-      StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
+      ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
   Result name = Pop();
   // Spill args, receiver, and function.  The call will drop args and
   // receiver.
@@ -1256,7 +1259,8 @@ Result VirtualFrame::CallConstructor(int arg_count) {
   // Arguments, receiver, and function are on top of the frame.  The
   // IC expects arg count in rax, function in rdi, and the arguments
   // and receiver on the stack.
-  Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::JSConstructCall));
   // Duplicate the function before preparing the frame.
   PushElementAt(arg_count);
   Result function = Pop();
index 7396db17f6c7cd94a1a30da721ae940d1009f4f4..aac9864343b450d479e6da7e75448200775c37c8 100644 (file)
@@ -67,7 +67,9 @@ class VirtualFrame : public ZoneObject {
    private:
     bool previous_state_;
 
-    CodeGenerator* cgen() { return CodeGeneratorScope::Current(); }
+    CodeGenerator* cgen() {
+      return CodeGeneratorScope::Current(Isolate::Current());
+    }
   };
 
   // An illegal index into the virtual frame.
@@ -79,7 +81,10 @@ class VirtualFrame : public ZoneObject {
   // Construct a virtual frame as a clone of an existing one.
   explicit inline VirtualFrame(VirtualFrame* original);
 
-  CodeGenerator* cgen() { return CodeGeneratorScope::Current(); }
+  CodeGenerator* cgen() {
+    return CodeGeneratorScope::Current(Isolate::Current());
+  }
+
   MacroAssembler* masm() { return cgen()->masm(); }
 
   // Create a duplicate of an existing valid frame element.
@@ -315,7 +320,7 @@ class VirtualFrame : public ZoneObject {
 
   // Call runtime given the number of arguments expected on (and
   // removed from) the stack.
-  Result CallRuntime(Runtime::Function* f, int arg_count);
+  Result CallRuntime(const Runtime::Function* f, int arg_count);
   Result CallRuntime(Runtime::FunctionId id, int arg_count);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
@@ -342,7 +347,6 @@ class VirtualFrame : public ZoneObject {
                      StrictModeFlag strict_mode);
 
   // Call keyed store IC.  Value, key, and receiver are found on top
-  // of the frame.  All three are dropped.
   Result CallKeyedStoreIC(StrictModeFlag strict_mode);
 
   // Call call IC.  Function name, arguments, and receiver are found on top
index 467296039c2cc0d013abcdd0ea889a3827a65542..516fc4aac5896e86a9e7e728e4800fe66ec4162d 100644 (file)
@@ -28,6 +28,7 @@
 #ifndef V8_ZONE_INL_H_
 #define V8_ZONE_INL_H_
 
+#include "isolate.h"
 #include "zone.h"
 #include "v8-counters.h"
 
@@ -35,8 +36,19 @@ namespace v8 {
 namespace internal {
 
 
+AssertNoZoneAllocation::AssertNoZoneAllocation()
+    : prev_(Isolate::Current()->zone_allow_allocation()) {
+  Isolate::Current()->set_zone_allow_allocation(false);
+}
+
+
+AssertNoZoneAllocation::~AssertNoZoneAllocation() {
+  Isolate::Current()->set_zone_allow_allocation(prev_);
+}
+
+
 inline void* Zone::New(int size) {
-  ASSERT(AssertNoZoneAllocation::allow_allocation());
+  ASSERT(Isolate::Current()->zone_allow_allocation());
   ASSERT(ZoneScope::nesting() > 0);
   // Round up the requested size to fit the alignment.
   size = RoundUp(size, kAlignment);
@@ -54,7 +66,7 @@ inline void* Zone::New(int size) {
 
 template <typename T>
 T* Zone::NewArray(int length) {
-  return static_cast<T*>(Zone::New(length * sizeof(T)));
+  return static_cast<T*>(New(length * sizeof(T)));
 }
 
 
@@ -65,7 +77,7 @@ bool Zone::excess_allocation() {
 
 void Zone::adjust_segment_bytes_allocated(int delta) {
   segment_bytes_allocated_ += delta;
-  Counters::zone_segment_bytes.Set(segment_bytes_allocated_);
+  isolate_->counters()->zone_segment_bytes()->Set(segment_bytes_allocated_);
 }
 
 
@@ -78,6 +90,36 @@ ZoneSplayTree<Config>::~ZoneSplayTree() {
 }
 
 
+// TODO(isolates): for performance reasons, this should be replaced with a new
+//                 operator that takes the zone in which the object should be
+//                 allocated.
+void* ZoneObject::operator new(size_t size) {
+  return ZONE->New(static_cast<int>(size));
+}
+
+
+inline void* ZoneListAllocationPolicy::New(int size) {
+  return ZONE->New(size);
+}
+
+
+ZoneScope::ZoneScope(ZoneScopeMode mode)
+    : isolate_(Isolate::Current()),
+      mode_(mode) {
+  isolate_->zone()->scope_nesting_++;
+}
+
+
+bool ZoneScope::ShouldDeleteOnExit() {
+  return isolate_->zone()->scope_nesting_ == 1 && mode_ == DELETE_ON_EXIT;
+}
+
+
+int ZoneScope::nesting() {
+  return Isolate::Current()->zone()->scope_nesting_;
+}
+
+
 } }  // namespace v8::internal
 
 #endif  // V8_ZONE_INL_H_
index f8dbaabc7888b0316eb58c0f04dfc57481ac0a46..42ce8c5cb7c2d72392c3f80cbbc92f42ac418cf4 100644 (file)
@@ -34,20 +34,28 @@ namespace v8 {
 namespace internal {
 
 
-Address Zone::position_ = 0;
-Address Zone::limit_ = 0;
-int Zone::zone_excess_limit_ = 256 * MB;
-int Zone::segment_bytes_allocated_ = 0;
+Zone::Zone()
+    : zone_excess_limit_(256 * MB),
+      segment_bytes_allocated_(0),
+      position_(0),
+      limit_(0),
+      scope_nesting_(0),
+      segment_head_(NULL) {
+}
 unsigned Zone::allocation_size_ = 0;
 
-bool AssertNoZoneAllocation::allow_allocation_ = true;
 
-int ZoneScope::nesting_ = 0;
+ZoneScope::~ZoneScope() {
+  ASSERT_EQ(Isolate::Current(), isolate_);
+  if (ShouldDeleteOnExit()) isolate_->zone()->DeleteAll();
+  isolate_->zone()->scope_nesting_--;
+}
+
 
 // Segments represent chunks of memory: They have starting address
 // (encoded in the this pointer) and a size in bytes. Segments are
 // chained together forming a LIFO structure with the newest segment
-// available as Segment::head(). Segments are allocated using malloc()
+// available as segment_head_. Segments are allocated using malloc()
 // and de-allocated using free().
 
 class Segment {
@@ -61,45 +69,38 @@ class Segment {
   Address start() const { return address(sizeof(Segment)); }
   Address end() const { return address(size_); }
 
-  static Segment* head() { return head_; }
-  static void set_head(Segment* head) { head_ = head; }
-
-  // Creates a new segment, sets it size, and pushes it to the front
-  // of the segment chain. Returns the new segment.
-  static Segment* New(int size) {
-    Segment* result = reinterpret_cast<Segment*>(Malloced::New(size));
-    Zone::adjust_segment_bytes_allocated(size);
-    if (result != NULL) {
-      result->next_ = head_;
-      result->size_ = size;
-      head_ = result;
-    }
-    return result;
-  }
-
-  // Deletes the given segment. Does not touch the segment chain.
-  static void Delete(Segment* segment, int size) {
-    Zone::adjust_segment_bytes_allocated(-size);
-    Malloced::Delete(segment);
-  }
-
-  static int bytes_allocated() { return bytes_allocated_; }
-
  private:
   // Computes the address of the nth byte in this segment.
   Address address(int n) const {
     return Address(this) + n;
   }
 
-  static Segment* head_;
-  static int bytes_allocated_;
   Segment* next_;
   int size_;
+
+  friend class Zone;
 };
 
 
-Segment* Segment::head_ = NULL;
-int Segment::bytes_allocated_ = 0;
+// Creates a new segment, sets it size, and pushes it to the front
+// of the segment chain. Returns the new segment.
+Segment* Zone::NewSegment(int size) {
+  Segment* result = reinterpret_cast<Segment*>(Malloced::New(size));
+  adjust_segment_bytes_allocated(size);
+  if (result != NULL) {
+    result->next_ = segment_head_;
+    result->size_ = size;
+    segment_head_ = result;
+  }
+  return result;
+}
+
+
+// Deletes the given segment. Does not touch the segment chain.
+void Zone::DeleteSegment(Segment* segment, int size) {
+  adjust_segment_bytes_allocated(-size);
+  Malloced::Delete(segment);
+}
 
 
 void Zone::DeleteAll() {
@@ -109,14 +110,14 @@ void Zone::DeleteAll() {
 #endif
 
   // Find a segment with a suitable size to keep around.
-  Segment* keep = Segment::head();
+  Segment* keep = segment_head_;
   while (keep != NULL && keep->size() > kMaximumKeptSegmentSize) {
     keep = keep->next();
   }
 
   // Traverse the chained list of segments, zapping (in debug mode)
   // and freeing every segment except the one we wish to keep.
-  Segment* current = Segment::head();
+  Segment* current = segment_head_;
   while (current != NULL) {
     Segment* next = current->next();
     if (current == keep) {
@@ -128,7 +129,7 @@ void Zone::DeleteAll() {
       // Zap the entire current segment (including the header).
       memset(current, kZapDeadByte, size);
 #endif
-      Segment::Delete(current, size);
+      DeleteSegment(current, size);
     }
     current = next;
   }
@@ -150,7 +151,7 @@ void Zone::DeleteAll() {
   }
 
   // Update the head segment to be the kept segment (if any).
-  Segment::set_head(keep);
+  segment_head_ = keep;
 }
 
 
@@ -164,7 +165,7 @@ Address Zone::NewExpand(int size) {
   // strategy, where we increase the segment size every time we expand
   // except that we employ a maximum segment size when we delete. This
   // is to avoid excessive malloc() and free() overhead.
-  Segment* head = Segment::head();
+  Segment* head = segment_head_;
   int old_size = (head == NULL) ? 0 : head->size();
   static const int kSegmentOverhead = sizeof(Segment) + kAlignment;
   int new_size = kSegmentOverhead + size + (old_size << 1);
@@ -177,7 +178,7 @@ Address Zone::NewExpand(int size) {
     // requested size.
     new_size = Max(kSegmentOverhead + size, kMaximumSegmentSize);
   }
-  Segment* segment = Segment::New(new_size);
+  Segment* segment = NewSegment(new_size);
   if (segment == NULL) {
     V8::FatalProcessOutOfMemory("Zone");
     return NULL;
index e299f158a8f03d341a04f8854be87ea07f1edef4..13b55c4c7f3a11d1223f2cd7c0c900392f137147 100644 (file)
@@ -39,6 +39,7 @@ enum ZoneScopeMode {
   DONT_DELETE_ON_EXIT
 };
 
+class Segment;
 
 // The Zone supports very fast allocation of small chunks of
 // memory. The chunks cannot be deallocated individually, but instead
@@ -57,23 +58,25 @@ class Zone {
  public:
   // Allocate 'size' bytes of memory in the Zone; expands the Zone by
   // allocating new segments of memory on demand using malloc().
-  static inline void* New(int size);
+  inline void* New(int size);
 
   template <typename T>
-  static inline T* NewArray(int length);
+  inline T* NewArray(int length);
 
   // Delete all objects and free all memory allocated in the Zone.
-  static void DeleteAll();
+  void DeleteAll();
 
   // Returns true if more memory has been allocated in zones than
   // the limit allows.
-  static inline bool excess_allocation();
+  inline bool excess_allocation();
 
-  static inline void adjust_segment_bytes_allocated(int delta);
+  inline void adjust_segment_bytes_allocated(int delta);
 
   static unsigned allocation_size_;
 
  private:
+  friend class Isolate;
+  friend class ZoneScope;
 
   // All pointers returned from New() have this alignment.
   static const int kAlignment = kPointerSize;
@@ -88,30 +91,39 @@ class Zone {
   static const int kMaximumKeptSegmentSize = 64 * KB;
 
   // Report zone excess when allocation exceeds this limit.
-  static int zone_excess_limit_;
+  int zone_excess_limit_;
 
   // The number of bytes allocated in segments.  Note that this number
   // includes memory allocated from the OS but not yet allocated from
   // the zone.
-  static int segment_bytes_allocated_;
-
-  // The Zone is intentionally a singleton; you should not try to
-  // allocate instances of the class.
-  Zone() { UNREACHABLE(); }
+  int segment_bytes_allocated_;
 
+  // Each isolate gets its own zone.
+  Zone();
 
   // Expand the Zone to hold at least 'size' more bytes and allocate
   // the bytes. Returns the address of the newly allocated chunk of
   // memory in the Zone. Should only be called if there isn't enough
   // room in the Zone already.
-  static Address NewExpand(int size);
+  Address NewExpand(int size);
+
+  // Creates a new segment, sets it size, and pushes it to the front
+  // of the segment chain. Returns the new segment.
+  Segment* NewSegment(int size);
 
+  // Deletes the given segment. Does not touch the segment chain.
+  void DeleteSegment(Segment* segment, int size);
 
   // The free region in the current (front) segment is represented as
   // the half-open interval [position, limit). The 'position' variable
   // is guaranteed to be aligned as dictated by kAlignment.
-  static Address position_;
-  static Address limit_;
+  Address position_;
+  Address limit_;
+
+  int scope_nesting_;
+
+  Segment* segment_head_;
+  Isolate* isolate_;
 };
 
 
@@ -120,7 +132,7 @@ class Zone {
 class ZoneObject {
  public:
   // Allocate a new ZoneObject of 'size' bytes in the Zone.
-  void* operator new(size_t size) { return Zone::New(static_cast<int>(size)); }
+  inline void* operator new(size_t size);
 
   // Ideally, the delete operator should be private instead of
   // public, but unfortunately the compiler sometimes synthesizes
@@ -136,14 +148,10 @@ class ZoneObject {
 
 class AssertNoZoneAllocation {
  public:
-  AssertNoZoneAllocation() : prev_(allow_allocation_) {
-    allow_allocation_ = false;
-  }
-  ~AssertNoZoneAllocation() { allow_allocation_ = prev_; }
-  static bool allow_allocation() { return allow_allocation_; }
+  inline AssertNoZoneAllocation();
+  inline ~AssertNoZoneAllocation();
  private:
   bool prev_;
-  static bool allow_allocation_;
 };
 
 
@@ -153,7 +161,7 @@ class AssertNoZoneAllocation {
 class ZoneListAllocationPolicy {
  public:
   // Allocate 'size' bytes of memory in the zone.
-  static void* New(int size) {  return Zone::New(size); }
+  static inline void* New(int size);
 
   // De-allocation attempts are silently ignored.
   static void Delete(void* p) { }
@@ -189,18 +197,12 @@ typedef ZoneList<Handle<Map> > ZoneMapList;
 // outer-most scope.
 class ZoneScope BASE_EMBEDDED {
  public:
-  explicit ZoneScope(ZoneScopeMode mode) : mode_(mode) {
-    nesting_++;
-  }
+  // TODO(isolates): pass isolate pointer here.
+  inline explicit ZoneScope(ZoneScopeMode mode);
 
-  virtual ~ZoneScope() {
-    if (ShouldDeleteOnExit()) Zone::DeleteAll();
-    --nesting_;
-  }
+  virtual ~ZoneScope();
 
-  bool ShouldDeleteOnExit() {
-    return nesting_ == 1 && mode_ == DELETE_ON_EXIT;
-  }
+  inline bool ShouldDeleteOnExit();
 
   // For ZoneScopes that do not delete on exit by default, call this
   // method to request deletion on exit.
@@ -208,11 +210,11 @@ class ZoneScope BASE_EMBEDDED {
     mode_ = DELETE_ON_EXIT;
   }
 
-  static int nesting() { return nesting_; }
+  inline static int nesting();
 
  private:
+  Isolate* isolate_;
   ZoneScopeMode mode_;
-  static int nesting_;
 };
 
 
index 404b692b27480b62c3e7313b4b63a10f52edb7ae..94b3becea03b1201d520e7b465cfb31b96ae768a 100644 (file)
@@ -87,8 +87,9 @@ class CcTest {
 class ApiTestFuzzer: public v8::internal::Thread {
  public:
   void CallTest();
-  explicit ApiTestFuzzer(int num)
-      : test_number_(num),
+  explicit ApiTestFuzzer(v8::internal::Isolate* isolate, int num)
+      : Thread(isolate),
+        test_number_(num),
         gate_(v8::internal::OS::CreateSemaphore(0)),
         active_(true) {
   }
index 25f5c39518ddb84666be7e75b9cccc979b01b25b..b74f166f65d2e6d524c7879c831f815c1c9b8fd7 100644 (file)
@@ -243,7 +243,7 @@ static v8::Handle<Value> CheckAccessorArgsCorrect(Local<String> name,
   ApiTestFuzzer::Fuzz();
   CHECK(info.This() == info.Holder());
   CHECK(info.Data()->Equals(v8::String::New("data")));
-  i::Heap::CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
   CHECK(info.This() == info.Holder());
   CHECK(info.Data()->Equals(v8::String::New("data")));
   return v8::Integer::New(17);
@@ -397,9 +397,9 @@ static v8::Handle<Value> StackCheck(Local<String> name,
   for (int i = 0; !iter.done(); i++) {
     i::StackFrame* frame = iter.frame();
     CHECK(i != 0 || (frame->type() == i::StackFrame::EXIT));
-    CHECK(frame->code()->IsCode());
+    i::Code* code = frame->LookupCode(i::Isolate::Current());
+    CHECK(code->IsCode());
     i::Address pc = frame->pc();
-    i::Code* code = frame->code();
     CHECK(code->contains(pc));
     iter.Advance();
   }
index d2a28d7f4b9fed1caec431a0a8717a37be3f6185..00ff86621e4e0f0d715719bd9b944aa6743eb997 100644 (file)
@@ -27,7 +27,6 @@
 
 #include "v8.h"
 #include "accessors.h"
-#include "top.h"
 
 #include "cctest.h"
 
@@ -38,13 +37,14 @@ using namespace v8::internal;
 static MaybeObject* AllocateAfterFailures() {
   static int attempts = 0;
   if (++attempts < 3) return Failure::RetryAfterGC();
+  Heap* heap = Isolate::Current()->heap();
 
   // New space.
-  NewSpace* new_space = Heap::new_space();
+  NewSpace* new_space = heap->new_space();
   static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
   while (new_space->Available() > kNewSpaceFillerSize) {
     int available_before = static_cast<int>(new_space->Available());
-    CHECK(!Heap::AllocateByteArray(0)->IsFailure());
+    CHECK(!heap->AllocateByteArray(0)->IsFailure());
     if (available_before == new_space->Available()) {
       // It seems that we are avoiding new space allocations when
       // allocation is forced, so no need to fill up new space
@@ -52,45 +52,46 @@ static MaybeObject* AllocateAfterFailures() {
       break;
     }
   }
-  CHECK(!Heap::AllocateByteArray(100)->IsFailure());
-  CHECK(!Heap::AllocateFixedArray(100, NOT_TENURED)->IsFailure());
+  CHECK(!heap->AllocateByteArray(100)->IsFailure());
+  CHECK(!heap->AllocateFixedArray(100, NOT_TENURED)->IsFailure());
 
   // Make sure we can allocate through optimized allocation functions
   // for specific kinds.
-  CHECK(!Heap::AllocateFixedArray(100)->IsFailure());
-  CHECK(!Heap::AllocateHeapNumber(0.42)->IsFailure());
-  CHECK(!Heap::AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
-  Object* object =
-      Heap::AllocateJSObject(*Top::object_function())->ToObjectChecked();
-  CHECK(!Heap::CopyJSObject(JSObject::cast(object))->IsFailure());
+  CHECK(!heap->AllocateFixedArray(100)->IsFailure());
+  CHECK(!heap->AllocateHeapNumber(0.42)->IsFailure());
+  CHECK(!heap->AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
+  Object* object = heap->AllocateJSObject(
+      *Isolate::Current()->object_function())->ToObjectChecked();
+  CHECK(!heap->CopyJSObject(JSObject::cast(object))->IsFailure());
 
   // Old data space.
-  OldSpace* old_data_space = Heap::old_data_space();
+  OldSpace* old_data_space = heap->old_data_space();
   static const int kOldDataSpaceFillerSize = ByteArray::SizeFor(0);
   while (old_data_space->Available() > kOldDataSpaceFillerSize) {
-    CHECK(!Heap::AllocateByteArray(0, TENURED)->IsFailure());
+    CHECK(!heap->AllocateByteArray(0, TENURED)->IsFailure());
   }
-  CHECK(!Heap::AllocateRawAsciiString(100, TENURED)->IsFailure());
+  CHECK(!heap->AllocateRawAsciiString(100, TENURED)->IsFailure());
 
   // Large object space.
-  while (!Heap::OldGenerationAllocationLimitReached()) {
-    CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure());
+  while (!heap->OldGenerationAllocationLimitReached()) {
+    CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure());
   }
-  CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure());
+  CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure());
 
   // Map space.
-  MapSpace* map_space = Heap::map_space();
+  MapSpace* map_space = heap->map_space();
   static const int kMapSpaceFillerSize = Map::kSize;
   InstanceType instance_type = JS_OBJECT_TYPE;
   int instance_size = JSObject::kHeaderSize;
   while (map_space->Available() > kMapSpaceFillerSize) {
-    CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure());
+    CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
   }
-  CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure());
+  CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
 
   // Test that we can allocate in old pointer space and code space.
-  CHECK(!Heap::AllocateFixedArray(100, TENURED)->IsFailure());
-  CHECK(!Heap::CopyCode(Builtins::builtin(Builtins::Illegal))->IsFailure());
+  CHECK(!heap->AllocateFixedArray(100, TENURED)->IsFailure());
+  CHECK(!heap->CopyCode(Isolate::Current()->builtins()->builtin(
+      Builtins::Illegal))->IsFailure());
 
   // Return success.
   return Smi::FromInt(42);
@@ -98,7 +99,7 @@ static MaybeObject* AllocateAfterFailures() {
 
 
 static Handle<Object> Test() {
-  CALL_HEAP_FUNCTION(AllocateAfterFailures(), Object);
+  CALL_HEAP_FUNCTION(ISOLATE, AllocateAfterFailures(), Object);
 }
 
 
@@ -129,18 +130,19 @@ TEST(StressJS) {
   v8::HandleScope scope;
   env->Enter();
   Handle<JSFunction> function =
-      Factory::NewFunction(Factory::function_symbol(), Factory::null_value());
+      FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
   // Force the creation of an initial map and set the code to
   // something empty.
-  Factory::NewJSObject(function);
-  function->ReplaceCode(Builtins::builtin(Builtins::EmptyFunction));
+  FACTORY->NewJSObject(function);
+  function->ReplaceCode(Isolate::Current()->builtins()->builtin(
+      Builtins::EmptyFunction));
   // Patch the map to have an accessor for "get".
   Handle<Map> map(function->initial_map());
   Handle<DescriptorArray> instance_descriptors(map->instance_descriptors());
-  Handle<Proxy> proxy = Factory::NewProxy(&kDescriptor);
-  instance_descriptors = Factory::CopyAppendProxyDescriptor(
+  Handle<Proxy> proxy = FACTORY->NewProxy(&kDescriptor);
+  instance_descriptors = FACTORY->CopyAppendProxyDescriptor(
       instance_descriptors,
-      Factory::NewStringFromAscii(Vector<const char>("get", 3)),
+      FACTORY->NewStringFromAscii(Vector<const char>("get", 3)),
       proxy,
       static_cast<PropertyAttributes>(0));
   map->set_instance_descriptors(*instance_descriptors);
@@ -183,7 +185,8 @@ class Block {
 
 TEST(CodeRange) {
   const int code_range_size = 16*MB;
-  CodeRange::Setup(code_range_size);
+  OS::Setup();
+  Isolate::Current()->code_range()->Setup(code_range_size);
   int current_allocated = 0;
   int total_allocated = 0;
   List<Block> blocks(1000);
@@ -195,14 +198,16 @@ TEST(CodeRange) {
       size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
            Pseudorandom() % 5000 + 1;
       size_t allocated = 0;
-      void* base = CodeRange::AllocateRawMemory(requested, &allocated);
+      void* base = Isolate::Current()->code_range()->
+          AllocateRawMemory(requested, &allocated);
       blocks.Add(Block(base, static_cast<int>(allocated)));
       current_allocated += static_cast<int>(allocated);
       total_allocated += static_cast<int>(allocated);
     } else {
       // Free a block.
       int index = Pseudorandom() % blocks.length();
-      CodeRange::FreeRawMemory(blocks[index].base, blocks[index].size);
+      Isolate::Current()->code_range()->FreeRawMemory(
+          blocks[index].base, blocks[index].size);
       current_allocated -= blocks[index].size;
       if (index < blocks.length() - 1) {
         blocks[index] = blocks.RemoveLast();
@@ -212,5 +217,5 @@ TEST(CodeRange) {
     }
   }
 
-  CodeRange::TearDown();
+  Isolate::Current()->code_range()->TearDown();
 }
index 755293057a3dd3d74a26d4b239ede916b8ba2aa8..fbe5d8bd09d35b704808c6d9a42b62467eafdc26 100644 (file)
@@ -34,7 +34,6 @@
 #include "execution.h"
 #include "snapshot.h"
 #include "platform.h"
-#include "top.h"
 #include "utils.h"
 #include "cctest.h"
 #include "parser.h"
@@ -398,11 +397,11 @@ THREADED_TEST(ScriptUsingStringResource) {
     CHECK(source->IsExternal());
     CHECK_EQ(resource,
              static_cast<TestResource*>(source->GetExternalStringResource()));
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     CHECK_EQ(0, TestResource::dispose_count);
   }
-  i::CompilationCache::Clear();
-  i::Heap::CollectAllGarbage(false);
+  v8::internal::Isolate::Current()->compilation_cache()->Clear();
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(1, TestResource::dispose_count);
 }
 
@@ -419,11 +418,11 @@ THREADED_TEST(ScriptUsingAsciiStringResource) {
     Local<Value> value = script->Run();
     CHECK(value->IsNumber());
     CHECK_EQ(7, value->Int32Value());
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     CHECK_EQ(0, TestAsciiResource::dispose_count);
   }
-  i::CompilationCache::Clear();
-  i::Heap::CollectAllGarbage(false);
+  i::Isolate::Current()->compilation_cache()->Clear();
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(1, TestAsciiResource::dispose_count);
 }
 
@@ -436,19 +435,19 @@ THREADED_TEST(ScriptMakingExternalString) {
     LocalContext env;
     Local<String> source = String::New(two_byte_source);
     // Trigger GCs so that the newly allocated string moves to old gen.
-    i::Heap::CollectGarbage(i::NEW_SPACE);  // in survivor space now
-    i::Heap::CollectGarbage(i::NEW_SPACE);  // in old gen now
+    HEAP->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+    HEAP->CollectGarbage(i::NEW_SPACE);  // in old gen now
     bool success = source->MakeExternal(new TestResource(two_byte_source));
     CHECK(success);
     Local<Script> script = Script::Compile(source);
     Local<Value> value = script->Run();
     CHECK(value->IsNumber());
     CHECK_EQ(7, value->Int32Value());
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     CHECK_EQ(0, TestResource::dispose_count);
   }
-  i::CompilationCache::Clear();
-  i::Heap::CollectAllGarbage(false);
+  i::Isolate::Current()->compilation_cache()->Clear();
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(1, TestResource::dispose_count);
 }
 
@@ -461,8 +460,8 @@ THREADED_TEST(ScriptMakingExternalAsciiString) {
     LocalContext env;
     Local<String> source = v8_str(c_source);
     // Trigger GCs so that the newly allocated string moves to old gen.
-    i::Heap::CollectGarbage(i::NEW_SPACE);  // in survivor space now
-    i::Heap::CollectGarbage(i::NEW_SPACE);  // in old gen now
+    HEAP->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+    HEAP->CollectGarbage(i::NEW_SPACE);  // in old gen now
     bool success = source->MakeExternal(
         new TestAsciiResource(i::StrDup(c_source)));
     CHECK(success);
@@ -470,11 +469,11 @@ THREADED_TEST(ScriptMakingExternalAsciiString) {
     Local<Value> value = script->Run();
     CHECK(value->IsNumber());
     CHECK_EQ(7, value->Int32Value());
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     CHECK_EQ(0, TestAsciiResource::dispose_count);
   }
-  i::CompilationCache::Clear();
-  i::Heap::CollectAllGarbage(false);
+  i::Isolate::Current()->compilation_cache()->Clear();
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(1, TestAsciiResource::dispose_count);
 }
 
@@ -484,8 +483,8 @@ TEST(MakingExternalStringConditions) {
   LocalContext env;
 
   // Free some space in the new space so that we can check freshness.
-  i::Heap::CollectGarbage(i::NEW_SPACE);
-  i::Heap::CollectGarbage(i::NEW_SPACE);
+  HEAP->CollectGarbage(i::NEW_SPACE);
+  HEAP->CollectGarbage(i::NEW_SPACE);
 
   uint16_t* two_byte_string = AsciiToTwoByteString("small");
   Local<String> small_string = String::New(two_byte_string);
@@ -494,8 +493,8 @@ TEST(MakingExternalStringConditions) {
   // We should refuse to externalize newly created small string.
   CHECK(!small_string->CanMakeExternal());
   // Trigger GCs so that the newly allocated string moves to old gen.
-  i::Heap::CollectGarbage(i::NEW_SPACE);  // in survivor space now
-  i::Heap::CollectGarbage(i::NEW_SPACE);  // in old gen now
+  HEAP->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+  HEAP->CollectGarbage(i::NEW_SPACE);  // in old gen now
   // Old space strings should be accepted.
   CHECK(small_string->CanMakeExternal());
 
@@ -530,15 +529,15 @@ TEST(MakingExternalAsciiStringConditions) {
   LocalContext env;
 
   // Free some space in the new space so that we can check freshness.
-  i::Heap::CollectGarbage(i::NEW_SPACE);
-  i::Heap::CollectGarbage(i::NEW_SPACE);
+  HEAP->CollectGarbage(i::NEW_SPACE);
+  HEAP->CollectGarbage(i::NEW_SPACE);
 
   Local<String> small_string = String::New("small");
   // We should refuse to externalize newly created small string.
   CHECK(!small_string->CanMakeExternal());
   // Trigger GCs so that the newly allocated string moves to old gen.
-  i::Heap::CollectGarbage(i::NEW_SPACE);  // in survivor space now
-  i::Heap::CollectGarbage(i::NEW_SPACE);  // in old gen now
+  HEAP->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+  HEAP->CollectGarbage(i::NEW_SPACE);  // in old gen now
   // Old space strings should be accepted.
   CHECK(small_string->CanMakeExternal());
 
@@ -570,13 +569,13 @@ THREADED_TEST(UsingExternalString) {
         String::NewExternal(new TestResource(two_byte_string));
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
     // Trigger GCs so that the newly allocated string moves to old gen.
-    i::Heap::CollectGarbage(i::NEW_SPACE);  // in survivor space now
-    i::Heap::CollectGarbage(i::NEW_SPACE);  // in old gen now
-    i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring);
+    HEAP->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+    HEAP->CollectGarbage(i::NEW_SPACE);  // in old gen now
+    i::Handle<i::String> isymbol = FACTORY->SymbolFromString(istring);
     CHECK(isymbol->IsSymbol());
   }
-  i::Heap::CollectAllGarbage(false);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 }
 
 
@@ -588,13 +587,13 @@ THREADED_TEST(UsingExternalAsciiString) {
         new TestAsciiResource(i::StrDup(one_byte_string)));
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
     // Trigger GCs so that the newly allocated string moves to old gen.
-    i::Heap::CollectGarbage(i::NEW_SPACE);  // in survivor space now
-    i::Heap::CollectGarbage(i::NEW_SPACE);  // in old gen now
-    i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring);
+    HEAP->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+    HEAP->CollectGarbage(i::NEW_SPACE);  // in old gen now
+    i::Handle<i::String> isymbol = FACTORY->SymbolFromString(istring);
     CHECK(isymbol->IsSymbol());
   }
-  i::Heap::CollectAllGarbage(false);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 }
 
 
@@ -607,12 +606,12 @@ THREADED_TEST(ScavengeExternalString) {
     Local<String> string =
         String::NewExternal(new TestResource(two_byte_string));
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
-    i::Heap::CollectGarbage(i::NEW_SPACE);
-    in_new_space = i::Heap::InNewSpace(*istring);
-    CHECK(in_new_space || i::Heap::old_data_space()->Contains(*istring));
+    HEAP->CollectGarbage(i::NEW_SPACE);
+    in_new_space = HEAP->InNewSpace(*istring);
+    CHECK(in_new_space || HEAP->old_data_space()->Contains(*istring));
     CHECK_EQ(0, TestResource::dispose_count);
   }
-  i::Heap::CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
+  HEAP->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
   CHECK_EQ(1, TestResource::dispose_count);
 }
 
@@ -626,12 +625,12 @@ THREADED_TEST(ScavengeExternalAsciiString) {
     Local<String> string = String::NewExternal(
         new TestAsciiResource(i::StrDup(one_byte_string)));
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
-    i::Heap::CollectGarbage(i::NEW_SPACE);
-    in_new_space = i::Heap::InNewSpace(*istring);
-    CHECK(in_new_space || i::Heap::old_data_space()->Contains(*istring));
+    HEAP->CollectGarbage(i::NEW_SPACE);
+    in_new_space = HEAP->InNewSpace(*istring);
+    CHECK(in_new_space || HEAP->old_data_space()->Contains(*istring));
     CHECK_EQ(0, TestAsciiResource::dispose_count);
   }
-  i::Heap::CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
+  HEAP->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
   CHECK_EQ(1, TestAsciiResource::dispose_count);
 }
 
@@ -671,11 +670,11 @@ TEST(ExternalStringWithDisposeHandling) {
     Local<Value> value = script->Run();
     CHECK(value->IsNumber());
     CHECK_EQ(7, value->Int32Value());
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     CHECK_EQ(0, TestAsciiResource::dispose_count);
   }
-  i::CompilationCache::Clear();
-  i::Heap::CollectAllGarbage(false);
+  i::Isolate::Current()->compilation_cache()->Clear();
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(1, TestAsciiResourceWithDisposeControl::dispose_calls);
   CHECK_EQ(0, TestAsciiResource::dispose_count);
 
@@ -692,11 +691,11 @@ TEST(ExternalStringWithDisposeHandling) {
     Local<Value> value = script->Run();
     CHECK(value->IsNumber());
     CHECK_EQ(7, value->Int32Value());
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     CHECK_EQ(0, TestAsciiResource::dispose_count);
   }
-  i::CompilationCache::Clear();
-  i::Heap::CollectAllGarbage(false);
+  i::Isolate::Current()->compilation_cache()->Clear();
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(1, TestAsciiResourceWithDisposeControl::dispose_calls);
   CHECK_EQ(1, TestAsciiResource::dispose_count);
 }
@@ -742,9 +741,9 @@ THREADED_TEST(StringConcat) {
     CHECK(value->IsNumber());
     CHECK_EQ(68, value->Int32Value());
   }
-  i::CompilationCache::Clear();
-  i::Heap::CollectAllGarbage(false);
-  i::Heap::CollectAllGarbage(false);
+  i::Isolate::Current()->compilation_cache()->Clear();
+  HEAP->CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 }
 
 
@@ -1579,12 +1578,12 @@ THREADED_TEST(InternalFieldsNativePointers) {
 
   // Check reading and writing aligned pointers.
   obj->SetPointerInInternalField(0, aligned);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(aligned, obj->GetPointerFromInternalField(0));
 
   // Check reading and writing unaligned pointers.
   obj->SetPointerInInternalField(0, unaligned);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(unaligned, obj->GetPointerFromInternalField(0));
 
   delete[] data;
@@ -1610,19 +1609,19 @@ THREADED_TEST(InternalFieldsNativePointersAndExternal) {
   CHECK_EQ(1, static_cast<int>(reinterpret_cast<uintptr_t>(unaligned) & 0x1));
 
   obj->SetPointerInInternalField(0, aligned);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(aligned, v8::External::Unwrap(obj->GetInternalField(0)));
 
   obj->SetPointerInInternalField(0, unaligned);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(unaligned, v8::External::Unwrap(obj->GetInternalField(0)));
 
   obj->SetInternalField(0, v8::External::Wrap(aligned));
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(aligned, obj->GetPointerFromInternalField(0));
 
   obj->SetInternalField(0, v8::External::Wrap(unaligned));
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(unaligned, obj->GetPointerFromInternalField(0));
 
   delete[] data;
@@ -1635,7 +1634,7 @@ THREADED_TEST(IdentityHash) {
 
   // Ensure that the test starts with an fresh heap to test whether the hash
   // code is based on the address.
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   Local<v8::Object> obj = v8::Object::New();
   int hash = obj->GetIdentityHash();
   int hash1 = obj->GetIdentityHash();
@@ -1645,7 +1644,7 @@ THREADED_TEST(IdentityHash) {
   // objects should not be assigned the same hash code. If the test below fails
   // the random number generator should be evaluated.
   CHECK_NE(hash, hash2);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   int hash3 = v8::Object::New()->GetIdentityHash();
   // Make sure that the identity hash is not based on the initial address of
   // the object alone. If the test below fails the random number generator
@@ -1682,7 +1681,7 @@ THREADED_TEST(HiddenProperties) {
   v8::Local<v8::String> empty = v8_str("");
   v8::Local<v8::String> prop_name = v8_str("prop_name");
 
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   // Make sure delete of a non-existent hidden value works
   CHECK(obj->DeleteHiddenValue(key));
@@ -1692,7 +1691,7 @@ THREADED_TEST(HiddenProperties) {
   CHECK(obj->SetHiddenValue(key, v8::Integer::New(2002)));
   CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
 
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   // Make sure we do not find the hidden property.
   CHECK(!obj->Has(empty));
@@ -1703,7 +1702,7 @@ THREADED_TEST(HiddenProperties) {
   CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
   CHECK_EQ(2003, obj->Get(empty)->Int32Value());
 
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   // Add another property and delete it afterwards to force the object in
   // slow case.
@@ -1714,7 +1713,7 @@ THREADED_TEST(HiddenProperties) {
   CHECK(obj->Delete(prop_name));
   CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
 
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   CHECK(obj->DeleteHiddenValue(key));
   CHECK(obj->GetHiddenValue(key).IsEmpty());
@@ -1847,7 +1846,7 @@ THREADED_TEST(ApiObjectGroups) {
     V8::AddImplicitReferences(g2s2, g2_children, 1);
   }
   // Do a full GC
-  i::Heap::CollectGarbage(i::OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(i::OLD_POINTER_SPACE);
 
   // All object should be alive.
   CHECK_EQ(0, NumberOfWeakCalls);
@@ -1871,7 +1870,7 @@ THREADED_TEST(ApiObjectGroups) {
     V8::AddImplicitReferences(g2s2, g2_children, 1);
   }
 
-  i::Heap::CollectGarbage(i::OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(i::OLD_POINTER_SPACE);
 
   // All objects should be gone. 5 global handles in total.
   CHECK_EQ(5, NumberOfWeakCalls);
@@ -1880,7 +1879,7 @@ THREADED_TEST(ApiObjectGroups) {
   g1c1.MakeWeak(reinterpret_cast<void*>(1234), &WeakPointerCallback);
   g2c1.MakeWeak(reinterpret_cast<void*>(1234), &WeakPointerCallback);
 
-  i::Heap::CollectGarbage(i::OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(i::OLD_POINTER_SPACE);
   CHECK_EQ(7, NumberOfWeakCalls);
 }
 
@@ -1936,7 +1935,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
     V8::AddImplicitReferences(g3s1, g3_children, 1);
   }
   // Do a full GC
-  i::Heap::CollectGarbage(i::OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(i::OLD_POINTER_SPACE);
 
   // All object should be alive.
   CHECK_EQ(0, NumberOfWeakCalls);
@@ -1960,7 +1959,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
     V8::AddImplicitReferences(g3s1, g3_children, 1);
   }
 
-  i::Heap::CollectGarbage(i::OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(i::OLD_POINTER_SPACE);
 
   // All objects should be gone. 7 global handles in total.
   CHECK_EQ(7, NumberOfWeakCalls);
@@ -2264,8 +2263,6 @@ TEST(OutOfMemoryNested) {
 TEST(HugeConsStringOutOfMemory) {
   // It's not possible to read a snapshot into a heap with different dimensions.
   if (i::Snapshot::IsEnabled()) return;
-  v8::HandleScope scope;
-  LocalContext context;
   // Set heap limits.
   static const int K = 1024;
   v8::ResourceConstraints constraints;
@@ -2276,6 +2273,9 @@ TEST(HugeConsStringOutOfMemory) {
   // Execute a script that causes out of memory.
   v8::V8::IgnoreOutOfMemoryException();
 
+  v8::HandleScope scope;
+  LocalContext context;
+
   // Build huge string. This should fail with out of memory exception.
   Local<Value> result = CompileRun(
     "var str = Array.prototype.join.call({length: 513}, \"A\").toUpperCase();"
@@ -2695,7 +2695,7 @@ v8::Handle<Value> CThrowCountDown(const v8::Arguments& args) {
       if (try_catch.HasCaught()) {
         CHECK_EQ(expected, count);
         CHECK(result.IsEmpty());
-        CHECK(!i::Top::has_scheduled_exception());
+        CHECK(!i::Isolate::Current()->has_scheduled_exception());
       } else {
         CHECK_NE(expected, count);
       }
@@ -4387,7 +4387,7 @@ static void ForceScavenge(v8::Persistent<v8::Value> obj, void* data) {
   obj.Dispose();
   obj.Clear();
   in_scavenge = true;
-  i::Heap::PerformScavenge();
+  HEAP->PerformScavenge();
   in_scavenge = false;
   *(reinterpret_cast<bool*>(data)) = true;
 }
@@ -4424,7 +4424,7 @@ THREADED_TEST(NoWeakRefCallbacksInScavenge) {
   object_b.MakeWeak(&released_in_scavenge, &CheckIsNotInvokedInScavenge);
 
   while (!object_a_disposed) {
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
   }
   CHECK(!released_in_scavenge);
 }
@@ -4442,7 +4442,7 @@ static v8::Handle<Value> ArgumentsTestCallback(const v8::Arguments& args) {
   CHECK_EQ(v8::Integer::New(3), args[2]);
   CHECK_EQ(v8::Undefined(), args[3]);
   v8::HandleScope scope;
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   return v8::Undefined();
 }
 
@@ -6683,7 +6683,7 @@ THREADED_TEST(SetPrototypeThrows) {
   v8::TryCatch try_catch;
   CHECK(!o1->SetPrototype(o0));
   CHECK(!try_catch.HasCaught());
-  ASSERT(!i::Top::has_pending_exception());
+  ASSERT(!i::Isolate::Current()->has_pending_exception());
 
   CHECK_EQ(42, CompileRun("function f() { return 42; }; f()")->Int32Value());
 }
@@ -7065,7 +7065,7 @@ static v8::Handle<Value> InterceptorHasOwnPropertyGetterGC(
     Local<String> name,
     const AccessorInfo& info) {
   ApiTestFuzzer::Fuzz();
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   return v8::Handle<Value>();
 }
 
@@ -7795,7 +7795,7 @@ static v8::Handle<Value> InterceptorCallICFastApi(Local<String> name,
   int* call_count = reinterpret_cast<int*>(v8::External::Unwrap(info.Data()));
   ++(*call_count);
   if ((*call_count) % 20 == 0) {
-    i::Heap::CollectAllGarbage(true);
+    HEAP->CollectAllGarbage(true);
   }
   return v8::Handle<Value>();
 }
@@ -7833,7 +7833,7 @@ static void GenerateSomeGarbage() {
 v8::Handle<v8::Value> DirectApiCallback(const v8::Arguments& args) {
   static int count = 0;
   if (count++ % 3 == 0) {
-    i::Heap::CollectAllGarbage(true);  // This should move the stub
+    HEAP->  CollectAllGarbage(true);  // This should move the stub
     GenerateSomeGarbage();  // This should ensure the old stub memory is flushed
   }
   return v8::Handle<v8::Value>();
@@ -7888,7 +7888,7 @@ THREADED_TEST(CallICFastApi_DirectCall_Throw) {
 v8::Handle<v8::Value> DirectGetterCallback(Local<String> name,
                                            const v8::AccessorInfo& info) {
   if (++p_getter_count % 3 == 0) {
-    i::Heap::CollectAllGarbage(true);
+    HEAP->CollectAllGarbage(true);
     GenerateSomeGarbage();
   }
   return v8::Handle<v8::Value>();
@@ -8803,7 +8803,8 @@ void ApiTestFuzzer::Setup(PartOfTest part) {
       : RegisterThreadedTest::count();
   active_tests_ = tests_being_run_ = end - start;
   for (int i = 0; i < tests_being_run_; i++) {
-    RegisterThreadedTest::nth(i)->fuzzer_ = new ApiTestFuzzer(i + start);
+    RegisterThreadedTest::nth(i)->fuzzer_ = new ApiTestFuzzer(
+        i::Isolate::Current(), i + start);
   }
   for (int i = 0; i < active_tests_; i++) {
     RegisterThreadedTest::nth(i)->fuzzer_->Start();
@@ -9025,11 +9026,11 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
   // the first garbage collection but some of the maps have already
   // been marked at that point.  Therefore some of the maps are not
   // collected until the second garbage collection.
-  i::Heap::CollectAllGarbage(false);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   int count = GetGlobalObjectsCount();
 #ifdef DEBUG
-  if (count != expected) i::Heap::TracePathToGlobal();
+  if (count != expected) HEAP->TracePathToGlobal();
 #endif
   CHECK_EQ(expected, count);
 }
@@ -9095,7 +9096,7 @@ THREADED_TEST(NewPersistentHandleFromWeakCallback) {
   // weak callback of the first handle would be able to 'reallocate' it.
   handle1.MakeWeak(NULL, NewPersistentHandleCallback);
   handle2.Dispose();
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 }
 
 
@@ -9103,7 +9104,7 @@ v8::Persistent<v8::Object> to_be_disposed;
 
 void DisposeAndForceGcCallback(v8::Persistent<v8::Value> handle, void*) {
   to_be_disposed.Dispose();
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   handle.Dispose();
 }
 
@@ -9119,7 +9120,7 @@ THREADED_TEST(DoNotUseDeletedNodesInSecondLevelGc) {
   }
   handle1.MakeWeak(NULL, DisposeAndForceGcCallback);
   to_be_disposed = handle2;
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 }
 
 void DisposingCallback(v8::Persistent<v8::Value> handle, void*) {
@@ -9145,7 +9146,7 @@ THREADED_TEST(NoGlobalHandlesOrphaningDueToWeakCallback) {
   }
   handle2.MakeWeak(NULL, DisposingCallback);
   handle3.MakeWeak(NULL, HandleCreatingCallback);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 }
 
 
@@ -9391,6 +9392,31 @@ THREADED_TEST(PropertyEnumeration) {
   CheckProperties(elms->Get(v8::Integer::New(3)), elmc3, elmv3);
 }
 
+THREADED_TEST(PropertyEnumeration2) {
+  v8::HandleScope scope;
+  LocalContext context;
+  v8::Handle<v8::Value> obj = v8::Script::Compile(v8::String::New(
+      "var result = [];"
+      "result[0] = {};"
+      "result[1] = {a: 1, b: 2};"
+      "result[2] = [1, 2, 3];"
+      "var proto = {x: 1, y: 2, z: 3};"
+      "var x = { __proto__: proto, w: 0, z: 1 };"
+      "result[3] = x;"
+      "result;"))->Run();
+  v8::Handle<v8::Array> elms = obj.As<v8::Array>();
+  CHECK_EQ(4, elms->Length());
+  int elmc0 = 0;
+  const char** elmv0 = NULL;
+  CheckProperties(elms->Get(v8::Integer::New(0)), elmc0, elmv0);
+
+  v8::Handle<v8::Value> val = elms->Get(v8::Integer::New(0));
+  v8::Handle<v8::Array> props = val.As<v8::Object>()->GetPropertyNames();
+  CHECK_EQ(0, props->Length());
+  for (uint32_t i = 0; i < props->Length(); i++) {
+    printf("p[%d]\n", i);
+  }
+}
 
 static bool NamedSetAccessBlocker(Local<v8::Object> obj,
                                   Local<Value> name,
@@ -9883,7 +9909,7 @@ class RegExpInterruptTest {
     gc_during_regexp_ = 0;
     regexp_success_ = false;
     gc_success_ = false;
-    GCThread gc_thread(this);
+    GCThread gc_thread(i::Isolate::Current(), this);
     gc_thread.Start();
     v8::Locker::StartPreemption(1);
 
@@ -9902,8 +9928,8 @@ class RegExpInterruptTest {
 
   class GCThread : public i::Thread {
    public:
-    explicit GCThread(RegExpInterruptTest* test)
-        : test_(test) {}
+    explicit GCThread(i::Isolate* isolate, RegExpInterruptTest* test)
+        : Thread(isolate), test_(test) {}
     virtual void Run() {
       test_->CollectGarbage();
     }
@@ -9917,7 +9943,7 @@ class RegExpInterruptTest {
       {
         v8::Locker lock;
         // TODO(lrn): Perhaps create some garbage before collecting.
-        i::Heap::CollectAllGarbage(false);
+        HEAP->CollectAllGarbage(false);
         gc_count_++;
       }
       i::OS::Sleep(1);
@@ -10005,7 +10031,7 @@ class ApplyInterruptTest {
     gc_during_apply_ = 0;
     apply_success_ = false;
     gc_success_ = false;
-    GCThread gc_thread(this);
+    GCThread gc_thread(i::Isolate::Current(), this);
     gc_thread.Start();
     v8::Locker::StartPreemption(1);
 
@@ -10024,8 +10050,8 @@ class ApplyInterruptTest {
 
   class GCThread : public i::Thread {
    public:
-    explicit GCThread(ApplyInterruptTest* test)
-        : test_(test) {}
+    explicit GCThread(i::Isolate* isolate, ApplyInterruptTest* test)
+        : Thread(isolate), test_(test) {}
     virtual void Run() {
       test_->CollectGarbage();
     }
@@ -10038,7 +10064,7 @@ class ApplyInterruptTest {
     while (gc_during_apply_ < kRequiredGCs) {
       {
         v8::Locker lock;
-        i::Heap::CollectAllGarbage(false);
+        HEAP->CollectAllGarbage(false);
         gc_count_++;
       }
       i::OS::Sleep(1);
@@ -10167,17 +10193,17 @@ static void MorphAString(i::String* string,
   CHECK(i::StringShape(string).IsExternal());
   if (string->IsAsciiRepresentation()) {
     // Check old map is not symbol or long.
-    CHECK(string->map() == i::Heap::external_ascii_string_map());
+    CHECK(string->map() == HEAP->external_ascii_string_map());
     // Morph external string to be TwoByte string.
-    string->set_map(i::Heap::external_string_map());
+    string->set_map(HEAP->external_string_map());
     i::ExternalTwoByteString* morphed =
          i::ExternalTwoByteString::cast(string);
     morphed->set_resource(uc16_resource);
   } else {
     // Check old map is not symbol or long.
-    CHECK(string->map() == i::Heap::external_string_map());
+    CHECK(string->map() == HEAP->external_string_map());
     // Morph external string to be ASCII string.
-    string->set_map(i::Heap::external_ascii_string_map());
+    string->set_map(HEAP->external_ascii_string_map());
     i::ExternalAsciiString* morphed =
          i::ExternalAsciiString::cast(string);
     morphed->set_resource(ascii_resource);
@@ -10201,9 +10227,9 @@ THREADED_TEST(MorphCompositeStringTest) {
                                   i::StrLength(c_string)));
 
     Local<String> lhs(v8::Utils::ToLocal(
-        i::Factory::NewExternalStringFromAscii(&ascii_resource)));
+        FACTORY->NewExternalStringFromAscii(&ascii_resource)));
     Local<String> rhs(v8::Utils::ToLocal(
-        i::Factory::NewExternalStringFromAscii(&ascii_resource)));
+        FACTORY->NewExternalStringFromAscii(&ascii_resource)));
 
     env->Global()->Set(v8_str("lhs"), lhs);
     env->Global()->Set(v8_str("rhs"), rhs);
@@ -10288,18 +10314,18 @@ class RegExpStringModificationTest {
 
     // Create the input string for the regexp - the one we are going to change
     // properties of.
-    input_ = i::Factory::NewExternalStringFromAscii(&ascii_resource_);
+    input_ = FACTORY->NewExternalStringFromAscii(&ascii_resource_);
 
     // Inject the input as a global variable.
     i::Handle<i::String> input_name =
-        i::Factory::NewStringFromAscii(i::Vector<const char>("input", 5));
-    i::Top::global_context()->global()->SetProperty(
+        FACTORY->NewStringFromAscii(i::Vector<const char>("input", 5));
+    i::Isolate::Current()->global_context()->global()->SetProperty(
         *input_name,
         *input_,
         NONE,
         i::kNonStrictMode)->ToObjectChecked();
 
-    MorphThread morph_thread(this);
+    MorphThread morph_thread(i::Isolate::Current(), this);
     morph_thread.Start();
     v8::Locker::StartPreemption(1);
     LongRunningRegExp();
@@ -10319,8 +10345,9 @@ class RegExpStringModificationTest {
 
   class MorphThread : public i::Thread {
    public:
-    explicit MorphThread(RegExpStringModificationTest* test)
-        : test_(test) {}
+    explicit MorphThread(i::Isolate* isolate,
+                         RegExpStringModificationTest* test)
+        : Thread(isolate), test_(test) {}
     virtual void Run() {
       test_->MorphString();
     }
@@ -10753,14 +10780,14 @@ THREADED_TEST(PixelArray) {
   uint8_t* pixel_data = reinterpret_cast<uint8_t*>(malloc(kElementCount));
   i::Handle<i::ExternalPixelArray> pixels =
       i::Handle<i::ExternalPixelArray>::cast(
-          i::Factory::NewExternalArray(kElementCount,
+          FACTORY->NewExternalArray(kElementCount,
                                        v8::kExternalPixelArray,
                                        pixel_data));
-  i::Heap::CollectAllGarbage(false);  // Force GC to trigger verification.
+  HEAP->CollectAllGarbage(false);  // Force GC to trigger verification.
   for (int i = 0; i < kElementCount; i++) {
     pixels->set(i, i % 256);
   }
-  i::Heap::CollectAllGarbage(false);  // Force GC to trigger verification.
+  HEAP->CollectAllGarbage(false);  // Force GC to trigger verification.
   for (int i = 0; i < kElementCount; i++) {
     CHECK_EQ(i % 256, pixels->get(i));
     CHECK_EQ(i % 256, pixel_data[i]);
@@ -11162,9 +11189,9 @@ THREADED_TEST(PixelArrayWithInterceptor) {
   uint8_t* pixel_data = reinterpret_cast<uint8_t*>(malloc(kElementCount));
   i::Handle<i::ExternalPixelArray> pixels =
       i::Handle<i::ExternalPixelArray>::cast(
-          i::Factory::NewExternalArray(kElementCount,
-                                       v8::kExternalPixelArray,
-                                       pixel_data));
+          FACTORY->NewExternalArray(kElementCount,
+                                    v8::kExternalPixelArray,
+                                    pixel_data));
   for (int i = 0; i < kElementCount; i++) {
     pixels->set(i, i % 256);
   }
@@ -11225,12 +11252,12 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
       static_cast<ElementType*>(malloc(kElementCount * element_size));
   i::Handle<ExternalArrayClass> array =
       i::Handle<ExternalArrayClass>::cast(
-          i::Factory::NewExternalArray(kElementCount, array_type, array_data));
-  i::Heap::CollectAllGarbage(false);  // Force GC to trigger verification.
+          FACTORY->NewExternalArray(kElementCount, array_type, array_data));
+  HEAP->CollectAllGarbage(false);  // Force GC to trigger verification.
   for (int i = 0; i < kElementCount; i++) {
     array->set(i, static_cast<ElementType>(i));
   }
-  i::Heap::CollectAllGarbage(false);  // Force GC to trigger verification.
+  HEAP->CollectAllGarbage(false);  // Force GC to trigger verification.
   for (int i = 0; i < kElementCount; i++) {
     CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array->get(i)));
     CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array_data[i]));
@@ -11347,7 +11374,7 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
                       "  }"
                       "}"
                       "sum;");
-  i::Heap::CollectAllGarbage(false);  // Force GC to trigger verification.
+  HEAP->CollectAllGarbage(false);  // Force GC to trigger verification.
   CHECK_EQ(28, result->Int32Value());
 
   // Make sure out-of-range loads do not throw.
@@ -11504,7 +11531,7 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
         static_cast<ElementType*>(malloc(kLargeElementCount * element_size));
     i::Handle<ExternalArrayClass> large_array =
         i::Handle<ExternalArrayClass>::cast(
-            i::Factory::NewExternalArray(kLargeElementCount,
+            FACTORY->NewExternalArray(kLargeElementCount,
                                          array_type,
                                          array_data));
     v8::Handle<v8::Object> large_obj = v8::Object::New();
@@ -11940,7 +11967,8 @@ THREADED_TEST(IdleNotification) {
 static uint32_t* stack_limit;
 
 static v8::Handle<Value> GetStackLimitCallback(const v8::Arguments& args) {
-  stack_limit = reinterpret_cast<uint32_t*>(i::StackGuard::real_climit());
+  stack_limit = reinterpret_cast<uint32_t*>(
+      i::Isolate::Current()->stack_guard()->real_climit());
   return v8::Undefined();
 }
 
@@ -12206,7 +12234,7 @@ TEST(Regress528) {
     other_context->Enter();
     CompileRun(source_simple);
     other_context->Exit();
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     if (GetGlobalObjectsCount() == 1) break;
   }
   CHECK_GE(2, gc_count);
@@ -12228,7 +12256,7 @@ TEST(Regress528) {
     other_context->Enter();
     CompileRun(source_eval);
     other_context->Exit();
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     if (GetGlobalObjectsCount() == 1) break;
   }
   CHECK_GE(2, gc_count);
@@ -12255,7 +12283,7 @@ TEST(Regress528) {
     other_context->Enter();
     CompileRun(source_exception);
     other_context->Exit();
-    i::Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
     if (GetGlobalObjectsCount() == 1) break;
   }
   CHECK_GE(2, gc_count);
@@ -12473,26 +12501,26 @@ TEST(GCCallbacks) {
   v8::V8::AddGCEpilogueCallback(EpilogueCallback);
   CHECK_EQ(0, prologue_call_count);
   CHECK_EQ(0, epilogue_call_count);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(1, prologue_call_count);
   CHECK_EQ(1, epilogue_call_count);
   v8::V8::AddGCPrologueCallback(PrologueCallbackSecond);
   v8::V8::AddGCEpilogueCallback(EpilogueCallbackSecond);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(2, prologue_call_count);
   CHECK_EQ(2, epilogue_call_count);
   CHECK_EQ(1, prologue_call_count_second);
   CHECK_EQ(1, epilogue_call_count_second);
   v8::V8::RemoveGCPrologueCallback(PrologueCallback);
   v8::V8::RemoveGCEpilogueCallback(EpilogueCallback);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(2, prologue_call_count);
   CHECK_EQ(2, epilogue_call_count);
   CHECK_EQ(2, prologue_call_count_second);
   CHECK_EQ(2, epilogue_call_count_second);
   v8::V8::RemoveGCPrologueCallback(PrologueCallbackSecond);
   v8::V8::RemoveGCEpilogueCallback(EpilogueCallbackSecond);
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK_EQ(2, prologue_call_count);
   CHECK_EQ(2, epilogue_call_count);
   CHECK_EQ(2, prologue_call_count_second);
@@ -12520,7 +12548,7 @@ THREADED_TEST(AddToJSFunctionResultCache) {
       "    return 'Different results for ' + key1 + ': ' + r1 + ' vs. ' + r1_;"
       "  return 'PASSED';"
       "})()";
-  i::Heap::ClearJSFunctionResultCaches();
+  HEAP->ClearJSFunctionResultCaches();
   ExpectString(code, "PASSED");
 }
 
@@ -12544,7 +12572,7 @@ THREADED_TEST(FillJSFunctionResultCache) {
       "    return 'FAILED: k0CacheSize is too small';"
       "  return 'PASSED';"
       "})()";
-  i::Heap::ClearJSFunctionResultCaches();
+  HEAP->ClearJSFunctionResultCaches();
   ExpectString(code, "PASSED");
 }
 
@@ -12569,7 +12597,7 @@ THREADED_TEST(RoundRobinGetFromCache) {
       "  };"
       "  return 'PASSED';"
       "})()";
-  i::Heap::ClearJSFunctionResultCaches();
+  HEAP->ClearJSFunctionResultCaches();
   ExpectString(code, "PASSED");
 }
 
@@ -12594,7 +12622,7 @@ THREADED_TEST(ReverseGetFromCache) {
       "  };"
       "  return 'PASSED';"
       "})()";
-  i::Heap::ClearJSFunctionResultCaches();
+  HEAP->ClearJSFunctionResultCaches();
   ExpectString(code, "PASSED");
 }
 
@@ -12612,7 +12640,7 @@ THREADED_TEST(TestEviction) {
       "  };"
       "  return 'PASSED';"
       "})()";
-  i::Heap::ClearJSFunctionResultCaches();
+  HEAP->ClearJSFunctionResultCaches();
   ExpectString(code, "PASSED");
 }
 
@@ -12702,7 +12730,7 @@ THREADED_TEST(TwoByteStringInAsciiCons) {
 void FailedAccessCheckCallbackGC(Local<v8::Object> target,
                                  v8::AccessType type,
                                  Local<v8::Value> data) {
-  i::Heap::CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
 }
 
 
@@ -12783,6 +12811,343 @@ TEST(GCInFailedAccessCheckCallback) {
   v8::V8::SetFailedAccessCheckCallbackFunction(NULL);
 }
 
+TEST(DefaultIsolateGetCurrent) {
+  CHECK(v8::Isolate::GetCurrent() != NULL);
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  CHECK(reinterpret_cast<i::Isolate*>(isolate)->IsDefaultIsolate());
+  printf("*** %s\n", "DefaultIsolateGetCurrent success");
+}
+
+TEST(IsolateNewDispose) {
+  v8::Isolate* current_isolate = v8::Isolate::GetCurrent();
+  v8::Isolate* isolate = v8::Isolate::New();
+  CHECK(isolate != NULL);
+  CHECK(!reinterpret_cast<i::Isolate*>(isolate)->IsDefaultIsolate());
+  CHECK(current_isolate != isolate);
+  CHECK(current_isolate == v8::Isolate::GetCurrent());
+
+  v8::V8::SetFatalErrorHandler(StoringErrorCallback);
+  last_location = last_message = NULL;
+  isolate->Dispose();
+  CHECK_EQ(last_location, NULL);
+  CHECK_EQ(last_message, NULL);
+}
+
+TEST(IsolateEnterExitDefault) {
+  v8::HandleScope scope;
+  LocalContext context;
+  v8::Isolate* current_isolate = v8::Isolate::GetCurrent();
+  CHECK(current_isolate != NULL);  // Default isolate.
+  ExpectString("'hello'", "hello");
+  current_isolate->Enter();
+  ExpectString("'still working'", "still working");
+  current_isolate->Exit();
+  ExpectString("'still working 2'", "still working 2");
+  current_isolate->Exit();
+  // Default isolate is always, well, 'default current'.
+  CHECK_EQ(v8::Isolate::GetCurrent(), current_isolate);
+  // Still working since default isolate is auto-entering any thread
+  // that has no isolate and attempts to execute V8 APIs.
+  ExpectString("'still working 3'", "still working 3");
+}
+
+TEST(DisposeDefaultIsolate) {
+  v8::V8::SetFatalErrorHandler(StoringErrorCallback);
+
+  // Run some V8 code to trigger default isolate to become 'current'.
+  v8::HandleScope scope;
+  LocalContext context;
+  ExpectString("'run some V8'", "run some V8");
+
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  CHECK(reinterpret_cast<i::Isolate*>(isolate)->IsDefaultIsolate());
+  last_location = last_message = NULL;
+  isolate->Dispose();
+  // It is not possible to dispose default isolate via Isolate API.
+  CHECK_NE(last_location, NULL);
+  CHECK_NE(last_message, NULL);
+}
+
+TEST(RunDefaultAndAnotherIsolate) {
+  v8::HandleScope scope;
+  LocalContext context;
+
+  // Enter new isolate.
+  v8::Isolate* isolate = v8::Isolate::New();
+  CHECK(isolate);
+  isolate->Enter();
+  { // Need this block because subsequent Exit() will deallocate Heap,
+    // so we need all scope objects to be deconstructed when it happens.
+    v8::HandleScope scope_new;
+    LocalContext context_new;
+
+    // Run something in new isolate.
+    CompileRun("var foo = 153;");
+    ExpectTrue("function f() { return foo == 153; }; f()");
+  }
+  isolate->Exit();
+
+  // This runs automatically in default isolate.
+  // Variables in another isolate should be not available.
+  ExpectTrue("function f() {"
+             "  try {"
+             "    foo;"
+             "    return false;"
+             "  } catch(e) {"
+             "    return true;"
+             "  }"
+             "};"
+             "var bar = 371;"
+             "f()");
+
+  v8::V8::SetFatalErrorHandler(StoringErrorCallback);
+  last_location = last_message = NULL;
+  isolate->Dispose();
+  CHECK_EQ(last_location, NULL);
+  CHECK_EQ(last_message, NULL);
+
+  // Check that default isolate still runs.
+  ExpectTrue("function f() { return bar == 371; }; f()");
+}
+
+TEST(DisposeIsolateWhenInUse) {
+  v8::Isolate* isolate = v8::Isolate::New();
+  CHECK(isolate);
+  isolate->Enter();
+  v8::HandleScope scope;
+  LocalContext context;
+  // Run something in this isolate.
+  ExpectTrue("true");
+  v8::V8::SetFatalErrorHandler(StoringErrorCallback);
+  last_location = last_message = NULL;
+  // Still entered, should fail.
+  isolate->Dispose();
+  CHECK_NE(last_location, NULL);
+  CHECK_NE(last_message, NULL);
+}
+
+TEST(RunTwoIsolatesOnSingleThread) {
+  // Run isolate 1.
+  v8::Isolate* isolate1 = v8::Isolate::New();
+  isolate1->Enter();
+  v8::Persistent<v8::Context> context1 = v8::Context::New();
+
+  {
+    v8::Context::Scope cscope(context1);
+    v8::HandleScope scope;
+    // Run something in new isolate.
+    CompileRun("var foo = 'isolate 1';");
+    ExpectString("function f() { return foo; }; f()", "isolate 1");
+  }
+
+  // Run isolate 2.
+  v8::Isolate* isolate2 = v8::Isolate::New();
+  v8::Persistent<v8::Context> context2;
+
+  {
+    v8::Isolate::Scope iscope(isolate2);
+    context2 = v8::Context::New();
+    v8::Context::Scope cscope(context2);
+    v8::HandleScope scope;
+
+    // Run something in new isolate.
+    CompileRun("var foo = 'isolate 2';");
+    ExpectString("function f() { return foo; }; f()", "isolate 2");
+  }
+
+  {
+    v8::Context::Scope cscope(context1);
+    v8::HandleScope scope;
+    // Now again in isolate 1
+    ExpectString("function f() { return foo; }; f()", "isolate 1");
+  }
+
+  isolate1->Exit();
+
+  // Run some stuff in default isolate.
+  v8::Persistent<v8::Context> context_default = v8::Context::New();
+
+  {
+    v8::Context::Scope cscope(context_default);
+    v8::HandleScope scope;
+    // Variables in other isolates should be not available, verify there
+    // is an exception.
+    ExpectTrue("function f() {"
+               "  try {"
+               "    foo;"
+               "    return false;"
+               "  } catch(e) {"
+               "    return true;"
+               "  }"
+               "};"
+               "var isDefaultIsolate = true;"
+               "f()");
+  }
+
+  isolate1->Enter();
+
+  {
+    v8::Isolate::Scope iscope(isolate2);
+    v8::Context::Scope cscope(context2);
+    v8::HandleScope scope;
+    ExpectString("function f() { return foo; }; f()", "isolate 2");
+  }
+
+  {
+    v8::Context::Scope cscope(context1);
+    v8::HandleScope scope;
+    ExpectString("function f() { return foo; }; f()", "isolate 1");
+  }
+
+  {
+    v8::Isolate::Scope iscope(isolate2);
+    context2.Dispose();
+  }
+
+  context1.Dispose();
+  isolate1->Exit();
+
+  v8::V8::SetFatalErrorHandler(StoringErrorCallback);
+  last_location = last_message = NULL;
+
+  isolate1->Dispose();
+  CHECK_EQ(last_location, NULL);
+  CHECK_EQ(last_message, NULL);
+
+  isolate2->Dispose();
+  CHECK_EQ(last_location, NULL);
+  CHECK_EQ(last_message, NULL);
+
+  // Check that default isolate still runs.
+  {
+    v8::Context::Scope cscope(context_default);
+    v8::HandleScope scope;
+    ExpectTrue("function f() { return isDefaultIsolate; }; f()");
+  }
+}
+
+static int CalcFibonacci(v8::Isolate* isolate, int limit) {
+  v8::Isolate::Scope isolate_scope(isolate);
+  v8::HandleScope scope;
+  LocalContext context;
+  i::ScopedVector<char> code(1024);
+  i::OS::SNPrintF(code, "function fib(n) {"
+                        "  if (n <= 2) return 1;"
+                        "  return fib(n-1) + fib(n-2);"
+                        "}"
+                        "fib(%d)", limit);
+  Local<Value> value = CompileRun(code.start());
+  CHECK(value->IsNumber());
+  return static_cast<int>(value->NumberValue());
+}
+
+class IsolateThread : public v8::internal::Thread {
+ public:
+  explicit IsolateThread(v8::Isolate* isolate, int fib_limit)
+    : Thread(NULL),
+      isolate_(isolate),
+      fib_limit_(fib_limit),
+      result_(0) { }
+
+  void Run() {
+    result_ = CalcFibonacci(isolate_, fib_limit_);
+  }
+
+  int result() { return result_; }
+
+ private:
+  v8::Isolate* isolate_;
+  int fib_limit_;
+  int result_;
+};
+
+TEST(MultipleIsolatesOnIndividualThreads) {
+  v8::Isolate* isolate1 = v8::Isolate::New();
+  v8::Isolate* isolate2 = v8::Isolate::New();
+
+  IsolateThread thread1(isolate1, 21);
+  IsolateThread thread2(isolate2, 12);
+
+  // Compute some fibonacci numbers on 3 threads in 3 isolates.
+  thread1.Start();
+  thread2.Start();
+
+  int result1 = CalcFibonacci(v8::Isolate::GetCurrent(), 21);
+  int result2 = CalcFibonacci(v8::Isolate::GetCurrent(), 12);
+
+  thread1.Join();
+  thread2.Join();
+
+  // Compare results. The actual fibonacci numbers for 12 and 21 are taken
+  // (I'm lazy!) from http://en.wikipedia.org/wiki/Fibonacci_number
+  CHECK_EQ(result1, 10946);
+  CHECK_EQ(result2, 144);
+  CHECK_EQ(result1, thread1.result());
+  CHECK_EQ(result2, thread2.result());
+
+  isolate1->Dispose();
+  isolate2->Dispose();
+}
+
+
+class InitDefaultIsolateThread : public v8::internal::Thread {
+ public:
+  enum TestCase { IgnoreOOM, SetResourceConstraints, SetFatalHandler };
+
+  explicit InitDefaultIsolateThread(TestCase testCase)
+    : Thread(NULL),
+      testCase_(testCase),
+      result_(false) { }
+
+  void Run() {
+    switch (testCase_) {
+    case IgnoreOOM:
+      v8::V8::IgnoreOutOfMemoryException();
+      break;
+
+    case SetResourceConstraints: {
+      static const int K = 1024;
+      v8::ResourceConstraints constraints;
+      constraints.set_max_young_space_size(256 * K);
+      constraints.set_max_old_space_size(4 * K * K);
+      v8::SetResourceConstraints(&constraints);
+      break;
+    }
+
+    case SetFatalHandler:
+      v8::V8::SetFatalErrorHandler(NULL);
+      break;
+    }
+    result_ = true;
+  }
+
+  bool result() { return result_; }
+
+ private:
+  TestCase testCase_;
+  bool result_;
+};
+
+
+static void InitializeTestHelper(InitDefaultIsolateThread::TestCase testCase) {
+  InitDefaultIsolateThread thread(testCase);
+  thread.Start();
+  thread.Join();
+  CHECK_EQ(thread.result(), true);
+}
+
+TEST(InitializeDefaultIsolateOnSecondaryThread1) {
+  InitializeTestHelper(InitDefaultIsolateThread::IgnoreOOM);
+}
+
+TEST(InitializeDefaultIsolateOnSecondaryThread2) {
+  InitializeTestHelper(InitDefaultIsolateThread::SetResourceConstraints);
+}
+
+TEST(InitializeDefaultIsolateOnSecondaryThread3) {
+  InitializeTestHelper(InitDefaultIsolateThread::SetFatalHandler);
+}
+
 
 TEST(StringCheckMultipleContexts) {
   const char* code =
@@ -12886,7 +13251,7 @@ TEST(DontDeleteCellLoadIC) {
                  "})()",
                  "ReferenceError: cell is not defined");
     CompileRun("cell = \"new_second\";");
-    i::Heap::CollectAllGarbage(true);
+    HEAP->CollectAllGarbage(true);
     ExpectString("readCell()", "new_second");
     ExpectString("readCell()", "new_second");
   }
index 29f5d10b17118dc1d924a22d16928602564a025a..a91886e7e502cbcf82f287764d9e8c55c5b3a054 100644 (file)
@@ -65,10 +65,10 @@ TEST(0) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code = Heap::CreateCode(
+  Object* code = HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(code->IsCode());
 #ifdef DEBUG
   Code::cast(code)->Print();
@@ -102,10 +102,10 @@ TEST(1) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code = Heap::CreateCode(
+  Object* code = HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(code->IsCode());
 #ifdef DEBUG
   Code::cast(code)->Print();
@@ -139,7 +139,7 @@ TEST(2) {
 
   // some relocated stuff here, not executed
   __ RecordComment("dead code, just testing relocations");
-  __ mov(r0, Operand(Factory::true_value()));
+  __ mov(r0, Operand(FACTORY->true_value()));
   __ RecordComment("dead code, just testing immediate operands");
   __ mov(r0, Operand(-1));
   __ mov(r0, Operand(0xFF000000));
@@ -148,10 +148,10 @@ TEST(2) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code = Heap::CreateCode(
+  Object* code = HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(code->IsCode());
 #ifdef DEBUG
   Code::cast(code)->Print();
@@ -196,10 +196,10 @@ TEST(3) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code = Heap::CreateCode(
+  Object* code = HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(code->IsCode());
 #ifdef DEBUG
   Code::cast(code)->Print();
@@ -245,7 +245,7 @@ TEST(4) {
   Label L, C;
 
 
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
 
     __ mov(ip, Operand(sp));
@@ -311,10 +311,10 @@ TEST(4) {
 
     CodeDesc desc;
     assm.GetCode(&desc);
-    Object* code = Heap::CreateCode(
+    Object* code = HEAP->CreateCode(
         desc,
         Code::ComputeFlags(Code::STUB),
-        Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+        Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
     CHECK(code->IsCode());
 #ifdef DEBUG
     Code::cast(code)->Print();
@@ -359,7 +359,7 @@ TEST(5) {
 
   Assembler assm(NULL, 0);
 
-  if (CpuFeatures::IsSupported(ARMv7)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     CpuFeatures::Scope scope(ARMv7);
     // On entry, r0 = 0xAAAAAAAA = 0b10..10101010.
     __ ubfx(r0, r0, 1, 12);  // 0b00..010101010101 = 0x555
@@ -371,10 +371,10 @@ TEST(5) {
 
     CodeDesc desc;
     assm.GetCode(&desc);
-    Object* code = Heap::CreateCode(
+    Object* code = HEAP->CreateCode(
         desc,
         Code::ComputeFlags(Code::STUB),
-        Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+        Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
     CHECK(code->IsCode());
 #ifdef DEBUG
     Code::cast(code)->Print();
@@ -395,7 +395,7 @@ TEST(6) {
 
   Assembler assm(NULL, 0);
 
-  if (CpuFeatures::IsSupported(ARMv7)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     CpuFeatures::Scope scope(ARMv7);
     __ usat(r1, 8, Operand(r0));           // Sat 0xFFFF to 0-255 = 0xFF.
     __ usat(r2, 12, Operand(r0, ASR, 9));  // Sat (0xFFFF>>9) to 0-4095 = 0x7F.
@@ -406,10 +406,10 @@ TEST(6) {
 
     CodeDesc desc;
     assm.GetCode(&desc);
-    Object* code = Heap::CreateCode(
+    Object* code = HEAP->CreateCode(
         desc,
         Code::ComputeFlags(Code::STUB),
-        Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+        Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
     CHECK(code->IsCode());
 #ifdef DEBUG
     Code::cast(code)->Print();
@@ -438,7 +438,7 @@ static void TestRoundingMode(VCVTTypes types,
 
   Assembler assm(NULL, 0);
 
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
 
     Label wrong_exception;
@@ -482,10 +482,10 @@ static void TestRoundingMode(VCVTTypes types,
 
     CodeDesc desc;
     assm.GetCode(&desc);
-    Object* code = Heap::CreateCode(
+    Object* code = HEAP->CreateCode(
         desc,
         Code::ComputeFlags(Code::STUB),
-        Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+        Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
     CHECK(code->IsCode());
 #ifdef DEBUG
     Code::cast(code)->Print();
index 14692ff1eb4bfd689cdd3286586d69c5adb1d788..694bd57bd163654c1d44eb9a370e98cffaad8be9 100644 (file)
@@ -69,10 +69,10 @@ TEST(AssemblerIa320) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code = Heap::CreateCode(desc,
-                                  Code::ComputeFlags(Code::STUB),
-                                  Handle<Object>(Heap::undefined_value()))->
-      ToObjectChecked();
+  Object* code = HEAP->CreateCode(
+      desc,
+      Code::ComputeFlags(Code::STUB),
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(code->IsCode());
 #ifdef OBJECT_PRINT
   Code::cast(code)->Print();
@@ -107,10 +107,10 @@ TEST(AssemblerIa321) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code = Heap::CreateCode(desc,
-                                  Code::ComputeFlags(Code::STUB),
-                                  Handle<Object>(Heap::undefined_value()))->
-      ToObjectChecked();
+  Object* code = HEAP->CreateCode(
+      desc,
+      Code::ComputeFlags(Code::STUB),
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(code->IsCode());
 #ifdef OBJECT_PRINT
   Code::cast(code)->Print();
@@ -144,16 +144,15 @@ TEST(AssemblerIa322) {
   __ ret(0);
 
   // some relocated stuff here, not executed
-  __ mov(eax, Factory::true_value());
+  __ mov(eax, FACTORY->true_value());
   __ jmp(NULL, RelocInfo::RUNTIME_ENTRY);
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code = Heap::CreateCode(
+  Object* code = HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
-
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(code->IsCode());
 #ifdef OBJECT_PRINT
   Code::cast(code)->Print();
@@ -168,7 +167,7 @@ TEST(AssemblerIa322) {
 typedef int (*F3)(float x);
 
 TEST(AssemblerIa323) {
-  if (!CpuFeatures::IsSupported(SSE2)) return;
+  if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) return;
 
   InitializeVM();
   v8::HandleScope scope;
@@ -176,7 +175,7 @@ TEST(AssemblerIa323) {
   v8::internal::byte buffer[256];
   Assembler assm(buffer, sizeof buffer);
 
-  CHECK(CpuFeatures::IsSupported(SSE2));
+  CHECK(Isolate::Current()->cpu_features()->IsSupported(SSE2));
   { CpuFeatures::Scope fscope(SSE2);
     __ cvttss2si(eax, Operand(esp, 4));
     __ ret(0);
@@ -184,10 +183,10 @@ TEST(AssemblerIa323) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Code* code = Code::cast(Heap::CreateCode(
+  Code* code = Code::cast(HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked());
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked());
   // don't print the code - our disassembler can't handle cvttss2si
   // instead print bytes
   Disassembler::Dump(stdout,
@@ -203,7 +202,7 @@ TEST(AssemblerIa323) {
 typedef int (*F4)(double x);
 
 TEST(AssemblerIa324) {
-  if (!CpuFeatures::IsSupported(SSE2)) return;
+  if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) return;
 
   InitializeVM();
   v8::HandleScope scope;
@@ -211,17 +210,17 @@ TEST(AssemblerIa324) {
   v8::internal::byte buffer[256];
   Assembler assm(buffer, sizeof buffer);
 
-  CHECK(CpuFeatures::IsSupported(SSE2));
+  CHECK(Isolate::Current()->cpu_features()->IsSupported(SSE2));
   CpuFeatures::Scope fscope(SSE2);
   __ cvttsd2si(eax, Operand(esp, 4));
   __ ret(0);
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Code* code = Code::cast(Heap::CreateCode(
+  Code* code = Code::cast(HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked());
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked());
   // don't print the code - our disassembler can't handle cvttsd2si
   // instead print bytes
   Disassembler::Dump(stdout,
@@ -247,10 +246,10 @@ TEST(AssemblerIa325) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Code* code = Code::cast(Heap::CreateCode(
+  Code* code = Code::cast(HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked());
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked());
   F0 f = FUNCTION_CAST<F0>(code->entry());
   int res = f();
   CHECK_EQ(42, res);
@@ -260,11 +259,11 @@ TEST(AssemblerIa325) {
 typedef double (*F5)(double x, double y);
 
 TEST(AssemblerIa326) {
-  if (!CpuFeatures::IsSupported(SSE2)) return;
+  if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) return;
 
   InitializeVM();
   v8::HandleScope scope;
-  CHECK(CpuFeatures::IsSupported(SSE2));
+  CHECK(Isolate::Current()->cpu_features()->IsSupported(SSE2));
   CpuFeatures::Scope fscope(SSE2);
   v8::internal::byte buffer[256];
   Assembler assm(buffer, sizeof buffer);
@@ -284,10 +283,10 @@ TEST(AssemblerIa326) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Code* code = Code::cast(Heap::CreateCode(
+  Code* code = Code::cast(HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked());
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked());
 #ifdef DEBUG
   ::printf("\n---\n");
   // don't print the code - our disassembler can't handle SSE instructions
@@ -306,11 +305,11 @@ TEST(AssemblerIa326) {
 typedef double (*F6)(int x);
 
 TEST(AssemblerIa328) {
-  if (!CpuFeatures::IsSupported(SSE2)) return;
+  if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) return;
 
   InitializeVM();
   v8::HandleScope scope;
-  CHECK(CpuFeatures::IsSupported(SSE2));
+  CHECK(Isolate::Current()->cpu_features()->IsSupported(SSE2));
   CpuFeatures::Scope fscope(SSE2);
   v8::internal::byte buffer[256];
   Assembler assm(buffer, sizeof buffer);
@@ -324,10 +323,10 @@ TEST(AssemblerIa328) {
   __ ret(0);
   CodeDesc desc;
   assm.GetCode(&desc);
-  Code* code = Code::cast(Heap::CreateCode(
+  Code* code = Code::cast(HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked());
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked());
   CHECK(code->IsCode());
 #ifdef OBJECT_PRINT
   Code::cast(code)->Print();
@@ -379,10 +378,10 @@ TEST(AssemblerIa329) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Code* code = Code::cast(Heap::CreateCode(
+  Code* code = Code::cast(HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked());
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked());
   CHECK(code->IsCode());
 #ifdef OBJECT_PRINT
   Code::cast(code)->Print();
index 1554bda2badf62edbd00fcdad39bb2f9277f3bcf..7e2115a10bd8be4dab6eeabbde128c48e73dfd7a 100644 (file)
@@ -86,6 +86,7 @@ static const v8::internal::Register arg2 = rsi;
 
 
 TEST(AssemblerX64ReturnOperation) {
+  OS::Setup();
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -107,6 +108,7 @@ TEST(AssemblerX64ReturnOperation) {
 }
 
 TEST(AssemblerX64StackOperations) {
+  OS::Setup();
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -138,6 +140,7 @@ TEST(AssemblerX64StackOperations) {
 }
 
 TEST(AssemblerX64ArithmeticOperations) {
+  OS::Setup();
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -159,6 +162,7 @@ TEST(AssemblerX64ArithmeticOperations) {
 }
 
 TEST(AssemblerX64ImulOperation) {
+  OS::Setup();
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -186,6 +190,7 @@ TEST(AssemblerX64ImulOperation) {
 }
 
 TEST(AssemblerX64MemoryOperands) {
+  OS::Setup();
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -219,6 +224,7 @@ TEST(AssemblerX64MemoryOperands) {
 }
 
 TEST(AssemblerX64ControlFlow) {
+  OS::Setup();
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -247,6 +253,7 @@ TEST(AssemblerX64ControlFlow) {
 }
 
 TEST(AssemblerX64LoopImmediates) {
+  OS::Setup();
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
index 9c292bcfca98e43b64ba734244cf474ff93f5b1f..6183357b0382928a90c3144598e8d3730289bfee 100644 (file)
@@ -35,6 +35,7 @@
 using namespace v8::internal;
 
 TEST(List) {
+  v8::internal::V8::Initialize(NULL);
   List<AstNode*>* list = new List<AstNode*>(0);
   CHECK_EQ(0, list->length());
 
index ce9a42e81fb27535caafac60901187fb5f835f32..a0d1719227ca92f85d3c53a40b648a2136ed0f09 100644 (file)
@@ -84,11 +84,13 @@ class ProducerThread: public i::Thread {
  public:
   typedef SamplingCircularQueue::Cell Record;
 
-  ProducerThread(SamplingCircularQueue* scq,
+  ProducerThread(i::Isolate* isolate,
+                 SamplingCircularQueue* scq,
                  int records_per_chunk,
                  Record value,
                  i::Semaphore* finished)
-      : scq_(scq),
+      : Thread(isolate),
+        scq_(scq),
         records_per_chunk_(records_per_chunk),
         value_(value),
         finished_(finished) { }
@@ -131,9 +133,10 @@ TEST(SamplingCircularQueueMultithreading) {
   // Check that we are using non-reserved values.
   CHECK_NE(SamplingCircularQueue::kClear, 1);
   CHECK_NE(SamplingCircularQueue::kEnd, 1);
-  ProducerThread producer1(&scq, kRecordsPerChunk, 1, semaphore);
-  ProducerThread producer2(&scq, kRecordsPerChunk, 10, semaphore);
-  ProducerThread producer3(&scq, kRecordsPerChunk, 20, semaphore);
+  i::Isolate* isolate = i::Isolate::Current();
+  ProducerThread producer1(isolate, &scq, kRecordsPerChunk, 1, semaphore);
+  ProducerThread producer2(isolate, &scq, kRecordsPerChunk, 10, semaphore);
+  ProducerThread producer3(isolate, &scq, kRecordsPerChunk, 20, semaphore);
 
   CHECK_EQ(NULL, scq.StartDequeue());
   producer1.Start();
index 9f21b78dd6dc570b42903a4f88f3e7e42cbff050..d3dd9c6ff0e97b715604bedfc33026138ea46101 100644 (file)
@@ -34,7 +34,6 @@
 #include "execution.h"
 #include "factory.h"
 #include "platform.h"
-#include "top.h"
 #include "cctest.h"
 
 using namespace v8::internal;
@@ -99,21 +98,21 @@ static void InitializeVM() {
 
 
 static MaybeObject* GetGlobalProperty(const char* name) {
-  Handle<String> symbol = Factory::LookupAsciiSymbol(name);
-  return Top::context()->global()->GetProperty(*symbol);
+  Handle<String> symbol = FACTORY->LookupAsciiSymbol(name);
+  return Isolate::Current()->context()->global()->GetProperty(*symbol);
 }
 
 
 static void SetGlobalProperty(const char* name, Object* value) {
   Handle<Object> object(value);
-  Handle<String> symbol = Factory::LookupAsciiSymbol(name);
-  Handle<JSObject> global(Top::context()->global());
+  Handle<String> symbol = FACTORY->LookupAsciiSymbol(name);
+  Handle<JSObject> global(Isolate::Current()->context()->global());
   SetProperty(global, symbol, object, NONE, kNonStrictMode);
 }
 
 
 static Handle<JSFunction> Compile(const char* source) {
-  Handle<String> source_code(Factory::NewStringFromUtf8(CStrVector(source)));
+  Handle<String> source_code(FACTORY->NewStringFromUtf8(CStrVector(source)));
   Handle<SharedFunctionInfo> shared_function =
       Compiler::Compile(source_code,
                         Handle<String>(),
@@ -123,8 +122,8 @@ static Handle<JSFunction> Compile(const char* source) {
                         NULL,
                         Handle<String>::null(),
                         NOT_NATIVES_CODE);
-  return Factory::NewFunctionFromSharedFunctionInfo(shared_function,
-                                                    Top::global_context());
+  return FACTORY->NewFunctionFromSharedFunctionInfo(shared_function,
+      Isolate::Current()->global_context());
 }
 
 
@@ -137,7 +136,7 @@ static double Inc(int x) {
   if (fun.is_null()) return -1;
 
   bool has_pending_exception;
-  Handle<JSObject> global(Top::context()->global());
+  Handle<JSObject> global(Isolate::Current()->context()->global());
   Execution::Call(fun, global, 0, NULL, &has_pending_exception);
   CHECK(!has_pending_exception);
   return GetGlobalProperty("result")->ToObjectChecked()->Number();
@@ -158,7 +157,7 @@ static double Add(int x, int y) {
   SetGlobalProperty("x", Smi::FromInt(x));
   SetGlobalProperty("y", Smi::FromInt(y));
   bool has_pending_exception;
-  Handle<JSObject> global(Top::context()->global());
+  Handle<JSObject> global(Isolate::Current()->context()->global());
   Execution::Call(fun, global, 0, NULL, &has_pending_exception);
   CHECK(!has_pending_exception);
   return GetGlobalProperty("result")->ToObjectChecked()->Number();
@@ -178,7 +177,7 @@ static double Abs(int x) {
 
   SetGlobalProperty("x", Smi::FromInt(x));
   bool has_pending_exception;
-  Handle<JSObject> global(Top::context()->global());
+  Handle<JSObject> global(Isolate::Current()->context()->global());
   Execution::Call(fun, global, 0, NULL, &has_pending_exception);
   CHECK(!has_pending_exception);
   return GetGlobalProperty("result")->ToObjectChecked()->Number();
@@ -199,7 +198,7 @@ static double Sum(int n) {
 
   SetGlobalProperty("n", Smi::FromInt(n));
   bool has_pending_exception;
-  Handle<JSObject> global(Top::context()->global());
+  Handle<JSObject> global(Isolate::Current()->context()->global());
   Execution::Call(fun, global, 0, NULL, &has_pending_exception);
   CHECK(!has_pending_exception);
   return GetGlobalProperty("result")->ToObjectChecked()->Number();
@@ -220,7 +219,7 @@ TEST(Print) {
   Handle<JSFunction> fun = Compile(source);
   if (fun.is_null()) return;
   bool has_pending_exception;
-  Handle<JSObject> global(Top::context()->global());
+  Handle<JSObject> global(Isolate::Current()->context()->global());
   Execution::Call(fun, global, 0, NULL, &has_pending_exception);
   CHECK(!has_pending_exception);
 }
@@ -253,7 +252,7 @@ TEST(Stuff) {
   Handle<JSFunction> fun = Compile(source);
   CHECK(!fun.is_null());
   bool has_pending_exception;
-  Handle<JSObject> global(Top::context()->global());
+  Handle<JSObject> global(Isolate::Current()->context()->global());
   Execution::Call(fun, global, 0, NULL, &has_pending_exception);
   CHECK(!has_pending_exception);
   CHECK_EQ(511.0, GetGlobalProperty("r")->ToObjectChecked()->Number());
@@ -268,11 +267,12 @@ TEST(UncaughtThrow) {
   Handle<JSFunction> fun = Compile(source);
   CHECK(!fun.is_null());
   bool has_pending_exception;
-  Handle<JSObject> global(Top::context()->global());
+  Handle<JSObject> global(Isolate::Current()->context()->global());
   Handle<Object> result =
       Execution::Call(fun, global, 0, NULL, &has_pending_exception);
   CHECK(has_pending_exception);
-  CHECK_EQ(42.0, Top::pending_exception()->ToObjectChecked()->Number());
+  CHECK_EQ(42.0, Isolate::Current()->pending_exception()->
+           ToObjectChecked()->Number());
 }
 
 
@@ -293,18 +293,18 @@ TEST(C2JSFrames) {
 
   // Run the generated code to populate the global object with 'foo'.
   bool has_pending_exception;
-  Handle<JSObject> global(Top::context()->global());
+  Handle<JSObject> global(Isolate::Current()->context()->global());
   Execution::Call(fun0, global, 0, NULL, &has_pending_exception);
   CHECK(!has_pending_exception);
 
-  Object* foo_symbol = Factory::LookupAsciiSymbol("foo")->ToObjectChecked();
-  MaybeObject* fun1_object =
-      Top::context()->global()->GetProperty(String::cast(foo_symbol));
+  Object* foo_symbol = FACTORY->LookupAsciiSymbol("foo")->ToObjectChecked();
+  MaybeObject* fun1_object = Isolate::Current()->context()->global()->
+      GetProperty(String::cast(foo_symbol));
   Handle<Object> fun1(fun1_object->ToObjectChecked());
   CHECK(fun1->IsJSFunction());
 
   Object** argv[1] = {
-    Handle<Object>::cast(Factory::LookupAsciiSymbol("hello")).location()
+    Handle<Object>::cast(FACTORY->LookupAsciiSymbol("hello")).location()
   };
   Execution::Call(Handle<JSFunction>::cast(fun1), global, 1, argv,
                   &has_pending_exception);
@@ -318,8 +318,8 @@ TEST(Regression236) {
   InitializeVM();
   v8::HandleScope scope;
 
-  Handle<Script> script = Factory::NewScript(Factory::empty_string());
-  script->set_source(Heap::undefined_value());
+  Handle<Script> script = FACTORY->NewScript(FACTORY->empty_string());
+  script->set_source(HEAP->undefined_value());
   CHECK_EQ(-1, GetScriptLineNumber(script, 0));
   CHECK_EQ(-1, GetScriptLineNumber(script, 100));
   CHECK_EQ(-1, GetScriptLineNumber(script, -1));
index 7f06bc34d861c75d28e5db992ed30f3141d3fb95..507f23682e574a4263edb247aa62847db47e62ab 100644 (file)
@@ -23,7 +23,7 @@ using i::TokenEnumerator;
 TEST(StartStop) {
   CpuProfilesCollection profiles;
   ProfileGenerator generator(&profiles);
-  ProfilerEventsProcessor processor(&generator);
+  ProfilerEventsProcessor processor(i::Isolate::Current(), &generator);
   processor.Start();
   while (!processor.running()) {
     i::Thread::YieldCPU();
@@ -87,7 +87,7 @@ TEST(CodeEvents) {
   CpuProfilesCollection profiles;
   profiles.StartProfiling("", 1);
   ProfileGenerator generator(&profiles);
-  ProfilerEventsProcessor processor(&generator);
+  ProfilerEventsProcessor processor(i::Isolate::Current(), &generator);
   processor.Start();
   while (!processor.running()) {
     i::Thread::YieldCPU();
@@ -96,11 +96,11 @@ TEST(CodeEvents) {
   // Enqueue code creation events.
   i::HandleScope scope;
   const char* aaa_str = "aaa";
-  i::Handle<i::String> aaa_name = i::Factory::NewStringFromAscii(
+  i::Handle<i::String> aaa_name = FACTORY->NewStringFromAscii(
       i::Vector<const char>(aaa_str, i::StrLength(aaa_str)));
   processor.CodeCreateEvent(i::Logger::FUNCTION_TAG,
                             *aaa_name,
-                            i::Heap::empty_string(),
+                            HEAP->empty_string(),
                             0,
                             ToAddress(0x1000),
                             0x100,
@@ -151,7 +151,7 @@ TEST(TickEvents) {
   CpuProfilesCollection profiles;
   profiles.StartProfiling("", 1);
   ProfileGenerator generator(&profiles);
-  ProfilerEventsProcessor processor(&generator);
+  ProfilerEventsProcessor processor(i::Isolate::Current(), &generator);
   processor.Start();
   while (!processor.running()) {
     i::Thread::YieldCPU();
index 5894de2a82cf793ba10c6988b1c2a42c90365d42..feae0b06cb174d332c029201a0daf617e09417c1 100644 (file)
@@ -35,6 +35,7 @@
 using namespace v8::internal;
 
 TEST(BitVector) {
+  v8::internal::V8::Initialize(NULL);
   ZoneScope zone(DELETE_ON_EXIT);
   {
     BitVector v(15);
index 7245e54be734e4bb59a4b577267231d451b7db9a..89c33672e7f8158dc39343f8c24ada7a82f4b657 100644 (file)
@@ -143,17 +143,18 @@ class DebugLocalContext {
   inline v8::Context* operator*() { return *context_; }
   inline bool IsReady() { return !context_.IsEmpty(); }
   void ExposeDebug() {
+    v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
     // Expose the debug context global object in the global object for testing.
-    Debug::Load();
-    Debug::debug_context()->set_security_token(
+    debug->Load();
+    debug->debug_context()->set_security_token(
         v8::Utils::OpenHandle(*context_)->security_token());
 
     Handle<JSGlobalProxy> global(Handle<JSGlobalProxy>::cast(
         v8::Utils::OpenHandle(*context_->Global())));
     Handle<v8::internal::String> debug_string =
-        v8::internal::Factory::LookupAsciiSymbol("debug");
+        FACTORY->LookupAsciiSymbol("debug");
     SetProperty(global, debug_string,
-        Handle<Object>(Debug::debug_context()->global_proxy()), DONT_ENUM,
+        Handle<Object>(debug->debug_context()->global_proxy()), DONT_ENUM,
         ::v8::internal::kNonStrictMode);
   }
  private:
@@ -196,7 +197,8 @@ static bool HasDebugInfo(v8::Handle<v8::Function> fun) {
 static int SetBreakPoint(Handle<v8::internal::JSFunction> fun, int position) {
   static int break_point = 0;
   Handle<v8::internal::SharedFunctionInfo> shared(fun->shared());
-  Debug::SetBreakPoint(
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
+  debug->SetBreakPoint(
       shared,
       Handle<Object>(v8::internal::Smi::FromInt(++break_point)),
       &position);
@@ -279,7 +281,8 @@ static int SetScriptBreakPointByNameFromJS(const char* script_name,
 
 // Clear a break point.
 static void ClearBreakPoint(int break_point) {
-  Debug::ClearBreakPoint(
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
+  debug->ClearBreakPoint(
       Handle<Object>(v8::internal::Smi::FromInt(break_point)));
 }
 
@@ -339,8 +342,9 @@ static void ChangeScriptBreakPointIgnoreCountFromJS(int break_point_number,
 
 // Change break on exception.
 static void ChangeBreakOnException(bool caught, bool uncaught) {
-  Debug::ChangeBreakOnException(v8::internal::BreakException, caught);
-  Debug::ChangeBreakOnException(v8::internal::BreakUncaughtException, uncaught);
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
+  debug->ChangeBreakOnException(v8::internal::BreakException, caught);
+  debug->ChangeBreakOnException(v8::internal::BreakUncaughtException, uncaught);
 }
 
 
@@ -365,7 +369,8 @@ static void ChangeBreakOnExceptionFromJS(bool caught, bool uncaught) {
 
 // Prepare to step to next break location.
 static void PrepareStep(StepAction step_action) {
-  Debug::PrepareStep(step_action, 1);
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
+  debug->PrepareStep(step_action, 1);
 }
 
 
@@ -376,7 +381,9 @@ namespace internal {
 
 // Collect the currently debugged functions.
 Handle<FixedArray> GetDebuggedFunctions() {
-  v8::internal::DebugInfoListNode* node = Debug::debug_info_list_;
+  Debug* debug = Isolate::Current()->debug();
+
+  v8::internal::DebugInfoListNode* node = debug->debug_info_list_;
 
   // Find the number of debugged functions.
   int count = 0;
@@ -387,7 +394,7 @@ Handle<FixedArray> GetDebuggedFunctions() {
 
   // Allocate array for the debugged functions
   Handle<FixedArray> debugged_functions =
-      v8::internal::Factory::NewFixedArray(count);
+      FACTORY->NewFixedArray(count);
 
   // Run through the debug info objects and collect all functions.
   count = 0;
@@ -402,7 +409,9 @@ Handle<FixedArray> GetDebuggedFunctions() {
 
 static Handle<Code> ComputeCallDebugBreak(int argc) {
   CALL_HEAP_FUNCTION(
-      v8::internal::StubCache::ComputeCallDebugBreak(argc, Code::CALL_IC),
+      v8::internal::Isolate::Current(),
+      v8::internal::Isolate::Current()->stub_cache()->ComputeCallDebugBreak(
+          argc, Code::CALL_IC),
       Code);
 }
 
@@ -411,12 +420,12 @@ static Handle<Code> ComputeCallDebugBreak(int argc) {
 void CheckDebuggerUnloaded(bool check_functions) {
   // Check that the debugger context is cleared and that there is no debug
   // information stored for the debugger.
-  CHECK(Debug::debug_context().is_null());
-  CHECK_EQ(NULL, Debug::debug_info_list_);
+  CHECK(Isolate::Current()->debug()->debug_context().is_null());
+  CHECK_EQ(NULL, Isolate::Current()->debug()->debug_info_list_);
 
   // Collect garbage to ensure weak handles are cleared.
-  Heap::CollectAllGarbage(false);
-  Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   // Iterate the head and check that there are no debugger related objects left.
   HeapIterator iterator;
@@ -444,8 +453,8 @@ void CheckDebuggerUnloaded(bool check_functions) {
 
 
 void ForceUnloadDebugger() {
-  Debugger::never_unload_debugger_ = false;
-  Debugger::UnloadDebugger();
+  Isolate::Current()->debugger()->never_unload_debugger_ = false;
+  Isolate::Current()->debugger()->UnloadDebugger();
 }
 
 
@@ -480,6 +489,8 @@ void CheckDebugBreakFunction(DebugLocalContext* env,
                              const char* source, const char* name,
                              int position, v8::internal::RelocInfo::Mode mode,
                              Code* debug_break) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
+
   // Create function and set the break point.
   Handle<v8::internal::JSFunction> fun = v8::Utils::OpenHandle(
       *CompileFunction(env, source, name));
@@ -501,8 +512,8 @@ void CheckDebugBreakFunction(DebugLocalContext* env,
   // Clear the break point and check that the debug break function is no longer
   // there
   ClearBreakPoint(bp);
-  CHECK(!Debug::HasDebugInfo(shared));
-  CHECK(Debug::EnsureDebugInfo(shared));
+  CHECK(!debug->HasDebugInfo(shared));
+  CHECK(debug->EnsureDebugInfo(shared));
   TestBreakLocationIterator it2(Debug::GetDebugInfo(shared));
   it2.FindBreakLocationFromPosition(position);
   CHECK_EQ(mode, it2.it()->rinfo()->rmode());
@@ -634,8 +645,9 @@ static void DebugEventBreakPointHitCount(v8::DebugEvent event,
                                          v8::Handle<v8::Object> exec_state,
                                          v8::Handle<v8::Object> event_data,
                                          v8::Handle<v8::Value> data) {
+  Debug* debug = v8::internal::Isolate::Current()->debug();
   // When hitting a debug event listener there must be a break set.
-  CHECK_NE(v8::internal::Debug::break_id(), 0);
+  CHECK_NE(debug->break_id(), 0);
 
   // Count the number of breaks.
   if (event == v8::Break) {
@@ -738,8 +750,10 @@ static void DebugEventCounter(v8::DebugEvent event,
                               v8::Handle<v8::Object> exec_state,
                               v8::Handle<v8::Object> event_data,
                               v8::Handle<v8::Value> data) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
+
   // When hitting a debug event listener there must be a break set.
-  CHECK_NE(v8::internal::Debug::break_id(), 0);
+  CHECK_NE(debug->break_id(), 0);
 
   // Count the number of breaks.
   if (event == v8::Break) {
@@ -796,8 +810,9 @@ static void DebugEventEvaluate(v8::DebugEvent event,
                                v8::Handle<v8::Object> exec_state,
                                v8::Handle<v8::Object> event_data,
                                v8::Handle<v8::Value> data) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   // When hitting a debug event listener there must be a break set.
-  CHECK_NE(v8::internal::Debug::break_id(), 0);
+  CHECK_NE(debug->break_id(), 0);
 
   if (event == v8::Break) {
     for (int i = 0; checks[i].expr != NULL; i++) {
@@ -822,8 +837,9 @@ static void DebugEventRemoveBreakPoint(v8::DebugEvent event,
                                        v8::Handle<v8::Object> exec_state,
                                        v8::Handle<v8::Object> event_data,
                                        v8::Handle<v8::Value> data) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   // When hitting a debug event listener there must be a break set.
-  CHECK_NE(v8::internal::Debug::break_id(), 0);
+  CHECK_NE(debug->break_id(), 0);
 
   if (event == v8::Break) {
     break_point_hit_count++;
@@ -840,8 +856,9 @@ static void DebugEventStep(v8::DebugEvent event,
                            v8::Handle<v8::Object> exec_state,
                            v8::Handle<v8::Object> event_data,
                            v8::Handle<v8::Value> data) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   // When hitting a debug event listener there must be a break set.
-  CHECK_NE(v8::internal::Debug::break_id(), 0);
+  CHECK_NE(debug->break_id(), 0);
 
   if (event == v8::Break) {
     break_point_hit_count++;
@@ -866,8 +883,9 @@ static void DebugEventStepSequence(v8::DebugEvent event,
                                    v8::Handle<v8::Object> exec_state,
                                    v8::Handle<v8::Object> event_data,
                                    v8::Handle<v8::Value> data) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   // When hitting a debug event listener there must be a break set.
-  CHECK_NE(v8::internal::Debug::break_id(), 0);
+  CHECK_NE(debug->break_id(), 0);
 
   if (event == v8::Break || event == v8::Exception) {
     // Check that the current function is the expected.
@@ -896,8 +914,9 @@ static void DebugEventBreakPointCollectGarbage(
     v8::Handle<v8::Object> exec_state,
     v8::Handle<v8::Object> event_data,
     v8::Handle<v8::Value> data) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   // When hitting a debug event listener there must be a break set.
-  CHECK_NE(v8::internal::Debug::break_id(), 0);
+  CHECK_NE(debug->break_id(), 0);
 
   // Perform a garbage collection when break point is hit and continue. Based
   // on the number of break points hit either scavenge or mark compact
@@ -906,10 +925,10 @@ static void DebugEventBreakPointCollectGarbage(
     break_point_hit_count++;
     if (break_point_hit_count % 2 == 0) {
       // Scavenge.
-      Heap::CollectGarbage(v8::internal::NEW_SPACE);
+      HEAP->CollectGarbage(v8::internal::NEW_SPACE);
     } else {
       // Mark sweep compact.
-      Heap::CollectAllGarbage(true);
+      HEAP->CollectAllGarbage(true);
     }
   }
 }
@@ -921,8 +940,9 @@ static void DebugEventBreak(v8::DebugEvent event,
                             v8::Handle<v8::Object> exec_state,
                             v8::Handle<v8::Object> event_data,
                             v8::Handle<v8::Value> data) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   // When hitting a debug event listener there must be a break set.
-  CHECK_NE(v8::internal::Debug::break_id(), 0);
+  CHECK_NE(debug->break_id(), 0);
 
   if (event == v8::Break) {
     // Count the number of breaks.
@@ -930,7 +950,7 @@ static void DebugEventBreak(v8::DebugEvent event,
 
     // Run the garbage collector to enforce heap verification if option
     // --verify-heap is set.
-    Heap::CollectGarbage(v8::internal::NEW_SPACE);
+    HEAP->CollectGarbage(v8::internal::NEW_SPACE);
 
     // Set the break flag again to come back here as soon as possible.
     v8::Debug::DebugBreak();
@@ -946,8 +966,9 @@ static void DebugEventBreakMax(v8::DebugEvent event,
                                v8::Handle<v8::Object> exec_state,
                                v8::Handle<v8::Object> event_data,
                                v8::Handle<v8::Value> data) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   // When hitting a debug event listener there must be a break set.
-  CHECK_NE(v8::internal::Debug::break_id(), 0);
+  CHECK_NE(debug->break_id(), 0);
 
   if (event == v8::Break) {
     if (break_point_hit_count < max_break_point_hit_count) {
@@ -987,6 +1008,7 @@ static void MessageCallbackCount(v8::Handle<v8::Message> message,
 // of break locations.
 TEST(DebugStub) {
   using ::v8::internal::Builtins;
+  using ::v8::internal::Isolate;
   v8::HandleScope scope;
   DebugLocalContext env;
 
@@ -999,12 +1021,14 @@ TEST(DebugStub) {
                           "function f2(){x=1;}", "f2",
                           0,
                           v8::internal::RelocInfo::CODE_TARGET_CONTEXT,
-                          Builtins::builtin(Builtins::StoreIC_DebugBreak));
+                          Isolate::Current()->builtins()->builtin(
+                              Builtins::StoreIC_DebugBreak));
   CheckDebugBreakFunction(&env,
                           "function f3(){var a=x;}", "f3",
                           0,
                           v8::internal::RelocInfo::CODE_TARGET_CONTEXT,
-                          Builtins::builtin(Builtins::LoadIC_DebugBreak));
+                          Isolate::Current()->builtins()->builtin(
+                              Builtins::LoadIC_DebugBreak));
 
 // TODO(1240753): Make the test architecture independent or split
 // parts of the debugger into architecture dependent files. This
@@ -1017,14 +1041,16 @@ TEST(DebugStub) {
       "f4",
       0,
       v8::internal::RelocInfo::CODE_TARGET,
-      Builtins::builtin(Builtins::KeyedStoreIC_DebugBreak));
+      Isolate::Current()->builtins()->builtin(
+          Builtins::KeyedStoreIC_DebugBreak));
   CheckDebugBreakFunction(
       &env,
       "function f5(){var index='propertyName'; var a={}; return a[index];}",
       "f5",
       0,
       v8::internal::RelocInfo::CODE_TARGET,
-      Builtins::builtin(Builtins::KeyedLoadIC_DebugBreak));
+      Isolate::Current()->builtins()->builtin(
+          Builtins::KeyedLoadIC_DebugBreak));
 #endif
 
   // Check the debug break code stubs for call ICs with different number of
@@ -1139,7 +1165,7 @@ TEST(BreakPointICLoad) {
   foo->Call(env->Global(), 0, NULL);
   CHECK_EQ(0, break_point_hit_count);
 
-  // Run with breakpoint
+  // Run with breakpoint.
   int bp = SetBreakPoint(foo, 0);
   foo->Call(env->Global(), 0, NULL);
   CHECK_EQ(1, break_point_hit_count);
@@ -1172,7 +1198,7 @@ TEST(BreakPointICCall) {
   foo->Call(env->Global(), 0, NULL);
   CHECK_EQ(0, break_point_hit_count);
 
-  // Run with breakpoint.
+  // Run with breakpoint
   int bp = SetBreakPoint(foo, 0);
   foo->Call(env->Global(), 0, NULL);
   CHECK_EQ(1, break_point_hit_count);
@@ -1367,12 +1393,12 @@ static void CallAndGC(v8::Local<v8::Object> recv,
     CHECK_EQ(1 + i * 3, break_point_hit_count);
 
     // Scavenge and call function.
-    Heap::CollectGarbage(v8::internal::NEW_SPACE);
+    HEAP->CollectGarbage(v8::internal::NEW_SPACE);
     f->Call(recv, 0, NULL);
     CHECK_EQ(2 + i * 3, break_point_hit_count);
 
     // Mark sweep (and perhaps compact) and call function.
-    Heap::CollectAllGarbage(force_compaction);
+    HEAP->CollectAllGarbage(force_compaction);
     f->Call(recv, 0, NULL);
     CHECK_EQ(3 + i * 3, break_point_hit_count);
   }
@@ -2199,7 +2225,7 @@ TEST(ScriptBreakPointLineTopLevel) {
   }
   f = v8::Local<v8::Function>::Cast(env->Global()->Get(v8::String::New("f")));
 
-  Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   SetScriptBreakPointByNameFromJS("test.html", 3, -1);
 
@@ -3698,7 +3724,7 @@ TEST(BreakOnException) {
   DebugLocalContext env;
   env.ExposeDebug();
 
-  v8::internal::Top::TraceException(false);
+  v8::internal::Isolate::Current()->TraceException(false);
 
   // Create functions for testing break on exception.
   v8::Local<v8::Function> throws =
@@ -3845,7 +3871,7 @@ TEST(BreakOnCompileException) {
   // For this test, we want to break on uncaught exceptions:
   ChangeBreakOnException(false, true);
 
-  v8::internal::Top::TraceException(false);
+  v8::internal::Isolate::Current()->TraceException(false);
 
   // Create a function for checking the function when hitting a break point.
   frame_count = CompileFunction(&env, frame_count_source, "frame_count");
@@ -4694,6 +4720,8 @@ Barriers message_queue_barriers;
 // placing JSON debugger commands in the queue.
 class MessageQueueDebuggerThread : public v8::internal::Thread {
  public:
+  explicit MessageQueueDebuggerThread(v8::internal::Isolate* isolate)
+      : Thread(isolate) { }
   void Run();
 };
 
@@ -4793,10 +4821,12 @@ void MessageQueueDebuggerThread::Run() {
   // Main thread continues running source_3 to end, waits for this thread.
 }
 
-MessageQueueDebuggerThread message_queue_debugger_thread;
 
 // This thread runs the v8 engine.
 TEST(MessageQueues) {
+  MessageQueueDebuggerThread message_queue_debugger_thread(
+      i::Isolate::Current());
+
   // Create a V8 environment
   v8::HandleScope scope;
   DebugLocalContext env;
@@ -4942,11 +4972,13 @@ Barriers threaded_debugging_barriers;
 
 class V8Thread : public v8::internal::Thread {
  public:
+  explicit V8Thread(v8::internal::Isolate* isolate) : Thread(isolate) { }
   void Run();
 };
 
 class DebuggerThread : public v8::internal::Thread {
  public:
+  explicit DebuggerThread(v8::internal::Isolate* isolate) : Thread(isolate) { }
   void Run();
 };
 
@@ -5021,10 +5053,11 @@ void DebuggerThread::Run() {
   v8::Debug::SendCommand(buffer, AsciiToUtf16(command_2, buffer));
 }
 
-DebuggerThread debugger_thread;
-V8Thread v8_thread;
 
 TEST(ThreadedDebugging) {
+  DebuggerThread debugger_thread(i::Isolate::Current());
+  V8Thread v8_thread(i::Isolate::Current());
+
   // Create a V8 environment
   threaded_debugging_barriers.Initialize();
 
@@ -5044,13 +5077,16 @@ TEST(ThreadedDebugging) {
 
 class BreakpointsV8Thread : public v8::internal::Thread {
  public:
+  explicit BreakpointsV8Thread(v8::internal::Isolate* isolate)
+      : Thread(isolate) { }
   void Run();
 };
 
 class BreakpointsDebuggerThread : public v8::internal::Thread {
  public:
-  explicit BreakpointsDebuggerThread(bool global_evaluate)
-      : global_evaluate_(global_evaluate) {}
+  explicit BreakpointsDebuggerThread(v8::internal::Isolate* isolate,
+                                     bool global_evaluate)
+      : Thread(isolate), global_evaluate_(global_evaluate) {}
   void Run();
 
  private:
@@ -5226,8 +5262,9 @@ void BreakpointsDebuggerThread::Run() {
 void TestRecursiveBreakpointsGeneric(bool global_evaluate) {
   i::FLAG_debugger_auto_break = true;
 
-  BreakpointsDebuggerThread breakpoints_debugger_thread(global_evaluate);
-  BreakpointsV8Thread breakpoints_v8_thread;
+  BreakpointsDebuggerThread breakpoints_debugger_thread(i::Isolate::Current(),
+      global_evaluate);
+  BreakpointsV8Thread breakpoints_v8_thread(i::Isolate::Current());
 
   // Create a V8 environment
   Barriers stack_allocated_breakpoints_barriers;
@@ -5609,11 +5646,15 @@ TEST(DebuggerClearMessageHandlerWhileActive) {
 
 class HostDispatchV8Thread : public v8::internal::Thread {
  public:
+  explicit HostDispatchV8Thread(v8::internal::Isolate* isolate)
+      : Thread(isolate) { }
   void Run();
 };
 
 class HostDispatchDebuggerThread : public v8::internal::Thread {
  public:
+  explicit HostDispatchDebuggerThread(v8::internal::Isolate* isolate)
+      : Thread(isolate) { }
   void Run();
 };
 
@@ -5683,11 +5724,11 @@ void HostDispatchDebuggerThread::Run() {
   v8::Debug::SendCommand(buffer, AsciiToUtf16(command_2, buffer));
 }
 
-HostDispatchDebuggerThread host_dispatch_debugger_thread;
-HostDispatchV8Thread host_dispatch_v8_thread;
-
 
 TEST(DebuggerHostDispatch) {
+  HostDispatchDebuggerThread host_dispatch_debugger_thread(
+      i::Isolate::Current());
+  HostDispatchV8Thread host_dispatch_v8_thread(i::Isolate::Current());
   i::FLAG_debugger_auto_break = true;
 
   // Create a V8 environment
@@ -5711,11 +5752,15 @@ TEST(DebuggerHostDispatch) {
 
 class DebugMessageDispatchV8Thread : public v8::internal::Thread {
  public:
+  explicit DebugMessageDispatchV8Thread(v8::internal::Isolate* isolate)
+      : Thread(isolate) { }
   void Run();
 };
 
 class DebugMessageDispatchDebuggerThread : public v8::internal::Thread {
  public:
+  explicit DebugMessageDispatchDebuggerThread(v8::internal::Isolate* isolate)
+      : Thread(isolate) { }
   void Run();
 };
 
@@ -5747,11 +5792,13 @@ void DebugMessageDispatchDebuggerThread::Run() {
   debug_message_dispatch_barriers->barrier_2.Wait();
 }
 
-DebugMessageDispatchDebuggerThread debug_message_dispatch_debugger_thread;
-DebugMessageDispatchV8Thread debug_message_dispatch_v8_thread;
-
 
 TEST(DebuggerDebugMessageDispatch) {
+  DebugMessageDispatchDebuggerThread debug_message_dispatch_debugger_thread(
+      i::Isolate::Current());
+  DebugMessageDispatchV8Thread debug_message_dispatch_v8_thread(
+      i::Isolate::Current());
+
   i::FLAG_debugger_auto_break = true;
 
   // Create a V8 environment
@@ -5769,6 +5816,7 @@ TEST(DebuggerDebugMessageDispatch) {
 
 
 TEST(DebuggerAgent) {
+  i::Debugger* debugger = i::Isolate::Current()->debugger();
   // Make sure these ports is not used by other tests to allow tests to run in
   // parallel.
   const int kPort1 = 5858;
@@ -5786,18 +5834,18 @@ TEST(DebuggerAgent) {
   i::Socket::Setup();
 
   // Test starting and stopping the agent without any client connection.
-  i::Debugger::StartAgent("test", kPort1);
-  i::Debugger::StopAgent();
+  debugger->StartAgent("test", kPort1);
+  debugger->StopAgent();
 
   // Test starting the agent, connecting a client and shutting down the agent
   // with the client connected.
-  ok = i::Debugger::StartAgent("test", kPort2);
+  ok = debugger->StartAgent("test", kPort2);
   CHECK(ok);
-  i::Debugger::WaitForAgent();
+  debugger->WaitForAgent();
   i::Socket* client = i::OS::CreateSocket();
   ok = client->Connect("localhost", port2_str);
   CHECK(ok);
-  i::Debugger::StopAgent();
+  debugger->StopAgent();
   delete client;
 
   // Test starting and stopping the agent with the required port already
@@ -5805,8 +5853,8 @@ TEST(DebuggerAgent) {
   i::Socket* server = i::OS::CreateSocket();
   server->Bind(kPort3);
 
-  i::Debugger::StartAgent("test", kPort3);
-  i::Debugger::StopAgent();
+  debugger->StartAgent("test", kPort3);
+  debugger->StopAgent();
 
   delete server;
 }
@@ -5814,8 +5862,8 @@ TEST(DebuggerAgent) {
 
 class DebuggerAgentProtocolServerThread : public i::Thread {
  public:
-  explicit DebuggerAgentProtocolServerThread(int port)
-      : port_(port), server_(NULL), client_(NULL),
+  explicit DebuggerAgentProtocolServerThread(i::Isolate* isolate, int port)
+      : Thread(isolate), port_(port), server_(NULL), client_(NULL),
         listening_(OS::CreateSemaphore(0)) {
   }
   ~DebuggerAgentProtocolServerThread() {
@@ -5877,7 +5925,7 @@ TEST(DebuggerAgentProtocolOverflowHeader) {
 
   // Create a socket server to receive a debugger agent message.
   DebuggerAgentProtocolServerThread* server =
-      new DebuggerAgentProtocolServerThread(kPort);
+      new DebuggerAgentProtocolServerThread(i::Isolate::Current(), kPort);
   server->Start();
   server->WaitForListening();
 
@@ -6376,17 +6424,18 @@ static void DebugEventScriptCollectedEvent(v8::DebugEvent event,
 
 // Test that scripts collected are reported through the debug event listener.
 TEST(ScriptCollectedEvent) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   break_point_hit_count = 0;
   script_collected_count = 0;
   v8::HandleScope scope;
   DebugLocalContext env;
 
   // Request the loaded scripts to initialize the debugger script cache.
-  Debug::GetLoadedScripts();
+  debug->GetLoadedScripts();
 
   // Do garbage collection to ensure that only the script in this test will be
   // collected afterwards.
-  Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   script_collected_count = 0;
   v8::Debug::SetDebugEventListener(DebugEventScriptCollectedEvent,
@@ -6398,7 +6447,7 @@ TEST(ScriptCollectedEvent) {
 
   // Do garbage collection to collect the script above which is no longer
   // referenced.
-  Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   CHECK_EQ(2, script_collected_count);
 
@@ -6422,6 +6471,7 @@ static void ScriptCollectedMessageHandler(const v8::Debug::Message& message) {
 // Test that GetEventContext doesn't fail and return empty handle for
 // ScriptCollected events.
 TEST(ScriptCollectedEventContext) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   script_collected_message_count = 0;
   v8::HandleScope scope;
 
@@ -6429,11 +6479,11 @@ TEST(ScriptCollectedEventContext) {
     DebugLocalContext env;
 
     // Request the loaded scripts to initialize the debugger script cache.
-    Debug::GetLoadedScripts();
+    debug->GetLoadedScripts();
 
     // Do garbage collection to ensure that only the script in this test will be
     // collected afterwards.
-    Heap::CollectAllGarbage(false);
+    HEAP->CollectAllGarbage(false);
 
     v8::Debug::SetMessageHandler2(ScriptCollectedMessageHandler);
     {
@@ -6444,7 +6494,7 @@ TEST(ScriptCollectedEventContext) {
 
   // Do garbage collection to collect the script above which is no longer
   // referenced.
-  Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   CHECK_EQ(2, script_collected_message_count);
 
@@ -6592,6 +6642,7 @@ TEST(ProvisionalBreakpointOnLineOutOfRange) {
 
 
 static void BreakMessageHandler(const v8::Debug::Message& message) {
+  i::Isolate* isolate = i::Isolate::Current();
   if (message.IsEvent() && message.GetEvent() == v8::Break) {
     // Count the number of breaks.
     break_point_hit_count++;
@@ -6603,18 +6654,18 @@ static void BreakMessageHandler(const v8::Debug::Message& message) {
   } else if (message.IsEvent() && message.GetEvent() == v8::AfterCompile) {
     v8::HandleScope scope;
 
-    bool is_debug_break = i::StackGuard::IsDebugBreak();
+    bool is_debug_break = isolate->stack_guard()->IsDebugBreak();
     // Force DebugBreak flag while serializer is working.
-    i::StackGuard::DebugBreak();
+    isolate->stack_guard()->DebugBreak();
 
     // Force serialization to trigger some internal JS execution.
     v8::Handle<v8::String> json = message.GetJSON();
 
     // Restore previous state.
     if (is_debug_break) {
-      i::StackGuard::DebugBreak();
+      isolate->stack_guard()->DebugBreak();
     } else {
-      i::StackGuard::Continue(i::DEBUGBREAK);
+      isolate->stack_guard()->Continue(i::DEBUGBREAK);
     }
   }
 }
@@ -6843,6 +6894,7 @@ static void DebugEventGetAtgumentPropertyValue(
 
 
 TEST(CallingContextIsNotDebugContext) {
+  v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
   // Create and enter a debugee context.
   v8::HandleScope scope;
   DebugLocalContext env;
@@ -6851,7 +6903,7 @@ TEST(CallingContextIsNotDebugContext) {
   // Save handles to the debugger and debugee contexts to be used in
   // NamedGetterWithCallingContextCheck.
   debugee_context = v8::Local<v8::Context>(*env);
-  debugger_context = v8::Utils::ToLocal(Debug::debug_context());
+  debugger_context = v8::Utils::ToLocal(debug->debug_context());
 
   // Create object with 'a' property accessor.
   v8::Handle<v8::ObjectTemplate> named = v8::ObjectTemplate::New();
index 6ea4c849ee71fd5439983b12cb99575abcaebc7a..619839185e651aab3e16787145ea154872357404 100644 (file)
@@ -130,7 +130,7 @@ void DeclarationContext::Check(const char* source,
   InitializeIfNeeded();
   // A retry after a GC may pollute the counts, so perform gc now
   // to avoid that.
-  v8::internal::Heap::CollectGarbage(v8::internal::NEW_SPACE);
+  HEAP->CollectGarbage(v8::internal::NEW_SPACE);
   HandleScope scope;
   TryCatch catcher;
   catcher.SetVerbose(true);
index 17453552e4d56cd673e6676c98824f84b0697bb2..5ab84f96cb087eefc367a569f45b35762b649495 100644 (file)
 #include "v8.h"
 
 #include "api.h"
+#include "cctest.h"
 #include "compilation-cache.h"
 #include "debug.h"
 #include "deoptimizer.h"
+#include "isolate.h"
 #include "platform.h"
 #include "stub-cache.h"
-#include "cctest.h"
-
 
-using ::v8::internal::Handle;
-using ::v8::internal::Object;
-using ::v8::internal::JSFunction;
 using ::v8::internal::Deoptimizer;
 using ::v8::internal::EmbeddedVector;
+using ::v8::internal::Handle;
+using ::v8::internal::Isolate;
+using ::v8::internal::JSFunction;
 using ::v8::internal::OS;
+using ::v8::internal::Object;
 
 // Size of temp buffer for formatting small strings.
 #define SMALL_STRING_BUFFER_SIZE 80
@@ -124,7 +125,7 @@ TEST(DeoptimizeSimple) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 
   // Test lazy deoptimization of a simple function. Call the function after the
   // deoptimization while it is still activated further down the stack.
@@ -140,7 +141,7 @@ TEST(DeoptimizeSimple) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -164,7 +165,7 @@ TEST(DeoptimizeSimpleWithArguments) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 
   // Test lazy deoptimization of a simple function with some arguments. Call the
   // function after the deoptimization while it is still activated further down
@@ -181,7 +182,7 @@ TEST(DeoptimizeSimpleWithArguments) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -207,7 +208,7 @@ TEST(DeoptimizeSimpleNested) {
     CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
     CHECK_EQ(6, env->Global()->Get(v8_str("result"))->Int32Value());
     CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
-    CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+    CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
   }
 }
 
@@ -232,7 +233,7 @@ TEST(DeoptimizeRecursive) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(11, env->Global()->Get(v8_str("calls"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 
   v8::Local<v8::Function> fun =
       v8::Local<v8::Function>::Cast(env->Global()->Get(v8::String::New("f")));
@@ -266,7 +267,7 @@ TEST(DeoptimizeMultiple) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(14, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -289,7 +290,7 @@ TEST(DeoptimizeConstructor) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK(env->Global()->Get(v8_str("result"))->IsTrue());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 
   {
     AlwaysOptimizeAllowNativesSyntaxNoInlining options;
@@ -306,7 +307,7 @@ TEST(DeoptimizeConstructor) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(3, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -337,7 +338,7 @@ TEST(DeoptimizeConstructorMultiple) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(14, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -385,7 +386,7 @@ TEST(DeoptimizeBinaryOperationADDString) {
   CHECK(result->IsString());
   v8::String::AsciiValue ascii(result);
   CHECK_EQ("a+an X", *ascii);
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -444,7 +445,7 @@ TEST(DeoptimizeBinaryOperationADD) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(15, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -458,7 +459,7 @@ TEST(DeoptimizeBinaryOperationSUB) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(-1, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -472,7 +473,7 @@ TEST(DeoptimizeBinaryOperationMUL) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(56, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -486,7 +487,7 @@ TEST(DeoptimizeBinaryOperationDIV) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(0, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -500,7 +501,7 @@ TEST(DeoptimizeBinaryOperationMOD) {
 
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(7, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -545,7 +546,7 @@ TEST(DeoptimizeCompare) {
   CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(true, env->Global()->Get(v8_str("result"))->BooleanValue());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -627,7 +628,7 @@ TEST(DeoptimizeLoadICStoreIC) {
   CHECK(!GetJSFunction(env->Global(), "g2")->IsOptimized());
   CHECK_EQ(4, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(13, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
 
 
@@ -710,5 +711,5 @@ TEST(DeoptimizeLoadICStoreICNested) {
   CHECK(!GetJSFunction(env->Global(), "g2")->IsOptimized());
   CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
   CHECK_EQ(13, env->Global()->Get(v8_str("result"))->Int32Value());
-  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount());
+  CHECK_EQ(0, Deoptimizer::GetDeoptimizedCodeCount(Isolate::Current()));
 }
index c702645eb81d38dd43c56c6c3ff4f4273d2c76ef..32216147be3956276e8effa5c5ee747887bec9e4 100644 (file)
@@ -270,7 +270,7 @@ TEST(Type0) {
           "13a06000       movne r6, #0");
 
   // mov -> movw.
-  if (CpuFeatures::IsSupported(ARMv7)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     COMPARE(mov(r5, Operand(0x01234), LeaveCC, ne),
             "13015234       movwne r5, #4660");
     // We only disassemble one instruction so the eor instruction is not here.
@@ -360,7 +360,7 @@ TEST(Type1) {
 TEST(Type3) {
   SETUP();
 
-  if (CpuFeatures::IsSupported(ARMv7)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
     COMPARE(ubfx(r0, r1, 5, 10),
             "e7e902d1       ubfx r0, r1, #5, #10");
     COMPARE(ubfx(r1, r0, 5, 10),
@@ -415,7 +415,7 @@ TEST(Type3) {
 TEST(Vfp) {
   SETUP();
 
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
     CpuFeatures::Scope scope(VFP3);
     COMPARE(vmov(d0, d1),
             "eeb00b41       vmov.f64 d0, d1");
index c995aa8ed04fc8663e1ad0c072214ac285377dea..2f10177775175bb12a637f71bebeaf55668f33b6 100644 (file)
@@ -68,7 +68,7 @@ TEST(DisasmIa320) {
   __ sub(Operand(eax), Immediate(12345678));
   __ xor_(eax, 12345678);
   __ and_(eax, 12345678);
-  Handle<FixedArray> foo = Factory::NewFixedArray(10, TENURED);
+  Handle<FixedArray> foo = FACTORY->NewFixedArray(10, TENURED);
   __ cmp(eax, foo);
 
   // ---- This one caused crash
@@ -99,7 +99,7 @@ TEST(DisasmIa320) {
   __ cmp(edx, 3);
   __ cmp(edx, Operand(esp, 4));
   __ cmp(Operand(ebp, ecx, times_4, 0), Immediate(1000));
-  Handle<FixedArray> foo2 = Factory::NewFixedArray(10, TENURED);
+  Handle<FixedArray> foo2 = FACTORY->NewFixedArray(10, TENURED);
   __ cmp(ebx, foo2);
   __ cmpb(ebx, Operand(ebp, ecx, times_2, 0));
   __ cmpb(Operand(ebp, ecx, times_2, 0), ebx);
@@ -107,12 +107,12 @@ TEST(DisasmIa320) {
   __ xor_(edx, 3);
   __ nop();
   {
-    CHECK(CpuFeatures::IsSupported(CPUID));
+    CHECK(Isolate::Current()->cpu_features()->IsSupported(CPUID));
     CpuFeatures::Scope fscope(CPUID);
     __ cpuid();
   }
   {
-    CHECK(CpuFeatures::IsSupported(RDTSC));
+    CHECK(Isolate::Current()->cpu_features()->IsSupported(RDTSC));
     CpuFeatures::Scope fscope(RDTSC);
     __ rdtsc();
   }
@@ -272,7 +272,8 @@ TEST(DisasmIa320) {
   __ bind(&L2);
   __ call(Operand(ebx, ecx, times_4, 10000));
   __ nop();
-  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+  Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+      Builtins::LoadIC_Initialize));
   __ call(ic, RelocInfo::CODE_TARGET);
   __ nop();
   __ call(FUNCTION_ADDR(DummyStaticFunction), RelocInfo::RUNTIME_ENTRY);
@@ -332,7 +333,7 @@ TEST(DisasmIa320) {
   __ j(zero, &Ljcc, taken);
   __ j(zero, &Ljcc, not_taken);
 
-  // __ mov(Operand::StaticVariable(Top::handler_address()), eax);
+  // __ mov(Operand::StaticVariable(Isolate::handler_address()), eax);
   // 0xD9 instructions
   __ nop();
 
@@ -373,7 +374,7 @@ TEST(DisasmIa320) {
   __ fwait();
   __ nop();
   {
-    if (CpuFeatures::IsSupported(SSE2)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
       CpuFeatures::Scope fscope(SSE2);
       __ cvttss2si(edx, Operand(ebx, ecx, times_4, 10000));
       __ cvtsi2sd(xmm1, Operand(ebx, ecx, times_4, 10000));
@@ -395,7 +396,7 @@ TEST(DisasmIa320) {
 
   // cmov.
   {
-    if (CpuFeatures::IsSupported(CMOV)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(CMOV)) {
       CpuFeatures::Scope use_cmov(CMOV);
       __ cmov(overflow, eax, Operand(eax, 0));
       __ cmov(no_overflow, eax, Operand(eax, 1));
@@ -418,7 +419,7 @@ TEST(DisasmIa320) {
 
   // andpd, cmpltsd, movaps, psllq, psrlq, por.
   {
-    if (CpuFeatures::IsSupported(SSE2)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
       CpuFeatures::Scope fscope(SSE2);
       __ andpd(xmm0, xmm1);
       __ andpd(xmm1, xmm2);
@@ -447,7 +448,7 @@ TEST(DisasmIa320) {
   }
 
   {
-    if (CpuFeatures::IsSupported(SSE4_1)) {
+    if (Isolate::Current()->cpu_features()->IsSupported(SSE4_1)) {
       CpuFeatures::Scope scope(SSE4_1);
       __ pextrd(Operand(eax), xmm0, 1);
       __ pinsrd(xmm1, Operand(eax), 0);
@@ -458,10 +459,10 @@ TEST(DisasmIa320) {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code = Heap::CreateCode(
+  Object* code = HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(code->IsCode());
 #ifdef OBJECT_PRINT
   Code::cast(code)->Print();
index 563cc4bf4dceb9e75812237bfe0f7cb0d6919757..dea5c47735b8d847f442c16884ff8377611642ae 100644 (file)
@@ -36,6 +36,7 @@ using ::v8::internal::CStrVector;
 using ::v8::internal::Factory;
 using ::v8::internal::Handle;
 using ::v8::internal::Heap;
+using ::v8::internal::Isolate;
 using ::v8::internal::JSFunction;
 using ::v8::internal::Object;
 using ::v8::internal::Runtime;
@@ -77,15 +78,20 @@ static void CheckFunctionName(v8::Handle<v8::Script> script,
 
   // Find the position of a given func source substring in the source.
   Handle<String> func_pos_str =
-      Factory::NewStringFromAscii(CStrVector(func_pos_src));
-  int func_pos = Runtime::StringMatch(script_src, func_pos_str, 0);
+      FACTORY->NewStringFromAscii(CStrVector(func_pos_src));
+  int func_pos = Runtime::StringMatch(Isolate::Current(),
+                                      script_src,
+                                      func_pos_str,
+                                      0);
   CHECK_NE(0, func_pos);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Obtain SharedFunctionInfo for the function.
   Object* shared_func_info_ptr =
-      Runtime::FindSharedFunctionInfoInScript(i_script, func_pos);
-  CHECK(shared_func_info_ptr != Heap::undefined_value());
+      Runtime::FindSharedFunctionInfoInScript(Isolate::Current(),
+                                              i_script,
+                                              func_pos);
+  CHECK(shared_func_info_ptr != HEAP->undefined_value());
   Handle<SharedFunctionInfo> shared_func_info(
       SharedFunctionInfo::cast(shared_func_info_ptr));
 
index f1e9fcde2515bacf143c797abcc52227b80599f6..bda97a4a38d73cafb51b25f51527c8c88fca7389 100644 (file)
@@ -26,7 +26,7 @@ class ConstructorHeapProfileTestHelper : public i::ConstructorHeapProfile {
  public:
   ConstructorHeapProfileTestHelper()
     : i::ConstructorHeapProfile(),
-      f_name_(i::Factory::NewStringFromAscii(i::CStrVector("F"))),
+      f_name_(FACTORY->NewStringFromAscii(i::CStrVector("F"))),
       f_count_(0) {
   }
 
@@ -143,25 +143,25 @@ TEST(ClustersCoarserSimple) {
   i::ZoneScope zn_scope(i::DELETE_ON_EXIT);
 
   JSObjectsRetainerTree tree;
-  JSObjectsCluster function(i::Heap::function_class_symbol());
-  JSObjectsCluster a(*i::Factory::NewStringFromAscii(i::CStrVector("A")));
-  JSObjectsCluster b(*i::Factory::NewStringFromAscii(i::CStrVector("B")));
+  JSObjectsCluster function(HEAP->function_class_symbol());
+  JSObjectsCluster a(*FACTORY->NewStringFromAscii(i::CStrVector("A")));
+  JSObjectsCluster b(*FACTORY->NewStringFromAscii(i::CStrVector("B")));
 
   // o1 <- Function
   JSObjectsCluster o1 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100, &function);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100, &function);
   // o2 <- Function
   JSObjectsCluster o2 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x200, &function);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x200, &function);
   // o3 <- A, B
   JSObjectsCluster o3 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x300, &a, &b);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x300, &a, &b);
   // o4 <- B, A
   JSObjectsCluster o4 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x400, &b, &a);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x400, &b, &a);
   // o5 <- A, B, Function
   JSObjectsCluster o5 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x500,
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x500,
                           &a, &b, &function);
 
   ClustersCoarser coarser;
@@ -181,20 +181,20 @@ TEST(ClustersCoarserMultipleConstructors) {
   i::ZoneScope zn_scope(i::DELETE_ON_EXIT);
 
   JSObjectsRetainerTree tree;
-  JSObjectsCluster function(i::Heap::function_class_symbol());
+  JSObjectsCluster function(HEAP->function_class_symbol());
 
   // o1 <- Function
   JSObjectsCluster o1 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100, &function);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100, &function);
   // a1 <- Function
   JSObjectsCluster a1 =
-      AddHeapObjectToTree(&tree, i::Heap::Array_symbol(), 0x1000, &function);
+      AddHeapObjectToTree(&tree, HEAP->Array_symbol(), 0x1000, &function);
   // o2 <- Function
   JSObjectsCluster o2 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x200, &function);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x200, &function);
   // a2 <- Function
   JSObjectsCluster a2 =
-      AddHeapObjectToTree(&tree, i::Heap::Array_symbol(), 0x2000, &function);
+      AddHeapObjectToTree(&tree, HEAP->Array_symbol(), 0x2000, &function);
 
   ClustersCoarser coarser;
   coarser.Process(&tree);
@@ -224,21 +224,21 @@ TEST(ClustersCoarserPathsTraversal) {
   // o21 ~ o22, and o11 ~ o12.
 
   JSObjectsCluster o =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100);
   JSObjectsCluster o11 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x110, &o);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x110, &o);
   JSObjectsCluster o12 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x120, &o);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x120, &o);
   JSObjectsCluster o21 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x210, &o11);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x210, &o11);
   JSObjectsCluster o22 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x220, &o12);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x220, &o12);
   JSObjectsCluster p =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x300, &o21);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x300, &o21);
   JSObjectsCluster q =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x310, &o21, &o22);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x310, &o21, &o22);
   JSObjectsCluster r =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x320, &o22);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x320, &o22);
 
   ClustersCoarser coarser;
   coarser.Process(&tree);
@@ -275,19 +275,19 @@ TEST(ClustersCoarserSelf) {
   // we expect that coarser will deduce equivalences: p ~ q ~ r, o1 ~ o2;
 
   JSObjectsCluster o =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x100);
   JSObjectsCluster o1 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x110, &o);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x110, &o);
   JSObjectsCluster o2 =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x120, &o);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x120, &o);
   JSObjectsCluster p =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x300, &o1);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x300, &o1);
   AddSelfReferenceToTree(&tree, &p);
   JSObjectsCluster q =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x310, &o1, &o2);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x310, &o1, &o2);
   AddSelfReferenceToTree(&tree, &q);
   JSObjectsCluster r =
-      AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x320, &o2);
+      AddHeapObjectToTree(&tree, HEAP->Object_symbol(), 0x320, &o2);
   AddSelfReferenceToTree(&tree, &r);
 
   ClustersCoarser coarser;
@@ -730,7 +730,7 @@ TEST(HeapEntryIdsAndGC) {
   const v8::HeapSnapshot* snapshot1 =
       v8::HeapProfiler::TakeSnapshot(v8::String::New("s1"));
 
-  i::Heap::CollectAllGarbage(true);  // Enforce compaction.
+  HEAP->CollectAllGarbage(true);  // Enforce compaction.
 
   const v8::HeapSnapshot* snapshot2 =
       v8::HeapProfiler::TakeSnapshot(v8::String::New("s2"));
index 4cc7f8ba85a7180570bfda32a883fa0f9746a7bb..86860ce0174db535a9612a9644564949a5afab5f 100644 (file)
@@ -24,9 +24,9 @@ static void InitializeVM() {
 static void CheckMap(Map* map, int type, int instance_size) {
   CHECK(map->IsHeapObject());
 #ifdef DEBUG
-  CHECK(Heap::Contains(map));
+  CHECK(HEAP->Contains(map));
 #endif
-  CHECK_EQ(Heap::meta_map(), map->map());
+  CHECK_EQ(HEAP->meta_map(), map->map());
   CHECK_EQ(type, map->instance_type());
   CHECK_EQ(instance_size, map->instance_size());
 }
@@ -34,10 +34,10 @@ static void CheckMap(Map* map, int type, int instance_size) {
 
 TEST(HeapMaps) {
   InitializeVM();
-  CheckMap(Heap::meta_map(), MAP_TYPE, Map::kSize);
-  CheckMap(Heap::heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
-  CheckMap(Heap::fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
-  CheckMap(Heap::string_map(), STRING_TYPE, kVariableSizeSentinel);
+  CheckMap(HEAP->meta_map(), MAP_TYPE, Map::kSize);
+  CheckMap(HEAP->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
+  CheckMap(HEAP->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
+  CheckMap(HEAP->string_map(), STRING_TYPE, kVariableSizeSentinel);
 }
 
 
@@ -58,7 +58,7 @@ static void CheckSmi(int value, const char* string) {
 
 
 static void CheckNumber(double value, const char* string) {
-  Object* obj = Heap::NumberFromDouble(value)->ToObjectChecked();
+  Object* obj = HEAP->NumberFromDouble(value)->ToObjectChecked();
   CHECK(obj->IsNumber());
   bool exc;
   Object* print_string = *Execution::ToString(Handle<Object>(obj), &exc);
@@ -76,27 +76,27 @@ static void CheckFindCodeObject() {
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code = Heap::CreateCode(
+  Object* code = HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(code->IsCode());
 
   HeapObject* obj = HeapObject::cast(code);
   Address obj_addr = obj->address();
 
   for (int i = 0; i < obj->Size(); i += kPointerSize) {
-    Object* found = Heap::FindCodeObject(obj_addr + i);
+    Object* found = HEAP->FindCodeObject(obj_addr + i);
     CHECK_EQ(code, found);
   }
 
-  Object* copy = Heap::CreateCode(
+  Object* copy = HEAP->CreateCode(
       desc,
       Code::ComputeFlags(Code::STUB),
-      Handle<Object>(Heap::undefined_value()))->ToObjectChecked();
+      Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
   CHECK(copy->IsCode());
   HeapObject* obj_copy = HeapObject::cast(copy);
-  Object* not_right = Heap::FindCodeObject(obj_copy->address() +
+  Object* not_right = HEAP->FindCodeObject(obj_copy->address() +
                                            obj_copy->Size() / 2);
   CHECK(not_right != code);
 }
@@ -106,41 +106,41 @@ TEST(HeapObjects) {
   InitializeVM();
 
   v8::HandleScope sc;
-  Object* value = Heap::NumberFromDouble(1.000123)->ToObjectChecked();
+  Object* value = HEAP->NumberFromDouble(1.000123)->ToObjectChecked();
   CHECK(value->IsHeapNumber());
   CHECK(value->IsNumber());
   CHECK_EQ(1.000123, value->Number());
 
-  value = Heap::NumberFromDouble(1.0)->ToObjectChecked();
+  value = HEAP->NumberFromDouble(1.0)->ToObjectChecked();
   CHECK(value->IsSmi());
   CHECK(value->IsNumber());
   CHECK_EQ(1.0, value->Number());
 
-  value = Heap::NumberFromInt32(1024)->ToObjectChecked();
+  value = HEAP->NumberFromInt32(1024)->ToObjectChecked();
   CHECK(value->IsSmi());
   CHECK(value->IsNumber());
   CHECK_EQ(1024.0, value->Number());
 
-  value = Heap::NumberFromInt32(Smi::kMinValue)->ToObjectChecked();
+  value = HEAP->NumberFromInt32(Smi::kMinValue)->ToObjectChecked();
   CHECK(value->IsSmi());
   CHECK(value->IsNumber());
   CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value());
 
-  value = Heap::NumberFromInt32(Smi::kMaxValue)->ToObjectChecked();
+  value = HEAP->NumberFromInt32(Smi::kMaxValue)->ToObjectChecked();
   CHECK(value->IsSmi());
   CHECK(value->IsNumber());
   CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value());
 
 #ifndef V8_TARGET_ARCH_X64
   // TODO(lrn): We need a NumberFromIntptr function in order to test this.
-  value = Heap::NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked();
+  value = HEAP->NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked();
   CHECK(value->IsHeapNumber());
   CHECK(value->IsNumber());
   CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
 #endif
 
   MaybeObject* maybe_value =
-      Heap::NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
+      HEAP->NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
   value = maybe_value->ToObjectChecked();
   CHECK(value->IsHeapNumber());
   CHECK(value->IsNumber());
@@ -148,21 +148,22 @@ TEST(HeapObjects) {
            value->Number());
 
   // nan oddball checks
-  CHECK(Heap::nan_value()->IsNumber());
-  CHECK(isnan(Heap::nan_value()->Number()));
+  CHECK(HEAP->nan_value()->IsNumber());
+  CHECK(isnan(HEAP->nan_value()->Number()));
 
-  Handle<String> s = Factory::NewStringFromAscii(CStrVector("fisk hest "));
+  Handle<String> s = FACTORY->NewStringFromAscii(CStrVector("fisk hest "));
   CHECK(s->IsString());
   CHECK_EQ(10, s->length());
 
-  String* object_symbol = String::cast(Heap::Object_symbol());
-  CHECK(Top::context()->global()->HasLocalProperty(object_symbol));
+  String* object_symbol = String::cast(HEAP->Object_symbol());
+  CHECK(
+      Isolate::Current()->context()->global()->HasLocalProperty(object_symbol));
 
   // Check ToString for oddballs
-  CheckOddball(Heap::true_value(), "true");
-  CheckOddball(Heap::false_value(), "false");
-  CheckOddball(Heap::null_value(), "null");
-  CheckOddball(Heap::undefined_value(), "undefined");
+  CheckOddball(HEAP->true_value(), "true");
+  CheckOddball(HEAP->false_value(), "false");
+  CheckOddball(HEAP->null_value(), "null");
+  CheckOddball(HEAP->undefined_value(), "undefined");
 
   // Check ToString for Smis
   CheckSmi(0, "0");
@@ -197,25 +198,25 @@ TEST(GarbageCollection) {
 
   v8::HandleScope sc;
   // Check GC.
-  Heap::CollectGarbage(NEW_SPACE);
+  HEAP->CollectGarbage(NEW_SPACE);
 
-  Handle<String> name = Factory::LookupAsciiSymbol("theFunction");
-  Handle<String> prop_name = Factory::LookupAsciiSymbol("theSlot");
-  Handle<String> prop_namex = Factory::LookupAsciiSymbol("theSlotx");
-  Handle<String> obj_name = Factory::LookupAsciiSymbol("theObject");
+  Handle<String> name = FACTORY->LookupAsciiSymbol("theFunction");
+  Handle<String> prop_name = FACTORY->LookupAsciiSymbol("theSlot");
+  Handle<String> prop_namex = FACTORY->LookupAsciiSymbol("theSlotx");
+  Handle<String> obj_name = FACTORY->LookupAsciiSymbol("theObject");
 
   {
     v8::HandleScope inner_scope;
     // Allocate a function and keep it in global object's property.
     Handle<JSFunction> function =
-        Factory::NewFunction(name, Factory::undefined_value());
+        FACTORY->NewFunction(name, FACTORY->undefined_value());
     Handle<Map> initial_map =
-        Factory::NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+        FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
     function->set_initial_map(*initial_map);
-    Top::context()->global()->SetProperty(
+    Isolate::Current()->context()->global()->SetProperty(
         *name, *function, NONE, kNonStrictMode)->ToObjectChecked();
     // Allocate an object.  Unrooted after leaving the scope.
-    Handle<JSObject> obj = Factory::NewJSObject(function);
+    Handle<JSObject> obj = FACTORY->NewJSObject(function);
     obj->SetProperty(
         *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
     obj->SetProperty(
@@ -225,34 +226,34 @@ TEST(GarbageCollection) {
     CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
   }
 
-  Heap::CollectGarbage(NEW_SPACE);
+  HEAP->CollectGarbage(NEW_SPACE);
 
   // Function should be alive.
-  CHECK(Top::context()->global()->HasLocalProperty(*name));
+  CHECK(Isolate::Current()->context()->global()->HasLocalProperty(*name));
   // Check function is retained.
-  Object* func_value =
-      Top::context()->global()->GetProperty(*name)->ToObjectChecked();
+  Object* func_value = Isolate::Current()->context()->global()->
+      GetProperty(*name)->ToObjectChecked();
   CHECK(func_value->IsJSFunction());
   Handle<JSFunction> function(JSFunction::cast(func_value));
 
   {
     HandleScope inner_scope;
     // Allocate another object, make it reachable from global.
-    Handle<JSObject> obj = Factory::NewJSObject(function);
-    Top::context()->global()->SetProperty(
+    Handle<JSObject> obj = FACTORY->NewJSObject(function);
+    Isolate::Current()->context()->global()->SetProperty(
         *obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked();
     obj->SetProperty(
         *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
   }
 
   // After gc, it should survive.
-  Heap::CollectGarbage(NEW_SPACE);
+  HEAP->CollectGarbage(NEW_SPACE);
 
-  CHECK(Top::context()->global()->HasLocalProperty(*obj_name));
-  CHECK(Top::context()->global()->GetProperty(*obj_name)->ToObjectChecked()->
-            IsJSObject());
-  Object* obj =
-      Top::context()->global()->GetProperty(*obj_name)->ToObjectChecked();
+  CHECK(Isolate::Current()->context()->global()->HasLocalProperty(*obj_name));
+  CHECK(Isolate::Current()->context()->global()->
+        GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
+  Object* obj = Isolate::Current()->context()->global()->
+      GetProperty(*obj_name)->ToObjectChecked();
   JSObject* js_obj = JSObject::cast(obj);
   CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
 }
@@ -260,7 +261,7 @@ TEST(GarbageCollection) {
 
 static void VerifyStringAllocation(const char* string) {
   v8::HandleScope scope;
-  Handle<String> s = Factory::NewStringFromUtf8(CStrVector(string));
+  Handle<String> s = FACTORY->NewStringFromUtf8(CStrVector(string));
   CHECK_EQ(StrLength(string), s->length());
   for (int index = 0; index < s->length(); index++) {
     CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
@@ -284,12 +285,13 @@ TEST(LocalHandles) {
 
   v8::HandleScope scope;
   const char* name = "Kasper the spunky";
-  Handle<String> string = Factory::NewStringFromAscii(CStrVector(name));
+  Handle<String> string = FACTORY->NewStringFromAscii(CStrVector(name));
   CHECK_EQ(StrLength(name), string->length());
 }
 
 
 TEST(GlobalHandles) {
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   InitializeVM();
 
   Handle<Object> h1;
@@ -300,17 +302,17 @@ TEST(GlobalHandles) {
   {
     HandleScope scope;
 
-    Handle<Object> i = Factory::NewStringFromAscii(CStrVector("fisk"));
-    Handle<Object> u = Factory::NewNumber(1.12344);
+    Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
+    Handle<Object> u = FACTORY->NewNumber(1.12344);
 
-    h1 = GlobalHandles::Create(*i);
-    h2 = GlobalHandles::Create(*u);
-    h3 = GlobalHandles::Create(*i);
-    h4 = GlobalHandles::Create(*u);
+    h1 = global_handles->Create(*i);
+    h2 = global_handles->Create(*u);
+    h3 = global_handles->Create(*i);
+    h4 = global_handles->Create(*u);
   }
 
   // after gc, it should survive
-  Heap::CollectGarbage(NEW_SPACE);
+  HEAP->CollectGarbage(NEW_SPACE);
 
   CHECK((*h1)->IsString());
   CHECK((*h2)->IsHeapNumber());
@@ -318,12 +320,12 @@ TEST(GlobalHandles) {
   CHECK((*h4)->IsHeapNumber());
 
   CHECK_EQ(*h3, *h1);
-  GlobalHandles::Destroy(h1.location());
-  GlobalHandles::Destroy(h3.location());
+  global_handles->Destroy(h1.location());
+  global_handles->Destroy(h3.location());
 
   CHECK_EQ(*h4, *h2);
-  GlobalHandles::Destroy(h2.location());
-  GlobalHandles::Destroy(h4.location());
+  global_handles->Destroy(h2.location());
+  global_handles->Destroy(h4.location());
 }
 
 
@@ -337,6 +339,7 @@ static void TestWeakGlobalHandleCallback(v8::Persistent<v8::Value> handle,
 
 
 TEST(WeakGlobalHandlesScavenge) {
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   InitializeVM();
 
   WeakPointerCleared = false;
@@ -347,33 +350,34 @@ TEST(WeakGlobalHandlesScavenge) {
   {
     HandleScope scope;
 
-    Handle<Object> i = Factory::NewStringFromAscii(CStrVector("fisk"));
-    Handle<Object> u = Factory::NewNumber(1.12344);
+    Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
+    Handle<Object> u = FACTORY->NewNumber(1.12344);
 
-    h1 = GlobalHandles::Create(*i);
-    h2 = GlobalHandles::Create(*u);
+    h1 = global_handles->Create(*i);
+    h2 = global_handles->Create(*u);
   }
 
-  GlobalHandles::MakeWeak(h2.location(),
-                          reinterpret_cast<void*>(1234),
-                          &TestWeakGlobalHandleCallback);
+  global_handles->MakeWeak(h2.location(),
+                           reinterpret_cast<void*>(1234),
+                           &TestWeakGlobalHandleCallback);
 
   // Scavenge treats weak pointers as normal roots.
-  Heap::PerformScavenge();
+  HEAP->PerformScavenge();
 
   CHECK((*h1)->IsString());
   CHECK((*h2)->IsHeapNumber());
 
   CHECK(!WeakPointerCleared);
-  CHECK(!GlobalHandles::IsNearDeath(h2.location()));
-  CHECK(!GlobalHandles::IsNearDeath(h1.location()));
+  CHECK(!global_handles->IsNearDeath(h2.location()));
+  CHECK(!global_handles->IsNearDeath(h1.location()));
 
-  GlobalHandles::Destroy(h1.location());
-  GlobalHandles::Destroy(h2.location());
+  global_handles->Destroy(h1.location());
+  global_handles->Destroy(h2.location());
 }
 
 
 TEST(WeakGlobalHandlesMark) {
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   InitializeVM();
 
   WeakPointerCleared = false;
@@ -384,34 +388,35 @@ TEST(WeakGlobalHandlesMark) {
   {
     HandleScope scope;
 
-    Handle<Object> i = Factory::NewStringFromAscii(CStrVector("fisk"));
-    Handle<Object> u = Factory::NewNumber(1.12344);
+    Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
+    Handle<Object> u = FACTORY->NewNumber(1.12344);
 
-    h1 = GlobalHandles::Create(*i);
-    h2 = GlobalHandles::Create(*u);
+    h1 = global_handles->Create(*i);
+    h2 = global_handles->Create(*u);
   }
 
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
-  Heap::CollectGarbage(NEW_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(NEW_SPACE);
   // Make sure the object is promoted.
 
-  GlobalHandles::MakeWeak(h2.location(),
-                          reinterpret_cast<void*>(1234),
-                          &TestWeakGlobalHandleCallback);
+  global_handles->MakeWeak(h2.location(),
+                           reinterpret_cast<void*>(1234),
+                           &TestWeakGlobalHandleCallback);
   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
   CHECK(!GlobalHandles::IsNearDeath(h2.location()));
 
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   CHECK((*h1)->IsString());
 
   CHECK(WeakPointerCleared);
   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
 
-  GlobalHandles::Destroy(h1.location());
+  global_handles->Destroy(h1.location());
 }
 
 TEST(DeleteWeakGlobalHandle) {
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   InitializeVM();
 
   WeakPointerCleared = false;
@@ -421,21 +426,21 @@ TEST(DeleteWeakGlobalHandle) {
   {
     HandleScope scope;
 
-    Handle<Object> i = Factory::NewStringFromAscii(CStrVector("fisk"));
-    h = GlobalHandles::Create(*i);
+    Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
+    h = global_handles->Create(*i);
   }
 
-  GlobalHandles::MakeWeak(h.location(),
-                          reinterpret_cast<void*>(1234),
-                          &TestWeakGlobalHandleCallback);
+  global_handles->MakeWeak(h.location(),
+                           reinterpret_cast<void*>(1234),
+                           &TestWeakGlobalHandleCallback);
 
   // Scanvenge does not recognize weak reference.
-  Heap::PerformScavenge();
+  HEAP->PerformScavenge();
 
   CHECK(!WeakPointerCleared);
 
   // Mark-compact treats weak reference properly.
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   CHECK(WeakPointerCleared);
 }
@@ -507,12 +512,12 @@ static const char* not_so_random_string_table[] = {
 static void CheckSymbols(const char** strings) {
   for (const char* string = *strings; *strings != 0; string = *strings++) {
     Object* a;
-    MaybeObject* maybe_a = Heap::LookupAsciiSymbol(string);
+    MaybeObject* maybe_a = HEAP->LookupAsciiSymbol(string);
     // LookupAsciiSymbol may return a failure if a GC is needed.
     if (!maybe_a->ToObject(&a)) continue;
     CHECK(a->IsSymbol());
     Object* b;
-    MaybeObject *maybe_b = Heap::LookupAsciiSymbol(string);
+    MaybeObject* maybe_b = HEAP->LookupAsciiSymbol(string);
     if (!maybe_b->ToObject(&b)) continue;
     CHECK_EQ(b, a);
     CHECK(String::cast(b)->IsEqualTo(CStrVector(string)));
@@ -532,15 +537,15 @@ TEST(FunctionAllocation) {
   InitializeVM();
 
   v8::HandleScope sc;
-  Handle<String> name = Factory::LookupAsciiSymbol("theFunction");
+  Handle<String> name = FACTORY->LookupAsciiSymbol("theFunction");
   Handle<JSFunction> function =
-      Factory::NewFunction(name, Factory::undefined_value());
+      FACTORY->NewFunction(name, FACTORY->undefined_value());
   Handle<Map> initial_map =
-      Factory::NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+      FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
   function->set_initial_map(*initial_map);
 
-  Handle<String> prop_name = Factory::LookupAsciiSymbol("theSlot");
-  Handle<JSObject> obj = Factory::NewJSObject(function);
+  Handle<String> prop_name = FACTORY->LookupAsciiSymbol("theSlot");
+  Handle<JSObject> obj = FACTORY->NewJSObject(function);
   obj->SetProperty(
       *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
   CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
@@ -555,14 +560,14 @@ TEST(ObjectProperties) {
   InitializeVM();
 
   v8::HandleScope sc;
-  String* object_symbol = String::cast(Heap::Object_symbol());
-  Object* raw_object =
-      Top::context()->global()->GetProperty(object_symbol)->ToObjectChecked();
+  String* object_symbol = String::cast(HEAP->Object_symbol());
+  Object* raw_object = Isolate::Current()->context()->global()->
+      GetProperty(object_symbol)->ToObjectChecked();
   JSFunction* object_function = JSFunction::cast(raw_object);
   Handle<JSFunction> constructor(object_function);
-  Handle<JSObject> obj = Factory::NewJSObject(constructor);
-  Handle<String> first = Factory::LookupAsciiSymbol("first");
-  Handle<String> second = Factory::LookupAsciiSymbol("second");
+  Handle<JSObject> obj = FACTORY->NewJSObject(constructor);
+  Handle<String> first = FACTORY->LookupAsciiSymbol("first");
+  Handle<String> second = FACTORY->LookupAsciiSymbol("second");
 
   // check for empty
   CHECK(!obj->HasLocalProperty(*first));
@@ -608,18 +613,18 @@ TEST(ObjectProperties) {
 
   // check string and symbol match
   static const char* string1 = "fisk";
-  Handle<String> s1 = Factory::NewStringFromAscii(CStrVector(string1));
+  Handle<String> s1 = FACTORY->NewStringFromAscii(CStrVector(string1));
   obj->SetProperty(
       *s1, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
-  Handle<String> s1_symbol = Factory::LookupAsciiSymbol(string1);
+  Handle<String> s1_symbol = FACTORY->LookupAsciiSymbol(string1);
   CHECK(obj->HasLocalProperty(*s1_symbol));
 
   // check symbol and string match
   static const char* string2 = "fugl";
-  Handle<String> s2_symbol = Factory::LookupAsciiSymbol(string2);
+  Handle<String> s2_symbol = FACTORY->LookupAsciiSymbol(string2);
   obj->SetProperty(
       *s2_symbol, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
-  Handle<String> s2 = Factory::NewStringFromAscii(CStrVector(string2));
+  Handle<String> s2 = FACTORY->NewStringFromAscii(CStrVector(string2));
   CHECK(obj->HasLocalProperty(*s2));
 }
 
@@ -628,15 +633,15 @@ TEST(JSObjectMaps) {
   InitializeVM();
 
   v8::HandleScope sc;
-  Handle<String> name = Factory::LookupAsciiSymbol("theFunction");
+  Handle<String> name = FACTORY->LookupAsciiSymbol("theFunction");
   Handle<JSFunction> function =
-      Factory::NewFunction(name, Factory::undefined_value());
+      FACTORY->NewFunction(name, FACTORY->undefined_value());
   Handle<Map> initial_map =
-      Factory::NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+      FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
   function->set_initial_map(*initial_map);
 
-  Handle<String> prop_name = Factory::LookupAsciiSymbol("theSlot");
-  Handle<JSObject> obj = Factory::NewJSObject(function);
+  Handle<String> prop_name = FACTORY->LookupAsciiSymbol("theSlot");
+  Handle<JSObject> obj = FACTORY->NewJSObject(function);
 
   // Set a propery
   obj->SetProperty(
@@ -652,14 +657,14 @@ TEST(JSArray) {
   InitializeVM();
 
   v8::HandleScope sc;
-  Handle<String> name = Factory::LookupAsciiSymbol("Array");
-  Object* raw_object =
-      Top::context()->global()->GetProperty(*name)->ToObjectChecked();
+  Handle<String> name = FACTORY->LookupAsciiSymbol("Array");
+  Object* raw_object = Isolate::Current()->context()->global()->
+      GetProperty(*name)->ToObjectChecked();
   Handle<JSFunction> function = Handle<JSFunction>(
       JSFunction::cast(raw_object));
 
   // Allocate the object.
-  Handle<JSObject> object = Factory::NewJSObject(function);
+  Handle<JSObject> object = FACTORY->NewJSObject(function);
   Handle<JSArray> array = Handle<JSArray>::cast(object);
   // We just initialized the VM, no heap allocation failure yet.
   Object* ok = array->Initialize(0)->ToObjectChecked();
@@ -676,7 +681,7 @@ TEST(JSArray) {
 
   // Set array length with larger than smi value.
   Handle<Object> length =
-      Factory::NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
+      FACTORY->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
   ok = array->SetElementsLength(*length)->ToObjectChecked();
 
   uint32_t int_length = 0;
@@ -698,14 +703,14 @@ TEST(JSObjectCopy) {
   InitializeVM();
 
   v8::HandleScope sc;
-  String* object_symbol = String::cast(Heap::Object_symbol());
-  Object* raw_object =
-      Top::context()->global()->GetProperty(object_symbol)->ToObjectChecked();
+  String* object_symbol = String::cast(HEAP->Object_symbol());
+  Object* raw_object = Isolate::Current()->context()->global()->
+      GetProperty(object_symbol)->ToObjectChecked();
   JSFunction* object_function = JSFunction::cast(raw_object);
   Handle<JSFunction> constructor(object_function);
-  Handle<JSObject> obj = Factory::NewJSObject(constructor);
-  Handle<String> first = Factory::LookupAsciiSymbol("first");
-  Handle<String> second = Factory::LookupAsciiSymbol("second");
+  Handle<JSObject> obj = FACTORY->NewJSObject(constructor);
+  Handle<String> first = FACTORY->LookupAsciiSymbol("first");
+  Handle<String> second = FACTORY->LookupAsciiSymbol("second");
 
   obj->SetProperty(
       *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
@@ -761,17 +766,17 @@ TEST(StringAllocation) {
       non_ascii[3 * i + 2] = chars[2];
     }
     Handle<String> non_ascii_sym =
-        Factory::LookupSymbol(Vector<const char>(non_ascii, 3 * length));
+        FACTORY->LookupSymbol(Vector<const char>(non_ascii, 3 * length));
     CHECK_EQ(length, non_ascii_sym->length());
     Handle<String> ascii_sym =
-        Factory::LookupSymbol(Vector<const char>(ascii, length));
+        FACTORY->LookupSymbol(Vector<const char>(ascii, length));
     CHECK_EQ(length, ascii_sym->length());
     Handle<String> non_ascii_str =
-        Factory::NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length));
+        FACTORY->NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length));
     non_ascii_str->Hash();
     CHECK_EQ(length, non_ascii_str->length());
     Handle<String> ascii_str =
-        Factory::NewStringFromUtf8(Vector<const char>(ascii, length));
+        FACTORY->NewStringFromUtf8(Vector<const char>(ascii, length));
     ascii_str->Hash();
     CHECK_EQ(length, ascii_str->length());
     DeleteArray(non_ascii);
@@ -805,22 +810,22 @@ TEST(Iteration) {
   int next_objs_index = 0;
 
   // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
-  objs[next_objs_index++] = Factory::NewJSArray(10);
-  objs[next_objs_index++] = Factory::NewJSArray(10, TENURED);
+  objs[next_objs_index++] = FACTORY->NewJSArray(10);
+  objs[next_objs_index++] = FACTORY->NewJSArray(10, TENURED);
 
   // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
   objs[next_objs_index++] =
-      Factory::NewStringFromAscii(CStrVector("abcdefghij"));
+      FACTORY->NewStringFromAscii(CStrVector("abcdefghij"));
   objs[next_objs_index++] =
-      Factory::NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
+      FACTORY->NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
 
   // Allocate a large string (for large object space).
-  int large_size = Heap::MaxObjectSizeInPagedSpace() + 1;
+  int large_size = HEAP->MaxObjectSizeInPagedSpace() + 1;
   char* str = new char[large_size];
   for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
   str[large_size - 1] = '\0';
   objs[next_objs_index++] =
-      Factory::NewStringFromAscii(CStrVector(str), TENURED);
+      FACTORY->NewStringFromAscii(CStrVector(str), TENURED);
   delete[] str;
 
   // Add a Map object to look for.
@@ -834,9 +839,9 @@ TEST(Iteration) {
 TEST(LargeObjectSpaceContains) {
   InitializeVM();
 
-  Heap::CollectGarbage(NEW_SPACE);
+  HEAP->CollectGarbage(NEW_SPACE);
 
-  Address current_top = Heap::new_space()->top();
+  Address current_top = HEAP->new_space()->top();
   Page* page = Page::FromAddress(current_top);
   Address current_page = page->address();
   Address next_page = current_page + Page::kPageSize;
@@ -859,7 +864,7 @@ TEST(LargeObjectSpaceContains) {
       kPointerSize;
   CHECK_EQ(bytes_to_allocate, FixedArray::SizeFor(n_elements));
   FixedArray* array = FixedArray::cast(
-      Heap::AllocateFixedArray(n_elements)->ToObjectChecked());
+      HEAP->AllocateFixedArray(n_elements)->ToObjectChecked());
 
   int index = n_elements - 1;
   CHECK_EQ(flags_ptr,
@@ -869,8 +874,8 @@ TEST(LargeObjectSpaceContains) {
   // CHECK(Page::FromAddress(next_page)->IsLargeObjectPage());
 
   HeapObject* addr = HeapObject::FromAddress(next_page + 2 * kPointerSize);
-  CHECK(Heap::new_space()->Contains(addr));
-  CHECK(!Heap::lo_space()->Contains(addr));
+  CHECK(HEAP->new_space()->Contains(addr));
+  CHECK(!HEAP->lo_space()->Contains(addr));
 }
 
 
@@ -901,7 +906,7 @@ TEST(Regression39128) {
 
   // Increase the chance of 'bump-the-pointer' allocation in old space.
   bool force_compaction = true;
-  Heap::CollectAllGarbage(force_compaction);
+  HEAP->CollectAllGarbage(force_compaction);
 
   v8::HandleScope scope;
 
@@ -910,11 +915,12 @@ TEST(Regression39128) {
   // that region dirty marks are updated correctly.
 
   // Step 1: prepare a map for the object.  We add 1 inobject property to it.
-  Handle<JSFunction> object_ctor(Top::global_context()->object_function());
+  Handle<JSFunction> object_ctor(
+      Isolate::Current()->global_context()->object_function());
   CHECK(object_ctor->has_initial_map());
   Handle<Map> object_map(object_ctor->initial_map());
   // Create a map with single inobject property.
-  Handle<Map> my_map = Factory::CopyMap(object_map, 1);
+  Handle<Map> my_map = FACTORY->CopyMap(object_map, 1);
   int n_properties = my_map->inobject_properties();
   CHECK_GT(n_properties, 0);
 
@@ -924,15 +930,15 @@ TEST(Regression39128) {
   // just enough room to allocate JSObject and thus fill the newspace.
 
   int allocation_amount = Min(FixedArray::kMaxSize,
-                              Heap::MaxObjectSizeInNewSpace());
+                              HEAP->MaxObjectSizeInNewSpace());
   int allocation_len = LenFromSize(allocation_amount);
-  NewSpace* new_space = Heap::new_space();
+  NewSpace* new_space = HEAP->new_space();
   Address* top_addr = new_space->allocation_top_address();
   Address* limit_addr = new_space->allocation_limit_address();
   while ((*limit_addr - *top_addr) > allocation_amount) {
-    CHECK(!Heap::always_allocate());
-    Object* array =
-        Heap::AllocateFixedArray(allocation_len)->ToObjectChecked();
+    CHECK(!HEAP->always_allocate());
+    Object* array = HEAP->AllocateFixedArray(allocation_len)->ToObjectChecked();
+    CHECK(!array->IsFailure());
     CHECK(new_space->Contains(array));
   }
 
@@ -941,12 +947,12 @@ TEST(Regression39128) {
   int fixed_array_len = LenFromSize(to_fill);
   CHECK(fixed_array_len < FixedArray::kMaxLength);
 
-  CHECK(!Heap::always_allocate());
-  Object* array =
-      Heap::AllocateFixedArray(fixed_array_len)->ToObjectChecked();
+  CHECK(!HEAP->always_allocate());
+  Object* array = HEAP->AllocateFixedArray(fixed_array_len)->ToObjectChecked();
+  CHECK(!array->IsFailure());
   CHECK(new_space->Contains(array));
 
-  Object* object = Heap::AllocateJSObjectFromMap(*my_map)->ToObjectChecked();
+  Object* object = HEAP->AllocateJSObjectFromMap(*my_map)->ToObjectChecked();
   CHECK(new_space->Contains(object));
   JSObject* jsobject = JSObject::cast(object);
   CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
@@ -958,15 +964,15 @@ TEST(Regression39128) {
 
   // Step 4: clone jsobject, but force always allocate first to create a clone
   // in old pointer space.
-  Address old_pointer_space_top = Heap::old_pointer_space()->top();
+  Address old_pointer_space_top = HEAP->old_pointer_space()->top();
   AlwaysAllocateScope aa_scope;
-  Object* clone_obj = Heap::CopyJSObject(jsobject)->ToObjectChecked();
+  Object* clone_obj = HEAP->CopyJSObject(jsobject)->ToObjectChecked();
   JSObject* clone = JSObject::cast(clone_obj);
   if (clone->address() != old_pointer_space_top) {
     // Alas, got allocated from free list, we cannot do checks.
     return;
   }
-  CHECK(Heap::old_pointer_space()->Contains(clone->address()));
+  CHECK(HEAP->old_pointer_space()->Contains(clone->address()));
 
   // Step 5: verify validity of region dirty marks.
   Address clone_addr = clone->address();
@@ -988,7 +994,7 @@ TEST(TestCodeFlushing) {
                        "  var z = x + y;"
                        "};"
                        "foo()";
-  Handle<String> foo_name = Factory::LookupAsciiSymbol("foo");
+  Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
 
   // This compile will add the code to the compilation cache.
   { v8::HandleScope scope;
@@ -996,23 +1002,23 @@ TEST(TestCodeFlushing) {
   }
 
   // Check function is compiled.
-  Object* func_value =
-      Top::context()->global()->GetProperty(*foo_name)->ToObjectChecked();
+  Object* func_value = Isolate::Current()->context()->global()->
+      GetProperty(*foo_name)->ToObjectChecked();
   CHECK(func_value->IsJSFunction());
   Handle<JSFunction> function(JSFunction::cast(func_value));
   CHECK(function->shared()->is_compiled());
 
-  Heap::CollectAllGarbage(true);
-  Heap::CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
 
   CHECK(function->shared()->is_compiled());
 
-  Heap::CollectAllGarbage(true);
-  Heap::CollectAllGarbage(true);
-  Heap::CollectAllGarbage(true);
-  Heap::CollectAllGarbage(true);
-  Heap::CollectAllGarbage(true);
-  Heap::CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
 
   // foo should no longer be in the compilation cache
   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
@@ -1027,7 +1033,7 @@ TEST(TestCodeFlushing) {
 // Count the number of global contexts in the weak list of global contexts.
 static int CountGlobalContexts() {
   int count = 0;
-  Object* object = Heap::global_contexts_list();
+  Object* object = HEAP->global_contexts_list();
   while (!object->IsUndefined()) {
     count++;
     object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
@@ -1051,6 +1057,8 @@ static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
 
 
 TEST(TestInternalWeakLists) {
+  v8::V8::Initialize();
+
   static const int kNumTestContexts = 10;
 
   v8::HandleScope scope;
@@ -1094,35 +1102,35 @@ TEST(TestInternalWeakLists) {
 
     // Scavenge treats these references as strong.
     for (int j = 0; j < 10; j++) {
-      Heap::PerformScavenge();
+      HEAP->PerformScavenge();
       CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
     }
 
     // Mark compact handles the weak references.
-    Heap::CollectAllGarbage(true);
+    HEAP->CollectAllGarbage(true);
     CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
 
     // Get rid of f3 and f5 in the same way.
     CompileRun("f3=null");
     for (int j = 0; j < 10; j++) {
-      Heap::PerformScavenge();
+      HEAP->PerformScavenge();
       CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
     }
-    Heap::CollectAllGarbage(true);
+    HEAP->CollectAllGarbage(true);
     CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
     CompileRun("f5=null");
     for (int j = 0; j < 10; j++) {
-      Heap::PerformScavenge();
+      HEAP->PerformScavenge();
       CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
     }
-    Heap::CollectAllGarbage(true);
+    HEAP->CollectAllGarbage(true);
     CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
 
     ctx[i]->Exit();
   }
 
   // Force compilation cache cleanup.
-  Heap::CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
 
   // Dispose the global contexts one by one.
   for (int i = 0; i < kNumTestContexts; i++) {
@@ -1131,12 +1139,12 @@ TEST(TestInternalWeakLists) {
 
     // Scavenge treats these references as strong.
     for (int j = 0; j < 10; j++) {
-      Heap::PerformScavenge();
+      HEAP->PerformScavenge();
       CHECK_EQ(kNumTestContexts - i, CountGlobalContexts());
     }
 
     // Mark compact handles the weak references.
-    Heap::CollectAllGarbage(true);
+    HEAP->CollectAllGarbage(true);
     CHECK_EQ(kNumTestContexts - i - 1, CountGlobalContexts());
   }
 
@@ -1148,10 +1156,10 @@ TEST(TestInternalWeakLists) {
 // causing a GC after the specified number of elements.
 static int CountGlobalContextsWithGC(int n) {
   int count = 0;
-  Handle<Object> object(Heap::global_contexts_list());
+  Handle<Object> object(HEAP->global_contexts_list());
   while (!object->IsUndefined()) {
     count++;
-    if (count == n) Heap::CollectAllGarbage(true);
+    if (count == n) HEAP->CollectAllGarbage(true);
     object =
         Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK));
   }
@@ -1170,7 +1178,7 @@ static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
   while (object->IsJSFunction() &&
          !Handle<JSFunction>::cast(object)->IsBuiltin()) {
     count++;
-    if (count == n) Heap::CollectAllGarbage(true);
+    if (count == n) HEAP->CollectAllGarbage(true);
     object = Handle<Object>(
         Object::cast(JSFunction::cast(*object)->next_function_link()));
   }
@@ -1179,6 +1187,8 @@ static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
 
 
 TEST(TestInternalWeakListsTraverseWithGC) {
+  v8::V8::Initialize();
+
   static const int kNumTestContexts = 10;
 
   v8::HandleScope scope;
@@ -1228,7 +1238,7 @@ TEST(TestInternalWeakListsTraverseWithGC) {
 
 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
   InitializeVM();
-  intptr_t size_of_objects_1 = Heap::SizeOfObjects();
+  intptr_t size_of_objects_1 = HEAP->SizeOfObjects();
   HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
   intptr_t size_of_objects_2 = 0;
   for (HeapObject* obj = iterator.next();
@@ -1283,10 +1293,10 @@ TEST(HeapIteratorFilterUnreachable) {
   InitializeVM();
   v8::HandleScope scope;
   CompileRun("a = {}; b = {};");
-  v8::Handle<Object> a(Top::context()->global()->GetProperty(
-      *Factory::LookupAsciiSymbol("a"))->ToObjectChecked());
-  v8::Handle<Object> b(Top::context()->global()->GetProperty(
-      *Factory::LookupAsciiSymbol("b"))->ToObjectChecked());
+  v8::Handle<Object> a(ISOLATE->context()->global()->GetProperty(
+      *FACTORY->LookupAsciiSymbol("a"))->ToObjectChecked());
+  v8::Handle<Object> b(ISOLATE->context()->global()->GetProperty(
+      *FACTORY->LookupAsciiSymbol("b"))->ToObjectChecked());
   CHECK_NE(*a, *b);
   {
     HeapIteratorTestHelper helper(*a, *b);
@@ -1294,8 +1304,8 @@ TEST(HeapIteratorFilterUnreachable) {
     CHECK(helper.a_found());
     CHECK(helper.b_found());
   }
-  CHECK(Top::context()->global()->DeleteProperty(
-      *Factory::LookupAsciiSymbol("a"), JSObject::FORCE_DELETION));
+  CHECK(ISOLATE->context()->global()->DeleteProperty(
+      *FACTORY->LookupAsciiSymbol("a"), JSObject::FORCE_DELETION));
   // We ensure that GC will not happen, so our raw pointer stays valid.
   AssertNoAllocation no_alloc;
   Object* a_saved = *a;
index 244980a19482073c08ffddbe8032ea71c5cf2f90..9232354476300c757548856d4277a7bd8406934a 100644 (file)
@@ -158,6 +158,7 @@ void CompareStrings(const char* s1, const char* s2,
 // --- T h e   A c t u a l   T e s t s
 
 TEST(LiveEditDiffer) {
+  v8::internal::V8::Initialize(NULL);
   CompareStrings("zz1zzz12zz123zzz", "zzzzzzzzzz", 6);
   CompareStrings("zz1zzz12zz123zzz", "zz0zzz0zz0zzz", 9);
   CompareStrings("123456789", "987654321", 16);
index bf72184fb013d0a337f8238a4c523fc0e7014ef4..e87da3e7d24bb56c3880e0c7e68d224b6bb915f3 100644 (file)
@@ -36,7 +36,7 @@
 #include "api.h"
 #include "codegen.h"
 #include "log.h"
-#include "top.h"
+#include "isolate.h"
 #include "cctest.h"
 #include "disassembler.h"
 #include "register-allocator-inl.h"
@@ -52,10 +52,10 @@ using v8::Value;
 using v8::internal::byte;
 using v8::internal::Address;
 using v8::internal::Handle;
+using v8::internal::Isolate;
 using v8::internal::JSFunction;
 using v8::internal::StackTracer;
 using v8::internal::TickSample;
-using v8::internal::Top;
 
 namespace i = v8::internal;
 
@@ -78,18 +78,19 @@ static void DoTrace(Address fp) {
   // sp is only used to define stack high bound
   trace_env.sample->sp =
       reinterpret_cast<Address>(trace_env.sample) - 10240;
-  StackTracer::Trace(trace_env.sample);
+  StackTracer::Trace(Isolate::Current(), trace_env.sample);
 }
 
 
 // Hide c_entry_fp to emulate situation when sampling is done while
 // pure JS code is being executed
 static void DoTraceHideCEntryFPAddress(Address fp) {
-  v8::internal::Address saved_c_frame_fp = *(Top::c_entry_fp_address());
+  v8::internal::Address saved_c_frame_fp =
+      *(Isolate::Current()->c_entry_fp_address());
   CHECK(saved_c_frame_fp);
-  *(Top::c_entry_fp_address()) = 0;
+  *(Isolate::Current()->c_entry_fp_address()) = 0;
   DoTrace(fp);
-  *(Top::c_entry_fp_address()) = saved_c_frame_fp;
+  *(Isolate::Current()->c_entry_fp_address()) = saved_c_frame_fp;
 }
 
 
@@ -163,8 +164,8 @@ v8::Handle<v8::Value> TraceExtension::JSTrace(const v8::Arguments& args) {
 
 
 static Address GetJsEntrySp() {
-  CHECK_NE(NULL, Top::GetCurrentThread());
-  return Top::js_entry_sp(Top::GetCurrentThread());
+  CHECK_NE(NULL, i::Isolate::Current()->thread_local_top());
+  return Isolate::js_entry_sp(i::Isolate::Current()->thread_local_top());
 }
 
 
@@ -276,7 +277,7 @@ static void CreateTraceCallerFunction(const char* func_name,
 
 // This test verifies that stack tracing works when called during
 // execution of a native function called from JS code. In this case,
-// StackTracer uses Top::c_entry_fp as a starting point for stack
+// StackTracer uses Isolate::c_entry_fp as a starting point for stack
 // walking.
 TEST(CFromJSStackTrace) {
   TickSample sample;
@@ -318,7 +319,7 @@ TEST(CFromJSStackTrace) {
 // This test verifies that stack tracing works when called during
 // execution of JS code. However, as calling StackTracer requires
 // entering native code, we can only emulate pure JS by erasing
-// Top::c_entry_fp value. In this case, StackTracer uses passed frame
+// Isolate::c_entry_fp value. In this case, StackTracer uses passed frame
 // pointer value as a starting point for stack walking.
 TEST(PureJSStackTrace) {
   // This test does not pass with inlining enabled since inlined functions
@@ -397,6 +398,7 @@ static int CFunc(int depth) {
 TEST(PureCStackTrace) {
   TickSample sample;
   InitTraceEnv(&sample);
+  InitializeVM();
   // Check that sampler doesn't crash
   CHECK_EQ(10, CFunc(10));
 }
index 30b8a48dbf07eaa20370c5ce66e7e43f465f5160..17c73874ac72c40b0fbb8be2ab0b0ee94bc3f868 100644 (file)
@@ -29,61 +29,61 @@ static void SetUp() {
   // Log to memory buffer.
   i::FLAG_logfile = "*";
   i::FLAG_log = true;
-  Logger::Setup();
+  LOGGER->Setup();
 }
 
 static void TearDown() {
-  Logger::TearDown();
+  LOGGER->TearDown();
 }
 
 
 TEST(EmptyLog) {
   SetUp();
-  CHECK_EQ(0, Logger::GetLogLines(0, NULL, 0));
-  CHECK_EQ(0, Logger::GetLogLines(100, NULL, 0));
-  CHECK_EQ(0, Logger::GetLogLines(0, NULL, 100));
-  CHECK_EQ(0, Logger::GetLogLines(100, NULL, 100));
+  CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 0));
+  CHECK_EQ(0, LOGGER->GetLogLines(100, NULL, 0));
+  CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 100));
+  CHECK_EQ(0, LOGGER->GetLogLines(100, NULL, 100));
   TearDown();
 }
 
 
 TEST(GetMessages) {
   SetUp();
-  Logger::StringEvent("aaa", "bbb");
-  Logger::StringEvent("cccc", "dddd");
-  CHECK_EQ(0, Logger::GetLogLines(0, NULL, 0));
+  LOGGER->StringEvent("aaa", "bbb");
+  LOGGER->StringEvent("cccc", "dddd");
+  CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 0));
   char log_lines[100];
   memset(log_lines, 0, sizeof(log_lines));
   // See Logger::StringEvent.
   const char* line_1 = "aaa,\"bbb\"\n";
   const int line_1_len = StrLength(line_1);
   // The exact size.
-  CHECK_EQ(line_1_len, Logger::GetLogLines(0, log_lines, line_1_len));
+  CHECK_EQ(line_1_len, LOGGER->GetLogLines(0, log_lines, line_1_len));
   CHECK_EQ(line_1, log_lines);
   memset(log_lines, 0, sizeof(log_lines));
   // A bit more than the first line length.
-  CHECK_EQ(line_1_len, Logger::GetLogLines(0, log_lines, line_1_len + 3));
+  CHECK_EQ(line_1_len, LOGGER->GetLogLines(0, log_lines, line_1_len + 3));
   log_lines[line_1_len] = '\0';
   CHECK_EQ(line_1, log_lines);
   memset(log_lines, 0, sizeof(log_lines));
   const char* line_2 = "cccc,\"dddd\"\n";
   const int line_2_len = StrLength(line_2);
   // Now start with line_2 beginning.
-  CHECK_EQ(0, Logger::GetLogLines(line_1_len, log_lines, 0));
-  CHECK_EQ(line_2_len, Logger::GetLogLines(line_1_len, log_lines, line_2_len));
+  CHECK_EQ(0, LOGGER->GetLogLines(line_1_len, log_lines, 0));
+  CHECK_EQ(line_2_len, LOGGER->GetLogLines(line_1_len, log_lines, line_2_len));
   CHECK_EQ(line_2, log_lines);
   memset(log_lines, 0, sizeof(log_lines));
   CHECK_EQ(line_2_len,
-           Logger::GetLogLines(line_1_len, log_lines, line_2_len + 3));
+           LOGGER->GetLogLines(line_1_len, log_lines, line_2_len + 3));
   CHECK_EQ(line_2, log_lines);
   memset(log_lines, 0, sizeof(log_lines));
   // Now get entire buffer contents.
   const char* all_lines = "aaa,\"bbb\"\ncccc,\"dddd\"\n";
   const int all_lines_len = StrLength(all_lines);
-  CHECK_EQ(all_lines_len, Logger::GetLogLines(0, log_lines, all_lines_len));
+  CHECK_EQ(all_lines_len, LOGGER->GetLogLines(0, log_lines, all_lines_len));
   CHECK_EQ(all_lines, log_lines);
   memset(log_lines, 0, sizeof(log_lines));
-  CHECK_EQ(all_lines_len, Logger::GetLogLines(0, log_lines, all_lines_len + 3));
+  CHECK_EQ(all_lines_len, LOGGER->GetLogLines(0, log_lines, all_lines_len + 3));
   CHECK_EQ(all_lines, log_lines);
   memset(log_lines, 0, sizeof(log_lines));
   TearDown();
@@ -91,26 +91,26 @@ TEST(GetMessages) {
 
 
 static int GetLogLines(int start_pos, i::Vector<char>* buffer) {
-  return Logger::GetLogLines(start_pos, buffer->start(), buffer->length());
+  return LOGGER->GetLogLines(start_pos, buffer->start(), buffer->length());
 }
 
 
 TEST(BeyondWritePosition) {
   SetUp();
-  Logger::StringEvent("aaa", "bbb");
-  Logger::StringEvent("cccc", "dddd");
+  LOGGER->StringEvent("aaa", "bbb");
+  LOGGER->StringEvent("cccc", "dddd");
   // See Logger::StringEvent.
   const char* all_lines = "aaa,\"bbb\"\ncccc,\"dddd\"\n";
   const int all_lines_len = StrLength(all_lines);
   EmbeddedVector<char, 100> buffer;
   const int beyond_write_pos = all_lines_len;
-  CHECK_EQ(0, Logger::GetLogLines(beyond_write_pos, buffer.start(), 1));
+  CHECK_EQ(0, LOGGER->GetLogLines(beyond_write_pos, buffer.start(), 1));
   CHECK_EQ(0, GetLogLines(beyond_write_pos, &buffer));
-  CHECK_EQ(0, Logger::GetLogLines(beyond_write_pos + 1, buffer.start(), 1));
+  CHECK_EQ(0, LOGGER->GetLogLines(beyond_write_pos + 1, buffer.start(), 1));
   CHECK_EQ(0, GetLogLines(beyond_write_pos + 1, &buffer));
-  CHECK_EQ(0, Logger::GetLogLines(beyond_write_pos + 100, buffer.start(), 1));
+  CHECK_EQ(0, LOGGER->GetLogLines(beyond_write_pos + 100, buffer.start(), 1));
   CHECK_EQ(0, GetLogLines(beyond_write_pos + 100, &buffer));
-  CHECK_EQ(0, Logger::GetLogLines(10 * 1024 * 1024, buffer.start(), 1));
+  CHECK_EQ(0, LOGGER->GetLogLines(10 * 1024 * 1024, buffer.start(), 1));
   CHECK_EQ(0, GetLogLines(10 * 1024 * 1024, &buffer));
   TearDown();
 }
@@ -120,12 +120,12 @@ TEST(MemoryLoggingTurnedOff) {
   // Log to stdout
   i::FLAG_logfile = "-";
   i::FLAG_log = true;
-  Logger::Setup();
-  CHECK_EQ(0, Logger::GetLogLines(0, NULL, 0));
-  CHECK_EQ(0, Logger::GetLogLines(100, NULL, 0));
-  CHECK_EQ(0, Logger::GetLogLines(0, NULL, 100));
-  CHECK_EQ(0, Logger::GetLogLines(100, NULL, 100));
-  Logger::TearDown();
+  LOGGER->Setup();
+  CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 0));
+  CHECK_EQ(0, LOGGER->GetLogLines(100, NULL, 0));
+  CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 100));
+  CHECK_EQ(0, LOGGER->GetLogLines(100, NULL, 100));
+  LOGGER->TearDown();
 }
 
 
@@ -139,12 +139,12 @@ namespace internal {
 
 class LoggerTestHelper : public AllStatic {
  public:
-  static bool IsSamplerActive() { return Logger::IsProfilerSamplerActive(); }
+  static bool IsSamplerActive() { return LOGGER->IsProfilerSamplerActive(); }
   static void ResetSamplesTaken() {
-    reinterpret_cast<Sampler*>(Logger::ticker_)->ResetSamplesTaken();
+    reinterpret_cast<Sampler*>(LOGGER->ticker_)->ResetSamplesTaken();
   }
   static bool has_samples_taken() {
-    return reinterpret_cast<Sampler*>(Logger::ticker_)->samples_taken() > 0;
+    return reinterpret_cast<Sampler*>(LOGGER->ticker_)->samples_taken() > 0;
   }
 };
 
@@ -166,13 +166,13 @@ class ScopedLoggerInitializer {
         need_to_set_up_logger_(i::V8::IsRunning()),
         scope_(),
         env_(v8::Context::New()) {
-    if (need_to_set_up_logger_) Logger::Setup();
+    if (need_to_set_up_logger_) LOGGER->Setup();
     env_->Enter();
   }
 
   ~ScopedLoggerInitializer() {
     env_->Exit();
-    Logger::TearDown();
+    LOGGER->TearDown();
     i::FLAG_prof_lazy = saved_prof_lazy_;
     i::FLAG_prof = saved_prof_;
     i::FLAG_prof_auto = saved_prof_auto_;
@@ -251,7 +251,7 @@ static void CheckThatProfilerWorks(LogBufferMatcher* matcher) {
         !LoggerTestHelper::IsSamplerActive());
   LoggerTestHelper::ResetSamplesTaken();
 
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 0);
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 0);
   CHECK(LoggerTestHelper::IsSamplerActive());
 
   // Verify that the current map of compiled functions has been logged.
@@ -273,7 +273,7 @@ static void CheckThatProfilerWorks(LogBufferMatcher* matcher) {
     i::OS::Sleep(1);
   }
 
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 0);
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 0);
   CHECK(i::RuntimeProfiler::IsEnabled() ||
         !LoggerTestHelper::IsSamplerActive());
 
@@ -329,8 +329,8 @@ namespace {
 
 class LoopingThread : public v8::internal::Thread {
  public:
-  LoopingThread()
-      : v8::internal::Thread(),
+  explicit LoopingThread(v8::internal::Isolate* isolate)
+      : v8::internal::Thread(isolate),
         semaphore_(v8::internal::OS::CreateSemaphore(0)),
         run_(true) {
   }
@@ -369,9 +369,12 @@ class LoopingThread : public v8::internal::Thread {
 
 class LoopingJsThread : public LoopingThread {
  public:
+  explicit LoopingJsThread(v8::internal::Isolate* isolate)
+      : LoopingThread(isolate) { }
   void RunLoop() {
     v8::Locker locker;
-    CHECK(v8::internal::ThreadManager::HasId());
+    CHECK(i::Isolate::Current() != NULL);
+    CHECK_GT(i::Isolate::Current()->thread_manager()->CurrentId(), 0);
     SetV8ThreadId();
     while (IsRunning()) {
       v8::HandleScope scope;
@@ -392,11 +395,14 @@ class LoopingJsThread : public LoopingThread {
 
 class LoopingNonJsThread : public LoopingThread {
  public:
+  explicit LoopingNonJsThread(v8::internal::Isolate* isolate)
+      : LoopingThread(isolate) { }
   void RunLoop() {
     v8::Locker locker;
     v8::Unlocker unlocker;
     // Now thread has V8's id, but will not run VM code.
-    CHECK(v8::internal::ThreadManager::HasId());
+    CHECK(i::Isolate::Current() != NULL);
+    CHECK_GT(i::Isolate::Current()->thread_manager()->CurrentId(), 0);
     double i = 10;
     SignalRunning();
     while (IsRunning()) {
@@ -409,8 +415,8 @@ class LoopingNonJsThread : public LoopingThread {
 
 class TestSampler : public v8::internal::Sampler {
  public:
-  TestSampler()
-      : Sampler(0, true, true),
+  explicit TestSampler(v8::internal::Isolate* isolate)
+      : Sampler(isolate, 0, true, true),
         semaphore_(v8::internal::OS::CreateSemaphore(0)),
         was_sample_stack_called_(false) {
   }
@@ -441,14 +447,14 @@ TEST(ProfMultipleThreads) {
   TestSampler* sampler = NULL;
   {
     v8::Locker locker;
-    sampler = new TestSampler();
+    sampler = new TestSampler(v8::internal::Isolate::Current());
     sampler->Start();
     CHECK(sampler->IsActive());
   }
 
-  LoopingJsThread jsThread;
+  LoopingJsThread jsThread(v8::internal::Isolate::Current());
   jsThread.Start();
-  LoopingNonJsThread nonJsThread;
+  LoopingNonJsThread nonJsThread(v8::internal::Isolate::Current());
   nonJsThread.Start();
 
   CHECK(!sampler->WasSampleStackCalled());
@@ -515,7 +521,7 @@ TEST(Issue23768) {
   i_source->set_resource(NULL);
 
   // Must not crash.
-  i::Logger::LogCompiledFunctions();
+  LOGGER->LogCompiledFunctions();
 }
 
 
@@ -541,7 +547,7 @@ TEST(LogCallbacks) {
   initialize_logger.env()->Global()->Set(v8_str("Obj"), obj->GetFunction());
   CompileAndRunScript("Obj.prototype.method1.toString();");
 
-  i::Logger::LogCompiledFunctions();
+  LOGGER->LogCompiledFunctions();
   CHECK_GT(matcher.GetNextChunk(), 0);
 
   const char* callback_rec = "code-creation,Callback,";
@@ -584,7 +590,7 @@ TEST(LogAccessorCallbacks) {
   inst->SetAccessor(v8::String::New("prop1"), Prop1Getter, Prop1Setter);
   inst->SetAccessor(v8::String::New("prop2"), Prop2Getter);
 
-  i::Logger::LogAccessorCallbacks();
+  LOGGER->LogAccessorCallbacks();
   CHECK_GT(matcher.GetNextChunk(), 0);
   matcher.PrintBuffer();
 
@@ -616,11 +622,11 @@ TEST(LogTags) {
   const char* close_tag = "close-tag,";
 
   // Check compatibility with the old style behavior.
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 0);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 0);
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 0);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 0);
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
   CHECK_EQ(NULL, matcher.Find(open_tag));
   CHECK_EQ(NULL, matcher.Find(close_tag));
 
@@ -628,11 +634,11 @@ TEST(LogTags) {
   const char* close_tag1 = "close-tag,1\n";
 
   // Check non-nested tag case.
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
   CHECK_GT(matcher.GetNextChunk(), 0);
   CHECK(matcher.IsInSequence(open_tag1, close_tag1));
 
@@ -640,15 +646,15 @@ TEST(LogTags) {
   const char* close_tag2 = "close-tag,2\n";
 
   // Check nested tags case.
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 2);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 2);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 2);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 2);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
   CHECK_GT(matcher.GetNextChunk(), 0);
   // open_tag1 < open_tag2 < close_tag2 < close_tag1
   CHECK(matcher.IsInSequence(open_tag1, open_tag2));
@@ -656,15 +662,15 @@ TEST(LogTags) {
   CHECK(matcher.IsInSequence(close_tag2, close_tag1));
 
   // Check overlapped tags case.
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 2);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 2);
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 2);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 2);
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
   CHECK_GT(matcher.GetNextChunk(), 0);
   // open_tag1 < open_tag2 < close_tag1 < close_tag2
   CHECK(matcher.IsInSequence(open_tag1, open_tag2));
@@ -675,19 +681,19 @@ TEST(LogTags) {
   const char* close_tag3 = "close-tag,3\n";
 
   // Check pausing overflow case.
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 2);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 2);
-  CHECK_EQ(v8::PROFILER_MODULE_CPU, Logger::GetActiveProfilerModules());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 3);
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 3);
-  CHECK_EQ(v8::PROFILER_MODULE_NONE, Logger::GetActiveProfilerModules());
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 2);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 2);
+  CHECK_EQ(v8::PROFILER_MODULE_CPU, LOGGER->GetActiveProfilerModules());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 3);
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 3);
+  CHECK_EQ(v8::PROFILER_MODULE_NONE, LOGGER->GetActiveProfilerModules());
   // Must be no tags, because logging must be disabled.
   CHECK_EQ(NULL, matcher.Find(open_tag3));
   CHECK_EQ(NULL, matcher.Find(close_tag3));
@@ -697,29 +703,29 @@ TEST(LogTags) {
 TEST(IsLoggingPreserved) {
   ScopedLoggerInitializer initialize_logger(false);
 
-  CHECK(Logger::is_logging());
-  Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK(Logger::is_logging());
-  Logger::PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
-  CHECK(Logger::is_logging());
+  CHECK(LOGGER->is_logging());
+  LOGGER->ResumeProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK(LOGGER->is_logging());
+  LOGGER->PauseProfiler(v8::PROFILER_MODULE_CPU, 1);
+  CHECK(LOGGER->is_logging());
 
-  CHECK(Logger::is_logging());
-  Logger::ResumeProfiler(
+  CHECK(LOGGER->is_logging());
+  LOGGER->ResumeProfiler(
       v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
-  CHECK(Logger::is_logging());
-  Logger::PauseProfiler(
+  CHECK(LOGGER->is_logging());
+  LOGGER->PauseProfiler(
       v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
-  CHECK(Logger::is_logging());
+  CHECK(LOGGER->is_logging());
 
-  CHECK(Logger::is_logging());
-  Logger::ResumeProfiler(
+  CHECK(LOGGER->is_logging());
+  LOGGER->ResumeProfiler(
       v8::PROFILER_MODULE_CPU |
       v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
-  CHECK(Logger::is_logging());
-  Logger::PauseProfiler(
+  CHECK(LOGGER->is_logging());
+  LOGGER->PauseProfiler(
       v8::PROFILER_MODULE_CPU |
       v8::PROFILER_MODULE_HEAP_STATS | v8::PROFILER_MODULE_JS_CONSTRUCTORS, 1);
-  CHECK(Logger::is_logging());
+  CHECK(LOGGER->is_logging());
 }
 
 
@@ -1128,7 +1134,7 @@ TEST(EquivalenceOfLoggingAndTraversal) {
       "  obj.test =\n"
       "    (function a(j) { return function b() { return j; } })(100);\n"
       "})(this);");
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
 
   EmbeddedVector<char, 204800> buffer;
   int log_size;
@@ -1148,9 +1154,9 @@ TEST(EquivalenceOfLoggingAndTraversal) {
   }
 
   // Iterate heap to find compiled functions, will write to log.
-  i::Logger::LogCompiledFunctions();
+  LOGGER->LogCompiledFunctions();
   char* new_log_start = buffer.start() + log_size;
-  const int new_log_size = Logger::GetLogLines(
+  const int new_log_size = LOGGER->GetLogLines(
       log_size, new_log_start, buffer.length() - log_size);
   CHECK_GT(new_log_size, 0);
   CHECK_GT(buffer.length(), log_size + new_log_size);
@@ -1184,7 +1190,7 @@ TEST(EquivalenceOfLoggingAndTraversal) {
   CHECK(results_equal);
 
   env->Exit();
-  Logger::TearDown();
+  LOGGER->TearDown();
   i::FLAG_always_compact = saved_always_compact;
 }
 
index d208013be746a8f853da228c9c1f66010bc04d30..6e059dff7533be7e7f17dcf9bdbc8efe46309882 100755 (executable)
@@ -146,6 +146,7 @@ static void TestMoveSmi(MacroAssembler* masm, Label* exit, int id, Smi* value) {
 
 // Test that we can move a Smi value literally into a register.
 TEST(SmiMove) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -232,7 +233,7 @@ void TestSmiCompare(MacroAssembler* masm, Label* exit, int id, int x, int y) {
 
 // Test that we can compare smis for equality (and more).
 TEST(SmiCompare) {
-  v8::V8::Initialize();
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -283,6 +284,7 @@ TEST(SmiCompare) {
 
 
 TEST(Integer32ToSmi) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -410,6 +412,7 @@ void TestI64PlusConstantToSmi(MacroAssembler* masm,
 
 
 TEST(Integer64PlusConstantToSmi) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -453,6 +456,7 @@ TEST(Integer64PlusConstantToSmi) {
 
 
 TEST(SmiCheck) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -699,6 +703,7 @@ void TestSmiNeg(MacroAssembler* masm, Label* exit, int id, int x) {
 
 
 TEST(SmiNeg) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -787,6 +792,7 @@ static void SmiAddTest(MacroAssembler* masm,
 }
 
 TEST(SmiAdd) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -975,6 +981,7 @@ static void SmiSubOverflowTest(MacroAssembler* masm,
 
 
 TEST(SmiSub) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1065,6 +1072,7 @@ void TestSmiMul(MacroAssembler* masm, Label* exit, int id, int x, int y) {
 
 
 TEST(SmiMul) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -1169,6 +1177,7 @@ void TestSmiDiv(MacroAssembler* masm, Label* exit, int id, int x, int y) {
 
 
 TEST(SmiDiv) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1278,6 +1287,7 @@ void TestSmiMod(MacroAssembler* masm, Label* exit, int id, int x, int y) {
 
 
 TEST(SmiMod) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1373,6 +1383,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) {
 }
 
 TEST(SmiIndex) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1441,6 +1452,7 @@ void TestSelectNonSmi(MacroAssembler* masm, Label* exit, int id, int x, int y) {
 
 
 TEST(SmiSelectNonSmi) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1519,6 +1531,7 @@ void TestSmiAnd(MacroAssembler* masm, Label* exit, int id, int x, int y) {
 
 
 TEST(SmiAnd) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1599,6 +1612,7 @@ void TestSmiOr(MacroAssembler* masm, Label* exit, int id, int x, int y) {
 
 
 TEST(SmiOr) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1681,6 +1695,7 @@ void TestSmiXor(MacroAssembler* masm, Label* exit, int id, int x, int y) {
 
 
 TEST(SmiXor) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1747,6 +1762,7 @@ void TestSmiNot(MacroAssembler* masm, Label* exit, int id, int x) {
 
 
 TEST(SmiNot) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1842,6 +1858,7 @@ void TestSmiShiftLeft(MacroAssembler* masm, Label* exit, int id, int x) {
 
 
 TEST(SmiShiftLeft) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -1947,6 +1964,7 @@ void TestSmiShiftLogicalRight(MacroAssembler* masm,
 
 
 TEST(SmiShiftLogicalRight) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -2015,6 +2033,7 @@ void TestSmiShiftArithmeticRight(MacroAssembler* masm,
 
 
 TEST(SmiShiftArithmeticRight) {
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -2078,7 +2097,7 @@ void TestPositiveSmiPowerUp(MacroAssembler* masm, Label* exit, int id, int x) {
 
 
 TEST(PositiveSmiTimesPowerOfTwoToInteger64) {
-  v8::V8::Initialize();
+  v8::internal::V8::Initialize(NULL);
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
@@ -2118,6 +2137,7 @@ TEST(PositiveSmiTimesPowerOfTwoToInteger64) {
 
 
 TEST(OperandOffset) {
+  v8::internal::V8::Initialize(NULL);
   int data[256];
   for (int i = 0; i < 256; i++) { data[i] = i * 0x01010101; }
 
index f86aa16a27fa726a78efcb6c6c173cdc04cc38c4..6d1b5ce632529c7e0bf18037262cb35e0943633d 100644 (file)
@@ -31,7 +31,6 @@
 
 #include "global-handles.h"
 #include "snapshot.h"
-#include "top.h"
 #include "cctest.h"
 
 using namespace v8::internal;
@@ -79,7 +78,7 @@ TEST(Promotion) {
   // from new space.
   FLAG_gc_global = true;
   FLAG_always_compact = true;
-  Heap::ConfigureHeap(2*256*KB, 4*MB, 4*MB);
+  HEAP->ConfigureHeap(2*256*KB, 4*MB, 4*MB);
 
   InitializeVM();
 
@@ -87,25 +86,25 @@ TEST(Promotion) {
 
   // Allocate a fixed array in the new space.
   int array_size =
-      (Heap::MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
+      (HEAP->MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
       (kPointerSize * 4);
-  Object* obj = Heap::AllocateFixedArray(array_size)->ToObjectChecked();
+  Object* obj = HEAP->AllocateFixedArray(array_size)->ToObjectChecked();
 
   Handle<FixedArray> array(FixedArray::cast(obj));
 
   // Array should be in the new space.
-  CHECK(Heap::InSpace(*array, NEW_SPACE));
+  CHECK(HEAP->InSpace(*array, NEW_SPACE));
 
   // Call the m-c collector, so array becomes an old object.
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   // Array now sits in the old space
-  CHECK(Heap::InSpace(*array, OLD_POINTER_SPACE));
+  CHECK(HEAP->InSpace(*array, OLD_POINTER_SPACE));
 }
 
 
 TEST(NoPromotion) {
-  Heap::ConfigureHeap(2*256*KB, 4*MB, 4*MB);
+  HEAP->ConfigureHeap(2*256*KB, 4*MB, 4*MB);
 
   // Test the situation that some objects in new space are promoted to
   // the old space
@@ -114,23 +113,23 @@ TEST(NoPromotion) {
   v8::HandleScope sc;
 
   // Do a mark compact GC to shrink the heap.
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   // Allocate a big Fixed array in the new space.
-  int size = (Heap::MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
+  int size = (HEAP->MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
       kPointerSize;
-  Object* obj = Heap::AllocateFixedArray(size)->ToObjectChecked();
+  Object* obj = HEAP->AllocateFixedArray(size)->ToObjectChecked();
 
   Handle<FixedArray> array(FixedArray::cast(obj));
 
   // Array still stays in the new space.
-  CHECK(Heap::InSpace(*array, NEW_SPACE));
+  CHECK(HEAP->InSpace(*array, NEW_SPACE));
 
   // Allocate objects in the old space until out of memory.
   FixedArray* host = *array;
   while (true) {
     Object* obj;
-    { MaybeObject* maybe_obj = Heap::AllocateFixedArray(100, TENURED);
+    { MaybeObject* maybe_obj = HEAP->AllocateFixedArray(100, TENURED);
       if (!maybe_obj->ToObject(&obj)) break;
     }
 
@@ -139,10 +138,10 @@ TEST(NoPromotion) {
   }
 
   // Call mark compact GC, and it should pass.
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   // array should not be promoted because the old space is full.
-  CHECK(Heap::InSpace(*array, NEW_SPACE));
+  CHECK(HEAP->InSpace(*array, NEW_SPACE));
 }
 
 
@@ -151,90 +150,86 @@ TEST(MarkCompactCollector) {
 
   v8::HandleScope sc;
   // call mark-compact when heap is empty
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   // keep allocating garbage in new space until it fails
   const int ARRAY_SIZE = 100;
   Object* array;
   MaybeObject* maybe_array;
   do {
-    maybe_array = Heap::AllocateFixedArray(ARRAY_SIZE);
+    maybe_array = HEAP->AllocateFixedArray(ARRAY_SIZE);
   } while (maybe_array->ToObject(&array));
-  Heap::CollectGarbage(NEW_SPACE);
+  HEAP->CollectGarbage(NEW_SPACE);
 
-  array = Heap::AllocateFixedArray(ARRAY_SIZE)->ToObjectChecked();
+  array = HEAP->AllocateFixedArray(ARRAY_SIZE)->ToObjectChecked();
 
   // keep allocating maps until it fails
   Object* mapp;
   MaybeObject* maybe_mapp;
   do {
-    maybe_mapp = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+    maybe_mapp = HEAP->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
   } while (maybe_mapp->ToObject(&mapp));
-  Heap::CollectGarbage(MAP_SPACE);
-  mapp = Heap::AllocateMap(JS_OBJECT_TYPE,
+  HEAP->CollectGarbage(MAP_SPACE);
+  mapp = HEAP->AllocateMap(JS_OBJECT_TYPE,
                            JSObject::kHeaderSize)->ToObjectChecked();
 
   // allocate a garbage
   String* func_name =
-      String::cast(Heap::LookupAsciiSymbol("theFunction")->ToObjectChecked());
+      String::cast(HEAP->LookupAsciiSymbol("theFunction")->ToObjectChecked());
   SharedFunctionInfo* function_share = SharedFunctionInfo::cast(
-      Heap::AllocateSharedFunctionInfo(func_name)->ToObjectChecked());
+      HEAP->AllocateSharedFunctionInfo(func_name)->ToObjectChecked());
   JSFunction* function = JSFunction::cast(
-      Heap::AllocateFunction(*Top::function_map(),
+      HEAP->AllocateFunction(*Isolate::Current()->function_map(),
                              function_share,
-                             Heap::undefined_value())->ToObjectChecked());
+                             HEAP->undefined_value())->ToObjectChecked());
   Map* initial_map =
-      Map::cast(Heap::AllocateMap(JS_OBJECT_TYPE,
+      Map::cast(HEAP->AllocateMap(JS_OBJECT_TYPE,
                                   JSObject::kHeaderSize)->ToObjectChecked());
   function->set_initial_map(initial_map);
-  Top::context()->global()->SetProperty(func_name,
-                                        function,
-                                        NONE,
-                                        kNonStrictMode)->ToObjectChecked();
+  Isolate::Current()->context()->global()->SetProperty(
+      func_name, function, NONE, kNonStrictMode)->ToObjectChecked();
 
-  JSObject* obj =
-      JSObject::cast(Heap::AllocateJSObject(function)->ToObjectChecked());
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  JSObject* obj = JSObject::cast(
+      HEAP->AllocateJSObject(function)->ToObjectChecked());
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   func_name =
-      String::cast(Heap::LookupAsciiSymbol("theFunction")->ToObjectChecked());
-  CHECK(Top::context()->global()->HasLocalProperty(func_name));
-  Object* func_value =
-      Top::context()->global()->GetProperty(func_name)->ToObjectChecked();
+      String::cast(HEAP->LookupAsciiSymbol("theFunction")->ToObjectChecked());
+  CHECK(Isolate::Current()->context()->global()->HasLocalProperty(func_name));
+  Object* func_value = Isolate::Current()->context()->global()->
+      GetProperty(func_name)->ToObjectChecked();
   CHECK(func_value->IsJSFunction());
   function = JSFunction::cast(func_value);
 
-  obj = JSObject::cast(Heap::AllocateJSObject(function)->ToObjectChecked());
+  obj = JSObject::cast(HEAP->AllocateJSObject(function)->ToObjectChecked());
   String* obj_name =
-      String::cast(Heap::LookupAsciiSymbol("theObject")->ToObjectChecked());
-  Top::context()->global()->SetProperty(obj_name,
-                                        obj,
-                                        NONE,
-                                        kNonStrictMode)->ToObjectChecked();
+      String::cast(HEAP->LookupAsciiSymbol("theObject")->ToObjectChecked());
+  Isolate::Current()->context()->global()->SetProperty(
+      obj_name, obj, NONE, kNonStrictMode)->ToObjectChecked();
   String* prop_name =
-      String::cast(Heap::LookupAsciiSymbol("theSlot")->ToObjectChecked());
+      String::cast(HEAP->LookupAsciiSymbol("theSlot")->ToObjectChecked());
   obj->SetProperty(prop_name,
                    Smi::FromInt(23),
                    NONE,
                    kNonStrictMode)->ToObjectChecked();
 
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   obj_name =
-      String::cast(Heap::LookupAsciiSymbol("theObject")->ToObjectChecked());
-  CHECK(Top::context()->global()->HasLocalProperty(obj_name));
-  CHECK(Top::context()->global()->
-            GetProperty(obj_name)->ToObjectChecked()->IsJSObject());
-  obj = JSObject::cast(
-      Top::context()->global()->GetProperty(obj_name)->ToObjectChecked());
+      String::cast(HEAP->LookupAsciiSymbol("theObject")->ToObjectChecked());
+  CHECK(Isolate::Current()->context()->global()->HasLocalProperty(obj_name));
+  CHECK(Isolate::Current()->context()->global()->
+        GetProperty(obj_name)->ToObjectChecked()->IsJSObject());
+  obj = JSObject::cast(Isolate::Current()->context()->global()->
+                       GetProperty(obj_name)->ToObjectChecked());
   prop_name =
-      String::cast(Heap::LookupAsciiSymbol("theSlot")->ToObjectChecked());
-  CHECK(obj->GetProperty(prop_name)->ToObjectChecked() == Smi::FromInt(23));
+      String::cast(HEAP->LookupAsciiSymbol("theSlot")->ToObjectChecked());
+  CHECK(obj->GetProperty(prop_name) == Smi::FromInt(23));
 }
 
 
 static Handle<Map> CreateMap() {
-  return Factory::NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+  return FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
 }
 
 
@@ -246,20 +241,20 @@ TEST(MapCompact) {
     v8::HandleScope sc;
     // keep allocating maps while pointers are still encodable and thus
     // mark compact is permitted.
-    Handle<JSObject> root = Factory::NewJSObjectFromMap(CreateMap());
+    Handle<JSObject> root = FACTORY->NewJSObjectFromMap(CreateMap());
     do {
       Handle<Map> map = CreateMap();
       map->set_prototype(*root);
-      root = Factory::NewJSObjectFromMap(map);
-    } while (Heap::map_space()->MapPointersEncodable());
+      root = FACTORY->NewJSObjectFromMap(map);
+    } while (HEAP->map_space()->MapPointersEncodable());
   }
   // Now, as we don't have any handles to just allocated maps, we should
   // be able to trigger map compaction.
   // To give an additional chance to fail, try to force compaction which
   // should be impossible right now.
-  Heap::CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
   // And now map pointers should be encodable again.
-  CHECK(Heap::map_space()->MapPointersEncodable());
+  CHECK(HEAP->map_space()->MapPointersEncodable());
 }
 
 
@@ -281,16 +276,16 @@ static void GCEpilogueCallbackFunc() {
 TEST(GCCallback) {
   InitializeVM();
 
-  Heap::SetGlobalGCPrologueCallback(&GCPrologueCallbackFunc);
-  Heap::SetGlobalGCEpilogueCallback(&GCEpilogueCallbackFunc);
+  HEAP->SetGlobalGCPrologueCallback(&GCPrologueCallbackFunc);
+  HEAP->SetGlobalGCEpilogueCallback(&GCEpilogueCallbackFunc);
 
   // Scavenge does not call GC callback functions.
-  Heap::PerformScavenge();
+  HEAP->PerformScavenge();
 
   CHECK_EQ(0, gc_starts);
   CHECK_EQ(gc_ends, gc_starts);
 
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
   CHECK_EQ(1, gc_starts);
   CHECK_EQ(gc_ends, gc_starts);
 }
@@ -304,44 +299,45 @@ static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
 }
 
 TEST(ObjectGroups) {
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   InitializeVM();
 
   NumberOfWeakCalls = 0;
   v8::HandleScope handle_scope;
 
   Handle<Object> g1s1 =
-    GlobalHandles::Create(Heap::AllocateFixedArray(1)->ToObjectChecked());
+      global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
   Handle<Object> g1s2 =
-    GlobalHandles::Create(Heap::AllocateFixedArray(1)->ToObjectChecked());
+    global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
   Handle<Object> g1c1 =
-    GlobalHandles::Create(Heap::AllocateFixedArray(1)->ToObjectChecked());
-  GlobalHandles::MakeWeak(g1s1.location(),
-                          reinterpret_cast<void*>(1234),
-                          &WeakPointerCallback);
-  GlobalHandles::MakeWeak(g1s2.location(),
-                          reinterpret_cast<void*>(1234),
-                          &WeakPointerCallback);
-  GlobalHandles::MakeWeak(g1c1.location(),
-                          reinterpret_cast<void*>(1234),
-                          &WeakPointerCallback);
+    global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
+  global_handles->MakeWeak(g1s1.location(),
+                           reinterpret_cast<void*>(1234),
+                           &WeakPointerCallback);
+  global_handles->MakeWeak(g1s2.location(),
+                           reinterpret_cast<void*>(1234),
+                           &WeakPointerCallback);
+  global_handles->MakeWeak(g1c1.location(),
+                           reinterpret_cast<void*>(1234),
+                           &WeakPointerCallback);
 
   Handle<Object> g2s1 =
-    GlobalHandles::Create(Heap::AllocateFixedArray(1)->ToObjectChecked());
+      global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
   Handle<Object> g2s2 =
-    GlobalHandles::Create(Heap::AllocateFixedArray(1)->ToObjectChecked());
+    global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
   Handle<Object> g2c1 =
-    GlobalHandles::Create(Heap::AllocateFixedArray(1)->ToObjectChecked());
-  GlobalHandles::MakeWeak(g2s1.location(),
-                          reinterpret_cast<void*>(1234),
-                          &WeakPointerCallback);
-  GlobalHandles::MakeWeak(g2s2.location(),
-                          reinterpret_cast<void*>(1234),
-                          &WeakPointerCallback);
-  GlobalHandles::MakeWeak(g2c1.location(),
-                          reinterpret_cast<void*>(1234),
-                          &WeakPointerCallback);
-
-  Handle<Object> root = GlobalHandles::Create(*g1s1);  // make a root.
+    global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
+  global_handles->MakeWeak(g2s1.location(),
+                           reinterpret_cast<void*>(1234),
+                           &WeakPointerCallback);
+  global_handles->MakeWeak(g2s2.location(),
+                           reinterpret_cast<void*>(1234),
+                           &WeakPointerCallback);
+  global_handles->MakeWeak(g2c1.location(),
+                           reinterpret_cast<void*>(1234),
+                           &WeakPointerCallback);
+
+  Handle<Object> root = global_handles->Create(*g1s1);  // make a root.
 
   // Connect group 1 and 2, make a cycle.
   Handle<FixedArray>::cast(g1s2)->set(0, *g2s2);
@@ -352,27 +348,27 @@ TEST(ObjectGroups) {
     Object** g1_children[] = { g1c1.location() };
     Object** g2_objects[] = { g2s1.location(), g2s2.location() };
     Object** g2_children[] = { g2c1.location() };
-    GlobalHandles::AddObjectGroup(g1_objects, 2, NULL);
-    GlobalHandles::AddImplicitReferences(HeapObject::cast(*g1s1),
-                                         g1_children, 1);
-    GlobalHandles::AddObjectGroup(g2_objects, 2, NULL);
-    GlobalHandles::AddImplicitReferences(HeapObject::cast(*g2s2),
-                                         g2_children, 1);
+    global_handles->AddObjectGroup(g1_objects, 2, NULL);
+    global_handles->AddImplicitReferences(HeapObject::cast(*g1s1),
+                                          g1_children, 1);
+    global_handles->AddObjectGroup(g2_objects, 2, NULL);
+    global_handles->AddImplicitReferences(HeapObject::cast(*g2s2),
+                                          g2_children, 1);
   }
   // Do a full GC
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   // All object should be alive.
   CHECK_EQ(0, NumberOfWeakCalls);
 
   // Weaken the root.
-  GlobalHandles::MakeWeak(root.location(),
-                          reinterpret_cast<void*>(1234),
-                          &WeakPointerCallback);
+  global_handles->MakeWeak(root.location(),
+                           reinterpret_cast<void*>(1234),
+                           &WeakPointerCallback);
   // But make children strong roots---all the objects (except for children)
   // should be collectable now.
-  GlobalHandles::ClearWeakness(g1c1.location());
-  GlobalHandles::ClearWeakness(g2c1.location());
+  global_handles->ClearWeakness(g1c1.location());
+  global_handles->ClearWeakness(g2c1.location());
 
   // Groups are deleted, rebuild groups.
   {
@@ -380,27 +376,27 @@ TEST(ObjectGroups) {
     Object** g1_children[] = { g1c1.location() };
     Object** g2_objects[] = { g2s1.location(), g2s2.location() };
     Object** g2_children[] = { g2c1.location() };
-    GlobalHandles::AddObjectGroup(g1_objects, 2, NULL);
-    GlobalHandles::AddImplicitReferences(HeapObject::cast(*g1s1),
+    global_handles->AddObjectGroup(g1_objects, 2, NULL);
+    global_handles->AddImplicitReferences(HeapObject::cast(*g1s1),
                                          g1_children, 1);
-    GlobalHandles::AddObjectGroup(g2_objects, 2, NULL);
-    GlobalHandles::AddImplicitReferences(HeapObject::cast(*g2s2),
+    global_handles->AddObjectGroup(g2_objects, 2, NULL);
+    global_handles->AddImplicitReferences(HeapObject::cast(*g2s2),
                                          g2_children, 1);
   }
 
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
 
   // All objects should be gone. 5 global handles in total.
   CHECK_EQ(5, NumberOfWeakCalls);
 
   // And now make children weak again and collect them.
-  GlobalHandles::MakeWeak(g1c1.location(),
-                          reinterpret_cast<void*>(1234),
-                          &WeakPointerCallback);
-  GlobalHandles::MakeWeak(g2c1.location(),
-                          reinterpret_cast<void*>(1234),
-                          &WeakPointerCallback);
-
-  Heap::CollectGarbage(OLD_POINTER_SPACE);
+  global_handles->MakeWeak(g1c1.location(),
+                           reinterpret_cast<void*>(1234),
+                           &WeakPointerCallback);
+  global_handles->MakeWeak(g2c1.location(),
+                           reinterpret_cast<void*>(1234),
+                           &WeakPointerCallback);
+
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
   CHECK_EQ(7, NumberOfWeakCalls);
 }
index 8ee40385a6e19cc53bf0770be3cb7ac8d779c61d..fc98bedd3ca695416abdcfe36918730f996ee012 100755 (executable)
@@ -31,6 +31,7 @@
 
 #include "v8.h"
 
+#include "isolate.h"
 #include "token.h"
 #include "scanner.h"
 #include "parser.h"
@@ -153,7 +154,7 @@ TEST(ScanHTMLEndComments) {
 
   // Parser/Scanner needs a stack limit.
   int marker;
-  i::StackGuard::SetStackLimit(
+  i::Isolate::Current()->stack_guard()->SetStackLimit(
       reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
 
   for (int i = 0; tests[i]; i++) {
@@ -184,7 +185,7 @@ TEST(Preparsing) {
   v8::Persistent<v8::Context> context = v8::Context::New();
   v8::Context::Scope context_scope(context);
   int marker;
-  i::StackGuard::SetStackLimit(
+  i::Isolate::Current()->stack_guard()->SetStackLimit(
       reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
 
   // Source containing functions that might be lazily compiled  and all types
@@ -245,7 +246,7 @@ TEST(Preparsing) {
 
 TEST(StandAlonePreParser) {
   int marker;
-  i::StackGuard::SetStackLimit(
+  i::Isolate::Current()->stack_guard()->SetStackLimit(
       reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
 
   const char* programs[] = {
@@ -257,14 +258,14 @@ TEST(StandAlonePreParser) {
       NULL
   };
 
-  uintptr_t stack_limit = i::StackGuard::real_climit();
+  uintptr_t stack_limit = ISOLATE->stack_guard()->real_climit();
   for (int i = 0; programs[i]; i++) {
     const char* program = programs[i];
     i::Utf8ToUC16CharacterStream stream(
         reinterpret_cast<const i::byte*>(program),
         static_cast<unsigned>(strlen(program)));
     i::CompleteParserRecorder log;
-    i::V8JavaScriptScanner scanner;
+    i::V8JavaScriptScanner scanner(ISOLATE);
     scanner.Initialize(&stream);
 
     v8::preparser::PreParser::PreParseResult result =
@@ -281,7 +282,7 @@ TEST(StandAlonePreParser) {
 
 TEST(RegressChromium62639) {
   int marker;
-  i::StackGuard::SetStackLimit(
+  ISOLATE->stack_guard()->SetStackLimit(
       reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
 
   const char* program = "var x = 'something';\n"
@@ -306,7 +307,7 @@ TEST(Regress928) {
   // the block could be lazily compiled, and an extra, unexpected,
   // entry was added to the data.
   int marker;
-  i::StackGuard::SetStackLimit(
+  ISOLATE->stack_guard()->SetStackLimit(
       reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
 
   const char* program =
@@ -342,7 +343,7 @@ TEST(Regress928) {
 
 TEST(PreParseOverflow) {
   int marker;
-  i::StackGuard::SetStackLimit(
+  ISOLATE->stack_guard()->SetStackLimit(
       reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
 
   size_t kProgramSize = 1024 * 1024;
@@ -351,13 +352,13 @@ TEST(PreParseOverflow) {
   memset(*program, '(', kProgramSize);
   program[kProgramSize] = '\0';
 
-  uintptr_t stack_limit = i::StackGuard::real_climit();
+  uintptr_t stack_limit = ISOLATE->stack_guard()->real_climit();
 
   i::Utf8ToUC16CharacterStream stream(
       reinterpret_cast<const i::byte*>(*program),
       static_cast<unsigned>(kProgramSize));
   i::CompleteParserRecorder log;
-  i::V8JavaScriptScanner scanner;
+  i::V8JavaScriptScanner scanner(ISOLATE);
   scanner.Initialize(&stream);
 
 
@@ -405,10 +406,10 @@ void TestCharacterStream(const char* ascii_source,
   }
   i::Vector<const char> ascii_vector(ascii_source, static_cast<int>(length));
   i::Handle<i::String> ascii_string(
-      i::Factory::NewStringFromAscii(ascii_vector));
+      FACTORY->NewStringFromAscii(ascii_vector));
   TestExternalResource resource(*uc16_buffer, length);
   i::Handle<i::String> uc16_string(
-      i::Factory::NewExternalStringFromTwoByte(&resource));
+      FACTORY->NewExternalStringFromTwoByte(&resource));
 
   i::ExternalTwoByteStringUC16CharacterStream uc16_stream(
       i::Handle<i::ExternalTwoByteString>::cast(uc16_string), start, end);
@@ -575,7 +576,7 @@ void TestStreamScanner(i::UC16CharacterStream* stream,
                        i::Token::Value* expected_tokens,
                        int skip_pos = 0,  // Zero means not skipping.
                        int skip_to = 0) {
-  i::V8JavaScriptScanner scanner;
+  i::V8JavaScriptScanner scanner(ISOLATE);
   scanner.Initialize(stream);
 
   int i = 0;
@@ -654,7 +655,7 @@ void TestScanRegExp(const char* re_source, const char* expected) {
   i::Utf8ToUC16CharacterStream stream(
        reinterpret_cast<const i::byte*>(re_source),
        static_cast<unsigned>(strlen(re_source)));
-  i::V8JavaScriptScanner scanner;
+  i::V8JavaScriptScanner scanner(ISOLATE);
   scanner.Initialize(&stream);
 
   i::Token::Value start = scanner.peek();
index e1a00e1060c720b8cde191a45ae7bb0c480d120c..756b9473c9626b8f24ec3795291c175e6b6b372d 100644 (file)
@@ -67,6 +67,7 @@ TEST(BusyLock) {
 
 
 TEST(VirtualMemory) {
+  OS::Setup();
   VirtualMemory* vm = new VirtualMemory(1 * MB);
   CHECK(vm->IsReserved());
   void* block_addr = vm->address();
index a5a6dd58ac50fe0dd6ce7bfdf0cfffbcb0be5738..9bd0014c6f7d0abafc3005f3373c79fab49a9425 100644 (file)
@@ -13,6 +13,7 @@ using namespace ::v8::internal;
 
 
 TEST(VirtualMemory) {
+  OS::Setup();
   VirtualMemory* vm = new VirtualMemory(1 * MB);
   CHECK(vm->IsReserved());
   void* block_addr = vm->address();
index c60d0720aa5207334df8118cb6019c97b9b5f238..fbe5834e545002505cfc63f242b99d1154877fe8 100644 (file)
@@ -56,7 +56,7 @@ TEST(TokenEnumerator) {
     CHECK_EQ(0, te.GetTokenId(*v8::Utils::OpenHandle(*token1)));
   }
   CHECK(!i::TokenEnumeratorTester::token_removed(&te)->at(2));
-  i::Heap::CollectAllGarbage(false);
+  HEAP->CollectAllGarbage(false);
   CHECK(i::TokenEnumeratorTester::token_removed(&te)->at(2));
   CHECK_EQ(1, te.GetTokenId(*v8::Utils::OpenHandle(*token2)));
   CHECK_EQ(0, te.GetTokenId(*v8::Utils::OpenHandle(*token1)));
index 51fef71464d58b3a7395fee48d6373cfb9b0aea1..cb8ba87bbdd96297bbc714b3e8c2047c7073fbf4 100644 (file)
@@ -62,7 +62,7 @@ static bool CheckParse(const char* input) {
   V8::Initialize(NULL);
   v8::HandleScope scope;
   ZoneScope zone_scope(DELETE_ON_EXIT);
-  FlatStringReader reader(CStrVector(input));
+  FlatStringReader reader(Isolate::Current(), CStrVector(input));
   RegExpCompileData result;
   return v8::internal::RegExpParser::ParseRegExp(&reader, false, &result);
 }
@@ -72,7 +72,7 @@ static SmartPointer<const char> Parse(const char* input) {
   V8::Initialize(NULL);
   v8::HandleScope scope;
   ZoneScope zone_scope(DELETE_ON_EXIT);
-  FlatStringReader reader(CStrVector(input));
+  FlatStringReader reader(Isolate::Current(), CStrVector(input));
   RegExpCompileData result;
   CHECK(v8::internal::RegExpParser::ParseRegExp(&reader, false, &result));
   CHECK(result.tree != NULL);
@@ -86,7 +86,7 @@ static bool CheckSimple(const char* input) {
   v8::HandleScope scope;
   unibrow::Utf8InputBuffer<> buffer(input, StrLength(input));
   ZoneScope zone_scope(DELETE_ON_EXIT);
-  FlatStringReader reader(CStrVector(input));
+  FlatStringReader reader(Isolate::Current(), CStrVector(input));
   RegExpCompileData result;
   CHECK(v8::internal::RegExpParser::ParseRegExp(&reader, false, &result));
   CHECK(result.tree != NULL);
@@ -104,7 +104,7 @@ static MinMaxPair CheckMinMaxMatch(const char* input) {
   v8::HandleScope scope;
   unibrow::Utf8InputBuffer<> buffer(input, StrLength(input));
   ZoneScope zone_scope(DELETE_ON_EXIT);
-  FlatStringReader reader(CStrVector(input));
+  FlatStringReader reader(Isolate::Current(), CStrVector(input));
   RegExpCompileData result;
   CHECK(v8::internal::RegExpParser::ParseRegExp(&reader, false, &result));
   CHECK(result.tree != NULL);
@@ -375,7 +375,7 @@ static void ExpectError(const char* input,
   V8::Initialize(NULL);
   v8::HandleScope scope;
   ZoneScope zone_scope(DELETE_ON_EXIT);
-  FlatStringReader reader(CStrVector(input));
+  FlatStringReader reader(Isolate::Current(), CStrVector(input));
   RegExpCompileData result;
   CHECK(!v8::internal::RegExpParser::ParseRegExp(&reader, false, &result));
   CHECK(result.tree == NULL);
@@ -471,6 +471,7 @@ static void TestCharacterClassEscapes(uc16 c, bool (pred)(uc16 c)) {
 
 
 TEST(CharacterClassEscapes) {
+  v8::internal::V8::Initialize(NULL);
   TestCharacterClassEscapes('.', IsRegExpNewline);
   TestCharacterClassEscapes('d', IsDigit);
   TestCharacterClassEscapes('D', NotDigit);
@@ -483,12 +484,12 @@ TEST(CharacterClassEscapes) {
 
 static RegExpNode* Compile(const char* input, bool multiline, bool is_ascii) {
   V8::Initialize(NULL);
-  FlatStringReader reader(CStrVector(input));
+  FlatStringReader reader(Isolate::Current(), CStrVector(input));
   RegExpCompileData compile_data;
   if (!v8::internal::RegExpParser::ParseRegExp(&reader, multiline,
                                                &compile_data))
     return NULL;
-  Handle<String> pattern = Factory::NewStringFromUtf8(CStrVector(input));
+  Handle<String> pattern = FACTORY->NewStringFromUtf8(CStrVector(input));
   RegExpEngine::Compile(&compile_data, false, multiline, pattern, is_ascii);
   return compile_data.node;
 }
@@ -538,6 +539,7 @@ static unsigned PseudoRandom(int i, int j) {
 
 
 TEST(SplayTreeSimple) {
+  v8::internal::V8::Initialize(NULL);
   static const unsigned kLimit = 1000;
   ZoneScope zone_scope(DELETE_ON_EXIT);
   ZoneSplayTree<TestConfig> tree;
@@ -590,6 +592,7 @@ TEST(SplayTreeSimple) {
 
 
 TEST(DispatchTableConstruction) {
+  v8::internal::V8::Initialize(NULL);
   // Initialize test data.
   static const int kLimit = 1000;
   static const int kRangeCount = 8;
@@ -673,7 +676,7 @@ typedef RegExpMacroAssembler ArchRegExpMacroAssembler;
 class ContextInitializer {
  public:
   ContextInitializer()
-      : env_(), scope_(), zone_(DELETE_ON_EXIT), stack_guard_() {
+      : env_(), scope_(), zone_(DELETE_ON_EXIT) {
     env_ = v8::Context::New();
     env_->Enter();
   }
@@ -685,7 +688,6 @@ class ContextInitializer {
   v8::Persistent<v8::Context> env_;
   v8::HandleScope scope_;
   v8::internal::ZoneScope zone_;
-  v8::internal::StackGuard stack_guard_;
 };
 
 
@@ -701,7 +703,8 @@ static ArchRegExpMacroAssembler::Result Execute(Code* code,
       start_offset,
       input_start,
       input_end,
-      captures);
+      captures,
+      Isolate::Current());
 }
 
 
@@ -713,12 +716,12 @@ TEST(MacroAssemblerNativeSuccess) {
 
   m.Succeed();
 
-  Handle<String> source = Factory::NewStringFromAscii(CStrVector(""));
+  Handle<String> source = FACTORY->NewStringFromAscii(CStrVector(""));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
   int captures[4] = {42, 37, 87, 117};
-  Handle<String> input = Factory::NewStringFromAscii(CStrVector("foofoo"));
+  Handle<String> input = FACTORY->NewStringFromAscii(CStrVector("foofoo"));
   Handle<SeqAsciiString> seq_input = Handle<SeqAsciiString>::cast(input);
   const byte* start_adr =
       reinterpret_cast<const byte*>(seq_input->GetCharsAddress());
@@ -757,12 +760,12 @@ TEST(MacroAssemblerNativeSimple) {
   m.Bind(&fail);
   m.Fail();
 
-  Handle<String> source = Factory::NewStringFromAscii(CStrVector("^foo"));
+  Handle<String> source = FACTORY->NewStringFromAscii(CStrVector("^foo"));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
   int captures[4] = {42, 37, 87, 117};
-  Handle<String> input = Factory::NewStringFromAscii(CStrVector("foofoo"));
+  Handle<String> input = FACTORY->NewStringFromAscii(CStrVector("foofoo"));
   Handle<SeqAsciiString> seq_input = Handle<SeqAsciiString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -780,7 +783,7 @@ TEST(MacroAssemblerNativeSimple) {
   CHECK_EQ(-1, captures[2]);
   CHECK_EQ(-1, captures[3]);
 
-  input = Factory::NewStringFromAscii(CStrVector("barbarbar"));
+  input = FACTORY->NewStringFromAscii(CStrVector("barbarbar"));
   seq_input = Handle<SeqAsciiString>::cast(input);
   start_adr = seq_input->GetCharsAddress();
 
@@ -813,14 +816,14 @@ TEST(MacroAssemblerNativeSimpleUC16) {
   m.Bind(&fail);
   m.Fail();
 
-  Handle<String> source = Factory::NewStringFromAscii(CStrVector("^foo"));
+  Handle<String> source = FACTORY->NewStringFromAscii(CStrVector("^foo"));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
   int captures[4] = {42, 37, 87, 117};
   const uc16 input_data[6] = {'f', 'o', 'o', 'f', 'o', '\xa0'};
   Handle<String> input =
-      Factory::NewStringFromTwoByte(Vector<const uc16>(input_data, 6));
+      FACTORY->NewStringFromTwoByte(Vector<const uc16>(input_data, 6));
   Handle<SeqTwoByteString> seq_input = Handle<SeqTwoByteString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -839,7 +842,7 @@ TEST(MacroAssemblerNativeSimpleUC16) {
   CHECK_EQ(-1, captures[3]);
 
   const uc16 input_data2[9] = {'b', 'a', 'r', 'b', 'a', 'r', 'b', 'a', '\xa0'};
-  input = Factory::NewStringFromTwoByte(Vector<const uc16>(input_data2, 9));
+  input = FACTORY->NewStringFromTwoByte(Vector<const uc16>(input_data2, 9));
   seq_input = Handle<SeqTwoByteString>::cast(input);
   start_adr = seq_input->GetCharsAddress();
 
@@ -871,11 +874,11 @@ TEST(MacroAssemblerNativeBacktrack) {
   m.Bind(&backtrack);
   m.Fail();
 
-  Handle<String> source = Factory::NewStringFromAscii(CStrVector(".........."));
+  Handle<String> source = FACTORY->NewStringFromAscii(CStrVector(".........."));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
-  Handle<String> input = Factory::NewStringFromAscii(CStrVector("foofoo"));
+  Handle<String> input = FACTORY->NewStringFromAscii(CStrVector("foofoo"));
   Handle<SeqAsciiString> seq_input = Handle<SeqAsciiString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -912,11 +915,11 @@ TEST(MacroAssemblerNativeBackReferenceASCII) {
   m.Bind(&missing_match);
   m.Fail();
 
-  Handle<String> source = Factory::NewStringFromAscii(CStrVector("^(..)..\1"));
+  Handle<String> source = FACTORY->NewStringFromAscii(CStrVector("^(..)..\1"));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
-  Handle<String> input = Factory::NewStringFromAscii(CStrVector("fooofo"));
+  Handle<String> input = FACTORY->NewStringFromAscii(CStrVector("fooofo"));
   Handle<SeqAsciiString> seq_input = Handle<SeqAsciiString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -958,13 +961,13 @@ TEST(MacroAssemblerNativeBackReferenceUC16) {
   m.Bind(&missing_match);
   m.Fail();
 
-  Handle<String> source = Factory::NewStringFromAscii(CStrVector("^(..)..\1"));
+  Handle<String> source = FACTORY->NewStringFromAscii(CStrVector("^(..)..\1"));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
   const uc16 input_data[6] = {'f', 0x2028, 'o', 'o', 'f', 0x2028};
   Handle<String> input =
-      Factory::NewStringFromTwoByte(Vector<const uc16>(input_data, 6));
+      FACTORY->NewStringFromTwoByte(Vector<const uc16>(input_data, 6));
   Handle<SeqTwoByteString> seq_input = Handle<SeqTwoByteString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -1013,11 +1016,11 @@ TEST(MacroAssemblernativeAtStart) {
   m.CheckNotCharacter('b', &fail);
   m.Succeed();
 
-  Handle<String> source = Factory::NewStringFromAscii(CStrVector("(^f|ob)"));
+  Handle<String> source = FACTORY->NewStringFromAscii(CStrVector("(^f|ob)"));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
-  Handle<String> input = Factory::NewStringFromAscii(CStrVector("foobar"));
+  Handle<String> input = FACTORY->NewStringFromAscii(CStrVector("foobar"));
   Handle<SeqAsciiString> seq_input = Handle<SeqAsciiString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -1071,12 +1074,12 @@ TEST(MacroAssemblerNativeBackRefNoCase) {
   m.Succeed();
 
   Handle<String> source =
-      Factory::NewStringFromAscii(CStrVector("^(abc)\1\1(?!\1)...(?!\1)"));
+      FACTORY->NewStringFromAscii(CStrVector("^(abc)\1\1(?!\1)...(?!\1)"));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
   Handle<String> input =
-      Factory::NewStringFromAscii(CStrVector("aBcAbCABCxYzab"));
+      FACTORY->NewStringFromAscii(CStrVector("aBcAbCABCxYzab"));
   Handle<SeqAsciiString> seq_input = Handle<SeqAsciiString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -1169,13 +1172,13 @@ TEST(MacroAssemblerNativeRegisters) {
   m.Fail();
 
   Handle<String> source =
-      Factory::NewStringFromAscii(CStrVector("<loop test>"));
+      FACTORY->NewStringFromAscii(CStrVector("<loop test>"));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
   // String long enough for test (content doesn't matter).
   Handle<String> input =
-      Factory::NewStringFromAscii(CStrVector("foofoofoofoofoo"));
+      FACTORY->NewStringFromAscii(CStrVector("foofoofoofoofoo"));
   Handle<SeqAsciiString> seq_input = Handle<SeqAsciiString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -1210,13 +1213,13 @@ TEST(MacroAssemblerStackOverflow) {
   m.GoTo(&loop);
 
   Handle<String> source =
-      Factory::NewStringFromAscii(CStrVector("<stack overflow test>"));
+      FACTORY->NewStringFromAscii(CStrVector("<stack overflow test>"));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
   // String long enough for test (content doesn't matter).
   Handle<String> input =
-      Factory::NewStringFromAscii(CStrVector("dummy"));
+      FACTORY->NewStringFromAscii(CStrVector("dummy"));
   Handle<SeqAsciiString> seq_input = Handle<SeqAsciiString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -1229,8 +1232,8 @@ TEST(MacroAssemblerStackOverflow) {
               NULL);
 
   CHECK_EQ(NativeRegExpMacroAssembler::EXCEPTION, result);
-  CHECK(Top::has_pending_exception());
-  Top::clear_pending_exception();
+  CHECK(Isolate::Current()->has_pending_exception());
+  Isolate::Current()->clear_pending_exception();
 }
 
 
@@ -1254,13 +1257,13 @@ TEST(MacroAssemblerNativeLotsOfRegisters) {
   m.Succeed();
 
   Handle<String> source =
-      Factory::NewStringFromAscii(CStrVector("<huge register space test>"));
+      FACTORY->NewStringFromAscii(CStrVector("<huge register space test>"));
   Handle<Object> code_object = m.GetCode(source);
   Handle<Code> code = Handle<Code>::cast(code_object);
 
   // String long enough for test (content doesn't matter).
   Handle<String> input =
-      Factory::NewStringFromAscii(CStrVector("sample text"));
+      FACTORY->NewStringFromAscii(CStrVector("sample text"));
   Handle<SeqAsciiString> seq_input = Handle<SeqAsciiString>::cast(input);
   Address start_adr = seq_input->GetCharsAddress();
 
@@ -1277,7 +1280,7 @@ TEST(MacroAssemblerNativeLotsOfRegisters) {
   CHECK_EQ(0, captures[0]);
   CHECK_EQ(42, captures[1]);
 
-  Top::clear_pending_exception();
+  Isolate::Current()->clear_pending_exception();
 }
 
 #else  // V8_INTERPRETED_REGEXP
@@ -1322,13 +1325,13 @@ TEST(MacroAssembler) {
 
   v8::HandleScope scope;
 
-  Handle<String> source = Factory::NewStringFromAscii(CStrVector("^f(o)o"));
+  Handle<String> source = FACTORY->NewStringFromAscii(CStrVector("^f(o)o"));
   Handle<ByteArray> array = Handle<ByteArray>::cast(m.GetCode(source));
   int captures[5];
 
   const uc16 str1[] = {'f', 'o', 'o', 'b', 'a', 'r'};
   Handle<String> f1_16 =
-      Factory::NewStringFromTwoByte(Vector<const uc16>(str1, 6));
+      FACTORY->NewStringFromTwoByte(Vector<const uc16>(str1, 6));
 
   CHECK(IrregexpInterpreter::Match(array, f1_16, captures, 0));
   CHECK_EQ(0, captures[0]);
@@ -1339,7 +1342,7 @@ TEST(MacroAssembler) {
 
   const uc16 str2[] = {'b', 'a', 'r', 'f', 'o', 'o'};
   Handle<String> f2_16 =
-      Factory::NewStringFromTwoByte(Vector<const uc16>(str2, 6));
+      FACTORY->NewStringFromTwoByte(Vector<const uc16>(str2, 6));
 
   CHECK(!IrregexpInterpreter::Match(array, f2_16, captures, 0));
   CHECK_EQ(42, captures[0]);
@@ -1349,6 +1352,7 @@ TEST(MacroAssembler) {
 
 
 TEST(AddInverseToTable) {
+  v8::internal::V8::Initialize(NULL);
   static const int kLimit = 1000;
   static const int kRangeCount = 16;
   for (int t = 0; t < 10; t++) {
@@ -1507,6 +1511,7 @@ static void TestSimpleRangeCaseIndependence(CharacterRange input,
 
 
 TEST(CharacterRangeCaseIndependence) {
+  v8::internal::V8::Initialize(NULL);
   TestSimpleRangeCaseIndependence(CharacterRange::Singleton('a'),
                                   CharacterRange::Singleton('A'));
   TestSimpleRangeCaseIndependence(CharacterRange::Singleton('z'),
@@ -1548,6 +1553,7 @@ static bool InClass(uc16 c, ZoneList<CharacterRange>* ranges) {
 
 
 TEST(CharClassDifference) {
+  v8::internal::V8::Initialize(NULL);
   ZoneScope zone_scope(DELETE_ON_EXIT);
   ZoneList<CharacterRange>* base = new ZoneList<CharacterRange>(1);
   base->Add(CharacterRange::Everything());
@@ -1574,6 +1580,7 @@ TEST(CharClassDifference) {
 
 
 TEST(CanonicalizeCharacterSets) {
+  v8::internal::V8::Initialize(NULL);
   ZoneScope scope(DELETE_ON_EXIT);
   ZoneList<CharacterRange>* list = new ZoneList<CharacterRange>(4);
   CharacterSet set(list);
@@ -1644,6 +1651,7 @@ static bool CharacterInSet(ZoneList<CharacterRange>* set, uc16 value) {
 }
 
 TEST(CharacterRangeMerge) {
+  v8::internal::V8::Initialize(NULL);
   ZoneScope zone_scope(DELETE_ON_EXIT);
   ZoneList<CharacterRange> l1(4);
   ZoneList<CharacterRange> l2(4);
index 80910c25642a4ffc8e7a06a75da3f16ad0a5aef5..b7a6ca98015e34a06c6c41a8f3572dcb5515d161 100644 (file)
@@ -99,8 +99,9 @@ static int make_code(TypeCode type, int id) {
 
 
 TEST(ExternalReferenceEncoder) {
-  StatsTable::SetCounterFunction(counter_function);
-  Heap::Setup(false);
+  OS::Setup();
+  i::Isolate::Current()->stats_table()->SetCounterFunction(counter_function);
+  HEAP->Setup(false);
   ExternalReferenceEncoder encoder;
   CHECK_EQ(make_code(BUILTIN, Builtins::ArrayCode),
            Encode(encoder, Builtins::ArrayCode));
@@ -109,7 +110,7 @@ TEST(ExternalReferenceEncoder) {
   CHECK_EQ(make_code(IC_UTILITY, IC::kLoadCallbackProperty),
            Encode(encoder, IC_Utility(IC::kLoadCallbackProperty)));
   ExternalReference keyed_load_function_prototype =
-      ExternalReference(&Counters::keyed_load_function_prototype);
+      ExternalReference(COUNTERS->keyed_load_function_prototype());
   CHECK_EQ(make_code(STATS_COUNTER, Counters::k_keyed_load_function_prototype),
            encoder.Encode(keyed_load_function_prototype.address()));
   ExternalReference the_hole_value_location =
@@ -136,8 +137,9 @@ TEST(ExternalReferenceEncoder) {
 
 
 TEST(ExternalReferenceDecoder) {
-  StatsTable::SetCounterFunction(counter_function);
-  Heap::Setup(false);
+  OS::Setup();
+  i::Isolate::Current()->stats_table()->SetCounterFunction(counter_function);
+  HEAP->Setup(false);
   ExternalReferenceDecoder decoder;
   CHECK_EQ(AddressOf(Builtins::ArrayCode),
            decoder.Decode(make_code(BUILTIN, Builtins::ArrayCode)));
@@ -147,7 +149,7 @@ TEST(ExternalReferenceDecoder) {
   CHECK_EQ(AddressOf(IC_Utility(IC::kLoadCallbackProperty)),
            decoder.Decode(make_code(IC_UTILITY, IC::kLoadCallbackProperty)));
   ExternalReference keyed_load_function =
-      ExternalReference(&Counters::keyed_load_function_prototype);
+      ExternalReference(COUNTERS->keyed_load_function_prototype());
   CHECK_EQ(keyed_load_function.address(),
            decoder.Decode(
                make_code(STATS_COUNTER,
@@ -276,12 +278,12 @@ static void Deserialize() {
 static void SanityCheck() {
   v8::HandleScope scope;
 #ifdef DEBUG
-  Heap::Verify();
+  HEAP->Verify();
 #endif
-  CHECK(Top::global()->IsJSObject());
-  CHECK(Top::global_context()->IsContext());
-  CHECK(Heap::symbol_table()->IsSymbolTable());
-  CHECK(!Factory::LookupAsciiSymbol("Empty")->IsFailure());
+  CHECK(Isolate::Current()->global()->IsJSObject());
+  CHECK(Isolate::Current()->global_context()->IsContext());
+  CHECK(HEAP->symbol_table()->IsSymbolTable());
+  CHECK(!FACTORY->LookupAsciiSymbol("Empty")->IsFailure());
 }
 
 
@@ -291,7 +293,6 @@ DEPENDENT_TEST(Deserialize, Serialize) {
   // serialize a snapshot in a VM that is booted from a snapshot.
   if (!Snapshot::IsEnabled()) {
     v8::HandleScope scope;
-
     Deserialize();
 
     v8::Persistent<v8::Context> env = v8::Context::New();
@@ -305,7 +306,6 @@ DEPENDENT_TEST(Deserialize, Serialize) {
 DEPENDENT_TEST(DeserializeFromSecondSerialization, SerializeTwice) {
   if (!Snapshot::IsEnabled()) {
     v8::HandleScope scope;
-
     Deserialize();
 
     v8::Persistent<v8::Context> env = v8::Context::New();
@@ -319,7 +319,6 @@ DEPENDENT_TEST(DeserializeFromSecondSerialization, SerializeTwice) {
 DEPENDENT_TEST(DeserializeAndRunScript2, Serialize) {
   if (!Snapshot::IsEnabled()) {
     v8::HandleScope scope;
-
     Deserialize();
 
     v8::Persistent<v8::Context> env = v8::Context::New();
@@ -337,7 +336,6 @@ DEPENDENT_TEST(DeserializeFromSecondSerializationAndRunScript2,
                SerializeTwice) {
   if (!Snapshot::IsEnabled()) {
     v8::HandleScope scope;
-
     Deserialize();
 
     v8::Persistent<v8::Context> env = v8::Context::New();
@@ -361,11 +359,11 @@ TEST(PartialSerialization) {
   // Make sure all builtin scripts are cached.
   { HandleScope scope;
     for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
-      Bootstrapper::NativesSourceLookup(i);
+      Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
     }
   }
-  Heap::CollectAllGarbage(true);
-  Heap::CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
 
   Object* raw_foo;
   {
@@ -425,7 +423,7 @@ static void ReserveSpaceForPartialSnapshot(const char* file_name) {
 #undef fscanf
 #endif
   fclose(fp);
-  Heap::ReserveSpace(new_size,
+  HEAP->ReserveSpace(new_size,
                      pointer_size,
                      data_size,
                      code_size,
@@ -482,12 +480,12 @@ TEST(ContextSerialization) {
   // Make sure all builtin scripts are cached.
   { HandleScope scope;
     for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
-      Bootstrapper::NativesSourceLookup(i);
+      Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
     }
   }
   // If we don't do this then we end up with a stray root pointing at the
   // context even after we have disposed of env.
-  Heap::CollectAllGarbage(true);
+  HEAP->CollectAllGarbage(true);
 
   int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
   Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
@@ -561,7 +559,7 @@ TEST(LinearAllocation) {
 
   for (int size = 1000; size < 5 * MB; size += size >> 1) {
     int new_space_size = (size < new_space_max) ? size : new_space_max;
-    Heap::ReserveSpace(
+    HEAP->ReserveSpace(
         new_space_size,
         size,              // Old pointer space.
         size,              // Old data space.
@@ -584,7 +582,7 @@ TEST(LinearAllocation) {
          i + kSmallFixedArraySize <= new_space_size;
          i += kSmallFixedArraySize) {
       Object* obj =
-          Heap::AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked();
+          HEAP->AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked();
       if (new_last != NULL) {
         CHECK(reinterpret_cast<char*>(obj) ==
               reinterpret_cast<char*>(new_last) + kSmallFixedArraySize);
@@ -596,7 +594,7 @@ TEST(LinearAllocation) {
     for (int i = 0;
          i + kSmallFixedArraySize <= size;
          i += kSmallFixedArraySize) {
-      Object* obj = Heap::AllocateFixedArray(kSmallFixedArrayLength,
+      Object* obj = HEAP->AllocateFixedArray(kSmallFixedArrayLength,
                                              TENURED)->ToObjectChecked();
       int old_page_fullness = i % Page::kPageSize;
       int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize;
@@ -614,7 +612,7 @@ TEST(LinearAllocation) {
 
     Object* data_last = NULL;
     for (int i = 0; i + kSmallStringSize <= size; i += kSmallStringSize) {
-      Object* obj = Heap::AllocateRawAsciiString(kSmallStringLength,
+      Object* obj = HEAP->AllocateRawAsciiString(kSmallStringLength,
                                                  TENURED)->ToObjectChecked();
       int old_page_fullness = i % Page::kPageSize;
       int page_fullness = (i + kSmallStringSize) % Page::kPageSize;
@@ -632,7 +630,7 @@ TEST(LinearAllocation) {
 
     Object* map_last = NULL;
     for (int i = 0; i + kMapSize <= size; i += kMapSize) {
-      Object* obj = Heap::AllocateMap(JS_OBJECT_TYPE,
+      Object* obj = HEAP->AllocateMap(JS_OBJECT_TYPE,
                                       42 * kPointerSize)->ToObjectChecked();
       int old_page_fullness = i % Page::kPageSize;
       int page_fullness = (i + kMapSize) % Page::kPageSize;
@@ -654,7 +652,7 @@ TEST(LinearAllocation) {
       AlwaysAllocateScope always;
       int large_object_array_length =
           (size - FixedArray::kHeaderSize) / kPointerSize;
-      Object* obj = Heap::AllocateFixedArray(large_object_array_length,
+      Object* obj = HEAP->AllocateFixedArray(large_object_array_length,
                                              TENURED)->ToObjectChecked();
       CHECK(!obj->IsFailure());
     }
index 822a23fa95ff439b0c58c02f59ab5c33626a73aa..b6e3f4bae75eac29814a44fc7ea73314c227fdc2 100644 (file)
@@ -10,9 +10,9 @@ using namespace ::v8::internal;
 
 class SocketListenerThread : public Thread {
  public:
-  explicit SocketListenerThread(int port, int data_size)
-      : port_(port), data_size_(data_size), server_(NULL), client_(NULL),
-        listening_(OS::CreateSemaphore(0)) {
+  explicit SocketListenerThread(Isolate* isolate, int port, int data_size)
+      : Thread(isolate), port_(port), data_size_(data_size), server_(NULL),
+        client_(NULL), listening_(OS::CreateSemaphore(0)) {
     data_ = new char[data_size_];
   }
   ~SocketListenerThread() {
@@ -88,7 +88,8 @@ static void SendAndReceive(int port, char *data, int len) {
   OS::SNPrintF(Vector<char>(port_str, kPortBuferLen), "%d", port);
 
   // Create a socket listener.
-  SocketListenerThread* listener = new SocketListenerThread(port, len);
+  SocketListenerThread* listener = new SocketListenerThread(Isolate::Current(),
+      port, len);
   listener->Start();
   listener->WaitForListening();
 
index 706c6bf9a4f368360001c47b56db4ac632b53549..de0c41e2b83dfb9e32e46ebe7f72aedc8d2807cd 100644 (file)
@@ -65,6 +65,8 @@ TEST(Page) {
   Address page_start = RoundUp(start, Page::kPageSize);
 
   Page* p = Page::FromAddress(page_start);
+  // Initialized Page has heap pointer, normally set by memory_allocator.
+  p->heap_ = HEAP;
   CHECK(p->address() == page_start);
   CHECK(p->is_valid());
 
@@ -90,37 +92,45 @@ TEST(Page) {
 
 
 TEST(MemoryAllocator) {
-  CHECK(Heap::ConfigureHeapDefault());
-  CHECK(MemoryAllocator::Setup(Heap::MaxReserved(), Heap::MaxExecutableSize()));
-
-  OldSpace faked_space(Heap::MaxReserved(), OLD_POINTER_SPACE, NOT_EXECUTABLE);
+  OS::Setup();
+  Isolate* isolate = Isolate::Current();
+  CHECK(HEAP->ConfigureHeapDefault());
+  CHECK(isolate->memory_allocator()->Setup(HEAP->MaxReserved(),
+                                           HEAP->MaxExecutableSize()));
+
+  OldSpace faked_space(HEAP,
+                       HEAP->MaxReserved(),
+                       OLD_POINTER_SPACE,
+                       NOT_EXECUTABLE);
   int total_pages = 0;
   int requested = MemoryAllocator::kPagesPerChunk;
   int allocated;
   // If we request n pages, we should get n or n - 1.
   Page* first_page =
-      MemoryAllocator::AllocatePages(requested, &allocated, &faked_space);
+      isolate->memory_allocator()->AllocatePages(
+          requested, &allocated, &faked_space);
   CHECK(first_page->is_valid());
   CHECK(allocated == requested || allocated == requested - 1);
   total_pages += allocated;
 
   Page* last_page = first_page;
   for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
-    CHECK(MemoryAllocator::IsPageInSpace(p, &faked_space));
+    CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space));
     last_page = p;
   }
 
   // Again, we should get n or n - 1 pages.
   Page* others =
-      MemoryAllocator::AllocatePages(requested, &allocated, &faked_space);
+      isolate->memory_allocator()->AllocatePages(
+          requested, &allocated, &faked_space);
   CHECK(others->is_valid());
   CHECK(allocated == requested || allocated == requested - 1);
   total_pages += allocated;
 
-  MemoryAllocator::SetNextPage(last_page, others);
+  isolate->memory_allocator()->SetNextPage(last_page, others);
   int page_count = 0;
   for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
-    CHECK(MemoryAllocator::IsPageInSpace(p, &faked_space));
+    CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space));
     page_count++;
   }
   CHECK(total_pages == page_count);
@@ -131,31 +141,34 @@ TEST(MemoryAllocator) {
   // Freeing pages at the first chunk starting at or after the second page
   // should free the entire second chunk.  It will return the page it was passed
   // (since the second page was in the first chunk).
-  Page* free_return = MemoryAllocator::FreePages(second_page);
+  Page* free_return = isolate->memory_allocator()->FreePages(second_page);
   CHECK(free_return == second_page);
-  MemoryAllocator::SetNextPage(first_page, free_return);
+  isolate->memory_allocator()->SetNextPage(first_page, free_return);
 
   // Freeing pages in the first chunk starting at the first page should free
   // the first chunk and return an invalid page.
-  Page* invalid_page = MemoryAllocator::FreePages(first_page);
+  Page* invalid_page = isolate->memory_allocator()->FreePages(first_page);
   CHECK(!invalid_page->is_valid());
 
-  MemoryAllocator::TearDown();
+  isolate->memory_allocator()->TearDown();
 }
 
 
 TEST(NewSpace) {
-  CHECK(Heap::ConfigureHeapDefault());
-  CHECK(MemoryAllocator::Setup(Heap::MaxReserved(), Heap::MaxExecutableSize()));
+  OS::Setup();
+  CHECK(HEAP->ConfigureHeapDefault());
+  CHECK(Isolate::Current()->memory_allocator()->Setup(
+      HEAP->MaxReserved(), HEAP->MaxExecutableSize()));
 
-  NewSpace new_space;
+  NewSpace new_space(HEAP);
 
   void* chunk =
-      MemoryAllocator::ReserveInitialChunk(4 * Heap::ReservedSemiSpaceSize());
+      Isolate::Current()->memory_allocator()->ReserveInitialChunk(
+          4 * HEAP->ReservedSemiSpaceSize());
   CHECK(chunk != NULL);
   Address start = RoundUp(static_cast<Address>(chunk),
-                          2 * Heap::ReservedSemiSpaceSize());
-  CHECK(new_space.Setup(start, 2 * Heap::ReservedSemiSpaceSize()));
+                          2 * HEAP->ReservedSemiSpaceSize());
+  CHECK(new_space.Setup(start, 2 * HEAP->ReservedSemiSpaceSize()));
   CHECK(new_space.HasBeenSetup());
 
   while (new_space.Available() >= Page::kMaxHeapObjectSize) {
@@ -165,24 +178,28 @@ TEST(NewSpace) {
   }
 
   new_space.TearDown();
-  MemoryAllocator::TearDown();
+  Isolate::Current()->memory_allocator()->TearDown();
 }
 
 
 TEST(OldSpace) {
-  CHECK(Heap::ConfigureHeapDefault());
-  CHECK(MemoryAllocator::Setup(Heap::MaxReserved(), Heap::MaxExecutableSize()));
+  OS::Setup();
+  CHECK(HEAP->ConfigureHeapDefault());
+  CHECK(Isolate::Current()->memory_allocator()->Setup(
+      HEAP->MaxReserved(), HEAP->MaxExecutableSize()));
 
-  OldSpace* s = new OldSpace(Heap::MaxOldGenerationSize(),
+  OldSpace* s = new OldSpace(HEAP,
+                             HEAP->MaxOldGenerationSize(),
                              OLD_POINTER_SPACE,
                              NOT_EXECUTABLE);
   CHECK(s != NULL);
 
   void* chunk =
-      MemoryAllocator::ReserveInitialChunk(4 * Heap::ReservedSemiSpaceSize());
+      Isolate::Current()->memory_allocator()->ReserveInitialChunk(
+          4 * HEAP->ReservedSemiSpaceSize());
   CHECK(chunk != NULL);
   Address start = static_cast<Address>(chunk);
-  size_t size = RoundUp(start, 2 * Heap::ReservedSemiSpaceSize()) - start;
+  size_t size = RoundUp(start, 2 * HEAP->ReservedSemiSpaceSize()) - start;
 
   CHECK(s->Setup(start, size));
 
@@ -192,14 +209,15 @@ TEST(OldSpace) {
 
   s->TearDown();
   delete s;
-  MemoryAllocator::TearDown();
+  Isolate::Current()->memory_allocator()->TearDown();
 }
 
 
 TEST(LargeObjectSpace) {
-  CHECK(Heap::Setup(false));
+  OS::Setup();
+  CHECK(HEAP->Setup(false));
 
-  LargeObjectSpace* lo = Heap::lo_space();
+  LargeObjectSpace* lo = HEAP->lo_space();
   CHECK(lo != NULL);
 
   Map* faked_map = reinterpret_cast<Map*>(HeapObject::FromAddress(0));
@@ -233,5 +251,5 @@ TEST(LargeObjectSpace) {
   lo->TearDown();
   delete lo;
 
-  MemoryAllocator::TearDown();
+  Isolate::Current()->memory_allocator()->TearDown();
 }
index 3f02b32b8c43331889f72ee4f95941c17b611bf1..9c76d2c9f9f2af9cca8a3961517976ed329ae5af 100644 (file)
@@ -94,7 +94,7 @@ static void InitializeBuildingBlocks(
           buf[j] = gen() % 65536;
         }
         building_blocks[i] =
-            Factory::NewStringFromTwoByte(Vector<const uc16>(buf, len));
+            FACTORY->NewStringFromTwoByte(Vector<const uc16>(buf, len));
         for (int j = 0; j < len; j++) {
           CHECK_EQ(buf[j], building_blocks[i]->Get(j));
         }
@@ -106,19 +106,19 @@ static void InitializeBuildingBlocks(
           buf[j] = gen() % 128;
         }
         building_blocks[i] =
-            Factory::NewStringFromAscii(Vector<const char>(buf, len));
+            FACTORY->NewStringFromAscii(Vector<const char>(buf, len));
         for (int j = 0; j < len; j++) {
           CHECK_EQ(buf[j], building_blocks[i]->Get(j));
         }
         break;
       }
       case 2: {
-        uc16* buf = Zone::NewArray<uc16>(len);
+        uc16* buf = ZONE->NewArray<uc16>(len);
         for (int j = 0; j < len; j++) {
           buf[j] = gen() % 65536;
         }
         Resource* resource = new Resource(Vector<const uc16>(buf, len));
-        building_blocks[i] = Factory::NewExternalStringFromTwoByte(resource);
+        building_blocks[i] = FACTORY->NewExternalStringFromTwoByte(resource);
         for (int j = 0; j < len; j++) {
           CHECK_EQ(buf[j], building_blocks[i]->Get(j));
         }
@@ -130,7 +130,7 @@ static void InitializeBuildingBlocks(
           buf[j] = gen() % 128;
         }
         building_blocks[i] =
-            Factory::NewStringFromAscii(Vector<const char>(buf, len));
+            FACTORY->NewStringFromAscii(Vector<const char>(buf, len));
         for (int j = 0; j < len; j++) {
           CHECK_EQ(buf[j], building_blocks[i]->Get(j));
         }
@@ -145,9 +145,9 @@ static void InitializeBuildingBlocks(
 static Handle<String> ConstructLeft(
     Handle<String> building_blocks[NUMBER_OF_BUILDING_BLOCKS],
     int depth) {
-  Handle<String> answer = Factory::NewStringFromAscii(CStrVector(""));
+  Handle<String> answer = FACTORY->NewStringFromAscii(CStrVector(""));
   for (int i = 0; i < depth; i++) {
-    answer = Factory::NewConsString(
+    answer = FACTORY->NewConsString(
         answer,
         building_blocks[i % NUMBER_OF_BUILDING_BLOCKS]);
   }
@@ -158,9 +158,9 @@ static Handle<String> ConstructLeft(
 static Handle<String> ConstructRight(
     Handle<String> building_blocks[NUMBER_OF_BUILDING_BLOCKS],
     int depth) {
-  Handle<String> answer = Factory::NewStringFromAscii(CStrVector(""));
+  Handle<String> answer = FACTORY->NewStringFromAscii(CStrVector(""));
   for (int i = depth - 1; i >= 0; i--) {
-    answer = Factory::NewConsString(
+    answer = FACTORY->NewConsString(
         building_blocks[i % NUMBER_OF_BUILDING_BLOCKS],
         answer);
   }
@@ -177,7 +177,7 @@ static Handle<String> ConstructBalancedHelper(
     return building_blocks[from % NUMBER_OF_BUILDING_BLOCKS];
   }
   if (to - from == 2) {
-    return Factory::NewConsString(
+    return FACTORY->NewConsString(
         building_blocks[from % NUMBER_OF_BUILDING_BLOCKS],
         building_blocks[(from+1) % NUMBER_OF_BUILDING_BLOCKS]);
   }
@@ -185,7 +185,7 @@ static Handle<String> ConstructBalancedHelper(
     ConstructBalancedHelper(building_blocks, from, from + ((to - from) / 2));
   Handle<String> part2 =
     ConstructBalancedHelper(building_blocks, from + ((to - from) / 2), to);
-  return Factory::NewConsString(part1, part2);
+  return FACTORY->NewConsString(part1, part2);
 }
 
 
@@ -286,12 +286,12 @@ TEST(DeepAscii) {
     foo[i] = "foo "[i % 4];
   }
   Handle<String> string =
-      Factory::NewStringFromAscii(Vector<const char>(foo, DEEP_ASCII_DEPTH));
-  Handle<String> foo_string = Factory::NewStringFromAscii(CStrVector("foo"));
+      FACTORY->NewStringFromAscii(Vector<const char>(foo, DEEP_ASCII_DEPTH));
+  Handle<String> foo_string = FACTORY->NewStringFromAscii(CStrVector("foo"));
   for (int i = 0; i < DEEP_ASCII_DEPTH; i += 10) {
-    string = Factory::NewConsString(string, foo_string);
+    string = FACTORY->NewConsString(string, foo_string);
   }
-  Handle<String> flat_string = Factory::NewConsString(string, foo_string);
+  Handle<String> flat_string = FACTORY->NewConsString(string, foo_string);
   FlattenString(flat_string);
 
   for (int i = 0; i < 500; i++) {
@@ -365,7 +365,7 @@ TEST(ExternalShortStringAdd) {
 
   // Generate short ascii and non-ascii external strings.
   for (int i = 0; i <= kMaxLength; i++) {
-    char* ascii = Zone::NewArray<char>(i + 1);
+    char* ascii = ZONE->NewArray<char>(i + 1);
     for (int j = 0; j < i; j++) {
       ascii[j] = 'a';
     }
@@ -377,7 +377,7 @@ TEST(ExternalShortStringAdd) {
         v8::String::NewExternal(ascii_resource);
 
     ascii_external_strings->Set(v8::Integer::New(i), ascii_external_string);
-    uc16* non_ascii = Zone::NewArray<uc16>(i + 1);
+    uc16* non_ascii = ZONE->NewArray<uc16>(i + 1);
     for (int j = 0; j < i; j++) {
       non_ascii[j] = 0x1234;
     }
@@ -459,10 +459,10 @@ TEST(CachedHashOverflow) {
   Handle<Smi> fortytwo(Smi::FromInt(42));
   Handle<Smi> thirtyseven(Smi::FromInt(37));
   Handle<Object> results[] = {
-      Factory::undefined_value(),
+      FACTORY->undefined_value(),
       fortytwo,
-      Factory::undefined_value(),
-      Factory::undefined_value(),
+      FACTORY->undefined_value(),
+      FACTORY->undefined_value(),
       thirtyseven,
       fortytwo,
       thirtyseven  // Bug yielded 42 here.
index aed7466a0992c0130a3a343791547015409b62af..b1ffd691c50894556a7a1cc31c4214ca34d39c45 100644 (file)
@@ -159,6 +159,8 @@ TEST(TerminateOnlyV8ThreadFromThreadItselfNoLoop) {
 
 
 class TerminatorThread : public v8::internal::Thread {
+ public:
+  explicit TerminatorThread(i::Isolate* isolate) : Thread(isolate) { }
   void Run() {
     semaphore->Wait();
     CHECK(!v8::V8::IsExecutionTerminating());
@@ -171,7 +173,7 @@ class TerminatorThread : public v8::internal::Thread {
 // from the side by another thread.
 TEST(TerminateOnlyV8ThreadFromOtherThread) {
   semaphore = v8::internal::OS::CreateSemaphore(0);
-  TerminatorThread thread;
+  TerminatorThread thread(i::Isolate::Current());
   thread.Start();
 
   v8::HandleScope scope;
@@ -193,6 +195,7 @@ TEST(TerminateOnlyV8ThreadFromOtherThread) {
 
 class LoopingThread : public v8::internal::Thread {
  public:
+  explicit LoopingThread(i::Isolate* isolate) : Thread(isolate) { }
   void Run() {
     v8::Locker locker;
     v8::HandleScope scope;
@@ -225,9 +228,9 @@ TEST(TerminateMultipleV8Threads) {
     v8::Locker::StartPreemption(1);
     semaphore = v8::internal::OS::CreateSemaphore(0);
   }
-  LoopingThread thread1;
+  LoopingThread thread1(i::Isolate::Current());
   thread1.Start();
-  LoopingThread thread2;
+  LoopingThread thread2(i::Isolate::Current());
   thread2.Start();
   // Wait until both threads have signaled the semaphore.
   semaphore->Wait();
index 0f48e248f715a63956f6c7401dd31633344ae337..3b20666ddc24c62fbac768e0ccb5424f30383d98 100644 (file)
@@ -64,6 +64,7 @@ static Turn turn = FILL_CACHE;
 
 class ThreadA: public v8::internal::Thread {
  public:
+  explicit ThreadA(i::Isolate* isolate) : Thread(isolate) { }
   void Run() {
     v8::Locker locker;
     v8::HandleScope scope;
@@ -99,6 +100,7 @@ class ThreadA: public v8::internal::Thread {
 
 class ThreadB: public v8::internal::Thread {
  public:
+  explicit ThreadB(i::Isolate* isolate) : Thread(isolate) { }
   void Run() {
     do {
       {
@@ -108,7 +110,7 @@ class ThreadB: public v8::internal::Thread {
           v8::Context::Scope context_scope(v8::Context::New());
 
           // Clear the caches by forcing major GC.
-          v8::internal::Heap::CollectAllGarbage(false);
+          HEAP->CollectAllGarbage(false);
           turn = SECOND_TIME_FILL_CACHE;
           break;
         }
@@ -123,8 +125,8 @@ class ThreadB: public v8::internal::Thread {
 TEST(JSFunctionResultCachesInTwoThreads) {
   v8::V8::Initialize();
 
-  ThreadA threadA;
-  ThreadB threadB;
+  ThreadA threadA(i::Isolate::Current());
+  ThreadB threadB(i::Isolate::Current());
 
   threadA.Start();
   threadB.Start();
index b48dcb8b6ee6c2453c3b22d1a73abff29dcec031..018018a131d603d66f874019ee54a15790f944db 100644 (file)
@@ -103,7 +103,7 @@ void TestMemCopy(Vector<byte> src,
 
 
 TEST(MemCopy) {
-  V8::Initialize(NULL);
+  OS::Setup();
   const int N = kMinComplexMemCopy + 128;
   Vector<byte> buffer1 = Vector<byte>::New(N);
   Vector<byte> buffer2 = Vector<byte>::New(N);
index 5cb46bc29a8baf274fd43d076eac6f27679ca24f..7df92f98664093c2c30e04911e3d0b5df08551f3 100644 (file)
@@ -38,24 +38,31 @@ SELF_SCRIPT_PATTERN = re.compile(r"//\s+Env: TEST_FILE_NAME")
 
 class MjsunitTestCase(test.TestCase):
 
-  def __init__(self, path, file, mode, context, config):
+  def __init__(self, path, file, mode, context, config, isolates):
     super(MjsunitTestCase, self).__init__(context, path, mode)
     self.file = file
     self.config = config
     self.self_script = False
+    self.isolates = isolates
 
   def GetLabel(self):
     return "%s %s" % (self.mode, self.GetName())
 
   def GetName(self):
-    return self.path[-1]
+    return self.path[-1] + ["", "-isolates"][self.isolates]
 
-  def GetCommand(self):
+  def TestsIsolates(self):
+    return self.isolates
+
+  def GetVmCommand(self, source):
     result = self.config.context.GetVmCommand(self, self.mode)
-    source = open(self.file).read()
     flags_match = FLAGS_PATTERN.search(source)
     if flags_match:
       result += flags_match.group(1).strip().split()
+    return result
+
+  def GetVmArguments(self, source):
+    result = []
     additional_files = []
     files_match = FILES_PATTERN.search(source);
     # Accept several lines of 'Files:'
@@ -73,6 +80,15 @@ class MjsunitTestCase(test.TestCase):
     result += [framework, self.file]
     return result
 
+  def GetCommand(self):
+    source = open(self.file).read()
+    result = self.GetVmCommand(source)
+    result += self.GetVmArguments(source)
+    if self.isolates:
+      result.append("--isolate")
+      result += self.GetVmArguments(source)
+    return result
+
   def GetSource(self):
     return open(self.file).read()
 
@@ -122,7 +138,8 @@ class MjsunitTestConfiguration(test.TestConfiguration):
     for test in all_tests:
       if self.Contains(path, test):
         file_path = join(self.root, reduce(join, test[1:], "") + ".js")
-        result.append(MjsunitTestCase(test, file_path, mode, self.context, self))
+        result.append(MjsunitTestCase(test, file_path, mode, self.context, self, False))
+        result.append(MjsunitTestCase(test, file_path, mode, self.context, self, True))
     return result
 
   def GetBuildRequirements(self):
index 6dab52d877dd5421f1af91dfadfa66b200b1c22a..cf40e4f24f981bceec888052b9cd5a43f3f21da6 100644 (file)
             '../../src/jump-target.h',
             '../../src/jsregexp.cc',
             '../../src/jsregexp.h',
+            '../../src/isolate.cc',
+            '../../src/isolate.h',
             '../../src/list-inl.h',
             '../../src/list.h',
             '../../src/lithium.cc',
index 79c3341e0858c9c54a9070b30ad6abb170ce5bb5..59f93be021893d141e53539e2fa3df1b40bf5ef3 100755 (executable)
@@ -340,6 +340,9 @@ class TestCase(object):
   def IsNegative(self):
     return False
 
+  def TestsIsolates(self):
+    return False
+
   def CompareTime(self, other):
     return cmp(other.duration, self.duration)
 
@@ -502,11 +505,19 @@ def PrintError(str):
 
 
 def CheckedUnlink(name):
-  try:
-    os.unlink(name)
-  except OSError, e:
-    PrintError("os.unlink() " + str(e))
-
+  # On Windows, when run with -jN in parallel processes,
+  # OS often fails to unlink the temp file. Not sure why.
+  # Need to retry.
+  # Idea from https://bugs.webkit.org/attachment.cgi?id=75982&action=prettypatch
+  retry_count = 0
+  while retry_count < 30:
+    try:
+      os.unlink(name)
+      return
+    except OSError, e:
+      retry_count += 1;
+      time.sleep(retry_count * 0.1)
+  PrintError("os.unlink() " + str(e))
 
 def Execute(args, context, timeout=None):
   (fd_out, outname) = tempfile.mkstemp()
@@ -1013,6 +1024,9 @@ class ClassifiedTest(object):
     self.case = case
     self.outcomes = outcomes
 
+  def TestsIsolates(self):
+    return self.case.TestsIsolates()
+
 
 class Configuration(object):
   """The parsed contents of a configuration file"""
@@ -1172,6 +1186,7 @@ def BuildOptions():
   result.add_option("--no-suppress-dialogs", help="Display Windows dialogs for crashing tests",
         dest="suppress_dialogs", action="store_false")
   result.add_option("--shell", help="Path to V8 shell", default="shell")
+  result.add_option("--isolates", help="Whether to test isolates", default=False, action="store_true")
   result.add_option("--store-unexpected-output",
       help="Store the temporary JS files from tests that fails",
       dest="store_unexpected_output", default=True, action="store_true")
@@ -1438,6 +1453,8 @@ def Main():
   def DoSkip(case):
     return SKIP in case.outcomes or SLOW in case.outcomes
   cases_to_run = [ c for c in all_cases if not DoSkip(c) ]
+  if not options.isolates:
+    cases_to_run = [c for c in cases_to_run if not c.TestsIsolates()]
   if len(cases_to_run) == 0:
     print "No tests to run."
     return 0