deps: upgrade v8 to 3.19.0
authorBen Noordhuis <info@bnoordhuis.nl>
Mon, 13 May 2013 01:28:40 +0000 (03:28 +0200)
committerBen Noordhuis <info@bnoordhuis.nl>
Mon, 13 May 2013 01:28:40 +0000 (03:28 +0200)
197 files changed:
deps/v8/ChangeLog
deps/v8/build/common.gypi
deps/v8/include/v8.h
deps/v8/samples/lineprocessor.cc
deps/v8/samples/process.cc
deps/v8/samples/shell.cc
deps/v8/src/api.cc
deps/v8/src/api.h
deps/v8/src/arguments.h
deps/v8/src/arm/code-stubs-arm.cc
deps/v8/src/arm/debug-arm.cc
deps/v8/src/arm/full-codegen-arm.cc
deps/v8/src/arm/ic-arm.cc
deps/v8/src/arm/lithium-arm.cc
deps/v8/src/arm/lithium-arm.h
deps/v8/src/arm/lithium-codegen-arm.cc
deps/v8/src/arm/lithium-codegen-arm.h
deps/v8/src/arm/macro-assembler-arm.cc
deps/v8/src/arm/macro-assembler-arm.h
deps/v8/src/arm/simulator-arm.cc
deps/v8/src/arm/simulator-arm.h
deps/v8/src/arm/stub-cache-arm.cc
deps/v8/src/arraybuffer.js [new file with mode: 0644]
deps/v8/src/assembler.cc
deps/v8/src/assembler.h
deps/v8/src/ast.h
deps/v8/src/atomicops_internals_x86_gcc.h
deps/v8/src/bootstrapper.cc
deps/v8/src/builtins-decls.h [new file with mode: 0644]
deps/v8/src/builtins.cc
deps/v8/src/builtins.h
deps/v8/src/code-stubs-hydrogen.cc
deps/v8/src/code-stubs.cc
deps/v8/src/code-stubs.h
deps/v8/src/compiler.cc
deps/v8/src/contexts.h
deps/v8/src/d8-debug.cc
deps/v8/src/d8.cc
deps/v8/src/d8.h
deps/v8/src/debug.cc
deps/v8/src/debug.h
deps/v8/src/factory.cc
deps/v8/src/factory.h
deps/v8/src/flag-definitions.h
deps/v8/src/frames-inl.h
deps/v8/src/frames.cc
deps/v8/src/frames.h
deps/v8/src/full-codegen.h
deps/v8/src/global-handles.cc
deps/v8/src/global-handles.h
deps/v8/src/handles-inl.h
deps/v8/src/heap-inl.h
deps/v8/src/heap-snapshot-generator.cc
deps/v8/src/heap.cc
deps/v8/src/heap.h
deps/v8/src/hydrogen-instructions.cc
deps/v8/src/hydrogen-instructions.h
deps/v8/src/hydrogen.cc
deps/v8/src/hydrogen.h
deps/v8/src/ia32/code-stubs-ia32.cc
deps/v8/src/ia32/debug-ia32.cc
deps/v8/src/ia32/full-codegen-ia32.cc
deps/v8/src/ia32/ic-ia32.cc
deps/v8/src/ia32/lithium-codegen-ia32.cc
deps/v8/src/ia32/lithium-codegen-ia32.h
deps/v8/src/ia32/lithium-ia32.cc
deps/v8/src/ia32/lithium-ia32.h
deps/v8/src/ia32/macro-assembler-ia32.cc
deps/v8/src/ia32/macro-assembler-ia32.h
deps/v8/src/ia32/stub-cache-ia32.cc
deps/v8/src/ic.cc
deps/v8/src/ic.h
deps/v8/src/incremental-marking.cc
deps/v8/src/incremental-marking.h
deps/v8/src/json-parser.h
deps/v8/src/json-stringifier.h
deps/v8/src/list-inl.h
deps/v8/src/list.h
deps/v8/src/lithium-allocator.cc
deps/v8/src/lithium-allocator.h
deps/v8/src/lithium.cc
deps/v8/src/lithium.h
deps/v8/src/macros.py
deps/v8/src/messages.js
deps/v8/src/mips/code-stubs-mips.cc
deps/v8/src/mips/debug-mips.cc
deps/v8/src/mips/full-codegen-mips.cc
deps/v8/src/mips/ic-mips.cc
deps/v8/src/mips/lithium-codegen-mips.cc
deps/v8/src/mips/lithium-codegen-mips.h
deps/v8/src/mips/lithium-mips.cc
deps/v8/src/mips/lithium-mips.h
deps/v8/src/mips/macro-assembler-mips.cc
deps/v8/src/mips/macro-assembler-mips.h
deps/v8/src/mips/stub-cache-mips.cc
deps/v8/src/mksnapshot.cc
deps/v8/src/object-observe.js
deps/v8/src/objects-debug.cc
deps/v8/src/objects-inl.h
deps/v8/src/objects-printer.cc
deps/v8/src/objects-visiting-inl.h
deps/v8/src/objects.cc
deps/v8/src/objects.h
deps/v8/src/parser.cc
deps/v8/src/parser.h
deps/v8/src/platform-posix.cc
deps/v8/src/prettyprinter.cc
deps/v8/src/prettyprinter.h
deps/v8/src/property-details.h
deps/v8/src/property.cc
deps/v8/src/property.h
deps/v8/src/runtime.cc
deps/v8/src/runtime.h
deps/v8/src/sampler.cc
deps/v8/src/scopes.cc
deps/v8/src/serialize.cc
deps/v8/src/spaces.cc
deps/v8/src/string-stream.cc
deps/v8/src/stub-cache.cc
deps/v8/src/stub-cache.h
deps/v8/src/third_party/vtune/v8-vtune.h
deps/v8/src/third_party/vtune/vtune-jit.cc
deps/v8/src/transitions.cc
deps/v8/src/type-info.cc
deps/v8/src/typedarray.js
deps/v8/src/v8.h
deps/v8/src/v8memory.h
deps/v8/src/v8natives.js
deps/v8/src/version.cc
deps/v8/src/x64/assembler-x64.h
deps/v8/src/x64/code-stubs-x64.cc
deps/v8/src/x64/debug-x64.cc
deps/v8/src/x64/full-codegen-x64.cc
deps/v8/src/x64/ic-x64.cc
deps/v8/src/x64/lithium-codegen-x64.cc
deps/v8/src/x64/lithium-codegen-x64.h
deps/v8/src/x64/lithium-x64.cc
deps/v8/src/x64/lithium-x64.h
deps/v8/src/x64/macro-assembler-x64.cc
deps/v8/src/x64/macro-assembler-x64.h
deps/v8/src/x64/stub-cache-x64.cc
deps/v8/test/cctest/cctest.cc
deps/v8/test/cctest/cctest.h
deps/v8/test/cctest/cctest.status
deps/v8/test/cctest/test-alloc.cc
deps/v8/test/cctest/test-api.cc
deps/v8/test/cctest/test-assembler-arm.cc
deps/v8/test/cctest/test-assembler-ia32.cc
deps/v8/test/cctest/test-assembler-x64.cc
deps/v8/test/cctest/test-compiler.cc
deps/v8/test/cctest/test-cpu-profiler.cc
deps/v8/test/cctest/test-debug.cc
deps/v8/test/cctest/test-declarative-accessors.cc
deps/v8/test/cctest/test-decls.cc
deps/v8/test/cctest/test-disasm-arm.cc
deps/v8/test/cctest/test-disasm-ia32.cc
deps/v8/test/cctest/test-func-name-inference.cc
deps/v8/test/cctest/test-global-object.cc
deps/v8/test/cctest/test-hashing.cc
deps/v8/test/cctest/test-heap-profiler.cc
deps/v8/test/cctest/test-heap.cc
deps/v8/test/cctest/test-lockers.cc
deps/v8/test/cctest/test-log-stack-tracer.cc
deps/v8/test/cctest/test-log.cc
deps/v8/test/cctest/test-mark-compact.cc
deps/v8/test/cctest/test-object-observe.cc
deps/v8/test/cctest/test-parsing.cc
deps/v8/test/cctest/test-profile-generator.cc
deps/v8/test/cctest/test-random.cc
deps/v8/test/cctest/test-regexp.cc
deps/v8/test/cctest/test-serialize.cc
deps/v8/test/cctest/test-strings.cc
deps/v8/test/cctest/test-symbols.cc
deps/v8/test/cctest/test-thread-termination.cc
deps/v8/test/cctest/test-threads.cc
deps/v8/test/cctest/test-weakmaps.cc
deps/v8/test/message/isvar.js [new file with mode: 0644]
deps/v8/test/message/isvar.out [new file with mode: 0644]
deps/v8/test/message/paren_in_arg_string.js [new file with mode: 0644]
deps/v8/test/message/paren_in_arg_string.out [new file with mode: 0644]
deps/v8/test/message/single-function-literal.js [new file with mode: 0644]
deps/v8/test/message/single-function-literal.out [new file with mode: 0644]
deps/v8/test/mjsunit/allocation-site-info.js
deps/v8/test/mjsunit/compare-nil.js [new file with mode: 0644]
deps/v8/test/mjsunit/elements-transition-hoisting.js
deps/v8/test/mjsunit/external-array-no-sse2.js
deps/v8/test/mjsunit/external-array.js
deps/v8/test/mjsunit/harmony/generators-iteration.js
deps/v8/test/mjsunit/harmony/generators-runtime.js
deps/v8/test/mjsunit/harmony/typedarrays.js
deps/v8/test/mjsunit/lea-add.js [new file with mode: 0644]
deps/v8/test/mjsunit/regress/regress-235311.js [new file with mode: 0644]
deps/v8/test/mjsunit/regress/regress-2671-1.js [new file with mode: 0644]
deps/v8/test/mjsunit/regress/regress-2671.js [new file with mode: 0644]
deps/v8/test/mjsunit/track-fields.js [new file with mode: 0644]
deps/v8/tools/grokdump.py
deps/v8/tools/gyp/v8.gyp

index e344643..d1dbb29 100644 (file)
@@ -1,3 +1,30 @@
+2013-05-10: Version 3.19.0
+
+        Deprecated Context::New which returns Persistent.
+
+        Added Persistent<T>::Reset which disposes the handle and redirects it to
+        point to another object.
+
+        Deprecated WriteAscii and MayContainNonAscii.
+
+        Exposed AssertNoAllocation to API.
+
+        Performance and stability improvements on all platforms.
+
+
+2013-04-30: Version 3.18.5
+
+        Allowed setting debugger breakpoints on CompareNilICs (issue 2660)
+
+        Fixed beyond-heap load on x64 Crankshafted StringCharFromCode
+        (Chromium issue 235311)
+
+        Change 'Parse error' to three more informative messages.
+        (Chromium issue 2636)
+
+        Performance and stability improvements on all platforms.
+
+
 2013-04-26: Version 3.18.4
 
         Added a preliminary API for ES6 ArrayBuffers
index 8028b3e..0b1f397 100644 (file)
           }],
           ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd" \
             or OS=="android"', {
+            'cflags!': [
+              '-O2',
+              '-Os',
+            ],
+            'cflags': [
+              '-fdata-sections',
+              '-ffunction-sections',
+              '-O3',
+            ],
             'conditions': [
               [ 'gcc_version==44 and clang==0', {
                 'cflags': [
index f300884..3a86e86 100644 (file)
@@ -123,6 +123,7 @@ class ImplementationUtilities;
 class Int32;
 class Integer;
 class Isolate;
+class LocalContext;
 class Number;
 class NumberObject;
 class Object;
@@ -190,8 +191,13 @@ class UniqueId {
  * \param object the weak global object to be reclaimed by the garbage collector
  * \param parameter the value passed in when making the weak global object
  */
-typedef void (*WeakReferenceCallback)(Persistent<Value> object,
-                                      void* parameter);
+template<typename T, typename P>
+class WeakReferenceCallbacks {
+ public:
+  typedef void (*Revivable)(Isolate* isolate,
+                            Persistent<T>* object,
+                            P* parameter);
+};
 
 // TODO(svenpanne) Temporary definition until Chrome is in sync.
 typedef void (*NearDeathCallback)(Isolate* isolate,
@@ -205,6 +211,9 @@ typedef void (*NearDeathCallback)(Isolate* isolate,
     *(static_cast<T* volatile*>(0)) = static_cast<S*>(0);      \
   }
 
+
+#define V8_USE_UNSAFE_HANDLES
+
 /**
  * An object reference managed by the v8 garbage collector.
  *
@@ -237,10 +246,12 @@ template <class T> class Handle {
    */
   V8_INLINE(Handle()) : val_(0) {}
 
+#ifdef V8_USE_UNSAFE_HANDLES
   /**
    * Creates a new handle for the specified value.
    */
   V8_INLINE(explicit Handle(T* val)) : val_(val) {}
+#endif
 
   /**
    * Creates a handle for the contents of the specified handle.  This
@@ -282,7 +293,7 @@ template <class T> class Handle {
    * to which they refer are identical.
    * The handles' references are not checked.
    */
-  template <class S> V8_INLINE(bool operator==(Handle<S> that) const) {
+  template <class S> V8_INLINE(bool operator==(const Handle<S> that) const) {
     internal::Object** a = reinterpret_cast<internal::Object**>(**this);
     internal::Object** b = reinterpret_cast<internal::Object**>(*that);
     if (a == 0) return b == 0;
@@ -290,6 +301,17 @@ template <class T> class Handle {
     return *a == *b;
   }
 
+#ifndef V8_USE_UNSAFE_HANDLES
+  template <class S> V8_INLINE(
+      bool operator==(const Persistent<S>& that) const) {
+    internal::Object** a = reinterpret_cast<internal::Object**>(**this);
+    internal::Object** b = reinterpret_cast<internal::Object**>(*that);
+    if (a == 0) return b == 0;
+    if (b == 0) return false;
+    return *a == *b;
+  }
+#endif
+
   /**
    * Checks whether two handles are different.
    * Returns true if only one of the handles is empty, or if
@@ -313,7 +335,46 @@ template <class T> class Handle {
     return Handle<S>::Cast(*this);
   }
 
+#ifndef V8_USE_UNSAFE_HANDLES
+  V8_INLINE(static Handle<T> New(Isolate* isolate, Handle<T> that)) {
+    return New(isolate, that.val_);
+  }
+  // TODO(dcarney): remove before cutover
+  V8_INLINE(static Handle<T> New(Isolate* isolate, const Persistent<T>& that)) {
+    return New(isolate, that.val_);
+  }
+
+#ifndef V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+
+ private:
+#endif
+  /**
+   * Creates a new handle for the specified value.
+   */
+  V8_INLINE(explicit Handle(T* val)) : val_(val) {}
+#endif
+
  private:
+  template<class F>
+  friend class Persistent;
+  template<class F>
+  friend class Local;
+  friend class Arguments;
+  friend class String;
+  friend class Object;
+  friend class AccessorInfo;
+  friend Handle<Primitive> Undefined(Isolate* isolate);
+  friend Handle<Primitive> Null(Isolate* isolate);
+  friend Handle<Boolean> True(Isolate* isolate);
+  friend Handle<Boolean> False(Isolate* isolate);
+  friend class Context;
+  friend class InternalHandleHelper;
+  friend class LocalContext;
+
+#ifndef V8_USE_UNSAFE_HANDLES
+  V8_INLINE(static Handle<T> New(Isolate* isolate, T* that));
+#endif
+
   T* val_;
 };
 
@@ -325,6 +386,7 @@ template <class T> class Handle {
  * handle scope are destroyed when the handle scope is destroyed.  Hence it
  * is not necessary to explicitly deallocate local handles.
  */
+// TODO(dcarney): deprecate entire class
 template <class T> class Local : public Handle<T> {
  public:
   V8_INLINE(Local());
@@ -337,7 +399,12 @@ template <class T> class Local : public Handle<T> {
      */
     TYPE_CHECK(T, S);
   }
+
+
+#ifdef V8_USE_UNSAFE_HANDLES
   template <class S> V8_INLINE(Local(S* that) : Handle<T>(that)) { }
+#endif
+
   template <class S> V8_INLINE(static Local<T> Cast(Local<S> that)) {
 #ifdef V8_ENABLE_CHECKS
     // If we're going to perform the type check then we have to check
@@ -346,6 +413,12 @@ template <class T> class Local : public Handle<T> {
 #endif
     return Local<T>(T::Cast(*that));
   }
+#ifndef V8_USE_UNSAFE_HANDLES
+  template <class S> V8_INLINE(Local(Handle<S> that))
+      : Handle<T>(reinterpret_cast<T*>(*that)) {
+    TYPE_CHECK(T, S);
+  }
+#endif
 
   template <class S> V8_INLINE(Local<S> As()) {
     return Local<S>::Cast(*this);
@@ -358,8 +431,32 @@ template <class T> class Local : public Handle<T> {
    */
   V8_INLINE(static Local<T> New(Handle<T> that));
   V8_INLINE(static Local<T> New(Isolate* isolate, Handle<T> that));
-};
+#ifndef V8_USE_UNSAFE_HANDLES
+  // TODO(dcarney): remove before cutover
+  V8_INLINE(static Local<T> New(Isolate* isolate, const Persistent<T>& that));
+
+#ifndef V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+
+ private:
+#endif
+  template <class S> V8_INLINE(Local(S* that) : Handle<T>(that)) { }
+#endif
 
+ private:
+  template<class F>
+  friend class Persistent;
+  template<class F>
+  friend class Handle;
+  friend class Arguments;
+  friend class String;
+  friend class Object;
+  friend class AccessorInfo;
+  friend class Context;
+  friend class InternalHandleHelper;
+  friend class LocalContext;
+
+  V8_INLINE(static Local<T> New(Isolate* isolate, T* that));
+};
 
 /**
  * An object reference that is independent of any handle scope.  Where
@@ -378,13 +475,38 @@ template <class T> class Local : public Handle<T> {
  * different storage cells but rather two references to the same
  * storage cell.
  */
-template <class T> class Persistent : public Handle<T> {
+template <class T> class Persistent // NOLINT
+#ifdef V8_USE_UNSAFE_HANDLES
+    : public Handle<T> {
+#else
+  { // NOLINT
+#endif
  public:
+#ifndef V8_USE_UNSAFE_HANDLES
+  V8_INLINE(Persistent()) : val_(0) { }
+  V8_INLINE(~Persistent()) {
+    // TODO(dcarney): add this back before cutover.
+    // Dispose();
+  }
+  V8_INLINE(bool IsEmpty() const) { return val_ == 0; }
+  // TODO(dcarney): remove somehow before cutover
+  // The handle should either be 0, or a pointer to a live cell.
+  V8_INLINE(void Clear()) { val_ = 0; }
+
+  /**
+   * A constructor that creates a new global cell pointing to that. In contrast
+   * to the copy constructor, this creates a new persistent handle which needs
+   * to be separately disposed.
+   */
+  template <class S> V8_INLINE(Persistent(Isolate* isolate, Handle<S> that))
+      : val_(*New(isolate, that)) { }
+
+#else
   /**
    * Creates an empty persistent handle that doesn't point to any
    * storage cell.
    */
-  V8_INLINE(Persistent());
+  V8_INLINE(Persistent()) : Handle<T>() { }
 
   /**
    * Creates a persistent handle for the same storage cell as the
@@ -424,6 +546,8 @@ template <class T> class Persistent : public Handle<T> {
   template <class S> explicit V8_INLINE(Persistent(Handle<S> that))
       : Handle<T>(*that) { }
 
+#endif
+
   template <class S> V8_INLINE(static Persistent<T> Cast(Persistent<S> that)) {
 #ifdef V8_ENABLE_CHECKS
     // If we're going to perform the type check then we have to check
@@ -437,16 +561,38 @@ template <class T> class Persistent : public Handle<T> {
     return Persistent<S>::Cast(*this);
   }
 
-  /** Deprecated. Use Isolate version instead. */
   V8_DEPRECATED(static Persistent<T> New(Handle<T> that));
 
   /**
    * Creates a new persistent handle for an existing local or persistent handle.
    */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(static Persistent<T> New(Isolate* isolate, Handle<T> that));
+#ifndef V8_USE_UNSAFE_HANDLES
+  // TODO(dcarney): remove before cutover
+  V8_INLINE(static Persistent<T> New(Isolate* isolate, Persistent<T> that));
+#endif
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(void Dispose());
+#ifndef V8_USE_UNSAFE_HANDLES
+  template <class S> V8_INLINE(
+      bool operator==(const Persistent<S>& that) const) {
+    internal::Object** a = reinterpret_cast<internal::Object**>(**this);
+    internal::Object** b = reinterpret_cast<internal::Object**>(*that);
+    if (a == 0) return b == 0;
+    if (b == 0) return false;
+    return *a == *b;
+  }
+
+  template <class S> V8_INLINE(bool operator==(const Handle<S> that) const) {
+    internal::Object** a = reinterpret_cast<internal::Object**>(**this);
+    internal::Object** b = reinterpret_cast<internal::Object**>(*that);
+    if (a == 0) return b == 0;
+    if (b == 0) return false;
+    return *a == *b;
+  }
+#endif
+
+  V8_INLINE(void Dispose());
 
   /**
    * Releases the storage cell referenced by this persistent handle.
@@ -454,11 +600,20 @@ template <class T> class Persistent : public Handle<T> {
    * This handle's reference, and any other references to the storage
    * cell remain and IsEmpty will still return false.
    */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(void Dispose(Isolate* isolate));
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(void MakeWeak(void* parameters,
-                              WeakReferenceCallback callback));
+  template<typename S, typename P>
+  V8_INLINE(void MakeWeak(
+      Isolate* isolate,
+      P* parameters,
+      typename WeakReferenceCallbacks<S, P>::Revivable callback));
+
+  template<typename P>
+  V8_INLINE(void MakeWeak(
+      Isolate* isolate,
+      P* parameters,
+      typename WeakReferenceCallbacks<T, P>::Revivable callback));
 
   /**
    * Make the reference to this object weak.  When only weak handles
@@ -466,18 +621,17 @@ template <class T> class Persistent : public Handle<T> {
    * callback to the given V8::NearDeathCallback function, passing
    * it the object reference and the given parameters.
    */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(void MakeWeak(Isolate* isolate,
                           void* parameters,
                           NearDeathCallback callback));
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(void ClearWeak());
+  V8_INLINE(void ClearWeak());
 
-  /** Clears the weak reference to this object. */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(void ClearWeak(Isolate* isolate));
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(void MarkIndependent());
+  V8_INLINE(void MarkIndependent());
 
   /**
    * Marks the reference to this object independent. Garbage collector is free
@@ -485,10 +639,10 @@ template <class T> class Persistent : public Handle<T> {
    * independent handle should not assume that it will be preceded by a global
    * GC prologue callback or followed by a global GC epilogue callback.
    */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(void MarkIndependent(Isolate* isolate));
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(void MarkPartiallyDependent());
+  V8_INLINE(void MarkPartiallyDependent());
 
   /**
    * Marks the reference to this object partially dependent. Partially dependent
@@ -498,47 +652,103 @@ template <class T> class Persistent : public Handle<T> {
    * external dependencies. This mark is automatically cleared after each
    * garbage collection.
    */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(void MarkPartiallyDependent(Isolate* isolate));
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(bool IsIndependent() const);
+  V8_INLINE(bool IsIndependent() const);
 
-  /** Returns true if this handle was previously marked as independent. */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(bool IsIndependent(Isolate* isolate) const);
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(bool IsNearDeath() const);
+  V8_INLINE(bool IsNearDeath() const);
 
   /** Checks if the handle holds the only reference to an object. */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(bool IsNearDeath(Isolate* isolate) const);
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(bool IsWeak() const);
+  V8_INLINE(bool IsWeak() const);
 
   /** Returns true if the handle's reference is weak.  */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(bool IsWeak(Isolate* isolate) const);
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(void SetWrapperClassId(uint16_t class_id));
+  V8_INLINE(void SetWrapperClassId(uint16_t class_id));
 
   /**
    * Assigns a wrapper class ID to the handle. See RetainedObjectInfo interface
    * description in v8-profiler.h for details.
    */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(void SetWrapperClassId(Isolate* isolate, uint16_t class_id));
 
-  /** Deprecated. Use Isolate version instead. */
-  V8_DEPRECATED(uint16_t WrapperClassId() const);
+  V8_INLINE(uint16_t WrapperClassId() const);
 
   /**
    * Returns the class ID previously assigned to this handle or 0 if no class ID
    * was previously assigned.
    */
+  // TODO(dcarney): remove before cutover
   V8_INLINE(uint16_t WrapperClassId(Isolate* isolate) const);
 
+  /**
+   * Disposes the current contents of the handle and replaces it.
+   */
+  V8_INLINE(void Reset(Isolate* isolate, const Handle<T>& other));
+
+#ifndef V8_USE_UNSAFE_HANDLES
+
+#ifndef V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
+ private:
+#endif
+  // TODO(dcarney): make unlinkable before cutover
+  V8_INLINE(Persistent(const Persistent& that)) : val_(that.val_) {}
+  // TODO(dcarney): make unlinkable before cutover
+  V8_INLINE(Persistent& operator=(const Persistent& that)) {  // NOLINT
+    this->val_ = that.val_;
+    return *this;
+  }
+
+ public:
+#ifndef V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+
+ private:
+#endif
+  // TODO(dcarney): remove before cutover
+  template <class S> V8_INLINE(Persistent(S* that)) : val_(that) { }
+  // TODO(dcarney): remove before cutover
+  template <class S> V8_INLINE(Persistent(Persistent<S> that))
+      : val_(*that) {
+    TYPE_CHECK(T, S);
+  }
+  // TODO(dcarney): remove before cutover
+  V8_INLINE(T* operator*() const) { return val_; }
+ public:
+#ifndef V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
+ private:
+#endif
+  // TODO(dcarney): remove before cutover
+  V8_INLINE(T* operator->() const) { return val_; }
+ public:
+#endif
+
  private:
+  template<class F>
+  friend class Handle;
+  template<class F>
+  friend class Local;
   friend class ImplementationUtilities;
   friend class ObjectTemplate;
+  friend class Context;
+  friend class InternalHandleHelper;
+  friend class LocalContext;
+
+  V8_INLINE(static Persistent<T> New(Isolate* isolate, T* that));
+
+#ifndef V8_USE_UNSAFE_HANDLES
+  T* val_;
+#endif
 };
 
 
@@ -1108,6 +1318,73 @@ class V8EXPORT Value : public Data {
    */
   bool IsRegExp() const;
 
+
+  /**
+   * Returns true if this value is an ArrayBuffer.
+   * This is an experimental feature.
+   */
+  bool IsArrayBuffer() const;
+
+  /**
+   * Returns true if this value is one of TypedArrays.
+   * This is an experimental feature.
+   */
+  bool IsTypedArray() const;
+
+  /**
+   * Returns true if this value is an Uint8Array.
+   * This is an experimental feature.
+   */
+  bool IsUint8Array() const;
+
+  /**
+   * Returns true if this value is an Uint8ClampedArray.
+   * This is an experimental feature.
+   */
+  bool IsUint8ClampedArray() const;
+
+  /**
+   * Returns true if this value is an Int8Array.
+   * This is an experimental feature.
+   */
+  bool IsInt8Array() const;
+
+  /**
+   * Returns true if this value is an Uint16Array.
+   * This is an experimental feature.
+   */
+  bool IsUint16Array() const;
+
+  /**
+   * Returns true if this value is an Int16Array.
+   * This is an experimental feature.
+   */
+  bool IsInt16Array() const;
+
+  /**
+   * Returns true if this value is an Uint32Array.
+   * This is an experimental feature.
+   */
+  bool IsUint32Array() const;
+
+  /**
+   * Returns true if this value is an Int32Array.
+   * This is an experimental feature.
+   */
+  bool IsInt32Array() const;
+
+  /**
+   * Returns true if this value is a Float32Array.
+   * This is an experimental feature.
+   */
+  bool IsFloat32Array() const;
+
+  /**
+   * Returns true if this value is a Float64Array.
+   * This is an experimental feature.
+   */
+  bool IsFloat64Array() const;
+
   Local<Boolean> ToBoolean() const;
   Local<Number> ToNumber() const;
   Local<String> ToString() const;
@@ -1185,8 +1462,7 @@ class V8EXPORT String : public Primitive {
   /**
    * This function is no longer useful.
    */
-  // TODO(dcarney): deprecate
-  V8_INLINE(bool MayContainNonAscii()) const { return true; }
+  V8_DEPRECATED(V8_INLINE(bool MayContainNonAscii()) const) { return true; }
 
   /**
    * Returns whether this string contains only one byte data.
@@ -1231,10 +1507,10 @@ class V8EXPORT String : public Primitive {
             int length = -1,
             int options = NO_OPTIONS) const;
   // ASCII characters.
-  int WriteAscii(char* buffer,
-                 int start = 0,
-                 int length = -1,
-                 int options = NO_OPTIONS) const;
+  V8_DEPRECATED(int WriteAscii(char* buffer,
+                               int start = 0,
+                               int length = -1,
+                               int options = NO_OPTIONS) const);
   // One byte characters.
   int WriteOneByte(uint8_t* buffer,
                    int start = 0,
@@ -2047,6 +2323,185 @@ class V8EXPORT ArrayBuffer : public Object {
 
 
 /**
+ * A base class for an instance of TypedArray series of constructors
+ * (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT TypedArray : public Object {
+ public:
+  /**
+   * Returns underlying ArrayBuffer.
+   */
+  Local<ArrayBuffer> Buffer();
+  /**
+   * Byte offset in |Buffer|
+   */
+  size_t ByteOffset();
+  /**
+   * Numbe of elements in this typed array.
+   */
+  size_t Length();
+  /**
+   * Size of typed array in bytes (e.g. for Int16Array, 2*|Length|).
+   */
+  size_t ByteLength();
+  /**
+   * Base address of typed array.
+   */
+  void* BaseAddress();
+
+  V8_INLINE(static TypedArray* Cast(Value* obj));
+
+ private:
+  TypedArray();
+  static void CheckCast(Value* obj);
+};
+
+
+/**
+ * An instance of Uint8Array constructor (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT Uint8Array : public TypedArray {
+ public:
+  static Local<Uint8Array> New(Handle<ArrayBuffer> array_buffer,
+                               size_t byte_offset, size_t length);
+  V8_INLINE(static Uint8Array* Cast(Value* obj));
+
+ private:
+  Uint8Array();
+  static void CheckCast(Value* obj);
+};
+
+
+/**
+ * An instance of Uint8ClampedArray constructor (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT Uint8ClampedArray : public TypedArray {
+ public:
+  static Local<Uint8ClampedArray> New(Handle<ArrayBuffer> array_buffer,
+                               size_t byte_offset, size_t length);
+  V8_INLINE(static Uint8ClampedArray* Cast(Value* obj));
+
+ private:
+  Uint8ClampedArray();
+  static void CheckCast(Value* obj);
+};
+
+/**
+ * An instance of Int8Array constructor (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT Int8Array : public TypedArray {
+ public:
+  static Local<Int8Array> New(Handle<ArrayBuffer> array_buffer,
+                               size_t byte_offset, size_t length);
+  V8_INLINE(static Int8Array* Cast(Value* obj));
+
+ private:
+  Int8Array();
+  static void CheckCast(Value* obj);
+};
+
+
+/**
+ * An instance of Uint16Array constructor (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT Uint16Array : public TypedArray {
+ public:
+  static Local<Uint16Array> New(Handle<ArrayBuffer> array_buffer,
+                               size_t byte_offset, size_t length);
+  V8_INLINE(static Uint16Array* Cast(Value* obj));
+
+ private:
+  Uint16Array();
+  static void CheckCast(Value* obj);
+};
+
+
+/**
+ * An instance of Int16Array constructor (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT Int16Array : public TypedArray {
+ public:
+  static Local<Int16Array> New(Handle<ArrayBuffer> array_buffer,
+                               size_t byte_offset, size_t length);
+  V8_INLINE(static Int16Array* Cast(Value* obj));
+
+ private:
+  Int16Array();
+  static void CheckCast(Value* obj);
+};
+
+
+/**
+ * An instance of Uint32Array constructor (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT Uint32Array : public TypedArray {
+ public:
+  static Local<Uint32Array> New(Handle<ArrayBuffer> array_buffer,
+                               size_t byte_offset, size_t length);
+  V8_INLINE(static Uint32Array* Cast(Value* obj));
+
+ private:
+  Uint32Array();
+  static void CheckCast(Value* obj);
+};
+
+
+/**
+ * An instance of Int32Array constructor (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT Int32Array : public TypedArray {
+ public:
+  static Local<Int32Array> New(Handle<ArrayBuffer> array_buffer,
+                               size_t byte_offset, size_t length);
+  V8_INLINE(static Int32Array* Cast(Value* obj));
+
+ private:
+  Int32Array();
+  static void CheckCast(Value* obj);
+};
+
+
+/**
+ * An instance of Float32Array constructor (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT Float32Array : public TypedArray {
+ public:
+  static Local<Float32Array> New(Handle<ArrayBuffer> array_buffer,
+                               size_t byte_offset, size_t length);
+  V8_INLINE(static Float32Array* Cast(Value* obj));
+
+ private:
+  Float32Array();
+  static void CheckCast(Value* obj);
+};
+
+
+/**
+ * An instance of Float64Array constructor (ES6 draft 15.13.6).
+ * This API is experimental and may change significantly.
+ */
+class V8EXPORT Float64Array : public TypedArray {
+ public:
+  static Local<Float64Array> New(Handle<ArrayBuffer> array_buffer,
+                               size_t byte_offset, size_t length);
+  V8_INLINE(static Float64Array* Cast(Value* obj));
+
+ private:
+  Float64Array();
+  static void CheckCast(Value* obj);
+};
+
+
+/**
  * An instance of the built-in Date constructor (ECMA-262, 15.9).
  */
 class V8EXPORT Date : public Object {
@@ -3440,6 +3895,24 @@ class V8EXPORT PersistentHandleVisitor {  // NOLINT
 
 
 /**
+ * Asserts that no action is performed that could cause a handle's value
+ * to be modified. Useful when otherwise unsafe handle operations need to
+ * be performed.
+ */
+class V8EXPORT AssertNoGCScope {
+#ifndef DEBUG
+  V8_INLINE(AssertNoGCScope(Isolate* isolate)) {}
+#else
+  AssertNoGCScope(Isolate* isolate);
+  ~AssertNoGCScope();
+ private:
+  Isolate* isolate_;
+  bool last_state_;
+#endif
+};
+
+
+/**
  * Container class for static utility functions.
  */
 class V8EXPORT V8 {
@@ -3909,10 +4382,11 @@ class V8EXPORT V8 {
                                                internal::Object** handle);
   static void DisposeGlobal(internal::Isolate* isolate,
                             internal::Object** global_handle);
+  typedef WeakReferenceCallbacks<Value, void>::Revivable RevivableCallback;
   static void MakeWeak(internal::Isolate* isolate,
                        internal::Object** global_handle,
                        void* data,
-                       WeakReferenceCallback weak_reference_callback,
+                       RevivableCallback weak_reference_callback,
                        NearDeathCallback near_death_callback);
   static void ClearWeak(internal::Isolate* isolate,
                         internal::Object** global_handle);
@@ -4256,7 +4730,16 @@ class V8EXPORT Context {
     explicit V8_INLINE(Scope(Handle<Context> context)) : context_(context) {
       context_->Enter();
     }
+    V8_INLINE(Scope(Isolate* isolate, Persistent<Context>& context)) // NOLINT
+#ifndef V8_USE_UNSAFE_HANDLES
+    : context_(Handle<Context>::New(isolate, context)) {
+#else
+    : context_(Local<Context>::New(isolate, context)) {
+#endif
+      context_->Enter();
+    }
     V8_INLINE(~Scope()) { context_->Exit(); }
+
    private:
     Handle<Context> context_;
   };
@@ -4543,7 +5026,7 @@ class Internals {
   static const int kJSObjectHeaderSize = 3 * kApiPointerSize;
   static const int kFixedArrayHeaderSize = 2 * kApiPointerSize;
   static const int kContextHeaderSize = 2 * kApiPointerSize;
-  static const int kContextEmbedderDataIndex = 56;
+  static const int kContextEmbedderDataIndex = 65;
   static const int kFullStringRepresentationMask = 0x07;
   static const int kStringEncodingMask = 0x4;
   static const int kExternalTwoByteRepresentationTag = 0x02;
@@ -4695,8 +5178,30 @@ Local<T> Local<T>::New(Handle<T> that) {
 
 template <class T>
 Local<T> Local<T>::New(Isolate* isolate, Handle<T> that) {
-  if (that.IsEmpty()) return Local<T>();
-  T* that_ptr = *that;
+  return New(isolate, that.val_);
+}
+
+#ifndef V8_USE_UNSAFE_HANDLES
+template <class T>
+Local<T> Local<T>::New(Isolate* isolate, const Persistent<T>& that) {
+  return New(isolate, that.val_);
+}
+
+template <class T>
+Handle<T> Handle<T>::New(Isolate* isolate, T* that) {
+  if (that == NULL) return Handle<T>();
+  T* that_ptr = that;
+  internal::Object** p = reinterpret_cast<internal::Object**>(that_ptr);
+  return Handle<T>(reinterpret_cast<T*>(HandleScope::CreateHandle(
+      reinterpret_cast<internal::Isolate*>(isolate), *p)));
+}
+#endif
+
+
+template <class T>
+Local<T> Local<T>::New(Isolate* isolate, T* that) {
+  if (that == NULL) return Local<T>();
+  T* that_ptr = that;
   internal::Object** p = reinterpret_cast<internal::Object**>(that_ptr);
   return Local<T>(reinterpret_cast<T*>(HandleScope::CreateHandle(
       reinterpret_cast<internal::Isolate*>(isolate), *p)));
@@ -4705,14 +5210,26 @@ Local<T> Local<T>::New(Isolate* isolate, Handle<T> that) {
 
 template <class T>
 Persistent<T> Persistent<T>::New(Handle<T> that) {
-  return New(Isolate::GetCurrent(), that);
+  return New(Isolate::GetCurrent(), that.val_);
 }
 
 
 template <class T>
 Persistent<T> Persistent<T>::New(Isolate* isolate, Handle<T> that) {
-  if (that.IsEmpty()) return Persistent<T>();
-  internal::Object** p = reinterpret_cast<internal::Object**>(*that);
+  return New(Isolate::GetCurrent(), that.val_);
+}
+
+#ifndef V8_USE_UNSAFE_HANDLES
+template <class T>
+Persistent<T> Persistent<T>::New(Isolate* isolate, Persistent<T> that) {
+  return New(Isolate::GetCurrent(), that.val_);
+}
+#endif
+
+template <class T>
+Persistent<T> Persistent<T>::New(Isolate* isolate, T* that) {
+  if (that == NULL) return Persistent<T>();
+  internal::Object** p = reinterpret_cast<internal::Object**>(that);
   return Persistent<T>(reinterpret_cast<T*>(
       V8::GlobalizeReference(reinterpret_cast<internal::Isolate*>(isolate),
                              p)));
@@ -4730,7 +5247,7 @@ bool Persistent<T>::IsIndependent(Isolate* isolate) const {
   typedef internal::Internals I;
   if (this->IsEmpty()) return false;
   if (!I::IsInitialized(isolate)) return false;
-  return I::GetNodeFlag(reinterpret_cast<internal::Object**>(**this),
+  return I::GetNodeFlag(reinterpret_cast<internal::Object**>(this->val_),
                         I::kNodeIsIndependentShift);
 }
 
@@ -4746,7 +5263,7 @@ bool Persistent<T>::IsNearDeath(Isolate* isolate) const {
   typedef internal::Internals I;
   if (this->IsEmpty()) return false;
   if (!I::IsInitialized(isolate)) return false;
-  return I::GetNodeState(reinterpret_cast<internal::Object**>(**this)) ==
+  return I::GetNodeState(reinterpret_cast<internal::Object**>(this->val_)) ==
       I::kNodeStateIsNearDeathValue;
 }
 
@@ -4762,7 +5279,7 @@ bool Persistent<T>::IsWeak(Isolate* isolate) const {
   typedef internal::Internals I;
   if (this->IsEmpty()) return false;
   if (!I::IsInitialized(isolate)) return false;
-  return I::GetNodeState(reinterpret_cast<internal::Object**>(**this)) ==
+  return I::GetNodeState(reinterpret_cast<internal::Object**>(this->val_)) ==
       I::kNodeStateIsWeakValue;
 }
 
@@ -4777,29 +5294,45 @@ template <class T>
 void Persistent<T>::Dispose(Isolate* isolate) {
   if (this->IsEmpty()) return;
   V8::DisposeGlobal(reinterpret_cast<internal::Isolate*>(isolate),
-                    reinterpret_cast<internal::Object**>(**this));
+                    reinterpret_cast<internal::Object**>(this->val_));
+#ifndef V8_USE_UNSAFE_HANDLES
+  val_ = 0;
+#endif
 }
 
 
 template <class T>
-Persistent<T>::Persistent() : Handle<T>() { }
-
-template <class T>
-void Persistent<T>::MakeWeak(void* parameters, WeakReferenceCallback callback) {
-  Isolate* isolate = Isolate::GetCurrent();
+template <typename S, typename P>
+void Persistent<T>::MakeWeak(
+    Isolate* isolate,
+    P* parameters,
+    typename WeakReferenceCallbacks<S, P>::Revivable callback) {
+  TYPE_CHECK(S, T);
+  typedef typename WeakReferenceCallbacks<Value, void>::Revivable Revivable;
   V8::MakeWeak(reinterpret_cast<internal::Isolate*>(isolate),
-               reinterpret_cast<internal::Object**>(**this),
+               reinterpret_cast<internal::Object**>(this->val_),
                parameters,
-               callback,
+               reinterpret_cast<Revivable>(callback),
                NULL);
 }
 
+
+template <class T>
+template <typename P>
+void Persistent<T>::MakeWeak(
+    Isolate* isolate,
+    P* parameters,
+    typename WeakReferenceCallbacks<T, P>::Revivable callback) {
+  MakeWeak<T, P>(isolate, parameters, callback);
+}
+
+
 template <class T>
 void Persistent<T>::MakeWeak(Isolate* isolate,
                              void* parameters,
                              NearDeathCallback callback) {
   V8::MakeWeak(reinterpret_cast<internal::Isolate*>(isolate),
-               reinterpret_cast<internal::Object**>(**this),
+               reinterpret_cast<internal::Object**>(this->val_),
                parameters,
                NULL,
                callback);
@@ -4813,7 +5346,7 @@ void Persistent<T>::ClearWeak() {
 template <class T>
 void Persistent<T>::ClearWeak(Isolate* isolate) {
   V8::ClearWeak(reinterpret_cast<internal::Isolate*>(isolate),
-                reinterpret_cast<internal::Object**>(**this));
+                reinterpret_cast<internal::Object**>(this->val_));
 }
 
 template <class T>
@@ -4826,7 +5359,7 @@ void Persistent<T>::MarkIndependent(Isolate* isolate) {
   typedef internal::Internals I;
   if (this->IsEmpty()) return;
   if (!I::IsInitialized(isolate)) return;
-  I::UpdateNodeFlag(reinterpret_cast<internal::Object**>(**this),
+  I::UpdateNodeFlag(reinterpret_cast<internal::Object**>(this->val_),
                     true,
                     I::kNodeIsIndependentShift);
 }
@@ -4841,7 +5374,7 @@ void Persistent<T>::MarkPartiallyDependent(Isolate* isolate) {
   typedef internal::Internals I;
   if (this->IsEmpty()) return;
   if (!I::IsInitialized(isolate)) return;
-  I::UpdateNodeFlag(reinterpret_cast<internal::Object**>(**this),
+  I::UpdateNodeFlag(reinterpret_cast<internal::Object**>(this->val_),
                     true,
                     I::kNodeIsPartiallyDependentShift);
 }
@@ -4852,11 +5385,27 @@ void Persistent<T>::SetWrapperClassId(uint16_t class_id) {
 }
 
 template <class T>
+void Persistent<T>::Reset(Isolate* isolate, const Handle<T>& other) {
+  Dispose(isolate);
+#ifdef V8_USE_UNSAFE_HANDLES
+  *this = *New(isolate, other);
+#else
+  if (other.IsEmpty()) {
+    this->val_ = NULL;
+    return;
+  }
+  internal::Object** p = reinterpret_cast<internal::Object**>(other.val_);
+  this->val_ = reinterpret_cast<T*>(
+      V8::GlobalizeReference(reinterpret_cast<internal::Isolate*>(isolate), p));
+#endif
+}
+
+template <class T>
 void Persistent<T>::SetWrapperClassId(Isolate* isolate, uint16_t class_id) {
   typedef internal::Internals I;
   if (this->IsEmpty()) return;
   if (!I::IsInitialized(isolate)) return;
-  internal::Object** obj = reinterpret_cast<internal::Object**>(**this);
+  internal::Object** obj = reinterpret_cast<internal::Object**>(this->val_);
   uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + I::kNodeClassIdOffset;
   *reinterpret_cast<uint16_t*>(addr) = class_id;
 }
@@ -4871,7 +5420,7 @@ uint16_t Persistent<T>::WrapperClassId(Isolate* isolate) const {
   typedef internal::Internals I;
   if (this->IsEmpty()) return 0;
   if (!I::IsInitialized(isolate)) return 0;
-  internal::Object** obj = reinterpret_cast<internal::Object**>(**this);
+  internal::Object** obj = reinterpret_cast<internal::Object**>(this->val_);
   uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + I::kNodeClassIdOffset;
   return *reinterpret_cast<uint16_t*>(addr);
 }
@@ -5224,6 +5773,78 @@ ArrayBuffer* ArrayBuffer::Cast(v8::Value* value) {
 }
 
 
+TypedArray* TypedArray::Cast(v8::Value* value) {
+#ifdef V8_ENABLE_CHECKS
+  CheckCast(value);
+#endif
+  return static_cast<TypedArray*>(value);
+}
+
+
+Uint8Array* Uint8Array::Cast(v8::Value* value) {
+#ifdef V8_ENABLE_CHECKS
+  CheckCast(value);
+#endif
+  return static_cast<Uint8Array*>(value);
+}
+
+
+Int8Array* Int8Array::Cast(v8::Value* value) {
+#ifdef V8_ENABLE_CHECKS
+  CheckCast(value);
+#endif
+  return static_cast<Int8Array*>(value);
+}
+
+
+Uint16Array* Uint16Array::Cast(v8::Value* value) {
+#ifdef V8_ENABLE_CHECKS
+  CheckCast(value);
+#endif
+  return static_cast<Uint16Array*>(value);
+}
+
+
+Int16Array* Int16Array::Cast(v8::Value* value) {
+#ifdef V8_ENABLE_CHECKS
+  CheckCast(value);
+#endif
+  return static_cast<Int16Array*>(value);
+}
+
+
+Uint32Array* Uint32Array::Cast(v8::Value* value) {
+#ifdef V8_ENABLE_CHECKS
+  CheckCast(value);
+#endif
+  return static_cast<Uint32Array*>(value);
+}
+
+
+Int32Array* Int32Array::Cast(v8::Value* value) {
+#ifdef V8_ENABLE_CHECKS
+  CheckCast(value);
+#endif
+  return static_cast<Int32Array*>(value);
+}
+
+
+Float32Array* Float32Array::Cast(v8::Value* value) {
+#ifdef V8_ENABLE_CHECKS
+  CheckCast(value);
+#endif
+  return static_cast<Float32Array*>(value);
+}
+
+
+Float64Array* Float64Array::Cast(v8::Value* value) {
+#ifdef V8_ENABLE_CHECKS
+  CheckCast(value);
+#endif
+  return static_cast<Float64Array*>(value);
+}
+
+
 Function* Function::Cast(v8::Value* value) {
 #ifdef V8_ENABLE_CHECKS
   CheckCast(value);
index b5b6367..2ce31b4 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include <v8.h>
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
@@ -124,7 +128,9 @@ void DispatchDebugMessages() {
   // "evaluate" command, because it must be executed some context.
   // In our sample we have only one context, so there is nothing really to
   // think about.
-  v8::Context::Scope scope(debug_message_context);
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
+  v8::Context::Scope scope(isolate, debug_message_context);
 
   v8::Debug::ProcessDebugMessages();
 }
@@ -136,8 +142,8 @@ int RunMain(int argc, char* argv[]) {
   v8::Isolate* isolate = v8::Isolate::GetCurrent();
   v8::HandleScope handle_scope(isolate);
 
-  v8::Handle<v8::String> script_source(NULL);
-  v8::Handle<v8::Value> script_name(NULL);
+  v8::Handle<v8::String> script_source;
+  v8::Handle<v8::Value> script_name;
   int script_param_counter = 0;
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
@@ -209,7 +215,7 @@ int RunMain(int argc, char* argv[]) {
 
   // Create a new execution environment containing the built-in
   // functions
-  v8::Handle<v8::Context> context = v8::Context::New(NULL, global);
+  v8::Handle<v8::Context> context = v8::Context::New(isolate, NULL, global);
   // Enter the newly created execution environment.
   v8::Context::Scope context_scope(context);
 
index 4dcc09a..fd3a821 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+// TODO(dcarney): remove this
+#define V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include <v8.h>
 
 #include <string>
@@ -163,11 +168,11 @@ bool JsHttpRequestProcessor::Initialize(map<string, string>* opts,
   // is what we need for the reference to remain after we return from
   // this method. That persistent handle has to be disposed in the
   // destructor.
-  context_ = Context::New(NULL, global);
+  context_.Reset(GetIsolate(), Context::New(GetIsolate(), NULL, global));
 
   // Enter the new context so all the following operations take place
   // within it.
-  Context::Scope context_scope(context_);
+  Context::Scope context_scope(GetIsolate(), context_);
 
   // Make the options mapping available within the context
   if (!InstallMaps(opts, output))
@@ -250,7 +255,7 @@ bool JsHttpRequestProcessor::Process(HttpRequest* request) {
 
   // Enter this processor's context so all the remaining operations
   // take place there
-  Context::Scope context_scope(context_);
+  Context::Scope context_scope(GetIsolate(), context_);
 
   // Wrap the C++ request object in a JavaScript wrapper
   Handle<Object> request_obj = WrapRequest(request);
@@ -303,7 +308,8 @@ Handle<Object> JsHttpRequestProcessor::WrapMap(map<string, string>* obj) {
     Handle<ObjectTemplate> raw_template = MakeMapTemplate(GetIsolate());
     map_template_ = Persistent<ObjectTemplate>::New(GetIsolate(), raw_template);
   }
-  Handle<ObjectTemplate> templ = map_template_;
+  Handle<ObjectTemplate> templ =
+      Local<ObjectTemplate>::New(GetIsolate(), map_template_);
 
   // Create an empty map wrapper.
   Handle<Object> result = templ->NewInstance();
@@ -410,7 +416,8 @@ Handle<Object> JsHttpRequestProcessor::WrapRequest(HttpRequest* request) {
     request_template_ =
         Persistent<ObjectTemplate>::New(GetIsolate(), raw_template);
   }
-  Handle<ObjectTemplate> templ = request_template_;
+  Handle<ObjectTemplate> templ =
+      Local<ObjectTemplate>::New(GetIsolate(), request_template_);
 
   // Create an empty http request wrapper.
   Handle<Object> result = templ->NewInstance();
index 0b71c2c..da18cc7 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+// TODO(dcarney): remove this
+#define V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include <v8.h>
 #include <assert.h>
 #include <fcntl.h>
@@ -45,7 +50,7 @@
  */
 
 
-v8::Persistent<v8::Context> CreateShellContext();
+v8::Handle<v8::Context> CreateShellContext(v8::Isolate* isolate);
 void RunShell(v8::Handle<v8::Context> context);
 int RunMain(v8::Isolate* isolate, int argc, char* argv[]);
 bool ExecuteString(v8::Isolate* isolate,
@@ -72,7 +77,7 @@ int main(int argc, char* argv[]) {
   int result;
   {
     v8::HandleScope handle_scope(isolate);
-    v8::Persistent<v8::Context> context = CreateShellContext();
+    v8::Handle<v8::Context> context = CreateShellContext(isolate);
     if (context.IsEmpty()) {
       fprintf(stderr, "Error creating context\n");
       return 1;
@@ -81,7 +86,6 @@ int main(int argc, char* argv[]) {
     result = RunMain(isolate, argc, argv);
     if (run_shell) RunShell(context);
     context->Exit();
-    context.Dispose(isolate);
   }
   v8::V8::Dispose();
   return result;
@@ -96,7 +100,7 @@ const char* ToCString(const v8::String::Utf8Value& value) {
 
 // Creates a new execution environment containing the built-in
 // functions.
-v8::Persistent<v8::Context> CreateShellContext() {
+v8::Handle<v8::Context> CreateShellContext(v8::Isolate* isolate) {
   // Create a template for the global object.
   v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New();
   // Bind the global 'print' function to the C++ Print callback.
@@ -110,7 +114,7 @@ v8::Persistent<v8::Context> CreateShellContext() {
   // Bind the 'version' function
   global->Set(v8::String::New("version"), v8::FunctionTemplate::New(Version));
 
-  return v8::Context::New(NULL, global);
+  return v8::Context::New(isolate, NULL, global);
 }
 
 
index 2b24ab0..8a6eaf4 100644 (file)
@@ -25,6 +25,9 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "api.h"
 
 #include <string.h>  // For memcpy, strlen.
@@ -625,7 +628,7 @@ i::Object** V8::GlobalizeReference(i::Isolate* isolate, i::Object** obj) {
 void V8::MakeWeak(i::Isolate* isolate,
                   i::Object** object,
                   void* parameters,
-                  WeakReferenceCallback weak_reference_callback,
+                  RevivableCallback weak_reference_callback,
                   NearDeathCallback near_death_callback) {
   ASSERT(isolate == i::Isolate::Current());
   LOG_API(isolate, "MakeWeak");
@@ -2409,6 +2412,46 @@ bool Value::IsArray() const {
 }
 
 
+bool Value::IsArrayBuffer() const {
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsArrayBuffer()"))
+    return false;
+  return Utils::OpenHandle(this)->IsJSArrayBuffer();
+}
+
+
+bool Value::IsTypedArray() const {
+  if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsArrayBuffer()"))
+    return false;
+  return Utils::OpenHandle(this)->IsJSTypedArray();
+}
+
+
+#define TYPED_ARRAY_LIST(F) \
+F(Uint8Array, kExternalUnsignedByteArray) \
+F(Int8Array, kExternalByteArray) \
+F(Uint16Array, kExternalUnsignedShortArray) \
+F(Int16Array, kExternalShortArray) \
+F(Uint32Array, kExternalUnsignedIntArray) \
+F(Int32Array, kExternalIntArray) \
+F(Float32Array, kExternalFloatArray) \
+F(Float64Array, kExternalDoubleArray) \
+F(Uint8ClampedArray, kExternalPixelArray)
+
+
+#define VALUE_IS_TYPED_ARRAY(TypedArray, type_const)                          \
+  bool Value::Is##TypedArray() const {                                        \
+    if (IsDeadCheck(i::Isolate::Current(), "v8::Value::Is" #TypedArray "()")) \
+      return false;                                                           \
+    i::Handle<i::Object> obj = Utils::OpenHandle(this);                       \
+    if (!obj->IsJSTypedArray()) return false;                                 \
+    return i::JSTypedArray::cast(*obj)->type() == type_const;                 \
+  }
+
+TYPED_ARRAY_LIST(VALUE_IS_TYPED_ARRAY)
+
+#undef VALUE_IS_TYPED_ARRAY
+
+
 bool Value::IsObject() const {
   if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsObject()")) return false;
   return Utils::OpenHandle(this)->IsJSObject();
@@ -2755,6 +2798,32 @@ void v8::ArrayBuffer::CheckCast(Value* that) {
 }
 
 
+void v8::TypedArray::CheckCast(Value* that) {
+  if (IsDeadCheck(i::Isolate::Current(), "v8::TypedArray::Cast()")) return;
+  i::Handle<i::Object> obj = Utils::OpenHandle(that);
+  ApiCheck(obj->IsJSTypedArray(),
+           "v8::TypedArray::Cast()",
+           "Could not convert to TypedArray");
+}
+
+
+#define CHECK_TYPED_ARRAY_CAST(ApiClass, typeConst)                         \
+  void v8::ApiClass::CheckCast(Value* that) {                               \
+    if (IsDeadCheck(i::Isolate::Current(), "v8::" #ApiClass "::Cast()"))    \
+      return;                                                               \
+    i::Handle<i::Object> obj = Utils::OpenHandle(that);                     \
+    ApiCheck(obj->IsJSTypedArray() &&                                       \
+             i::JSTypedArray::cast(*obj)->type() == typeConst,              \
+             "v8::" #ApiClass "::Cast()",                                   \
+             "Could not convert to " #ApiClass);                            \
+  }
+
+
+TYPED_ARRAY_LIST(CHECK_TYPED_ARRAY_CAST)
+
+#undef CHECK_TYPED_ARRAY_CAST
+
+
 void v8::Date::CheckCast(v8::Value* that) {
   i::Isolate* isolate = i::Isolate::Current();
   if (IsDeadCheck(isolate, "v8::Date::Cast()")) return;
@@ -3281,7 +3350,7 @@ Local<String> v8::Object::ObjectProtoToString() {
       const char* postfix = "]";
 
       int prefix_len = i::StrLength(prefix);
-      int str_len = str->Length();
+      int str_len = str->Utf8Length();
       int postfix_len = i::StrLength(postfix);
 
       int buf_len = prefix_len + str_len + postfix_len;
@@ -3293,7 +3362,7 @@ Local<String> v8::Object::ObjectProtoToString() {
       ptr += prefix_len;
 
       // Write real content.
-      str->WriteAscii(ptr, 0, str_len);
+      str->WriteUtf8(ptr, str_len);
       ptr += str_len;
 
       // Write postfix.
@@ -4061,7 +4130,7 @@ bool String::IsOneByte() const {
   if (IsDeadCheck(str->GetIsolate(), "v8::String::IsOneByte()")) {
     return false;
   }
-  return str->IsOneByteConvertible();
+  return str->HasOnlyOneByteChars();
 }
 
 
@@ -5806,6 +5875,131 @@ Local<ArrayBuffer> v8::ArrayBuffer::New(void* data, size_t byte_length) {
 }
 
 
+Local<ArrayBuffer> v8::TypedArray::Buffer() {
+  i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+  if (IsDeadCheck(isolate, "v8::TypedArray::Buffer()"))
+    return Local<ArrayBuffer>();
+  i::Handle<i::JSTypedArray> obj = Utils::OpenHandle(this);
+  ASSERT(obj->buffer()->IsJSArrayBuffer());
+  i::Handle<i::JSArrayBuffer> buffer(i::JSArrayBuffer::cast(obj->buffer()));
+  return Utils::ToLocal(buffer);
+}
+
+
+size_t v8::TypedArray::ByteOffset() {
+  i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+  if (IsDeadCheck(isolate, "v8::TypedArray::ByteOffset()")) return 0;
+  i::Handle<i::JSTypedArray> obj = Utils::OpenHandle(this);
+  return static_cast<size_t>(obj->byte_offset()->Number());
+}
+
+
+size_t v8::TypedArray::ByteLength() {
+  i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+  if (IsDeadCheck(isolate, "v8::TypedArray::ByteLength()")) return 0;
+  i::Handle<i::JSTypedArray> obj = Utils::OpenHandle(this);
+  return static_cast<size_t>(obj->byte_length()->Number());
+}
+
+
+size_t v8::TypedArray::Length() {
+  i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+  if (IsDeadCheck(isolate, "v8::TypedArray::Length()")) return 0;
+  i::Handle<i::JSTypedArray> obj = Utils::OpenHandle(this);
+  return static_cast<size_t>(obj->length()->Number());
+}
+
+
+void* v8::TypedArray::BaseAddress() {
+  i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+  if (IsDeadCheck(isolate, "v8::TypedArray::BaseAddress()")) return NULL;
+  i::Handle<i::JSTypedArray> obj = Utils::OpenHandle(this);
+  i::Handle<i::JSArrayBuffer> buffer(i::JSArrayBuffer::cast(obj->buffer()));
+  void* buffer_data = buffer->backing_store();
+  size_t byte_offset = static_cast<size_t>(obj->byte_offset()->Number());
+  return static_cast<uint8_t*>(buffer_data) + byte_offset;
+}
+
+
+template<typename ElementType,
+         ExternalArrayType array_type,
+         i::ElementsKind elements_kind>
+i::Handle<i::JSTypedArray> NewTypedArray(
+    i::Isolate* isolate,
+    Handle<ArrayBuffer> array_buffer, size_t byte_offset, size_t length) {
+  i::Handle<i::JSTypedArray> obj =
+      isolate->factory()->NewJSTypedArray(array_type);
+  i::Handle<i::JSArrayBuffer> buffer = Utils::OpenHandle(*array_buffer);
+
+  ASSERT(byte_offset % sizeof(ElementType) == 0);
+  ASSERT(byte_offset + length * sizeof(ElementType) <=
+      static_cast<size_t>(buffer->byte_length()->Number()));
+
+  obj->set_buffer(*buffer);
+
+  i::Handle<i::Object> byte_offset_object = isolate->factory()->NewNumber(
+        static_cast<double>(byte_offset));
+  obj->set_byte_offset(*byte_offset_object);
+
+  i::Handle<i::Object> byte_length_object = isolate->factory()->NewNumber(
+        static_cast<double>(length * sizeof(ElementType)));
+  obj->set_byte_length(*byte_length_object);
+
+  i::Handle<i::Object> length_object = isolate->factory()->NewNumber(
+        static_cast<double>(length));
+  obj->set_length(*length_object);
+
+  i::Handle<i::ExternalArray> elements =
+      isolate->factory()->NewExternalArray(
+          static_cast<int>(length), array_type,
+          static_cast<uint8_t*>(buffer->backing_store()) + byte_offset);
+  i::Handle<i::Map> map =
+      isolate->factory()->GetElementsTransitionMap(
+          obj, elements_kind);
+  obj->set_map(*map);
+  obj->set_elements(*elements);
+  return obj;
+}
+
+
+#define TYPED_ARRAY_NEW(TypedArray, element_type, array_type, elements_kind) \
+  Local<TypedArray> TypedArray::New(Handle<ArrayBuffer> array_buffer,        \
+                                    size_t byte_offset, size_t length) {     \
+    i::Isolate* isolate = i::Isolate::Current();                             \
+    EnsureInitializedForIsolate(isolate,                                     \
+        "v8::" #TypedArray "::New(Handle<ArrayBuffer>, size_t, size_t)");    \
+    LOG_API(isolate,                                                         \
+        "v8::" #TypedArray "::New(Handle<ArrayBuffer>, size_t, size_t)");    \
+    ENTER_V8(isolate);                                                       \
+    i::Handle<i::JSTypedArray> obj =                                         \
+        NewTypedArray<element_type, array_type, elements_kind>(              \
+            isolate, array_buffer, byte_offset, length);                     \
+    return Utils::ToLocal##TypedArray(obj);                                  \
+  }
+
+
+TYPED_ARRAY_NEW(Uint8Array, uint8_t, kExternalUnsignedByteArray,
+                i::EXTERNAL_UNSIGNED_BYTE_ELEMENTS)
+TYPED_ARRAY_NEW(Uint8ClampedArray, uint8_t, kExternalPixelArray,
+                i::EXTERNAL_PIXEL_ELEMENTS)
+TYPED_ARRAY_NEW(Int8Array, int8_t, kExternalByteArray,
+                i::EXTERNAL_BYTE_ELEMENTS)
+TYPED_ARRAY_NEW(Uint16Array, uint16_t, kExternalUnsignedShortArray,
+                i::EXTERNAL_UNSIGNED_SHORT_ELEMENTS)
+TYPED_ARRAY_NEW(Int16Array, int16_t, kExternalShortArray,
+                i::EXTERNAL_SHORT_ELEMENTS)
+TYPED_ARRAY_NEW(Uint32Array, uint32_t, kExternalUnsignedIntArray,
+                i::EXTERNAL_UNSIGNED_INT_ELEMENTS)
+TYPED_ARRAY_NEW(Int32Array, int32_t, kExternalIntArray,
+                i::EXTERNAL_INT_ELEMENTS)
+TYPED_ARRAY_NEW(Float32Array, float, kExternalFloatArray,
+                i::EXTERNAL_FLOAT_ELEMENTS)
+TYPED_ARRAY_NEW(Float64Array, double, kExternalDoubleArray,
+                i::EXTERNAL_DOUBLE_ELEMENTS)
+
+#undef TYPED_ARRAY_NEW
+
+
 Local<Symbol> v8::Symbol::New(Isolate* isolate) {
   i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
   EnsureInitializedForIsolate(i_isolate, "v8::Symbol::New()");
@@ -5883,6 +6077,19 @@ Local<Integer> v8::Integer::NewFromUnsigned(uint32_t value, Isolate* isolate) {
 }
 
 
+#ifdef DEBUG
+v8::AssertNoGCScope::AssertNoGCScope(v8::Isolate* isolate)
+  : isolate_(isolate),
+    last_state_(i::EnterAllocationScope(
+        reinterpret_cast<i::Isolate*>(isolate), false)) {
+}
+
+v8::AssertNoGCScope::~AssertNoGCScope() {
+  i::ExitAllocationScope(reinterpret_cast<i::Isolate*>(isolate_), last_state_);
+}
+#endif
+
+
 void V8::IgnoreOutOfMemoryException() {
   EnterIsolateIfNeeded()->set_ignore_out_of_memory(true);
 }
@@ -6295,9 +6502,10 @@ String::AsciiValue::AsciiValue(v8::Handle<v8::Value> obj)
   TryCatch try_catch;
   Handle<String> str = obj->ToString();
   if (str.IsEmpty()) return;
-  length_ = str->Length();
+  length_ = str->Utf8Length();
   str_ = i::NewArray<char>(length_ + 1);
-  str->WriteAscii(str_);
+  str->WriteUtf8(str_);
+  ASSERT(i::String::NonAsciiStart(str_, length_) >= length_);
 }
 
 
index f62541d..686abf7 100644 (file)
@@ -171,6 +171,16 @@ class RegisteredExtension {
   V(Object, JSObject)                          \
   V(Array, JSArray)                            \
   V(ArrayBuffer, JSArrayBuffer)                \
+  V(TypedArray, JSTypedArray)                  \
+  V(Uint8Array, JSTypedArray)                  \
+  V(Uint8ClampedArray, JSTypedArray)           \
+  V(Int8Array, JSTypedArray)                   \
+  V(Uint16Array, JSTypedArray)                 \
+  V(Int16Array, JSTypedArray)                  \
+  V(Uint32Array, JSTypedArray)                 \
+  V(Int32Array, JSTypedArray)                  \
+  V(Float32Array, JSTypedArray)                \
+  V(Float64Array, JSTypedArray)                \
   V(String, String)                            \
   V(Symbol, Symbol)                            \
   V(Script, Object)                            \
@@ -208,6 +218,28 @@ class Utils {
       v8::internal::Handle<v8::internal::JSArray> obj);
   static inline Local<ArrayBuffer> ToLocal(
       v8::internal::Handle<v8::internal::JSArrayBuffer> obj);
+
+  static inline Local<TypedArray> ToLocal(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+  static inline Local<Uint8Array> ToLocalUint8Array(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+  static inline Local<Uint8ClampedArray> ToLocalUint8ClampedArray(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+  static inline Local<Int8Array> ToLocalInt8Array(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+  static inline Local<Uint16Array> ToLocalUint16Array(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+  static inline Local<Int16Array> ToLocalInt16Array(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+  static inline Local<Uint32Array> ToLocalUint32Array(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+  static inline Local<Int32Array> ToLocalInt32Array(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+  static inline Local<Float32Array> ToLocalFloat32Array(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+  static inline Local<Float64Array> ToLocalFloat64Array(
+      v8::internal::Handle<v8::internal::JSTypedArray> obj);
+
   static inline Local<Message> MessageToLocal(
       v8::internal::Handle<v8::internal::Object> obj);
   static inline Local<StackTrace> StackTraceToLocal(
@@ -262,14 +294,34 @@ v8::internal::Handle<T> v8::internal::Handle<T>::EscapeFrom(
 }
 
 
+class InternalHandleHelper {
+ public:
+  template<class From, class To>
+  static inline Local<To> Convert(v8::internal::Handle<From> obj) {
+    return Local<To>(reinterpret_cast<To*>(obj.location()));
+  }
+};
+
+
 // Implementations of ToLocal
 
 #define MAKE_TO_LOCAL(Name, From, To)                                       \
   Local<v8::To> Utils::Name(v8::internal::Handle<v8::internal::From> obj) { \
     ASSERT(obj.is_null() || !obj->IsTheHole());                             \
-    return Local<To>(reinterpret_cast<To*>(obj.location()));                \
+    return InternalHandleHelper::Convert<v8::internal::From, v8::To>(obj);  \
   }
 
+
+#define MAKE_TO_LOCAL_TYPED_ARRAY(TypedArray, typeConst)                    \
+  Local<v8::TypedArray> Utils::ToLocal##TypedArray(                         \
+      v8::internal::Handle<v8::internal::JSTypedArray> obj) {               \
+    ASSERT(obj.is_null() || !obj->IsTheHole());                             \
+    ASSERT(obj->type() == typeConst);                                       \
+    return InternalHandleHelper::                                           \
+        Convert<v8::internal::JSTypedArray, v8::TypedArray>(obj);           \
+  }
+
+
 MAKE_TO_LOCAL(ToLocal, Context, Context)
 MAKE_TO_LOCAL(ToLocal, Object, Value)
 MAKE_TO_LOCAL(ToLocal, JSFunction, Function)
@@ -279,6 +331,18 @@ MAKE_TO_LOCAL(ToLocal, JSRegExp, RegExp)
 MAKE_TO_LOCAL(ToLocal, JSObject, Object)
 MAKE_TO_LOCAL(ToLocal, JSArray, Array)
 MAKE_TO_LOCAL(ToLocal, JSArrayBuffer, ArrayBuffer)
+MAKE_TO_LOCAL(ToLocal, JSTypedArray, TypedArray)
+
+MAKE_TO_LOCAL_TYPED_ARRAY(Uint8Array, kExternalUnsignedByteArray)
+MAKE_TO_LOCAL_TYPED_ARRAY(Uint8ClampedArray, kExternalPixelArray)
+MAKE_TO_LOCAL_TYPED_ARRAY(Int8Array, kExternalByteArray)
+MAKE_TO_LOCAL_TYPED_ARRAY(Uint16Array, kExternalUnsignedShortArray)
+MAKE_TO_LOCAL_TYPED_ARRAY(Int16Array, kExternalShortArray)
+MAKE_TO_LOCAL_TYPED_ARRAY(Uint32Array, kExternalUnsignedIntArray)
+MAKE_TO_LOCAL_TYPED_ARRAY(Int32Array, kExternalIntArray)
+MAKE_TO_LOCAL_TYPED_ARRAY(Float32Array, kExternalFloatArray)
+MAKE_TO_LOCAL_TYPED_ARRAY(Float64Array, kExternalDoubleArray)
+
 MAKE_TO_LOCAL(ToLocal, FunctionTemplateInfo, FunctionTemplate)
 MAKE_TO_LOCAL(ToLocal, ObjectTemplateInfo, ObjectTemplate)
 MAKE_TO_LOCAL(ToLocal, SignatureInfo, Signature)
@@ -293,6 +357,7 @@ MAKE_TO_LOCAL(Uint32ToLocal, Object, Uint32)
 MAKE_TO_LOCAL(ExternalToLocal, JSObject, External)
 MAKE_TO_LOCAL(ToLocal, DeclaredAccessorDescriptor, DeclaredAccessorDescriptor)
 
+#undef MAKE_TO_LOCAL_TYPED_ARRAY
 #undef MAKE_TO_LOCAL
 
 
index f8fb00c..1423d56 100644 (file)
@@ -115,15 +115,18 @@ class CustomArguments : public Relocatable {
 
 
 #define DECLARE_RUNTIME_FUNCTION(Type, Name)    \
-Type Name(Arguments args, Isolate* isolate)
-
-
-#define RUNTIME_FUNCTION(Type, Name)            \
-Type Name(Arguments args, Isolate* isolate)
-
-
-#define RUNTIME_ARGUMENTS(isolate, args) args, isolate
-
+Type Name(int args_length, Object** args_object, Isolate* isolate)
+
+#define RUNTIME_FUNCTION(Type, Name)                                  \
+static Type __RT_impl_##Name(Arguments args, Isolate* isolate);       \
+Type Name(int args_length, Object** args_object, Isolate* isolate) {  \
+  Arguments args(args_length, args_object);                           \
+  return __RT_impl_##Name(args, isolate);                             \
+}                                                                     \
+static Type __RT_impl_##Name(Arguments args, Isolate* isolate)
+
+#define RUNTIME_ARGUMENTS(isolate, args) \
+  args.length(), args.arguments(), isolate
 
 } }  // namespace v8::internal
 
index cc6caca..86da76a 100644 (file)
@@ -30,6 +30,7 @@
 #if defined(V8_TARGET_ARCH_ARM)
 
 #include "bootstrapper.h"
+#include "builtins-decls.h"
 #include "code-stubs.h"
 #include "regexp-macro-assembler.h"
 #include "stub-cache.h"
@@ -73,6 +74,28 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
 }
 
 
+void LoadFieldStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { r0 };
+  descriptor->register_param_count_ = 1;
+  descriptor->register_params_ = registers;
+  descriptor->stack_parameter_count_ = NULL;
+  descriptor->deoptimization_handler_ = NULL;
+}
+
+
+void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { r1 };
+  descriptor->register_param_count_ = 1;
+  descriptor->register_params_ = registers;
+  descriptor->stack_parameter_count_ = NULL;
+  descriptor->deoptimization_handler_ = NULL;
+}
+
+
 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
     Isolate* isolate,
     CodeStubInterfaceDescriptor* descriptor) {
@@ -115,9 +138,10 @@ static void InitializeArrayConstructorDescriptor(
     int constant_stack_parameter_count) {
   // register state
   // r0 -- number of arguments
+  // r1 -- function
   // r2 -- type info cell with elements kind
-  static Register registers[] = { r2 };
-  descriptor->register_param_count_ = 1;
+  static Register registers[] = { r1, r2 };
+  descriptor->register_param_count_ = 2;
   if (constant_stack_parameter_count != 0) {
     // stack param count needs (constructor pointer, and single argument)
     descriptor->stack_parameter_count_ = &r0;
@@ -3776,12 +3800,6 @@ Register InstanceofStub::left() { return r0; }
 Register InstanceofStub::right() { return r1; }
 
 
-void LoadFieldStub::Generate(MacroAssembler* masm) {
-  StubCompiler::DoGenerateFastPropertyLoad(masm, r0, reg_, inobject_, index_);
-  __ Ret();
-}
-
-
 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
   // The displacement is the offset of the last parameter (if any)
   // relative to the frame pointer.
@@ -4733,6 +4751,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
   Handle<Object> terminal_kind_sentinel =
       TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
                                                   LAST_FAST_ELEMENTS_KIND);
+  __ JumpIfNotSmi(r3, &miss);
   __ cmp(r3, Operand(terminal_kind_sentinel));
   __ b(gt, &miss);
   // Make sure the function is the Array() function
@@ -5941,8 +5960,36 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ AllocateAsciiConsString(r7, r6, r4, r5, &call_runtime);
   __ bind(&allocated);
   // Fill the fields of the cons string.
+  Label skip_write_barrier, after_writing;
+  ExternalReference high_promotion_mode = ExternalReference::
+      new_space_high_promotion_mode_active_address(masm->isolate());
+  __ mov(r4, Operand(high_promotion_mode));
+  __ ldr(r4, MemOperand(r4, 0));
+  __ cmp(r4, Operand::Zero());
+  __ b(eq, &skip_write_barrier);
+
   __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
+  __ RecordWriteField(r7,
+                      ConsString::kFirstOffset,
+                      r0,
+                      r4,
+                      kLRHasNotBeenSaved,
+                      kDontSaveFPRegs);
   __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
+  __ RecordWriteField(r7,
+                      ConsString::kSecondOffset,
+                      r1,
+                      r4,
+                      kLRHasNotBeenSaved,
+                      kDontSaveFPRegs);
+  __ jmp(&after_writing);
+
+  __ bind(&skip_write_barrier);
+  __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
+  __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
+
+  __ bind(&after_writing);
+
   __ mov(r0, Operand(r7));
   __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
   __ add(sp, sp, Operand(2 * kPointerSize));
@@ -6788,6 +6835,9 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
   { REG(r5), REG(r0), REG(r6), EMIT_REMEMBERED_SET },
   // FastNewClosureStub::Generate
   { REG(r2), REG(r4), REG(r1), EMIT_REMEMBERED_SET },
+  // StringAddStub::Generate
+  { REG(r7), REG(r1), REG(r4), EMIT_REMEMBERED_SET },
+  { REG(r7), REG(r0), REG(r4), EMIT_REMEMBERED_SET },
   // Null termination.
   { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
 };
@@ -7312,14 +7362,8 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
     // Get the elements kind and case on that.
     __ cmp(r2, Operand(undefined_sentinel));
     __ b(eq, &no_info);
-    __ ldr(r3, FieldMemOperand(r2, kPointerSize));
-
-    // There is no info if the call site went megamorphic either
-    // TODO(mvstanton): Really? I thought if it was the array function that
-    // the cell wouldn't get stamped as megamorphic.
-    __ cmp(r3,
-           Operand(TypeFeedbackCells::MegamorphicSentinel(masm->isolate())));
-    __ b(eq, &no_info);
+    __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+    __ JumpIfNotSmi(r3, &no_info);
     __ SmiUntag(r3);
     __ jmp(&switch_ready);
     __ bind(&no_info);
index 848fae2..6bfaf41 100644 (file)
@@ -224,6 +224,15 @@ void Debug::GenerateKeyedStoreICDebugBreak(MacroAssembler* masm) {
 }
 
 
+void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) {
+  // Register state for CompareNil IC
+  // ----------- S t a t e -------------
+  //  -- r0    : value
+  // -----------------------------------
+  Generate_DebugBreakCallHelper(masm, r0.bit(), 0);
+}
+
+
 void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
   // Calling convention for IC call (from ic-arm.cc)
   // ----------- S t a t e -------------
index 0ef4be0..0bc1f48 100644 (file)
@@ -1593,7 +1593,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
       : ObjectLiteral::kNoFlags;
   __ mov(r0, Operand(Smi::FromInt(flags)));
   int properties_count = constant_properties->length() / 2;
-  if (expr->depth() > 1) {
+  if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
+      expr->depth() > 1) {
     __ Push(r3, r2, r1, r0);
     __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
   } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
@@ -1939,11 +1940,12 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       Label resume;
       __ CompareRoot(result_register(), Heap::kTheHoleValueRootIndex);
       __ b(ne, &resume);
-      __ pop(result_register());
       if (expr->yield_kind() == Yield::SUSPEND) {
-        // TODO(wingo): Box into { value: VALUE, done: false }.
+        EmitReturnIteratorResult(false);
+      } else {
+        __ pop(result_register());
+        EmitReturnSequence();
       }
-      EmitReturnSequence();
 
       __ bind(&resume);
       context()->Plug(result_register());
@@ -1955,18 +1957,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
       __ str(r1, FieldMemOperand(result_register(),
                                  JSGeneratorObject::kContinuationOffset));
-      __ pop(result_register());
-      // TODO(wingo): Box into { value: VALUE, done: true }.
-
-      // Exit all nested statements.
-      NestedStatement* current = nesting_stack_;
-      int stack_depth = 0;
-      int context_length = 0;
-      while (current != NULL) {
-        current = current->Exit(&stack_depth, &context_length);
-      }
-      __ Drop(stack_depth);
-      EmitReturnSequence();
+      EmitReturnIteratorResult(true);
       break;
     }
 
@@ -2074,6 +2065,55 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
 }
 
 
+void FullCodeGenerator::EmitReturnIteratorResult(bool done) {
+  Label gc_required;
+  Label allocated;
+
+  Handle<Map> map(isolate()->native_context()->generator_result_map());
+
+  __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
+
+  __ bind(&allocated);
+  __ mov(r1, Operand(map));
+  __ pop(r2);
+  __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
+  __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
+  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
+  __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
+  __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
+  __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
+  __ str(r2,
+         FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
+  __ str(r3,
+         FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
+
+  // Only the value field needs a write barrier, as the other values are in the
+  // root set.
+  __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
+                      r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
+
+  if (done) {
+    // Exit all nested statements.
+    NestedStatement* current = nesting_stack_;
+    int stack_depth = 0;
+    int context_length = 0;
+    while (current != NULL) {
+      current = current->Exit(&stack_depth, &context_length);
+    }
+    __ Drop(stack_depth);
+  }
+
+  EmitReturnSequence();
+
+  __ bind(&gc_required);
+  __ Push(Smi::FromInt(map->instance_size()));
+  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+  __ ldr(context_register(),
+         MemOperand(fp, StandardFrameConstants::kContextOffset));
+  __ jmp(&allocated);
+}
+
+
 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
   Literal* key = prop->key()->AsLiteral();
index 893ac4e..c644be5 100644 (file)
@@ -1180,6 +1180,25 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
 }
 
 
+void StoreIC::GenerateSlow(MacroAssembler* masm) {
+  // ---------- S t a t e --------------
+  //  -- r0     : value
+  //  -- r2     : key
+  //  -- r1     : receiver
+  //  -- lr     : return address
+  // -----------------------------------
+
+  // Push receiver, key and value for runtime call.
+  __ Push(r1, r2, r0);
+
+  // The slow case calls into the runtime to complete the store without causing
+  // an IC miss that would otherwise cause a transition to the generic stub.
+  ExternalReference ref =
+      ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
+  __ TailCallExternalReference(ref, 3, 1);
+}
+
+
 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
   // ---------- S t a t e --------------
   //  -- r0     : value
index 66c108d..3fe46ff 100644 (file)
@@ -552,6 +552,11 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
 }
 
 
+LOperand* LChunkBuilder::UseConstant(HValue* value) {
+  return chunk_->DefineConstantOperand(HConstant::cast(value));
+}
+
+
 LOperand* LChunkBuilder::UseAny(HValue* value) {
   return value->IsConstant()
       ? chunk_->DefineConstantOperand(HConstant::cast(value))
@@ -672,7 +677,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
   int vreg = allocator_->GetVirtualRegister();
   if (!allocator_->AllocationOk()) {
     Abort("Out of virtual registers while trying to allocate temp register.");
-    return NULL;
+    vreg = 0;
   }
   operand->set_virtual_register(vreg);
   return operand;
@@ -1300,8 +1305,8 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
 
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
     return DefineAsRegister(new(zone()) LBitI(left, right));
   } else {
     ASSERT(instr->representation().IsTagged());
@@ -1479,15 +1484,15 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
     LOperand* left;
-    LOperand* right = UseOrConstant(instr->MostConstantOperand());
+    LOperand* right = UseOrConstant(instr->BetterRightOperand());
     LOperand* temp = NULL;
     if (instr->CheckFlag(HValue::kBailoutOnMinusZero) &&
         (instr->CheckFlag(HValue::kCanOverflow) ||
         !right->IsConstantOperand())) {
-      left = UseRegister(instr->LeastConstantOperand());
+      left = UseRegister(instr->BetterLeftOperand());
       temp = TempRegister();
     } else {
-      left = UseRegisterAtStart(instr->LeastConstantOperand());
+      left = UseRegisterAtStart(instr->BetterLeftOperand());
     }
     LMulI* mul = new(zone()) LMulI(left, right, temp);
     if (instr->CheckFlag(HValue::kCanOverflow) ||
@@ -1597,8 +1602,8 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
   if (instr->representation().IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
     LAddI* add = new(zone()) LAddI(left, right);
     LInstruction* result = DefineAsRegister(add);
     if (instr->CheckFlag(HValue::kCanOverflow)) {
@@ -1629,8 +1634,8 @@ LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
   if (instr->representation().IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    left = UseRegisterAtStart(instr->LeastConstantOperand());
-    right = UseOrConstantAtStart(instr->MostConstantOperand());
+    left = UseRegisterAtStart(instr->BetterLeftOperand());
+    right = UseOrConstantAtStart(instr->BetterRightOperand());
   } else {
     ASSERT(instr->representation().IsDouble());
     ASSERT(instr->left()->representation().IsDouble());
@@ -2114,8 +2119,8 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
 
 
 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
-  return DefineAsRegister(
-      new(zone()) LLoadNamedField(UseRegisterAtStart(instr->object())));
+  LOperand* obj = UseRegisterAtStart(instr->object());
+  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
 }
 
 
@@ -2150,12 +2155,6 @@ LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
 }
 
 
-LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
-  LOperand* input = UseRegisterAtStart(instr->value());
-  return DefineAsRegister(new(zone()) LLoadElements(input));
-}
-
-
 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
     HLoadExternalArrayPointer* instr) {
   LOperand* input = UseRegisterAtStart(instr->value());
@@ -2319,14 +2318,25 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
         : UseRegisterAtStart(instr->object());
   }
 
-  LOperand* val = needs_write_barrier
-      ? UseTempRegister(instr->value())
-      : UseRegister(instr->value());
+  LOperand* val;
+  if (needs_write_barrier ||
+      (FLAG_track_fields && instr->field_representation().IsSmi())) {
+    val = UseTempRegister(instr->value());
+  } else if (FLAG_track_double_fields &&
+             instr->field_representation().IsDouble()) {
+    val = UseRegisterAtStart(instr->value());
+  } else {
+    val = UseRegister(instr->value());
+  }
 
   // We need a temporary register for write barrier of the map field.
   LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
 
-  return new(zone()) LStoreNamedField(obj, val, temp);
+  LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp);
+  if (FLAG_track_fields && instr->field_representation().IsSmi()) {
+    return AssignEnvironment(result);
+  }
+  return result;
 }
 
 
@@ -2378,7 +2388,9 @@ LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
 
 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
   info()->MarkAsDeferredCalling();
-  LOperand* size = UseTempRegister(instr->size());
+  LOperand* size = instr->size()->IsConstant()
+      ? UseConstant(instr->size())
+      : UseTempRegister(instr->size());
   LOperand* temp1 = TempRegister();
   LOperand* temp2 = TempRegister();
   LAllocate* result = new(zone()) LAllocate(size, temp1, temp2);
@@ -2440,7 +2452,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
 
 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
   int spill_index = chunk()->GetNextSpillIndex(false);  // Not double-width.
-  if (spill_index > LUnallocated::kMaxFixedIndex) {
+  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
     Abort("Too many spill slots needed for OSR");
     spill_index = 0;
   }
index d81881e..116d576 100644 (file)
@@ -122,7 +122,6 @@ class LCodeGen;
   V(Label)                                      \
   V(LazyBailout)                                \
   V(LoadContextSlot)                            \
-  V(LoadElements)                               \
   V(LoadExternalArrayPointer)                   \
   V(LoadFunctionPrototype)                      \
   V(LoadGlobalCell)                             \
@@ -1574,18 +1573,6 @@ class LLoadFunctionPrototype: public LTemplateInstruction<1, 1, 0> {
 };
 
 
-class LLoadElements: public LTemplateInstruction<1, 1, 0> {
- public:
-  explicit LLoadElements(LOperand* object) {
-    inputs_[0] = object;
-  }
-
-  LOperand* object() { return inputs_[0]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(LoadElements, "load-elements")
-};
-
-
 class LLoadExternalArrayPointer: public LTemplateInstruction<1, 1, 0> {
  public:
   explicit LLoadExternalArrayPointer(LOperand* object) {
@@ -2142,6 +2129,9 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
   bool is_in_object() { return hydrogen()->is_in_object(); }
   int offset() { return hydrogen()->offset(); }
   Handle<Map> transition() const { return hydrogen()->transition(); }
+  Representation representation() const {
+    return hydrogen()->field_representation();
+  }
 };
 
 
@@ -2787,6 +2777,9 @@ class LChunkBuilder BASE_EMBEDDED {
   MUST_USE_RESULT LOperand* UseRegisterOrConstant(HValue* value);
   MUST_USE_RESULT LOperand* UseRegisterOrConstantAtStart(HValue* value);
 
+  // An input operand in a constant operand.
+  MUST_USE_RESULT LOperand* UseConstant(HValue* value);
+
   // An input operand in register, stack slot or a constant operand.
   // Will not be moved to a register even if one is freely available.
   MUST_USE_RESULT LOperand* UseAny(HValue* value);
index 29e01b9..3a0f476 100644 (file)
@@ -91,6 +91,10 @@ void LCodeGen::FinishCode(Handle<Code> code) {
     prototype_maps_.at(i)->AddDependentCode(
         DependentCode::kPrototypeCheckGroup, code);
   }
+  for (int i = 0 ; i < transition_maps_.length(); i++) {
+    transition_maps_.at(i)->AddDependentCode(
+        DependentCode::kTransitionGroup, code);
+  }
 }
 
 
@@ -1161,14 +1165,14 @@ void LCodeGen::DoModI(LModI* instr) {
   Register result = ToRegister(instr->result());
   Label done;
 
-  if (CpuFeatures::IsSupported(SUDIV)) {
-    CpuFeatureScope scope(masm(), SUDIV);
     // Check for x % 0.
-    if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
-      __ cmp(right, Operand::Zero());
-      DeoptimizeIf(eq, instr->environment());
-    }
+  if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
+    __ cmp(right, Operand::Zero());
+    DeoptimizeIf(eq, instr->environment());
+  }
 
+  if (CpuFeatures::IsSupported(SUDIV)) {
+    CpuFeatureScope scope(masm(), SUDIV);
     // Check for (kMinInt % -1).
     if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
       Label left_not_min_int;
@@ -1185,12 +1189,12 @@ void LCodeGen::DoModI(LModI* instr) {
 
     __ sdiv(result, left, right);
     __ mls(result, result, right, left);
-    __ cmp(result, Operand::Zero());
-    __ b(ne, &done);
 
     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
-       __ cmp(left, Operand::Zero());
-       DeoptimizeIf(lt, instr->environment());
+      __ cmp(result, Operand::Zero());
+      __ b(ne, &done);
+      __ cmp(left, Operand::Zero());
+      DeoptimizeIf(lt, instr->environment());
     }
   } else {
     Register scratch = scratch0();
@@ -1206,13 +1210,7 @@ void LCodeGen::DoModI(LModI* instr) {
     ASSERT(!scratch.is(right));
     ASSERT(!scratch.is(result));
 
-    Label vfp_modulo, both_positive, right_negative;
-
-    // Check for x % 0.
-    if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
-      __ cmp(right, Operand::Zero());
-      DeoptimizeIf(eq, instr->environment());
-    }
+    Label vfp_modulo, right_negative;
 
     __ Move(result, left);
 
@@ -1230,7 +1228,7 @@ void LCodeGen::DoModI(LModI* instr) {
     __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
                                        scratch,
                                        &right_negative,
-                                       &both_positive);
+                                       &vfp_modulo);
     // Perform modulo operation (scratch contains right - 1).
     __ and_(result, scratch, Operand(left));
     __ b(&done);
@@ -1239,23 +1237,6 @@ void LCodeGen::DoModI(LModI* instr) {
     // Negate right. The sign of the divisor does not matter.
     __ rsb(right, right, Operand::Zero());
 
-    __ bind(&both_positive);
-    const int kUnfolds = 3;
-    // If the right hand side is smaller than the (nonnegative)
-    // left hand side, the left hand side is the result.
-    // Else try a few subtractions of the left hand side.
-    __ mov(scratch, left);
-    for (int i = 0; i < kUnfolds; i++) {
-      // Check if the left hand side is less or equal than the
-      // the right hand side.
-      __ cmp(scratch, Operand(right));
-      __ mov(result, scratch, LeaveCC, lt);
-      __ b(lt, &done);
-      // If not, reduce the left hand side by the right hand
-      // side and check again.
-      if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
-    }
-
     __ bind(&vfp_modulo);
     // Load the arguments in VFP registers.
     // The divisor value is preloaded before. Be careful that 'right'
@@ -3076,13 +3057,20 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
 
 
 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
+  int offset = instr->hydrogen()->offset();
   Register object = ToRegister(instr->object());
+  if (instr->hydrogen()->representation().IsDouble()) {
+    DwVfpRegister result = ToDoubleRegister(instr->result());
+    __ vldr(result, FieldMemOperand(object, offset));
+    return;
+  }
+
   Register result = ToRegister(instr->result());
   if (instr->hydrogen()->is_in_object()) {
-    __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
+    __ ldr(result, FieldMemOperand(object, offset));
   } else {
     __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
-    __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
+    __ ldr(result, FieldMemOperand(result, offset));
   }
 }
 
@@ -3228,40 +3216,6 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
 }
 
 
-void LCodeGen::DoLoadElements(LLoadElements* instr) {
-  Register result = ToRegister(instr->result());
-  Register input = ToRegister(instr->object());
-  Register scratch = scratch0();
-
-  __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
-  if (FLAG_debug_code) {
-    Label done, fail;
-    __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
-    __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
-    __ cmp(scratch, ip);
-    __ b(eq, &done);
-    __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
-    __ cmp(scratch, ip);
-    __ b(eq, &done);
-    // |scratch| still contains |input|'s map.
-    __ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
-    __ ubfx(scratch, scratch, Map::kElementsKindShift,
-            Map::kElementsKindBitCount);
-    __ cmp(scratch, Operand(GetInitialFastElementsKind()));
-    __ b(lt, &fail);
-    __ cmp(scratch, Operand(TERMINAL_FAST_ELEMENTS_KIND));
-    __ b(le, &done);
-    __ cmp(scratch, Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
-    __ b(lt, &fail);
-    __ cmp(scratch, Operand(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
-    __ b(le, &done);
-    __ bind(&fail);
-    __ Abort("Check for fast or external elements failed.");
-    __ bind(&done);
-  }
-}
-
-
 void LCodeGen::DoLoadExternalArrayPointer(
     LLoadExternalArrayPointer* instr) {
   Register to_reg = ToRegister(instr->result());
@@ -4234,8 +4188,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
 
   __ mov(r0, Operand(instr->arity()));
   __ mov(r2, Operand(instr->hydrogen()->property_cell()));
-  Object* cell_value = instr->hydrogen()->property_cell()->value();
-  ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+  ElementsKind kind = instr->hydrogen()->elements_kind();
   if (instr->arity() == 0) {
     ArrayNoArgumentConstructorStub stub(kind);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
@@ -4262,15 +4215,34 @@ void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
 
 
 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
+  Representation representation = instr->representation();
+
   Register object = ToRegister(instr->object());
-  Register value = ToRegister(instr->value());
   Register scratch = scratch0();
   int offset = instr->offset();
 
-  ASSERT(!object.is(value));
+  Handle<Map> transition = instr->transition();
 
-  if (!instr->transition().is_null()) {
-    __ mov(scratch, Operand(instr->transition()));
+  if (FLAG_track_fields && representation.IsSmi()) {
+    Register value = ToRegister(instr->value());
+    __ SmiTag(value, value, SetCC);
+    if (!instr->hydrogen()->value()->range()->IsInSmiRange()) {
+      DeoptimizeIf(vs, instr->environment());
+    }
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    ASSERT(transition.is_null());
+    ASSERT(instr->is_in_object());
+    ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
+    DwVfpRegister value = ToDoubleRegister(instr->value());
+    __ vstr(value, FieldMemOperand(object, offset));
+    return;
+  }
+
+  if (!transition.is_null()) {
+    if (transition->CanBeDeprecated()) {
+      transition_maps_.Add(transition, info()->zone());
+    }
+    __ mov(scratch, Operand(transition));
     __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
     if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
       Register temp = ToRegister(instr->temp());
@@ -4287,6 +4259,8 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
   }
 
   // Do the store.
+  Register value = ToRegister(instr->value());
+  ASSERT(!object.is(value));
   HType type = instr->hydrogen()->value()->type();
   SmiCheck check_needed =
       type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
@@ -5138,6 +5112,8 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
       } else {
         mode = NUMBER_CANDIDATE_IS_SMI;
       }
+    } else {
+      mode = NUMBER_CANDIDATE_IS_SMI;
     }
   }
 
@@ -5473,7 +5449,6 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
 
 
 void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
-  Register size = ToRegister(instr->size());
   Register result = ToRegister(instr->result());
 
   // TODO(3095996): Get rid of this. For now, we need to make the
@@ -5482,8 +5457,16 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
   __ mov(result, Operand(Smi::FromInt(0)));
 
   PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
-  __ SmiTag(size, size);
-  __ push(size);
+  if (instr->size()->IsRegister()) {
+    Register size = ToRegister(instr->size());
+    ASSERT(!size.is(result));
+    __ SmiTag(size);
+    __ push(size);
+  } else {
+    int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
+    __ Push(Smi::FromInt(size));
+  }
+
   if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
     CallRuntimeFromDeferred(
         Runtime::kAllocateInOldPointerSpace, 1, instr);
@@ -5566,7 +5549,8 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
 
   // Pick the right runtime function or stub to call.
   int properties_count = instr->hydrogen()->constant_properties_length() / 2;
-  if (instr->hydrogen()->depth() > 1) {
+  if ((FLAG_track_double_fields && instr->hydrogen()->may_store_doubles()) ||
+      instr->hydrogen()->depth() > 1) {
     __ Push(r3, r2, r1, r0);
     CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
   } else if (flags != ObjectLiteral::kFastElements ||
index ae175e5..294dcf2 100644 (file)
@@ -57,6 +57,7 @@ class LCodeGen BASE_EMBEDDED {
         deopt_jump_table_(4, info->zone()),
         deoptimization_literals_(8, info->zone()),
         prototype_maps_(0, info->zone()),
+        transition_maps_(0, info->zone()),
         inlined_function_count_(0),
         scope_(info->scope()),
         status_(UNUSED),
@@ -418,6 +419,7 @@ class LCodeGen BASE_EMBEDDED {
   ZoneList<JumpTableEntry> deopt_jump_table_;
   ZoneList<Handle<Object> > deoptimization_literals_;
   ZoneList<Handle<Map> > prototype_maps_;
+  ZoneList<Handle<Map> > transition_maps_;
   int inlined_function_count_;
   Scope* const scope_;
   Status status_;
index b7cd3db..6e0b4a7 100644 (file)
@@ -1933,8 +1933,34 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
-           TAG_OBJECT);
+  Label allocate_new_space, install_map;
+  AllocationFlags flags = TAG_OBJECT;
+
+  ExternalReference high_promotion_mode = ExternalReference::
+      new_space_high_promotion_mode_active_address(isolate());
+  mov(scratch1, Operand(high_promotion_mode));
+  ldr(scratch1, MemOperand(scratch1, 0));
+  cmp(scratch1, Operand::Zero());
+  b(eq, &allocate_new_space);
+
+  Allocate(ConsString::kSize,
+           result,
+           scratch1,
+           scratch2,
+           gc_required,
+           static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
+
+  jmp(&install_map);
+
+  bind(&allocate_new_space);
+  Allocate(ConsString::kSize,
+           result,
+           scratch1,
+           scratch2,
+           gc_required,
+           flags);
+
+  bind(&install_map);
 
   InitializeNewString(result,
                       length,
@@ -3473,6 +3499,18 @@ void MacroAssembler::CheckPageFlag(
 }
 
 
+void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
+                                        Register scratch,
+                                        Label* if_deprecated) {
+  if (map->CanBeDeprecated()) {
+    mov(scratch, Operand(map));
+    ldr(scratch, FieldMemOperand(scratch, Map::kBitField3Offset));
+    tst(scratch, Operand(Smi::FromInt(Map::Deprecated::kMask)));
+    b(ne, if_deprecated);
+  }
+}
+
+
 void MacroAssembler::JumpIfBlack(Register object,
                                  Register scratch0,
                                  Register scratch1,
index b736c8f..9027291 100644 (file)
@@ -213,6 +213,10 @@ class MacroAssembler: public Assembler {
                      Condition cc,
                      Label* condition_met);
 
+  void CheckMapDeprecated(Handle<Map> map,
+                          Register scratch,
+                          Label* if_deprecated);
+
   // Check if object is in new space.  Jumps if the object is not in new space.
   // The register scratch can be object itself, but scratch will be clobbered.
   void JumpIfNotInNewSpace(Register object,
index 036fd7f..af65bc7 100644 (file)
@@ -975,12 +975,14 @@ ReturnType Simulator::GetFromVFPRegister(int reg_index) {
 }
 
 
-// For use in calls that take two double values, constructed either
+// Runtime FP routines take up to two double arguments and zero
+// or one integer arguments. All are consructed here.
 // from r0-r3 or d0 and d1.
-void Simulator::GetFpArgs(double* x, double* y) {
+void Simulator::GetFpArgs(double* x, double* y, int32_t* z) {
   if (use_eabi_hardfloat()) {
     *x = vfp_registers_[0];
     *y = vfp_registers_[1];
+    *z = registers_[1];
   } else {
     // We use a char buffer to get around the strict-aliasing rules which
     // otherwise allow the compiler to optimize away the copy.
@@ -988,44 +990,12 @@ void Simulator::GetFpArgs(double* x, double* y) {
     // Registers 0 and 1 -> x.
     OS::MemCopy(buffer, registers_, sizeof(*x));
     OS::MemCopy(x, buffer, sizeof(*x));
-    // Registers 2 and 3 -> y.
-    OS::MemCopy(buffer, registers_ + 2, sizeof(*y));
-    OS::MemCopy(y, buffer, sizeof(*y));
-  }
-}
-
-// For use in calls that take one double value, constructed either
-// from r0 and r1 or d0.
-void Simulator::GetFpArgs(double* x) {
-  if (use_eabi_hardfloat()) {
-    *x = vfp_registers_[0];
-  } else {
-    // We use a char buffer to get around the strict-aliasing rules which
-    // otherwise allow the compiler to optimize away the copy.
-    char buffer[sizeof(*x)];
-    // Registers 0 and 1 -> x.
-    OS::MemCopy(buffer, registers_, sizeof(*x));
-    OS::MemCopy(x, buffer, sizeof(*x));
-  }
-}
-
-
-// For use in calls that take one double value constructed either
-// from r0 and r1 or d0 and one integer value.
-void Simulator::GetFpArgs(double* x, int32_t* y) {
-  if (use_eabi_hardfloat()) {
-    *x = vfp_registers_[0];
-    *y = registers_[1];
-  } else {
-    // We use a char buffer to get around the strict-aliasing rules which
-    // otherwise allow the compiler to optimize away the copy.
-    char buffer[sizeof(*x)];
-    // Registers 0 and 1 -> x.
-    OS::MemCopy(buffer, registers_, sizeof(*x));
-    OS::MemCopy(x, buffer, sizeof(*x));
-    // Register 2 -> y.
+    // Register 2 and 3 -> y.
     OS::MemCopy(buffer, registers_ + 2, sizeof(*y));
     OS::MemCopy(y, buffer, sizeof(*y));
+    // Register 2 -> z
+    memcpy(buffer, registers_ + 2, sizeof(*z));
+    memcpy(z, buffer, sizeof(*z));
   }
 }
 
@@ -1648,10 +1618,12 @@ typedef int64_t (*SimulatorRuntimeCall)(int32_t arg0,
                                         int32_t arg3,
                                         int32_t arg4,
                                         int32_t arg5);
-typedef double (*SimulatorRuntimeFPCall)(int32_t arg0,
-                                         int32_t arg1,
-                                         int32_t arg2,
-                                         int32_t arg3);
+
+// These prototypes handle the four types of FP calls.
+typedef int64_t (*SimulatorRuntimeCompareCall)(double darg0, double darg1);
+typedef double (*SimulatorRuntimeFPFPCall)(double darg0, double darg1);
+typedef double (*SimulatorRuntimeFPCall)(double darg0);
+typedef double (*SimulatorRuntimeFPIntCall)(double darg0, int32_t arg0);
 
 // This signature supports direct call in to API function native callback
 // (refer to InvocationCallback in v8.h).
@@ -1717,27 +1689,27 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
       intptr_t external =
           reinterpret_cast<intptr_t>(redirection->external_function());
       if (fp_call) {
+        double dval0, dval1;  // one or two double parameters
+        int32_t ival;         // zero or one integer parameters
+        int64_t iresult = 0;  // integer return value
+        double dresult = 0;   // double return value
+        GetFpArgs(&dval0, &dval1, &ival);
         if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
-          SimulatorRuntimeFPCall target =
-              reinterpret_cast<SimulatorRuntimeFPCall>(external);
-          double dval0, dval1;
-          int32_t ival;
+          SimulatorRuntimeCall generic_target =
+            reinterpret_cast<SimulatorRuntimeCall>(external);
           switch (redirection->type()) {
           case ExternalReference::BUILTIN_FP_FP_CALL:
           case ExternalReference::BUILTIN_COMPARE_CALL:
-            GetFpArgs(&dval0, &dval1);
             PrintF("Call to host function at %p with args %f, %f",
-                FUNCTION_ADDR(target), dval0, dval1);
+                   FUNCTION_ADDR(generic_target), dval0, dval1);
             break;
           case ExternalReference::BUILTIN_FP_CALL:
-            GetFpArgs(&dval0);
             PrintF("Call to host function at %p with arg %f",
-                FUNCTION_ADDR(target), dval0);
+                FUNCTION_ADDR(generic_target), dval0);
             break;
           case ExternalReference::BUILTIN_FP_INT_CALL:
-            GetFpArgs(&dval0, &ival);
             PrintF("Call to host function at %p with args %f, %d",
-                FUNCTION_ADDR(target), dval0, ival);
+                   FUNCTION_ADDR(generic_target), dval0, ival);
             break;
           default:
             UNREACHABLE();
@@ -1749,22 +1721,54 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
           PrintF("\n");
         }
         CHECK(stack_aligned);
-        if (redirection->type() != ExternalReference::BUILTIN_COMPARE_CALL) {
+        switch (redirection->type()) {
+        case ExternalReference::BUILTIN_COMPARE_CALL: {
+          SimulatorRuntimeCompareCall target =
+            reinterpret_cast<SimulatorRuntimeCompareCall>(external);
+          iresult = target(dval0, dval1);
+          set_register(r0, static_cast<int32_t>(iresult));
+          set_register(r1, static_cast<int32_t>(iresult >> 32));
+          break;
+        }
+        case ExternalReference::BUILTIN_FP_FP_CALL: {
+          SimulatorRuntimeFPFPCall target =
+            reinterpret_cast<SimulatorRuntimeFPFPCall>(external);
+          dresult = target(dval0, dval1);
+          SetFpResult(dresult);
+          break;
+        }
+        case ExternalReference::BUILTIN_FP_CALL: {
           SimulatorRuntimeFPCall target =
-              reinterpret_cast<SimulatorRuntimeFPCall>(external);
-          double result = target(arg0, arg1, arg2, arg3);
-          SetFpResult(result);
-        } else {
-          SimulatorRuntimeCall target =
-              reinterpret_cast<SimulatorRuntimeCall>(external);
-          int64_t result = target(arg0, arg1, arg2, arg3, arg4, arg5);
-          int32_t lo_res = static_cast<int32_t>(result);
-          int32_t hi_res = static_cast<int32_t>(result >> 32);
-          if (::v8::internal::FLAG_trace_sim) {
-            PrintF("Returned %08x\n", lo_res);
+            reinterpret_cast<SimulatorRuntimeFPCall>(external);
+          dresult = target(dval0);
+          SetFpResult(dresult);
+          break;
+        }
+        case ExternalReference::BUILTIN_FP_INT_CALL: {
+          SimulatorRuntimeFPIntCall target =
+            reinterpret_cast<SimulatorRuntimeFPIntCall>(external);
+          dresult = target(dval0, ival);
+          SetFpResult(dresult);
+          break;
+        }
+        default:
+          UNREACHABLE();
+          break;
+        }
+        if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
+          switch (redirection->type()) {
+          case ExternalReference::BUILTIN_COMPARE_CALL:
+            PrintF("Returned %08x\n", static_cast<int32_t>(iresult));
+            break;
+          case ExternalReference::BUILTIN_FP_FP_CALL:
+          case ExternalReference::BUILTIN_FP_CALL:
+          case ExternalReference::BUILTIN_FP_INT_CALL:
+            PrintF("Returned %f\n", dresult);
+            break;
+          default:
+            UNREACHABLE();
+            break;
           }
-          set_register(r0, lo_res);
-          set_register(r1, hi_res);
         }
       } else if (redirection->type() == ExternalReference::DIRECT_API_CALL) {
         SimulatorRuntimeDirectApiCall target =
index 674ff42..45ae999 100644 (file)
@@ -348,10 +348,8 @@ class Simulator {
       void* external_function,
       v8::internal::ExternalReference::Type type);
 
-  // For use in calls that take double value arguments.
-  void GetFpArgs(double* x, double* y);
-  void GetFpArgs(double* x);
-  void GetFpArgs(double* x, int32_t* y);
+  // Handle arguments and return value for runtime FP functions.
+  void GetFpArgs(double* x, double* y, int32_t* z);
   void SetFpResult(const double& result);
   void TrashCallerSaveRegisters();
 
index ddcbd62..127bf3f 100644 (file)
@@ -315,11 +315,13 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
 }
 
 
-void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm,
-                                              Register dst,
-                                              Register src,
-                                              bool inobject,
-                                              int index) {
+void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
+                                            Register dst,
+                                            Register src,
+                                            bool inobject,
+                                            int index,
+                                            Representation representation) {
+  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
   int offset = index * kPointerSize;
   if (!inobject) {
     // Calculate the offset into the properties array.
@@ -451,8 +453,10 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
                                            Register value_reg,
                                            Register scratch1,
                                            Register scratch2,
+                                           Register scratch3,
                                            Label* miss_label,
-                                           Label* miss_restore_name) {
+                                           Label* miss_restore_name,
+                                           Label* slow) {
   // r0 : value
   Label exit;
 
@@ -465,6 +469,15 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
   }
 
+  int descriptor = transition->LastAdded();
+  DescriptorArray* descriptors = transition->instance_descriptors();
+  PropertyDetails details = descriptors->GetDetails(descriptor);
+  Representation representation = details.representation();
+  ASSERT(!representation.IsNone());
+
+  // Ensure no transitions to deprecated maps are followed.
+  __ CheckMapDeprecated(transition, scratch1, miss_label);
+
   // Check that we are allowed to write this.
   if (object->GetPrototype()->IsJSObject()) {
     JSObject* holder;
@@ -480,7 +493,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     }
     Register holder_reg = CheckPrototypes(
         object, receiver_reg, Handle<JSObject>(holder), name_reg,
-        scratch1, scratch2, name, miss_restore_name);
+        scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER);
     // If no property was found, and the holder (the last object in the
     // prototype chain) is in slow mode, we need to do a negative lookup on the
     // holder.
@@ -499,6 +512,30 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     }
   }
 
+  Register storage_reg = name_reg;
+
+  if (FLAG_track_fields && representation.IsSmi()) {
+    __ JumpIfNotSmi(value_reg, miss_restore_name);
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    Label do_store, heap_number;
+    __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
+    __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
+
+    __ JumpIfNotSmi(value_reg, &heap_number);
+    __ SmiUntag(scratch1, value_reg);
+    __ vmov(s0, scratch1);
+    __ vcvt_f64_s32(d0, s0);
+    __ jmp(&do_store);
+
+    __ bind(&heap_number);
+    __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
+                miss_restore_name, DONT_DO_SMI_CHECK);
+    __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
+
+    __ bind(&do_store);
+    __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
+  }
+
   // Stub never generated for non-global objects that require access
   // checks.
   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
@@ -527,7 +564,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
   __ RecordWriteField(receiver_reg,
                       HeapObject::kMapOffset,
                       scratch1,
-                      name_reg,
+                      scratch2,
                       kLRHasNotBeenSaved,
                       kDontSaveFPRegs,
                       OMIT_REMEMBERED_SET,
@@ -545,40 +582,60 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
   if (index < 0) {
     // Set the property straight into the object.
     int offset = object->map()->instance_size() + (index * kPointerSize);
-    __ str(value_reg, FieldMemOperand(receiver_reg, offset));
+    if (FLAG_track_double_fields && representation.IsDouble()) {
+      __ str(storage_reg, FieldMemOperand(receiver_reg, offset));
+    } else {
+      __ str(value_reg, FieldMemOperand(receiver_reg, offset));
+    }
+
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Skip updating write barrier if storing a smi.
+      __ JumpIfSmi(value_reg, &exit);
 
-    // Skip updating write barrier if storing a smi.
-    __ JumpIfSmi(value_reg, &exit);
-
-    // Update the write barrier for the array address.
-    // Pass the now unused name_reg as a scratch register.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(receiver_reg,
-                        offset,
-                        name_reg,
-                        scratch1,
-                        kLRHasNotBeenSaved,
-                        kDontSaveFPRegs);
+      // Update the write barrier for the array address.
+      // Pass the now unused name_reg as a scratch register.
+      if (!FLAG_track_double_fields || !representation.IsDouble()) {
+        __ mov(name_reg, value_reg);
+      } else {
+        ASSERT(storage_reg.is(name_reg));
+      }
+      __ RecordWriteField(receiver_reg,
+                          offset,
+                          name_reg,
+                          scratch1,
+                          kLRHasNotBeenSaved,
+                          kDontSaveFPRegs);
+    }
   } else {
     // Write to the properties array.
     int offset = index * kPointerSize + FixedArray::kHeaderSize;
     // Get the properties array
     __ ldr(scratch1,
            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
-    __ str(value_reg, FieldMemOperand(scratch1, offset));
+    if (FLAG_track_double_fields && representation.IsDouble()) {
+      __ str(storage_reg, FieldMemOperand(scratch1, offset));
+    } else {
+      __ str(value_reg, FieldMemOperand(scratch1, offset));
+    }
 
-    // Skip updating write barrier if storing a smi.
-    __ JumpIfSmi(value_reg, &exit);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Skip updating write barrier if storing a smi.
+      __ JumpIfSmi(value_reg, &exit);
 
-    // Update the write barrier for the array address.
-    // Ok to clobber receiver_reg and name_reg, since we return.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(scratch1,
-                        offset,
-                        name_reg,
-                        receiver_reg,
-                        kLRHasNotBeenSaved,
-                        kDontSaveFPRegs);
+      // Update the write barrier for the array address.
+      // Ok to clobber receiver_reg and name_reg, since we return.
+      if (!FLAG_track_double_fields || !representation.IsDouble()) {
+        __ mov(name_reg, value_reg);
+      } else {
+        ASSERT(storage_reg.is(name_reg));
+      }
+      __ RecordWriteField(scratch1,
+                          offset,
+                          name_reg,
+                          receiver_reg,
+                          kLRHasNotBeenSaved,
+                          kDontSaveFPRegs);
+    }
   }
 
   // Return the value (register r0).
@@ -624,24 +681,63 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
   // object and the number of in-object properties is not going to change.
   index -= object->map()->inobject_properties();
 
+  Representation representation = lookup->representation();
+  ASSERT(!representation.IsNone());
+  if (FLAG_track_fields && representation.IsSmi()) {
+    __ JumpIfNotSmi(value_reg, miss_label);
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    // Load the double storage.
+    if (index < 0) {
+      int offset = object->map()->instance_size() + (index * kPointerSize);
+      __ ldr(scratch1, FieldMemOperand(receiver_reg, offset));
+    } else {
+      __ ldr(scratch1,
+             FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
+      int offset = index * kPointerSize + FixedArray::kHeaderSize;
+      __ ldr(scratch1, FieldMemOperand(scratch1, offset));
+    }
+
+    // Store the value into the storage.
+    Label do_store, heap_number;
+    __ JumpIfNotSmi(value_reg, &heap_number);
+    __ SmiUntag(scratch2, value_reg);
+    __ vmov(s0, scratch2);
+    __ vcvt_f64_s32(d0, s0);
+    __ jmp(&do_store);
+
+    __ bind(&heap_number);
+    __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
+                miss_label, DONT_DO_SMI_CHECK);
+    __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
+
+    __ bind(&do_store);
+    __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
+    // Return the value (register r0).
+    ASSERT(value_reg.is(r0));
+    __ Ret();
+    return;
+  }
+
   // TODO(verwaest): Share this code as a code stub.
   if (index < 0) {
     // Set the property straight into the object.
     int offset = object->map()->instance_size() + (index * kPointerSize);
     __ str(value_reg, FieldMemOperand(receiver_reg, offset));
 
-    // Skip updating write barrier if storing a smi.
-    __ JumpIfSmi(value_reg, &exit);
-
-    // Update the write barrier for the array address.
-    // Pass the now unused name_reg as a scratch register.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(receiver_reg,
-                        offset,
-                        name_reg,
-                        scratch1,
-                        kLRHasNotBeenSaved,
-                        kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Skip updating write barrier if storing a smi.
+      __ JumpIfSmi(value_reg, &exit);
+
+      // Update the write barrier for the array address.
+      // Pass the now unused name_reg as a scratch register.
+      __ mov(name_reg, value_reg);
+      __ RecordWriteField(receiver_reg,
+                          offset,
+                          name_reg,
+                          scratch1,
+                          kLRHasNotBeenSaved,
+                          kDontSaveFPRegs);
+    }
   } else {
     // Write to the properties array.
     int offset = index * kPointerSize + FixedArray::kHeaderSize;
@@ -650,18 +746,20 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
     __ str(value_reg, FieldMemOperand(scratch1, offset));
 
-    // Skip updating write barrier if storing a smi.
-    __ JumpIfSmi(value_reg, &exit);
-
-    // Update the write barrier for the array address.
-    // Ok to clobber receiver_reg and name_reg, since we return.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(scratch1,
-                        offset,
-                        name_reg,
-                        receiver_reg,
-                        kLRHasNotBeenSaved,
-                        kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Skip updating write barrier if storing a smi.
+      __ JumpIfSmi(value_reg, &exit);
+
+      // Update the write barrier for the array address.
+      // Ok to clobber receiver_reg and name_reg, since we return.
+      __ mov(name_reg, value_reg);
+      __ RecordWriteField(scratch1,
+                          offset,
+                          name_reg,
+                          receiver_reg,
+                          kLRHasNotBeenSaved,
+                          kDontSaveFPRegs);
+    }
   }
 
   // Return the value (register r0).
@@ -1270,9 +1368,20 @@ void BaseLoadStubCompiler::NonexistentHandlerFrontend(
 
 void BaseLoadStubCompiler::GenerateLoadField(Register reg,
                                              Handle<JSObject> holder,
-                                             PropertyIndex index) {
-  GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
-  __ Ret();
+                                             PropertyIndex field,
+                                             Representation representation) {
+  if (!reg.is(receiver())) __ mov(receiver(), reg);
+  if (kind() == Code::LOAD_IC) {
+    LoadFieldStub stub(field.is_inobject(holder),
+                       field.translate(holder),
+                       representation);
+    GenerateTailCall(masm(), stub.GetCode(isolate()));
+  } else {
+    KeyedLoadFieldStub stub(field.is_inobject(holder),
+                            field.translate(holder),
+                            representation);
+    GenerateTailCall(masm(), stub.GetCode(isolate()));
+  }
 }
 
 
@@ -1496,7 +1605,8 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
 
   // Do the right check and compute the holder register.
   Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
-  GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
+  GenerateFastPropertyLoad(masm(), r1, reg, index.is_inobject(holder),
+                           index.translate(holder), Representation::Tagged());
 
   GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
 
@@ -2907,19 +3017,25 @@ Handle<Code> BaseLoadStubCompiler::CompilePolymorphicIC(
   Register map_reg = scratch1();
 
   int receiver_count = receiver_maps->length();
+  int number_of_handled_maps = 0;
   __ ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
   for (int current = 0; current < receiver_count; ++current) {
-    __ mov(ip, Operand(receiver_maps->at(current)));
-    __ cmp(map_reg, ip);
-    __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq);
+    Handle<Map> map = receiver_maps->at(current);
+    if (!map->is_deprecated()) {
+      number_of_handled_maps++;
+      __ mov(ip, Operand(receiver_maps->at(current)));
+      __ cmp(map_reg, ip);
+      __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq);
+    }
   }
+  ASSERT(number_of_handled_maps != 0);
 
   __ bind(&miss);
   TailCallBuiltin(masm(), MissBuiltin(kind()));
 
   // Return the generated code.
   InlineCacheState state =
-      receiver_maps->length() > 1 ? POLYMORPHIC : MONOMORPHIC;
+      number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
   return GetICCode(kind(), type, name, state);
 }
 
diff --git a/deps/v8/src/arraybuffer.js b/deps/v8/src/arraybuffer.js
new file mode 100644 (file)
index 0000000..2b0c3dd
--- /dev/null
@@ -0,0 +1,100 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"use strict";
+
+var $ArrayBuffer = global.ArrayBuffer;
+
+// -------------------------------------------------------------------
+
+function ArrayBufferConstructor(byteLength) { // length = 1
+  if (%_IsConstructCall()) {
+    var l = TO_POSITIVE_INTEGER(byteLength);
+    %ArrayBufferInitialize(this, l);
+  } else {
+    return new $ArrayBuffer(byteLength);
+  }
+}
+
+function ArrayBufferGetByteLength() {
+  if (!IS_ARRAYBUFFER(this)) {
+    throw MakeTypeError('incompatible_method_receiver',
+                        ['ArrayBuffer.prototype.byteLength', this]);
+  }
+  return %ArrayBufferGetByteLength(this);
+}
+
+// ES6 Draft 15.13.5.5.3
+function ArrayBufferSlice(start, end) {
+  if (!IS_ARRAYBUFFER(this)) {
+    throw MakeTypeError('incompatible_method_receiver',
+                        ['ArrayBuffer.prototype.slice', this]);
+  }
+
+  var relativeStart = TO_INTEGER(start);
+  var first;
+  if (relativeStart < 0) {
+    first = MathMax(this.byteLength + relativeStart, 0);
+  } else {
+    first = MathMin(relativeStart, this.byteLength);
+  }
+  var relativeEnd = IS_UNDEFINED(end) ? this.byteLength : TO_INTEGER(end);
+  var fin;
+  if (relativeEnd < 0) {
+    fin = MathMax(this.byteLength + relativeEnd, 0);
+  } else {
+    fin = MathMin(relativeEnd, this.byteLength);
+  }
+
+  var newLen = fin - first;
+  // TODO(dslomov): implement inheritance
+  var result = new $ArrayBuffer(newLen);
+
+  %ArrayBufferSliceImpl(this, result, first);
+  return result;
+}
+
+function SetUpArrayBuffer() {
+  %CheckIsBootstrapping();
+
+  // Set up the ArrayBuffer constructor function.
+  %SetCode($ArrayBuffer, ArrayBufferConstructor);
+  %FunctionSetPrototype($ArrayBuffer, new $Object());
+
+  // Set up the constructor property on the ArrayBuffer prototype object.
+  %SetProperty($ArrayBuffer.prototype, "constructor", $ArrayBuffer, DONT_ENUM);
+
+  InstallGetter($ArrayBuffer.prototype, "byteLength", ArrayBufferGetByteLength);
+
+  InstallFunctions($ArrayBuffer.prototype, DONT_ENUM, $Array(
+      "slice", ArrayBufferSlice
+  ));
+}
+
+SetUpArrayBuffer();
+
+
index fff588a..6b0c4b8 100644 (file)
@@ -1203,6 +1203,13 @@ ExternalReference ExternalReference::old_data_space_allocation_limit_address(
 }
 
 
+ExternalReference ExternalReference::
+    new_space_high_promotion_mode_active_address(Isolate* isolate) {
+  return ExternalReference(
+      isolate->heap()->NewSpaceHighPromotionModeActiveAddress());
+}
+
+
 ExternalReference ExternalReference::handle_scope_level_address(
     Isolate* isolate) {
   return ExternalReference(HandleScope::current_level_address(isolate));
index 32424cf..6abd5c5 100644 (file)
@@ -757,6 +757,8 @@ class ExternalReference BASE_EMBEDDED {
       Isolate* isolate);
   static ExternalReference old_data_space_allocation_limit_address(
       Isolate* isolate);
+  static ExternalReference new_space_high_promotion_mode_active_address(
+      Isolate* isolate);
 
   static ExternalReference double_fp_operation(Token::Value operation,
                                                Isolate* isolate);
index 10ae7de..9ffb00d 100644 (file)
@@ -277,6 +277,14 @@ class SmallMapList {
   bool is_empty() const { return list_.is_empty(); }
   int length() const { return list_.length(); }
 
+  void AddMapIfMissing(Handle<Map> map, Zone* zone) {
+    map = Map::CurrentMapForDeprecated(map);
+    for (int i = 0; i < length(); ++i) {
+      if (at(i).is_identical_to(map)) return;
+    }
+    Add(map, zone);
+  }
+
   void Add(Handle<Map> handle, Zone* zone) {
     list_.Add(handle.location(), zone);
   }
@@ -1324,10 +1332,9 @@ class ObjectLiteral: public MaterializedLiteral {
     return constant_properties_;
   }
   ZoneList<Property*>* properties() const { return properties_; }
-
   bool fast_elements() const { return fast_elements_; }
-
-  bool has_function() { return has_function_; }
+  bool may_store_doubles() const { return may_store_doubles_; }
+  bool has_function() const { return has_function_; }
 
   // Mark all computed expressions that are bound to a key that
   // is shadowed by a later occurrence of the same key. For the
@@ -1354,17 +1361,20 @@ class ObjectLiteral: public MaterializedLiteral {
                 bool is_simple,
                 bool fast_elements,
                 int depth,
+                bool may_store_doubles,
                 bool has_function)
       : MaterializedLiteral(isolate, literal_index, is_simple, depth),
         constant_properties_(constant_properties),
         properties_(properties),
         fast_elements_(fast_elements),
+        may_store_doubles_(may_store_doubles),
         has_function_(has_function) {}
 
  private:
   Handle<FixedArray> constant_properties_;
   ZoneList<Property*>* properties_;
   bool fast_elements_;
+  bool may_store_doubles_;
   bool has_function_;
 };
 
@@ -2849,10 +2859,11 @@ class AstNodeFactory BASE_EMBEDDED {
       bool is_simple,
       bool fast_elements,
       int depth,
+      bool may_store_doubles,
       bool has_function) {
     ObjectLiteral* lit = new(zone_) ObjectLiteral(
         isolate_, constant_properties, properties, literal_index,
-        is_simple, fast_elements, depth, has_function);
+        is_simple, fast_elements, depth, may_store_doubles, has_function);
     VISIT_AND_RETURN(ObjectLiteral, lit)
   }
 
index 6e55b50..e58d598 100644 (file)
@@ -168,7 +168,7 @@ inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
   return *ptr;
 }
 
-#if defined(__x86_64__)
+#if defined(__x86_64__) && defined(V8_HOST_ARCH_64_BIT)
 
 // 64-bit low-level operations on 64-bit platform.
 
index 85bf96e..b0d3a5e 100644 (file)
@@ -201,7 +201,7 @@ class Genesis BASE_EMBEDDED {
                                           ElementsKind elements_kind);
   bool InstallNatives();
 
-  void InstallTypedArray(const char* name);
+  Handle<JSFunction> InstallTypedArray(const char* name);
   bool InstallExperimentalNatives();
   void InstallBuiltinFunctionIds();
   void InstallJSFunctionResultCaches();
@@ -979,28 +979,32 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
       // ECMA-262, section 15.10.7.1.
       FieldDescriptor field(heap->source_string(),
                             JSRegExp::kSourceFieldIndex,
-                            final);
+                            final,
+                            Representation::Tagged());
       initial_map->AppendDescriptor(&field, witness);
     }
     {
       // ECMA-262, section 15.10.7.2.
       FieldDescriptor field(heap->global_string(),
                             JSRegExp::kGlobalFieldIndex,
-                            final);
+                            final,
+                            Representation::Tagged());
       initial_map->AppendDescriptor(&field, witness);
     }
     {
       // ECMA-262, section 15.10.7.3.
       FieldDescriptor field(heap->ignore_case_string(),
                             JSRegExp::kIgnoreCaseFieldIndex,
-                            final);
+                            final,
+                            Representation::Tagged());
       initial_map->AppendDescriptor(&field, witness);
     }
     {
       // ECMA-262, section 15.10.7.4.
       FieldDescriptor field(heap->multiline_string(),
                             JSRegExp::kMultilineFieldIndex,
-                            final);
+                            final,
+                            Representation::Tagged());
       initial_map->AppendDescriptor(&field, witness);
     }
     {
@@ -1009,7 +1013,8 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
           static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
       FieldDescriptor field(heap->last_index_string(),
                             JSRegExp::kLastIndexFieldIndex,
-                            writable);
+                            writable,
+                            Representation::Tagged());
       initial_map->AppendDescriptor(&field, witness);
     }
 
@@ -1161,7 +1166,8 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     map->set_instance_descriptors(*descriptors);
 
     {  // length
-      FieldDescriptor d(*factory->length_string(), 0, DONT_ENUM);
+      FieldDescriptor d(
+          *factory->length_string(), 0, DONT_ENUM, Representation::Tagged());
       map->AppendDescriptor(&d, witness);
     }
     {  // callee
@@ -1270,11 +1276,11 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
 }
 
 
-void Genesis::InstallTypedArray(const char* name) {
+Handle<JSFunction> Genesis::InstallTypedArray(const char* name) {
   Handle<JSObject> global = Handle<JSObject>(native_context()->global_object());
-  InstallFunction(global, name, JS_TYPED_ARRAY_TYPE,
-                  JSTypedArray::kSize, isolate()->initial_object_prototype(),
-                  Builtins::kIllegal, true);
+  return InstallFunction(global, name, JS_TYPED_ARRAY_TYPE,
+      JSTypedArray::kSize, isolate()->initial_object_prototype(),
+      Builtins::kIllegal, true);
 }
 
 
@@ -1311,26 +1317,36 @@ void Genesis::InitializeExperimentalGlobal() {
     }
   }
 
+  if (FLAG_harmony_array_buffer) {
+    // -- A r r a y B u f f e r
+    Handle<JSFunction> array_buffer_fun =
+        InstallFunction(global, "ArrayBuffer", JS_ARRAY_BUFFER_TYPE,
+                        JSArrayBuffer::kSize,
+                        isolate()->initial_object_prototype(),
+                        Builtins::kIllegal, true);
+    native_context()->set_array_buffer_fun(*array_buffer_fun);
+  }
+
   if (FLAG_harmony_typed_arrays) {
-    {  // -- A r r a y B u f f e r
-      Handle<JSFunction> array_buffer_fun =
-          InstallFunction(global, "__ArrayBuffer", JS_ARRAY_BUFFER_TYPE,
-                          JSArrayBuffer::kSize,
-                          isolate()->initial_object_prototype(),
-                          Builtins::kIllegal, true);
-      native_context()->set_array_buffer_fun(*array_buffer_fun);
-    }
-    {
-      // -- T y p e d A r r a y s
-      InstallTypedArray("__Int8Array");
-      InstallTypedArray("__Uint8Array");
-      InstallTypedArray("__Int16Array");
-      InstallTypedArray("__Uint16Array");
-      InstallTypedArray("__Int32Array");
-      InstallTypedArray("__Uint32Array");
-      InstallTypedArray("__Float32Array");
-      InstallTypedArray("__Float64Array");
-    }
+    // -- T y p e d A r r a y s
+    Handle<JSFunction> int8_fun = InstallTypedArray("Int8Array");
+    native_context()->set_int8_array_fun(*int8_fun);
+    Handle<JSFunction> uint8_fun = InstallTypedArray("Uint8Array");
+    native_context()->set_uint8_array_fun(*uint8_fun);
+    Handle<JSFunction> int16_fun = InstallTypedArray("Int16Array");
+    native_context()->set_int16_array_fun(*int16_fun);
+    Handle<JSFunction> uint16_fun = InstallTypedArray("Uint16Array");
+    native_context()->set_uint16_array_fun(*uint16_fun);
+    Handle<JSFunction> int32_fun = InstallTypedArray("Int32Array");
+    native_context()->set_int32_array_fun(*int32_fun);
+    Handle<JSFunction> uint32_fun = InstallTypedArray("Uint32Array");
+    native_context()->set_uint32_array_fun(*uint32_fun);
+    Handle<JSFunction> float_fun = InstallTypedArray("Float32Array");
+    native_context()->set_float_array_fun(*float_fun);
+    Handle<JSFunction> double_fun = InstallTypedArray("Float64Array");
+    native_context()->set_double_array_fun(*double_fun);
+    Handle<JSFunction> uint8c_fun = InstallTypedArray("Uint8ClampedArray");
+    native_context()->set_uint8c_array_fun(*uint8c_fun);
   }
 
   if (FLAG_harmony_generators) {
@@ -1371,6 +1387,40 @@ void Genesis::InitializeExperimentalGlobal() {
         *generator_object_prototype);
     native_context()->set_generator_object_prototype_map(
         *generator_object_prototype_map);
+
+    // Create a map for generator result objects.
+    ASSERT(object_map->inobject_properties() == 0);
+    STATIC_ASSERT(JSGeneratorObject::kResultPropertyCount == 2);
+    Handle<Map> generator_result_map = factory()->CopyMap(object_map,
+        JSGeneratorObject::kResultPropertyCount);
+    ASSERT(generator_result_map->inobject_properties() ==
+        JSGeneratorObject::kResultPropertyCount);
+
+    Handle<DescriptorArray> descriptors = factory()->NewDescriptorArray(0,
+        JSGeneratorObject::kResultPropertyCount);
+    DescriptorArray::WhitenessWitness witness(*descriptors);
+    generator_result_map->set_instance_descriptors(*descriptors);
+
+    Handle<String> value_string = factory()->InternalizeOneByteString(
+        STATIC_ASCII_VECTOR("value"));
+    FieldDescriptor value_descr(*value_string,
+                                JSGeneratorObject::kResultValuePropertyIndex,
+                                NONE,
+                                Representation::Tagged());
+    generator_result_map->AppendDescriptor(&value_descr, witness);
+
+    Handle<String> done_string = factory()->InternalizeOneByteString(
+        STATIC_ASCII_VECTOR("done"));
+    FieldDescriptor done_descr(*done_string,
+                               JSGeneratorObject::kResultDonePropertyIndex,
+                               NONE,
+                               Representation::Tagged());
+    generator_result_map->AppendDescriptor(&done_descr, witness);
+
+    generator_result_map->set_unused_property_fields(0);
+    ASSERT_EQ(JSGeneratorObject::kResultSize,
+              generator_result_map->instance_size());
+    native_context()->set_generator_result_map(*generator_result_map);
   }
 }
 
@@ -1924,14 +1974,16 @@ bool Genesis::InstallNatives() {
     {
       FieldDescriptor index_field(heap()->index_string(),
                                   JSRegExpResult::kIndexIndex,
-                                  NONE);
+                                  NONE,
+                                  Representation::Tagged());
       initial_map->AppendDescriptor(&index_field, witness);
     }
 
     {
       FieldDescriptor input_field(heap()->input_string(),
                                   JSRegExpResult::kInputIndex,
-                                  NONE);
+                                  NONE,
+                                  Representation::Tagged());
       initial_map->AppendDescriptor(&input_field, witness);
     }
 
@@ -1974,6 +2026,11 @@ bool Genesis::InstallExperimentalNatives() {
                "native object-observe.js") == 0) {
       if (!CompileExperimentalBuiltin(isolate(), i)) return false;
     }
+    if (FLAG_harmony_array_buffer &&
+        strcmp(ExperimentalNatives::GetScriptName(i).start(),
+               "native arraybuffer.js") == 0) {
+      if (!CompileExperimentalBuiltin(isolate(), i)) return false;
+    }
     if (FLAG_harmony_typed_arrays &&
         strcmp(ExperimentalNatives::GetScriptName(i).start(),
                "native typedarray.js") == 0) {
@@ -2352,14 +2409,15 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
   if (from->HasFastProperties()) {
     Handle<DescriptorArray> descs =
         Handle<DescriptorArray>(from->map()->instance_descriptors());
-    for (int i = 0; i < descs->number_of_descriptors(); i++) {
+    for (int i = 0; i < from->map()->NumberOfOwnDescriptors(); i++) {
       PropertyDetails details = descs->GetDetails(i);
       switch (details.type()) {
         case FIELD: {
           HandleScope inner(isolate());
           Handle<Name> key = Handle<Name>(descs->GetKey(i));
           int index = descs->GetFieldIndex(i);
-          Handle<Object> value = Handle<Object>(from->FastPropertyAt(index),
+          ASSERT(!descs->GetDetails(i).representation().IsDouble());
+          Handle<Object> value = Handle<Object>(from->RawFastPropertyAt(index),
                                                 isolate());
           CHECK_NOT_EMPTY_HANDLE(isolate(),
                                  JSObject::SetLocalPropertyIgnoreAttributes(
@@ -2386,9 +2444,8 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
           // Add to dictionary.
           Handle<Name> key = Handle<Name>(descs->GetKey(i));
           Handle<Object> callbacks(descs->GetCallbacksObject(i), isolate());
-          PropertyDetails d = PropertyDetails(details.attributes(),
-                                              CALLBACKS,
-                                              details.descriptor_index());
+          PropertyDetails d = PropertyDetails(
+              details.attributes(), CALLBACKS, i + 1);
           JSObject::SetNormalizedProperty(to, key, callbacks, d);
           break;
         }
diff --git a/deps/v8/src/builtins-decls.h b/deps/v8/src/builtins-decls.h
new file mode 100644 (file)
index 0000000..beb5dd1
--- /dev/null
@@ -0,0 +1,40 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_BUILTINS_DECLS_H_
+#define V8_BUILTINS_DECLS_H_
+
+#include "arguments.h"
+
+namespace v8 {
+namespace internal {
+
+DECLARE_RUNTIME_FUNCTION(MaybeObject*, ArrayConstructor_StubFailure);
+
+} }  // namespace v8::internal
+
+#endif  // V8_BUILTINS_DECLS_H_
index 5718180..661ee94 100644 (file)
@@ -125,23 +125,31 @@ BUILTIN_LIST_C(DEF_ARG_TYPE)
 
 #ifdef DEBUG
 
-#define BUILTIN(name)                                      \
-  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
-      name##ArgumentsType args, Isolate* isolate);         \
-  MUST_USE_RESULT static MaybeObject* Builtin_##name(      \
-      name##ArgumentsType args, Isolate* isolate) {        \
-    ASSERT(isolate == Isolate::Current());                 \
-    args.Verify();                                         \
-    return Builtin_Impl_##name(args, isolate);             \
-  }                                                        \
-  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
+#define BUILTIN(name)                                            \
+  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name(       \
+      name##ArgumentsType args, Isolate* isolate);               \
+  MUST_USE_RESULT static MaybeObject* Builtin_##name(            \
+      int args_length, Object** args_object, Isolate* isolate) { \
+    name##ArgumentsType args(args_length, args_object);          \
+    ASSERT(isolate == Isolate::Current());                       \
+    args.Verify();                                               \
+    return Builtin_Impl_##name(args, isolate);                   \
+  }                                                              \
+  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name(       \
       name##ArgumentsType args, Isolate* isolate)
 
 #else  // For release mode.
 
-#define BUILTIN(name)                                      \
-  static MaybeObject* Builtin_##name(name##ArgumentsType args, Isolate* isolate)
-
+#define BUILTIN(name)                                            \
+  static MaybeObject* Builtin_impl##name(                        \
+      name##ArgumentsType args, Isolate* isolate);               \
+  static MaybeObject* Builtin_##name(                            \
+      int args_length, Object** args_object, Isolate* isolate) { \
+    name##ArgumentsType args(args_length, args_object);          \
+    return Builtin_impl##name(args, isolate);                    \
+  }                                                              \
+  static MaybeObject* Builtin_impl##name(                        \
+      name##ArgumentsType args, Isolate* isolate)
 #endif
 
 
@@ -186,55 +194,58 @@ BUILTIN(EmptyFunction) {
 }
 
 
-#define CONVERT_ARG_STUB_CALLER_ARGS(name)                           \
-  Arguments* name = reinterpret_cast<Arguments*>(args[0]);
-
-
 RUNTIME_FUNCTION(MaybeObject*, ArrayConstructor_StubFailure) {
-  CONVERT_ARG_STUB_CALLER_ARGS(caller_args);
-  ASSERT(args.length() == 2);
-  Handle<Object> type_info = args.at<Object>(1);
+  // If we get 2 arguments then they are the stub parameters (constructor, type
+  // info).  If we get 3, then the first one is a pointer to the arguments
+  // passed by the caller.
+  Arguments empty_args(0, NULL);
+  bool no_caller_args = args.length() == 2;
+  ASSERT(no_caller_args || args.length() == 3);
+  int parameters_start = no_caller_args ? 0 : 1;
+  Arguments* caller_args = no_caller_args
+      ? &empty_args
+      : reinterpret_cast<Arguments*>(args[0]);
+  Handle<JSFunction> constructor = args.at<JSFunction>(parameters_start);
+  Handle<Object> type_info = args.at<Object>(parameters_start + 1);
 
-  JSArray* array = NULL;
   bool holey = false;
   if (caller_args->length() == 1 && (*caller_args)[0]->IsSmi()) {
     int value = Smi::cast((*caller_args)[0])->value();
     holey = (value > 0 && value < JSObject::kInitialMaxFastElementArray);
   }
 
+  JSArray* array;
   MaybeObject* maybe_array;
-  if (*type_info != isolate->heap()->undefined_value()) {
+  if (*type_info != isolate->heap()->undefined_value() &&
+      JSGlobalPropertyCell::cast(*type_info)->value()->IsSmi()) {
     JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(*type_info);
-    if (cell->value()->IsSmi()) {
-      Smi* smi = Smi::cast(cell->value());
-      ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
-      if (holey && !IsFastHoleyElementsKind(to_kind)) {
-        to_kind = GetHoleyElementsKind(to_kind);
-        // Update the allocation site info to reflect the advice alteration.
-        cell->set_value(Smi::FromInt(to_kind));
-      }
-
-      AllocationSiteMode mode = AllocationSiteInfo::GetMode(to_kind);
-      if (mode == TRACK_ALLOCATION_SITE) {
-        maybe_array = isolate->heap()->AllocateEmptyJSArrayWithAllocationSite(
-            to_kind, type_info);
-      } else {
-        maybe_array = isolate->heap()->AllocateEmptyJSArray(to_kind);
-      }
-      if (!maybe_array->To(&array)) return maybe_array;
+    Smi* smi = Smi::cast(cell->value());
+    ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
+    if (holey && !IsFastHoleyElementsKind(to_kind)) {
+      to_kind = GetHoleyElementsKind(to_kind);
+      // Update the allocation site info to reflect the advice alteration.
+      cell->set_value(Smi::FromInt(to_kind));
     }
-  }
-
-  ElementsKind kind = GetInitialFastElementsKind();
-  if (holey) {
-    kind = GetHoleyElementsKind(kind);
-  }
 
-  if (array == NULL) {
-    maybe_array = isolate->heap()->AllocateEmptyJSArray(kind);
+    maybe_array = isolate->heap()->AllocateJSObjectWithAllocationSite(
+        *constructor, type_info);
+    if (!maybe_array->To(&array)) return maybe_array;
+  } else {
+    ElementsKind kind = constructor->initial_map()->elements_kind();
+    ASSERT(kind == GetInitialFastElementsKind());
+    maybe_array = isolate->heap()->AllocateJSObject(*constructor);
     if (!maybe_array->To(&array)) return maybe_array;
+    // We might need to transition to holey
+    if (holey) {
+      kind = GetHoleyElementsKind(kind);
+      maybe_array = array->TransitionElementsKind(kind);
+      if (maybe_array->IsFailure()) return maybe_array;
+    }
   }
 
+  maybe_array = isolate->heap()->AllocateJSArrayStorage(array, 0, 0,
+      DONT_INITIALIZE_ARRAY_ELEMENTS);
+  if (maybe_array->IsFailure()) return maybe_array;
   maybe_array = ArrayConstructInitializeElements(array, caller_args);
   if (maybe_array->IsFailure()) return maybe_array;
   return array;
@@ -1500,6 +1511,11 @@ static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) {
   KeyedLoadIC::GenerateNonStrictArguments(masm);
 }
 
+static void Generate_StoreIC_Slow(MacroAssembler* masm) {
+  StoreIC::GenerateSlow(masm);
+}
+
+
 static void Generate_StoreIC_Initialize(MacroAssembler* masm) {
   StoreIC::GenerateInitialize(masm);
 }
@@ -1617,6 +1633,11 @@ static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
 }
 
 
+static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
+  Debug::GenerateCompareNilICDebugBreak(masm);
+}
+
+
 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
   Debug::GenerateReturnDebugBreak(masm);
 }
index ab77228..6fc17c4 100644 (file)
@@ -124,6 +124,8 @@ enum BuiltinExtraArguments {
                                     Code::kNoExtraICState)              \
   V(StoreIC_Miss,                   BUILTIN, UNINITIALIZED,             \
                                     Code::kNoExtraICState)              \
+  V(StoreIC_Slow,                   BUILTIN, UNINITIALIZED,             \
+                                    Code::kNoExtraICState)              \
   V(KeyedStoreIC_Miss,              BUILTIN, UNINITIALIZED,             \
                                     Code::kNoExtraICState)              \
   V(KeyedStoreIC_MissForceGeneric,  BUILTIN, UNINITIALIZED,             \
@@ -230,6 +232,8 @@ enum BuiltinExtraArguments {
                                                DEBUG_BREAK)                   \
   V(KeyedStoreIC_DebugBreak,                   KEYED_STORE_IC, DEBUG_STUB,    \
                                                DEBUG_BREAK)                   \
+  V(CompareNilIC_DebugBreak,                   COMPARE_NIL_IC, DEBUG_STUB,    \
+                                               DEBUG_BREAK)                   \
   V(Slot_DebugBreak,                           BUILTIN, DEBUG_STUB,           \
                                                DEBUG_BREAK)                   \
   V(PlainReturn_LiveEdit,                      BUILTIN, DEBUG_STUB,           \
@@ -274,8 +278,6 @@ enum BuiltinExtraArguments {
   V(APPLY_PREPARE, 1)                    \
   V(APPLY_OVERFLOW, 1)
 
-MaybeObject* ArrayConstructor_StubFailure(Arguments args, Isolate* isolate);
-
 class BuiltinFunctionTable;
 class ObjectVisitor;
 
index b672079..31431b7 100644 (file)
@@ -82,6 +82,24 @@ class CodeStubGraphBuilderBase : public HGraphBuilder {
   HContext* context() { return context_; }
   Isolate* isolate() { return info_.isolate(); }
 
+  class ArrayContextChecker {
+   public:
+    ArrayContextChecker(HGraphBuilder* builder, HValue* constructor,
+                        HValue* array_function)
+        : checker_(builder) {
+      checker_.If<HCompareObjectEqAndBranch, HValue*>(constructor,
+                                                      array_function);
+      checker_.Then();
+    }
+
+    ~ArrayContextChecker() {
+      checker_.ElseDeopt();
+      checker_.End();
+    }
+   private:
+    IfBuilder checker_;
+  };
+
  private:
   SmartArrayPointer<HParameter*> parameters_;
   HValue* arguments_length_;
@@ -240,7 +258,8 @@ Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(Isolate* isolate) {
       GetCodeKind(),
       GetICState(),
       GetExtraICState(),
-      GetStubType(), -1);
+      GetStubType(),
+      GetStubFlags());
   Handle<Code> new_object = factory->NewCode(
       desc, flags, masm.CodeObject(), NeedsImmovableCode());
   return new_object;
@@ -290,8 +309,7 @@ HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
   checker.Then();
 
   if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
-    HValue* elements =
-        AddInstruction(new(zone) HLoadElements(boilerplate, NULL));
+    HValue* elements = AddLoadElements(boilerplate);
 
     IfBuilder if_fixed_cow(this);
     if_fixed_cow.IfCompareMap(elements, factory->fixed_cow_array_map());
@@ -377,11 +395,12 @@ HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
 
   for (int i = 0; i < size; i += kPointerSize) {
     HInstruction* value =
-        AddInstruction(new(zone) HLoadNamedField(boilerplate, true, i));
+        AddInstruction(new(zone) HLoadNamedField(
+            boilerplate, true, Representation::Tagged(), i));
     AddInstruction(new(zone) HStoreNamedField(object,
                                               factory->empty_string(),
-                                              value,
-                                              true, i));
+                                              value, true,
+                                              Representation::Tagged(), i));
   }
 
   checker.ElseDeopt();
@@ -409,6 +428,36 @@ Handle<Code> KeyedLoadFastElementStub::GenerateCode() {
 }
 
 
+template<>
+HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
+  Representation representation = casted_stub()->representation();
+  HInstruction* load = AddInstruction(DoBuildLoadNamedField(
+      GetParameter(0), casted_stub()->is_inobject(),
+      representation, casted_stub()->offset()));
+  return load;
+}
+
+
+Handle<Code> LoadFieldStub::GenerateCode() {
+  return DoGenerateCode(this);
+}
+
+
+template<>
+HValue* CodeStubGraphBuilder<KeyedLoadFieldStub>::BuildCodeStub() {
+  Representation representation = casted_stub()->representation();
+  HInstruction* load = AddInstruction(DoBuildLoadNamedField(
+      GetParameter(0), casted_stub()->is_inobject(),
+      representation, casted_stub()->offset()));
+  return load;
+}
+
+
+Handle<Code> KeyedLoadFieldStub::GenerateCode() {
+  return DoGenerateCode(this);
+}
+
+
 template <>
 HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() {
   BuildUncheckedMonomorphicElementAccess(
@@ -452,8 +501,7 @@ HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
 
   if_builder.Else();
 
-  HInstruction* elements =
-      AddInstruction(new(zone) HLoadElements(js_array, js_array));
+  HInstruction* elements = AddLoadElements(js_array);
 
   HInstruction* elements_length =
       AddInstruction(new(zone) HFixedArrayBaseLength(elements));
@@ -470,12 +518,15 @@ HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
   AddInstruction(new(zone) HStoreNamedField(js_array,
                                             factory->elements_field_string(),
                                             new_elements, true,
+                                            Representation::Tagged(),
                                             JSArray::kElementsOffset));
 
   if_builder.End();
 
   AddInstruction(new(zone) HStoreNamedField(js_array, factory->length_string(),
-                                            map, true, JSArray::kMapOffset));
+                                            map, true,
+                                            Representation::Tagged(),
+                                            JSArray::kMapOffset));
   return js_array;
 }
 
@@ -491,6 +542,10 @@ HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
   //  -- Parameter 1 : type info cell
   //  -- Parameter 0 : constructor
   // -----------------------------------
+  HInstruction* array_function = BuildGetArrayFunction(context());
+  ArrayContextChecker(this,
+                      GetParameter(ArrayConstructorStubBase::kConstructor),
+                      array_function);
   // Get the right map
   // Should be a constant
   JSArrayBuilder array_builder(
@@ -510,6 +565,10 @@ Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
 template <>
 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
     BuildCodeStub() {
+  HInstruction* array_function = BuildGetArrayFunction(context());
+  ArrayContextChecker(this,
+                      GetParameter(ArrayConstructorStubBase::kConstructor),
+                      array_function);
   // Smi check and range check on the input arg.
   HValue* constant_one = graph()->GetConstant1();
   HValue* constant_zero = graph()->GetConstant0();
@@ -563,6 +622,10 @@ Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
 
 template <>
 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
+  HInstruction* array_function = BuildGetArrayFunction(context());
+  ArrayContextChecker(this,
+                      GetParameter(ArrayConstructorStubBase::kConstructor),
+                      array_function);
   ElementsKind kind = casted_stub()->elements_kind();
   HValue* length = GetArgumentsLength();
 
index df9855d..aa2c821 100644 (file)
@@ -431,7 +431,9 @@ CompareNilICStub::Types CompareNilICStub::GetPatchedICFlags(
     } else if (object->IsUndefined()) {
       types = static_cast<CompareNilICStub::Types>(
           types | CompareNilICStub::kCompareAgainstUndefined);
-    } else if (object->IsUndetectableObject() || !object->IsHeapObject()) {
+    } else if (object->IsUndetectableObject() ||
+               object->IsOddball() ||
+               !object->IsHeapObject()) {
         types = CompareNilICStub::kFullCompare;
     } else if ((types & CompareNilICStub::kCompareAgainstMonomorphicMap) != 0) {
       types = CompareNilICStub::kFullCompare;
index ea895d6..646aee2 100644 (file)
@@ -87,7 +87,8 @@ namespace internal {
   V(ArrayConstructor)                    \
   V(ProfileEntryHook)                    \
   /* IC Handler stubs */                 \
-  V(LoadField)
+  V(LoadField)                           \
+  V(KeyedLoadField)
 
 // List of code stubs only used on ARM platforms.
 #ifdef V8_TARGET_ARCH_ARM
@@ -185,6 +186,12 @@ class CodeStub BASE_EMBEDDED {
   virtual Code::ExtraICState GetExtraICState() {
     return Code::kNoExtraICState;
   }
+  virtual Code::StubType GetStubType() {
+    return Code::NORMAL;
+  }
+  virtual int GetStubFlags() {
+    return -1;
+  }
 
  protected:
   static bool CanUseFPRegisters();
@@ -192,9 +199,6 @@ class CodeStub BASE_EMBEDDED {
   // Generates the assembler code for the stub.
   virtual Handle<Code> GenerateCode() = 0;
 
-  virtual Code::StubType GetStubType() {
-    return Code::NORMAL;
-  }
 
   // Returns whether the code generated for this stub needs to be allocated as
   // a fixed (non-moveable) code object.
@@ -253,7 +257,6 @@ class PlatformCodeStub : public CodeStub {
   virtual Handle<Code> GenerateCode();
 
   virtual Code::Kind GetCodeKind() const { return Code::STUB; }
-  virtual int GetStubFlags() { return -1; }
 
  protected:
   // Generates the assembler code for the stub.
@@ -754,42 +757,108 @@ class StoreArrayLengthStub: public StoreICStub {
 };
 
 
-class HandlerStub: public ICStub {
+class HICStub: public HydrogenCodeStub {
+ public:
+  virtual Code::Kind GetCodeKind() const { return kind(); }
+  virtual InlineCacheState GetICState() { return MONOMORPHIC; }
+
+ protected:
+  HICStub() : HydrogenCodeStub(CODE_STUB_IS_NOT_MISS) { }
+  class KindBits: public BitField<Code::Kind, 0, 4> {};
+  virtual Code::Kind kind() const = 0;
+};
+
+
+class HandlerStub: public HICStub {
  public:
-  explicit HandlerStub(Code::Kind kind) : ICStub(kind) { }
   virtual Code::Kind GetCodeKind() const { return Code::STUB; }
   virtual int GetStubFlags() { return kind(); }
+
+ protected:
+  HandlerStub() : HICStub() { }
 };
 
 
 class LoadFieldStub: public HandlerStub {
  public:
-  LoadFieldStub(Register reg, bool inobject, int index)
-      : HandlerStub(Code::LOAD_IC),
-        reg_(reg),
-        inobject_(inobject),
-        index_(index) { }
-  virtual void Generate(MacroAssembler* masm);
+  LoadFieldStub(bool inobject, int index, Representation representation)
+      : HandlerStub() {
+    Initialize(Code::LOAD_IC, inobject, index, representation);
+  }
+
+  virtual Handle<Code> GenerateCode();
+
+  virtual void InitializeInterfaceDescriptor(
+      Isolate* isolate,
+      CodeStubInterfaceDescriptor* descriptor);
+
+  Representation representation() {
+    if (unboxed_double()) return Representation::Double();
+    return Representation::Tagged();
+  }
+
+  virtual Code::Kind kind() const {
+    return KindBits::decode(bit_field_);
+  }
+
+  bool is_inobject() {
+    return InobjectBits::decode(bit_field_);
+  }
+
+  int offset() {
+    int index = IndexBits::decode(bit_field_);
+    int offset = index * kPointerSize;
+    if (is_inobject()) return offset;
+    return FixedArray::kHeaderSize + offset;
+  }
+
+  bool unboxed_double() {
+    return UnboxedDoubleBits::decode(bit_field_);
+  }
 
- protected:
   virtual Code::StubType GetStubType() { return Code::FIELD; }
 
+ protected:
+  LoadFieldStub() : HandlerStub() { }
+
+  void Initialize(Code::Kind kind,
+                  bool inobject,
+                  int index,
+                  Representation representation) {
+    bool unboxed_double = FLAG_track_double_fields && representation.IsDouble();
+    bit_field_ = KindBits::encode(kind)
+        | InobjectBits::encode(inobject)
+        | IndexBits::encode(index)
+        | UnboxedDoubleBits::encode(unboxed_double);
+  }
+
  private:
   STATIC_ASSERT(KindBits::kSize == 4);
-  class RegisterBits: public BitField<int, 4, 6> {};
-  class InobjectBits: public BitField<bool, 10, 1> {};
-  class IndexBits: public BitField<int, 11, 11> {};
+  class InobjectBits: public BitField<bool, 4, 1> {};
+  class IndexBits: public BitField<int, 5, 11> {};
+  class UnboxedDoubleBits: public BitField<bool, 16, 1> {};
   virtual CodeStub::Major MajorKey() { return LoadField; }
-  virtual int MinorKey() {
-    return KindBits::encode(kind())
-        | RegisterBits::encode(reg_.code())
-        | InobjectBits::encode(inobject_)
-        | IndexBits::encode(index_);
+  virtual int NotMissMinorKey() { return bit_field_; }
+
+  int bit_field_;
+};
+
+
+class KeyedLoadFieldStub: public LoadFieldStub {
+ public:
+  KeyedLoadFieldStub(bool inobject, int index, Representation representation)
+      : LoadFieldStub() {
+    Initialize(Code::KEYED_LOAD_IC, inobject, index, representation);
   }
 
-  Register reg_;
-  bool inobject_;
-  int index_;
+  virtual void InitializeInterfaceDescriptor(
+      Isolate* isolate,
+      CodeStubInterfaceDescriptor* descriptor);
+
+  virtual Handle<Code> GenerateCode();
+
+ private:
+  virtual CodeStub::Major MajorKey() { return KeyedLoadField; }
 };
 
 
@@ -1596,7 +1665,8 @@ class ArrayConstructorStubBase : public HydrogenCodeStub {
   static void InstallDescriptors(Isolate* isolate);
 
   // Parameters accessed via CodeStubGraphBuilder::GetParameter()
-  static const int kPropertyCell = 0;
+  static const int kConstructor = 0;
+  static const int kPropertyCell = 1;
 
  private:
   int NotMissMinorKey() { return bit_field_; }
index dce8171..b7ff92a 100644 (file)
@@ -144,7 +144,8 @@ Code::Flags CompilationInfo::flags() const {
     return Code::ComputeFlags(code_stub()->GetCodeKind(),
                               code_stub()->GetICState(),
                               code_stub()->GetExtraICState(),
-                              Code::NORMAL, -1);
+                              code_stub()->GetStubType(),
+                              code_stub()->GetStubFlags());
   } else {
     return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
   }
@@ -299,14 +300,14 @@ OptimizingCompiler::Status OptimizingCompiler::CreateGraph() {
   //
   // The encoding is as a signed value, with parameters and receiver using
   // the negative indices and locals the non-negative ones.
-  const int parameter_limit = -LUnallocated::kMinFixedIndex;
+  const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
   Scope* scope = info()->scope();
   if ((scope->num_parameters() + 1) > parameter_limit) {
     info()->set_bailout_reason("too many parameters");
     return AbortOptimization();
   }
 
-  const int locals_limit = LUnallocated::kMaxFixedIndex;
+  const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
   if (!info()->osr_ast_id().IsNone() &&
       scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
     info()->set_bailout_reason("too many parameters/locals");
index 0024e13..434b274 100644 (file)
@@ -124,6 +124,15 @@ enum BindingFlags {
   V(INSTANTIATE_FUN_INDEX, JSFunction, instantiate_fun) \
   V(CONFIGURE_INSTANCE_FUN_INDEX, JSFunction, configure_instance_fun) \
   V(ARRAY_BUFFER_FUN_INDEX, JSFunction, array_buffer_fun) \
+  V(UINT8_ARRAY_FUN_INDEX, JSFunction, uint8_array_fun) \
+  V(INT8_ARRAY_FUN_INDEX, JSFunction, int8_array_fun) \
+  V(UINT16_ARRAY_FUN_INDEX, JSFunction, uint16_array_fun) \
+  V(INT16_ARRAY_FUN_INDEX, JSFunction, int16_array_fun) \
+  V(UINT32_ARRAY_FUN_INDEX, JSFunction, uint32_array_fun) \
+  V(INT32_ARRAY_FUN_INDEX, JSFunction, int32_array_fun) \
+  V(FLOAT_ARRAY_FUN_INDEX, JSFunction, float_array_fun) \
+  V(DOUBLE_ARRAY_FUN_INDEX, JSFunction, double_array_fun) \
+  V(UINT8C_ARRAY_FUN_INDEX, JSFunction, uint8c_array_fun) \
   V(FUNCTION_MAP_INDEX, Map, function_map) \
   V(STRICT_MODE_FUNCTION_MAP_INDEX, Map, strict_mode_function_map) \
   V(FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX, Map, function_without_prototype_map) \
@@ -171,6 +180,7 @@ enum BindingFlags {
     strict_mode_generator_function_map) \
   V(GENERATOR_OBJECT_PROTOTYPE_MAP_INDEX, Map, \
     generator_object_prototype_map) \
+  V(GENERATOR_RESULT_MAP_INDEX, Map, generator_result_map) \
   V(RANDOM_SEED_INDEX, ByteArray, random_seed)
 
 // JSFunctions are pairs (context, function code), sometimes also called
@@ -278,6 +288,15 @@ class Context: public FixedArray {
     INSTANTIATE_FUN_INDEX,
     CONFIGURE_INSTANCE_FUN_INDEX,
     ARRAY_BUFFER_FUN_INDEX,
+    UINT8_ARRAY_FUN_INDEX,
+    INT8_ARRAY_FUN_INDEX,
+    UINT16_ARRAY_FUN_INDEX,
+    INT16_ARRAY_FUN_INDEX,
+    UINT32_ARRAY_FUN_INDEX,
+    INT32_ARRAY_FUN_INDEX,
+    FLOAT_ARRAY_FUN_INDEX,
+    DOUBLE_ARRAY_FUN_INDEX,
+    UINT8C_ARRAY_FUN_INDEX,
     MESSAGE_LISTENERS_INDEX,
     MAKE_MESSAGE_FUN_INDEX,
     GET_STACK_TRACE_LINE_INDEX,
@@ -305,6 +324,7 @@ class Context: public FixedArray {
     GENERATOR_FUNCTION_MAP_INDEX,
     STRICT_MODE_GENERATOR_FUNCTION_MAP_INDEX,
     GENERATOR_OBJECT_PROTOTYPE_MAP_INDEX,
+    GENERATOR_RESULT_MAP_INDEX,
     RANDOM_SEED_INDEX,
 
     // Properties from here are treated as weak references by the full GC.
index a20de43..e1d29d9 100644 (file)
@@ -77,7 +77,7 @@ void HandleDebugEvent(DebugEvent event,
 
   // Print the event details.
   Handle<Object> details =
-      Shell::DebugMessageDetails(Handle<String>::Cast(event_json));
+      Shell::DebugMessageDetails(isolate, Handle<String>::Cast(event_json));
   if (try_catch.HasCaught()) {
     Shell::ReportException(isolate, &try_catch);
     return;
@@ -114,7 +114,7 @@ void HandleDebugEvent(DebugEvent event,
 
     // Convert the debugger command to a JSON debugger request.
     Handle<Value> request =
-        Shell::DebugCommandToJSONRequest(String::New(command));
+        Shell::DebugCommandToJSONRequest(isolate, String::New(command));
     if (try_catch.HasCaught()) {
       Shell::ReportException(isolate, &try_catch);
       continue;
@@ -146,7 +146,8 @@ void HandleDebugEvent(DebugEvent event,
     Handle<String> response = Handle<String>::Cast(response_val);
 
     // Convert the debugger response into text details and the running state.
-    Handle<Object> response_details = Shell::DebugMessageDetails(response);
+    Handle<Object> response_details =
+        Shell::DebugMessageDetails(isolate, response);
     if (try_catch.HasCaught()) {
       Shell::ReportException(isolate, &try_catch);
       continue;
@@ -281,7 +282,8 @@ void RemoteDebugger::HandleMessageReceived(char* message) {
   // Print the event details.
   TryCatch try_catch;
   Handle<Object> details =
-      Shell::DebugMessageDetails(Handle<String>::Cast(String::New(message)));
+      Shell::DebugMessageDetails(isolate_,
+                                 Handle<String>::Cast(String::New(message)));
   if (try_catch.HasCaught()) {
     Shell::ReportException(isolate_, &try_catch);
     PrintPrompt();
@@ -310,7 +312,7 @@ void RemoteDebugger::HandleKeyboardCommand(char* command) {
   // Convert the debugger command to a JSON debugger request.
   TryCatch try_catch;
   Handle<Value> request =
-      Shell::DebugCommandToJSONRequest(String::New(command));
+      Shell::DebugCommandToJSONRequest(isolate_, String::New(command));
   if (try_catch.HasCaught()) {
     Shell::ReportException(isolate_, &try_catch);
     PrintPrompt();
index 22ace17..1889556 100644 (file)
 #include <string.h>
 #include <sys/stat.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #ifdef V8_SHARED
 #include <assert.h>
 #endif  // V8_SHARED
@@ -124,8 +129,8 @@ class PerIsolateData {
   }
 
 #define DEFINE_STRING_GETTER(name, value) \
-  static Persistent<String> name##_string(Isolate* isolate) { \
-    return Get(isolate)->name##_string_; \
+  static Handle<String> name##_string(Isolate* isolate) { \
+    return Handle<String>(*Get(isolate)->name##_string_); \
   }
   FOR_EACH_STRING(DEFINE_STRING_GETTER)
 #undef DEFINE_STRING_GETTER
@@ -245,7 +250,7 @@ bool Shell::ExecuteString(Isolate* isolate,
   } else {
     PerIsolateData* data = PerIsolateData::Get(isolate);
     Local<Context> realm =
-        Local<Context>::New(data->realms_[data->realm_current_]);
+        Local<Context>::New(isolate, data->realms_[data->realm_current_]);
     realm->Enter();
     Handle<Value> result = script->Run();
     realm->Exit();
@@ -272,7 +277,7 @@ bool Shell::ExecuteString(Isolate* isolate,
 #if !defined(V8_SHARED)
         } else {
           v8::TryCatch try_catch;
-          Context::Scope context_scope(utility_context_);
+          Context::Scope context_scope(isolate, utility_context_);
           Handle<Object> global = utility_context_->Global();
           Handle<Value> fun = global->Get(String::New("Stringify"));
           Handle<Value> argv[1] = { result };
@@ -421,7 +426,7 @@ Handle<Value> Shell::RealmEval(const Arguments& args) {
   }
   Handle<Script> script = Script::New(args[1]->ToString());
   if (script.IsEmpty()) return Undefined(isolate);
-  Local<Context> realm = Local<Context>::New(data->realms_[index]);
+  Local<Context> realm = Local<Context>::New(isolate, data->realms_[index]);
   realm->Enter();
   Handle<Value> result = script->Run();
   realm->Exit();
@@ -435,7 +440,7 @@ Handle<Value> Shell::RealmSharedGet(Local<String> property,
   Isolate* isolate = info.GetIsolate();
   PerIsolateData* data = PerIsolateData::Get(isolate);
   if (data->realm_shared_.IsEmpty()) return Undefined(isolate);
-  return data->realm_shared_;
+  return Local<Value>::New(isolate, data->realm_shared_);
 }
 
 void Shell::RealmSharedSet(Local<String> property,
@@ -1057,14 +1062,14 @@ Handle<Value> Shell::ArraySet(const Arguments& args) {
 
 
 void Shell::ExternalArrayWeakCallback(v8::Isolate* isolate,
-                                      Persistent<Value> object,
-                                      void* data) {
+                                      Persistent<Object>* object,
+                                      uint8_t* data) {
   HandleScope scope(isolate);
-  int32_t length = object->ToObject()->Get(
+  int32_t length = (*object)->Get(
       PerIsolateData::byteLength_string(isolate))->Uint32Value();
   isolate->AdjustAmountOfExternalAllocatedMemory(-length);
-  delete[] static_cast<uint8_t*>(data);
-  object.Dispose(isolate);
+  delete[] data;
+  object->Dispose(isolate);
 }
 
 
@@ -1180,7 +1185,7 @@ Handle<Array> Shell::GetCompletions(Isolate* isolate,
                                     Handle<String> text,
                                     Handle<String> full) {
   HandleScope handle_scope(isolate);
-  Context::Scope context_scope(utility_context_);
+  Context::Scope context_scope(isolate, utility_context_);
   Handle<Object> global = utility_context_->Global();
   Handle<Value> fun = global->Get(String::New("GetCompletions"));
   static const int kArgc = 3;
@@ -1191,8 +1196,10 @@ Handle<Array> Shell::GetCompletions(Isolate* isolate,
 
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-Handle<Object> Shell::DebugMessageDetails(Handle<String> message) {
-  Context::Scope context_scope(utility_context_);
+Handle<Object> Shell::DebugMessageDetails(Isolate* isolate,
+                                          Handle<String> message) {
+  HandleScope handle_scope(isolate);
+  Context::Scope context_scope(isolate, utility_context_);
   Handle<Object> global = utility_context_->Global();
   Handle<Value> fun = global->Get(String::New("DebugMessageDetails"));
   static const int kArgc = 1;
@@ -1202,8 +1209,10 @@ Handle<Object> Shell::DebugMessageDetails(Handle<String> message) {
 }
 
 
-Handle<Value> Shell::DebugCommandToJSONRequest(Handle<String> command) {
-  Context::Scope context_scope(utility_context_);
+Handle<Value> Shell::DebugCommandToJSONRequest(Isolate* isolate,
+                                               Handle<String> command) {
+  HandleScope handle_scope(isolate);
+  Context::Scope context_scope(isolate, utility_context_);
   Handle<Object> global = utility_context_->Global();
   Handle<Value> fun = global->Get(String::New("DebugCommandToJSONRequest"));
   static const int kArgc = 1;
@@ -1214,7 +1223,9 @@ Handle<Value> Shell::DebugCommandToJSONRequest(Handle<String> command) {
 
 
 void Shell::DispatchDebugMessages() {
-  v8::Context::Scope scope(Shell::evaluation_context_);
+  Isolate* isolate = v8::Isolate::GetCurrent();
+  HandleScope handle_scope(isolate);
+  v8::Context::Scope scope(isolate, Shell::evaluation_context_);
   v8::Debug::ProcessDebugMessages();
 }
 #endif  // ENABLE_DEBUGGER_SUPPORT
@@ -1327,7 +1338,7 @@ void Shell::InstallUtilityScript(Isolate* isolate) {
   // utility, evaluation and debug context can all access each other.
   utility_context_->SetSecurityToken(Undefined(isolate));
   evaluation_context_->SetSecurityToken(Undefined(isolate));
-  Context::Scope utility_scope(utility_context_);
+  Context::Scope utility_scope(isolate, utility_context_);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   if (i::FLAG_debugger) printf("JavaScript debugger enabled\n");
@@ -1459,28 +1470,34 @@ Handle<ObjectTemplate> Shell::CreateGlobalTemplate(Isolate* isolate) {
   global_template->Set(String::New("Realm"), realm_template);
 
   // Bind the handlers for external arrays.
-  PropertyAttribute attr =
-      static_cast<PropertyAttribute>(ReadOnly | DontDelete);
-  global_template->Set(PerIsolateData::ArrayBuffer_string(isolate),
-                       CreateArrayBufferTemplate(ArrayBuffer), attr);
-  global_template->Set(String::New("Int8Array"),
-                       CreateArrayTemplate(Int8Array), attr);
-  global_template->Set(String::New("Uint8Array"),
-                       CreateArrayTemplate(Uint8Array), attr);
-  global_template->Set(String::New("Int16Array"),
-                       CreateArrayTemplate(Int16Array), attr);
-  global_template->Set(String::New("Uint16Array"),
-                       CreateArrayTemplate(Uint16Array), attr);
-  global_template->Set(String::New("Int32Array"),
-                       CreateArrayTemplate(Int32Array), attr);
-  global_template->Set(String::New("Uint32Array"),
-                       CreateArrayTemplate(Uint32Array), attr);
-  global_template->Set(String::New("Float32Array"),
-                       CreateArrayTemplate(Float32Array), attr);
-  global_template->Set(String::New("Float64Array"),
-                       CreateArrayTemplate(Float64Array), attr);
-  global_template->Set(String::New("Uint8ClampedArray"),
-                       CreateArrayTemplate(Uint8ClampedArray), attr);
+#ifndef V8_SHARED
+  if (!i::FLAG_harmony_typed_arrays) {
+#endif  // V8_SHARED
+    PropertyAttribute attr =
+        static_cast<PropertyAttribute>(ReadOnly | DontDelete);
+    global_template->Set(PerIsolateData::ArrayBuffer_string(isolate),
+                         CreateArrayBufferTemplate(ArrayBuffer), attr);
+    global_template->Set(String::New("Int8Array"),
+                         CreateArrayTemplate(Int8Array), attr);
+    global_template->Set(String::New("Uint8Array"),
+                         CreateArrayTemplate(Uint8Array), attr);
+    global_template->Set(String::New("Int16Array"),
+                         CreateArrayTemplate(Int16Array), attr);
+    global_template->Set(String::New("Uint16Array"),
+                         CreateArrayTemplate(Uint16Array), attr);
+    global_template->Set(String::New("Int32Array"),
+                         CreateArrayTemplate(Int32Array), attr);
+    global_template->Set(String::New("Uint32Array"),
+                         CreateArrayTemplate(Uint32Array), attr);
+    global_template->Set(String::New("Float32Array"),
+                         CreateArrayTemplate(Float32Array), attr);
+    global_template->Set(String::New("Float64Array"),
+                         CreateArrayTemplate(Float64Array), attr);
+    global_template->Set(String::New("Uint8ClampedArray"),
+                         CreateArrayTemplate(Uint8ClampedArray), attr);
+#ifndef V8_SHARED
+  }
+#endif  // V8_SHARED
 
 #if !defined(V8_SHARED) && !defined(_WIN32) && !defined(_WIN64)
   Handle<ObjectTemplate> os_templ = ObjectTemplate::New();
@@ -1522,7 +1539,8 @@ void Shell::InitializeDebugger(Isolate* isolate) {
   Locker lock(isolate);
   HandleScope scope(isolate);
   Handle<ObjectTemplate> global_template = CreateGlobalTemplate(isolate);
-  utility_context_ = Context::New(NULL, global_template);
+  utility_context_.Reset(isolate,
+                         Context::New(isolate, NULL, global_template));
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Start the debugger agent if requested.
@@ -1535,14 +1553,15 @@ void Shell::InitializeDebugger(Isolate* isolate) {
 }
 
 
-Persistent<Context> Shell::CreateEvaluationContext(Isolate* isolate) {
+Local<Context> Shell::CreateEvaluationContext(Isolate* isolate) {
 #ifndef V8_SHARED
   // This needs to be a critical section since this is not thread-safe
   i::ScopedLock lock(context_mutex_);
 #endif  // V8_SHARED
   // Initialize the global objects
   Handle<ObjectTemplate> global_template = CreateGlobalTemplate(isolate);
-  Persistent<Context> context = Context::New(NULL, global_template);
+  HandleScope handle_scope(isolate);
+  Local<Context> context = Context::New(isolate, NULL, global_template);
   ASSERT(!context.IsEmpty());
   Context::Scope scope(context);
 
@@ -1560,7 +1579,7 @@ Persistent<Context> Shell::CreateEvaluationContext(Isolate* isolate) {
   context->Global()->Set(String::New("arguments"),
                          Utils::ToLocal(arguments_jsarray));
 #endif  // V8_SHARED
-  return context;
+  return handle_scope.Close(context);
 }
 
 
@@ -1740,9 +1759,9 @@ Handle<String> Shell::ReadFile(Isolate* isolate, const char* name) {
 
 void Shell::RunShell(Isolate* isolate) {
   Locker locker(isolate);
-  Context::Scope context_scope(evaluation_context_);
-  PerIsolateData::RealmScope realm_scope(PerIsolateData::Get(isolate));
   HandleScope outer_scope(isolate);
+  Context::Scope context_scope(isolate, evaluation_context_);
+  PerIsolateData::RealmScope realm_scope(PerIsolateData::Get(isolate));
   Handle<String> name = String::New("(d8)");
   LineEditor* console = LineEditor::Get();
   printf("V8 version %s [console: %s]\n", V8::GetVersion(), console->name());
@@ -1791,7 +1810,7 @@ void ShellThread::Run() {
     // Prepare the context for this thread.
     Locker locker(isolate_);
     HandleScope outer_scope(isolate_);
-    Persistent<Context> thread_context =
+    Local<Context> thread_context =
         Shell::CreateEvaluationContext(isolate_);
     Context::Scope context_scope(thread_context);
     PerIsolateData::RealmScope realm_scope(PerIsolateData::Get(isolate_));
@@ -1815,7 +1834,6 @@ void ShellThread::Run() {
       Shell::ExecuteString(isolate_, str, String::New(filename), false, false);
     }
 
-    thread_context.Dispose(thread_context->GetIsolate());
     ptr = next_line;
   }
 }
@@ -1892,15 +1910,16 @@ void SourceGroup::ExecuteInThread() {
     {
       Isolate::Scope iscope(isolate);
       Locker lock(isolate);
-      HandleScope scope(isolate);
-      PerIsolateData data(isolate);
-      Persistent<Context> context = Shell::CreateEvaluationContext(isolate);
       {
-        Context::Scope cscope(context);
-        PerIsolateData::RealmScope realm_scope(PerIsolateData::Get(isolate));
-        Execute(isolate);
+        HandleScope scope(isolate);
+        PerIsolateData data(isolate);
+        Local<Context> context = Shell::CreateEvaluationContext(isolate);
+        {
+          Context::Scope cscope(context);
+          PerIsolateData::RealmScope realm_scope(PerIsolateData::Get(isolate));
+          Execute(isolate);
+        }
       }
-      context.Dispose(isolate);
       if (Shell::options.send_idle_notification) {
         const int kLongIdlePauseInMs = 1000;
         V8::ContextDisposedNotification();
@@ -2091,26 +2110,27 @@ int Shell::RunMain(Isolate* isolate, int argc, char* argv[]) {
 #endif  // V8_SHARED
   {  // NOLINT
     Locker lock(isolate);
-    HandleScope scope(isolate);
-    Persistent<Context> context = CreateEvaluationContext(isolate);
-    if (options.last_run) {
-      // Keep using the same context in the interactive shell.
-      evaluation_context_ = context;
+    {
+      HandleScope scope(isolate);
+      Local<Context> context = CreateEvaluationContext(isolate);
+      if (options.last_run) {
+        // Keep using the same context in the interactive shell.
+        evaluation_context_.Reset(isolate, context);
 #if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT)
-      // If the interactive debugger is enabled make sure to activate
-      // it before running the files passed on the command line.
-      if (i::FLAG_debugger) {
-        InstallUtilityScript(isolate);
-      }
+        // If the interactive debugger is enabled make sure to activate
+        // it before running the files passed on the command line.
+        if (i::FLAG_debugger) {
+          InstallUtilityScript(isolate);
+        }
 #endif  // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT
-    }
-    {
-      Context::Scope cscope(context);
-      PerIsolateData::RealmScope realm_scope(PerIsolateData::Get(isolate));
-      options.isolate_sources[0].Execute(isolate);
+      }
+      {
+        Context::Scope cscope(context);
+        PerIsolateData::RealmScope realm_scope(PerIsolateData::Get(isolate));
+        options.isolate_sources[0].Execute(isolate);
+      }
     }
     if (!options.last_run) {
-      context.Dispose(isolate);
       if (options.send_idle_notification) {
         const int kLongIdlePauseInMs = 1000;
         V8::ContextDisposedNotification();
@@ -2155,7 +2175,7 @@ int Shell::Main(int argc, char* argv[]) {
   {
     Initialize(isolate);
 #ifdef ENABLE_VTUNE_JIT_INTERFACE
-    vTune::InitilizeVtuneForV8();
+    vTune::InitializeVtuneForV8();
 #endif
     PerIsolateData data(isolate);
     InitializeDebugger(isolate);
index 4d9504f..c068dd9 100644 (file)
@@ -273,7 +273,7 @@ class Shell : public i::AllStatic {
   static const char* ToCString(const v8::String::Utf8Value& value);
   static void ReportException(Isolate* isolate, TryCatch* try_catch);
   static Handle<String> ReadFile(Isolate* isolate, const char* name);
-  static Persistent<Context> CreateEvaluationContext(Isolate* isolate);
+  static Local<Context> CreateEvaluationContext(Isolate* isolate);
   static int RunMain(Isolate* isolate, int argc, char* argv[]);
   static int Main(int argc, char* argv[]);
   static void Exit(int exit_code);
@@ -292,8 +292,10 @@ class Shell : public i::AllStatic {
   static void MapCounters(const char* name);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  static Handle<Object> DebugMessageDetails(Handle<String> message);
-  static Handle<Value> DebugCommandToJSONRequest(Handle<String> command);
+  static Handle<Object> DebugMessageDetails(Isolate* isolate,
+                                            Handle<String> message);
+  static Handle<Value> DebugCommandToJSONRequest(Isolate* isolate,
+                                                 Handle<String> command);
   static void DispatchDebugMessages();
 #endif  // ENABLE_DEBUGGER_SUPPORT
 #endif  // V8_SHARED
@@ -414,8 +416,8 @@ class Shell : public i::AllStatic {
                                            ExternalArrayType type,
                                            int32_t element_size);
   static void ExternalArrayWeakCallback(Isolate* isolate,
-                                        Persistent<Value> object,
-                                        void* data);
+                                        Persistent<Object>* object,
+                                        uint8_t* data);
 };
 
 
index efba8e5..02ec124 100644 (file)
@@ -76,12 +76,12 @@ Debug::~Debug() {
 
 static void PrintLn(v8::Local<v8::Value> value) {
   v8::Local<v8::String> s = value->ToString();
-  ScopedVector<char> data(s->Length() + 1);
+  ScopedVector<char> data(s->Utf8Length() + 1);
   if (data.start() == NULL) {
     V8::FatalProcessOutOfMemory("PrintLn");
     return;
   }
-  s->WriteAscii(data.start());
+  s->WriteUtf8(data.start());
   PrintF("%s\n", data.start());
 }
 
@@ -1644,6 +1644,9 @@ Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) {
       case Code::KEYED_STORE_IC:
         return isolate->builtins()->KeyedStoreIC_DebugBreak();
 
+      case Code::COMPARE_NIL_IC:
+        return isolate->builtins()->CompareNilIC_DebugBreak();
+
       default:
         UNREACHABLE();
     }
index 459073f..ccdc0c0 100644 (file)
@@ -418,6 +418,7 @@ class Debug {
   static void GenerateStoreICDebugBreak(MacroAssembler* masm);
   static void GenerateKeyedLoadICDebugBreak(MacroAssembler* masm);
   static void GenerateKeyedStoreICDebugBreak(MacroAssembler* masm);
+  static void GenerateCompareNilICDebugBreak(MacroAssembler* masm);
   static void GenerateReturnDebugBreak(MacroAssembler* masm);
   static void GenerateCallFunctionStubDebugBreak(MacroAssembler* masm);
   static void GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm);
index f36006c..fe71a22 100644 (file)
@@ -660,6 +660,14 @@ Handle<Object> Factory::NewNumberFromUint(uint32_t value,
 }
 
 
+Handle<HeapNumber> Factory::NewHeapNumber(double value,
+                                          PretenureFlag pretenure) {
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateHeapNumber(value, pretenure), HeapNumber);
+}
+
+
 Handle<JSObject> Factory::NewNeanderObject() {
   CALL_HEAP_FUNCTION(
       isolate(),
@@ -1056,6 +1064,58 @@ Handle<JSArrayBuffer> Factory::NewJSArrayBuffer() {
 }
 
 
+Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type) {
+  JSFunction* typed_array_fun;
+  Context* native_context = isolate()->context()->native_context();
+  switch (type) {
+    case kExternalUnsignedByteArray:
+      typed_array_fun = native_context->uint8_array_fun();
+      break;
+
+    case kExternalByteArray:
+      typed_array_fun = native_context->int8_array_fun();
+      break;
+
+    case kExternalUnsignedShortArray:
+      typed_array_fun = native_context->uint16_array_fun();
+      break;
+
+    case kExternalShortArray:
+      typed_array_fun = native_context->int16_array_fun();
+      break;
+
+    case kExternalUnsignedIntArray:
+      typed_array_fun = native_context->uint32_array_fun();
+      break;
+
+    case kExternalIntArray:
+      typed_array_fun = native_context->int32_array_fun();
+      break;
+
+    case kExternalFloatArray:
+      typed_array_fun = native_context->float_array_fun();
+      break;
+
+    case kExternalDoubleArray:
+      typed_array_fun = native_context->double_array_fun();
+      break;
+
+    case kExternalPixelArray:
+      typed_array_fun = native_context->uint8c_array_fun();
+      break;
+
+    default:
+      UNREACHABLE();
+      return Handle<JSTypedArray>();
+  }
+
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateJSObject(typed_array_fun),
+      JSTypedArray);
+}
+
+
 Handle<JSProxy> Factory::NewJSProxy(Handle<Object> handler,
                                     Handle<Object> prototype) {
   CALL_HEAP_FUNCTION(
index caac78d..5e89708 100644 (file)
@@ -267,6 +267,9 @@ class Factory {
   Handle<Object> NewNumberFromUint(uint32_t value,
                                   PretenureFlag pretenure = NOT_TENURED);
 
+  Handle<HeapNumber> NewHeapNumber(double value,
+                                   PretenureFlag pretenure = NOT_TENURED);
+
   // These objects are used by the api to create env-independent data
   // structures in the heap.
   Handle<JSObject> NewNeanderObject();
@@ -315,6 +318,8 @@ class Factory {
 
   Handle<JSArrayBuffer> NewJSArrayBuffer();
 
+  Handle<JSTypedArray> NewJSTypedArray(ExternalArrayType type);
+
   Handle<JSProxy> NewJSProxy(Handle<Object> handler, Handle<Object> prototype);
 
   // Change the type of the argument into a JS object/function and reinitialize.
index 0a6bf67..d5d58a7 100644 (file)
@@ -166,6 +166,9 @@ DEFINE_bool(harmony_observation, false,
             "enable harmony object observation (implies harmony collections")
 DEFINE_bool(harmony_typed_arrays, false,
             "enable harmony typed arrays")
+DEFINE_bool(harmony_array_buffer, false,
+            "enable harmony array buffer")
+DEFINE_implication(harmony_typed_arrays, harmony_array_buffer)
 DEFINE_bool(harmony_generators, false, "enable harmony generators")
 DEFINE_bool(harmony, false, "enable all harmony features (except typeof)")
 DEFINE_implication(harmony, harmony_scoping)
@@ -177,7 +180,7 @@ DEFINE_implication(harmony, harmony_observation)
 DEFINE_implication(harmony, harmony_generators)
 DEFINE_implication(harmony_modules, harmony_scoping)
 DEFINE_implication(harmony_observation, harmony_collections)
-DEFINE_implication(harmony, harmony_typed_arrays)
+// TODO[dslomov] add harmony => harmony_typed_arrays
 
 // Flags for experimental implementation features.
 DEFINE_bool(packed_arrays, true, "optimizes arrays that have no holes")
@@ -190,6 +193,9 @@ DEFINE_bool(clever_optimizations,
             true,
             "Optimize object size, Array shift, DOM strings and string +")
 DEFINE_bool(pretenure_literals, true, "allocate literals in old space")
+DEFINE_bool(track_fields, false, "track fields with only smi values")
+DEFINE_bool(track_double_fields, false, "track fields with double values")
+DEFINE_implication(track_double_fields, track_fields)
 
 // Flags for data representation optimizations
 DEFINE_bool(unbox_double_arrays, true, "automatically unbox arrays of doubles")
@@ -225,6 +231,8 @@ DEFINE_bool(trace_gvn, false, "trace global value numbering")
 DEFINE_bool(trace_representation, false, "trace representation types")
 DEFINE_bool(trace_track_allocation_sites, false,
             "trace the tracking of allocation sites")
+DEFINE_bool(trace_migration, false, "trace object migration")
+DEFINE_bool(trace_generalization, false, "trace map generalization")
 DEFINE_bool(stress_pointer_maps, false, "pointer map for every instruction")
 DEFINE_bool(stress_environments, false, "environment for every instruction")
 DEFINE_int(deopt_every_n_times,
index 83b37a5..1228ccf 100644 (file)
@@ -98,6 +98,12 @@ inline StackHandler::Kind StackHandler::kind() const {
 }
 
 
+inline unsigned StackHandler::index() const {
+  const int offset = StackHandlerConstants::kStateOffset;
+  return IndexField::decode(Memory::unsigned_at(address() + offset));
+}
+
+
 inline Object** StackHandler::context_address() const {
   const int offset = StackHandlerConstants::kContextOffset;
   return reinterpret_cast<Object**>(address() + offset);
@@ -213,6 +219,34 @@ Object* JavaScriptFrame::GetParameter(int index) const {
 }
 
 
+inline Address JavaScriptFrame::GetOperandSlot(int index) const {
+  Address base = fp() + JavaScriptFrameConstants::kLocal0Offset;
+  ASSERT(IsAddressAligned(base, kPointerSize));
+  ASSERT_EQ(type(), JAVA_SCRIPT);
+  ASSERT_LT(index, ComputeOperandsCount());
+  ASSERT_LE(0, index);
+  // Operand stack grows down.
+  return base - index * kPointerSize;
+}
+
+
+inline Object* JavaScriptFrame::GetOperand(int index) const {
+  return Memory::Object_at(GetOperandSlot(index));
+}
+
+
+inline int JavaScriptFrame::ComputeOperandsCount() const {
+  Address base = fp() + JavaScriptFrameConstants::kLocal0Offset;
+  // Base points to low address of first operand and stack grows down, so add
+  // kPointerSize to get the actual stack size.
+  intptr_t stack_size_in_bytes = (base + kPointerSize) - sp();
+  ASSERT(IsAligned(stack_size_in_bytes, kPointerSize));
+  ASSERT(type() == JAVA_SCRIPT);
+  ASSERT(stack_size_in_bytes >= 0);
+  return static_cast<int>(stack_size_in_bytes >> kPointerSizeLog2);
+}
+
+
 inline Object* JavaScriptFrame::receiver() const {
   return GetParameter(-1);
 }
index aaf8c79..152cd30 100644 (file)
@@ -840,6 +840,72 @@ void JavaScriptFrame::PrintTop(Isolate* isolate,
 }
 
 
+void JavaScriptFrame::SaveOperandStack(FixedArray* store,
+                                       int* stack_handler_index) const {
+  int operands_count = store->length();
+  ASSERT_LE(operands_count, ComputeOperandsCount());
+
+  // Visit the stack in LIFO order, saving operands and stack handlers into the
+  // array.  The saved stack handlers store a link to the next stack handler,
+  // which will allow RestoreOperandStack to rewind the handlers.
+  StackHandlerIterator it(this, top_handler());
+  int i = operands_count - 1;
+  *stack_handler_index = -1;
+  for (; !it.done(); it.Advance()) {
+    StackHandler* handler = it.handler();
+    // Save operands pushed after the handler was pushed.
+    for (; GetOperandSlot(i) < handler->address(); i--) {
+      store->set(i, GetOperand(i));
+    }
+    ASSERT_GE(i + 1, StackHandlerConstants::kSlotCount);
+    ASSERT_EQ(handler->address(), GetOperandSlot(i));
+    int next_stack_handler_index = i + 1 - StackHandlerConstants::kSlotCount;
+    handler->Unwind(isolate(), store, next_stack_handler_index,
+                    *stack_handler_index);
+    *stack_handler_index = next_stack_handler_index;
+    i -= StackHandlerConstants::kSlotCount;
+  }
+
+  // Save any remaining operands.
+  for (; i >= 0; i--) {
+    store->set(i, GetOperand(i));
+  }
+}
+
+
+void JavaScriptFrame::RestoreOperandStack(FixedArray* store,
+                                          int stack_handler_index) {
+  int operands_count = store->length();
+  ASSERT_LE(operands_count, ComputeOperandsCount());
+  int i = 0;
+  while (i <= stack_handler_index) {
+    if (i < stack_handler_index) {
+      // An operand.
+      ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
+      Memory::Object_at(GetOperandSlot(i)) = store->get(i);
+      i++;
+    } else {
+      // A stack handler.
+      ASSERT_EQ(i, stack_handler_index);
+      // The FixedArray store grows up.  The stack grows down.  So the operand
+      // slot for i actually points to the bottom of the top word in the
+      // handler.  The base of the StackHandler* is the address of the bottom
+      // word, which will be the last slot that is in the handler.
+      int handler_slot_index = i + StackHandlerConstants::kSlotCount - 1;
+      StackHandler *handler =
+          StackHandler::FromAddress(GetOperandSlot(handler_slot_index));
+      stack_handler_index = handler->Rewind(isolate(), store, i, fp());
+      i += StackHandlerConstants::kSlotCount;
+    }
+  }
+
+  for (; i < operands_count; i++) {
+    ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
+    Memory::Object_at(GetOperandSlot(i)) = store->get(i);
+  }
+}
+
+
 void FrameSummary::Print() {
   PrintF("receiver: ");
   receiver_->ShortPrint();
@@ -1438,6 +1504,60 @@ InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
 
 // -------------------------------------------------------------------------
 
+
+void StackHandler::Unwind(Isolate* isolate,
+                          FixedArray* array,
+                          int offset,
+                          int previous_handler_offset) const {
+  STATIC_ASSERT(StackHandlerConstants::kSlotCount == 5);
+  ASSERT_LE(0, offset);
+  ASSERT_GE(array->length(), offset + 5);
+  // Unwinding a stack handler into an array chains it in the opposite
+  // direction, re-using the "next" slot as a "previous" link, so that stack
+  // handlers can be later re-wound in the correct order.  Decode the "state"
+  // slot into "index" and "kind" and store them separately, using the fp slot.
+  array->set(offset, Smi::FromInt(previous_handler_offset));        // next
+  array->set(offset + 1, *code_address());                          // code
+  array->set(offset + 2, Smi::FromInt(static_cast<int>(index())));  // state
+  array->set(offset + 3, *context_address());                       // context
+  array->set(offset + 4, Smi::FromInt(static_cast<int>(kind())));   // fp
+
+  *isolate->handler_address() = next()->address();
+}
+
+
+int StackHandler::Rewind(Isolate* isolate,
+                         FixedArray* array,
+                         int offset,
+                         Address fp) {
+  STATIC_ASSERT(StackHandlerConstants::kSlotCount == 5);
+  ASSERT_LE(0, offset);
+  ASSERT_GE(array->length(), offset + 5);
+  Smi* prev_handler_offset = Smi::cast(array->get(offset));
+  Code* code = Code::cast(array->get(offset + 1));
+  Smi* smi_index = Smi::cast(array->get(offset + 2));
+  Object* context = array->get(offset + 3);
+  Smi* smi_kind = Smi::cast(array->get(offset + 4));
+
+  unsigned state = KindField::encode(static_cast<Kind>(smi_kind->value())) |
+      IndexField::encode(static_cast<unsigned>(smi_index->value()));
+
+  Memory::Address_at(address() + StackHandlerConstants::kNextOffset) =
+      *isolate->handler_address();
+  Memory::Object_at(address() + StackHandlerConstants::kCodeOffset) = code;
+  Memory::uintptr_at(address() + StackHandlerConstants::kStateOffset) = state;
+  Memory::Object_at(address() + StackHandlerConstants::kContextOffset) =
+      context;
+  Memory::Address_at(address() + StackHandlerConstants::kFPOffset) = fp;
+
+  *isolate->handler_address() = address();
+
+  return prev_handler_offset->value();
+}
+
+
+// -------------------------------------------------------------------------
+
 int NumRegs(RegList reglist) {
   return CompilerIntrinsics::CountSetBits(reglist);
 }
index 11e8d28..3c44f5e 100644 (file)
@@ -93,6 +93,7 @@ class StackHandlerConstants : public AllStatic {
   static const int kFPOffset       = 4 * kPointerSize;
 
   static const int kSize = kFPOffset + kPointerSize;
+  static const int kSlotCount = kSize >> kPointerSizeLog2;
 };
 
 
@@ -131,9 +132,15 @@ class StackHandler BASE_EMBEDDED {
   inline bool is_catch() const;
   inline bool is_finally() const;
 
+  // Generator support to preserve stack handlers.
+  void Unwind(Isolate* isolate, FixedArray* array, int offset,
+              int previous_handler_offset) const;
+  int Rewind(Isolate* isolate, FixedArray* array, int offset, Address fp);
+
  private:
   // Accessors.
   inline Kind kind() const;
+  inline unsigned index() const;
 
   inline Object** context_address() const;
   inline Object** code_address() const;
@@ -536,6 +543,15 @@ class JavaScriptFrame: public StandardFrame {
     return GetNumberOfIncomingArguments();
   }
 
+  // Access the operand stack.
+  inline Address GetOperandSlot(int index) const;
+  inline Object* GetOperand(int index) const;
+  inline int ComputeOperandsCount() const;
+
+  // Generator support to preserve operand stack and stack handlers.
+  void SaveOperandStack(FixedArray* store, int* stack_handler_index) const;
+  void RestoreOperandStack(FixedArray* store, int stack_handler_index);
+
   // Debugger access.
   void SetParameterValue(int index, Object* value) const;
 
index 3734ae5..32242b2 100644 (file)
@@ -410,6 +410,11 @@ class FullCodeGenerator: public AstVisitor {
   // this has to be a separate pass _before_ populating or executing any module.
   void AllocateModules(ZoneList<Declaration*>* declarations);
 
+  // Generator code to return a fresh iterator result object.  The "value"
+  // property is set to a value popped from the stack, and "done" is set
+  // according to the argument.
+  void EmitReturnIteratorResult(bool done);
+
   // Try to perform a comparison as a fast inlined literal compare if
   // the operands allow it.  Returns true if the compare operations
   // has been matched and all code generated; false otherwise.
index 7ee89d7..29432bb 100644 (file)
@@ -25,6 +25,9 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "api.h"
@@ -232,7 +235,7 @@ class GlobalHandles::Node {
 
   void MakeWeak(GlobalHandles* global_handles,
                 void* parameter,
-                WeakReferenceCallback weak_reference_callback,
+                RevivableCallback weak_reference_callback,
                 NearDeathCallback near_death_callback) {
     ASSERT(state() != FREE);
     set_state(WEAK);
@@ -264,7 +267,7 @@ class GlobalHandles::Node {
     set_state(NEAR_DEATH);
     set_parameter(NULL);
 
-    v8::Persistent<v8::Object> object = ToApi<v8::Object>(handle());
+    v8::Persistent<v8::Value> object = ToApi<v8::Value>(handle());
     {
       // Check that we are not passing a finalized external string to
       // the callback.
@@ -276,9 +279,11 @@ class GlobalHandles::Node {
       VMState<EXTERNAL> state(isolate);
       if (near_death_callback_ != NULL) {
         if (IsWeakCallback::decode(flags_)) {
-          WeakReferenceCallback callback =
-              reinterpret_cast<WeakReferenceCallback>(near_death_callback_);
-          callback(object, par);
+          RevivableCallback callback =
+              reinterpret_cast<RevivableCallback>(near_death_callback_);
+          callback(reinterpret_cast<v8::Isolate*>(isolate),
+                   &object,
+                   par);
         } else {
           near_death_callback_(reinterpret_cast<v8::Isolate*>(isolate),
                                object,
@@ -490,9 +495,9 @@ void GlobalHandles::Destroy(Object** location) {
 
 void GlobalHandles::MakeWeak(Object** location,
                              void* parameter,
-                             WeakReferenceCallback weak_reference_callback,
+                             RevivableCallback weak_reference_callback,
                              NearDeathCallback near_death_callback) {
-  ASSERT(near_death_callback != NULL);
+  ASSERT((weak_reference_callback == NULL) != (near_death_callback == NULL));
   Node::FromLocation(location)->MakeWeak(this,
                                          parameter,
                                          weak_reference_callback,
index 81e1476..f502dfa 100644 (file)
@@ -130,6 +130,8 @@ class GlobalHandles {
   // Destroy a global handle.
   void Destroy(Object** location);
 
+  typedef WeakReferenceCallbacks<v8::Value, void>::Revivable RevivableCallback;
+
   // Make the global handle weak and set the callback parameter for the
   // handle.  When the garbage collector recognizes that only weak global
   // handles point to an object the handles are cleared and the callback
@@ -138,7 +140,7 @@ class GlobalHandles {
   // reason is that Smi::FromInt(0) does not change during garage collection.
   void MakeWeak(Object** location,
                 void* parameter,
-                WeakReferenceCallback weak_reference_callback,
+                RevivableCallback weak_reference_callback,
                 NearDeathCallback near_death_callback);
 
   void RecordStats(HeapStats* stats);
index 5a3e9ed..4f5e9fe 100644 (file)
@@ -53,8 +53,9 @@ Handle<T>::Handle(T* obj, Isolate* isolate) {
 
 template <typename T>
 inline bool Handle<T>::is_identical_to(const Handle<T> other) const {
-  ASSERT(location_ == NULL ||
-         reinterpret_cast<Address>(*location_) != kZapValue);
+  ASSERT(location_ == NULL || !(*location_)->IsFailure());
+  if (location_ == other.location_) return true;
+  if (location_ == NULL || other.location_ == NULL) return false;
   // Dereferencing deferred handles to check object equality is safe.
   SLOW_ASSERT(IsDereferenceAllowed(true) && other.IsDereferenceAllowed(true));
   return *location_ == *other.location_;
@@ -63,24 +64,22 @@ inline bool Handle<T>::is_identical_to(const Handle<T> other) const {
 
 template <typename T>
 inline T* Handle<T>::operator*() const {
-  ASSERT(location_ != NULL);
-  ASSERT(reinterpret_cast<Address>(*location_) != kHandleZapValue);
+  ASSERT(location_ != NULL && !(*location_)->IsFailure());
   SLOW_ASSERT(IsDereferenceAllowed(false));
   return *BitCast<T**>(location_);
 }
 
 template <typename T>
 inline T** Handle<T>::location() const {
-  ASSERT(location_ == NULL ||
-         reinterpret_cast<Address>(*location_) != kZapValue);
-  SLOW_ASSERT(IsDereferenceAllowed(false));
+  ASSERT(location_ == NULL || !(*location_)->IsFailure());
+  SLOW_ASSERT(location_ == NULL || IsDereferenceAllowed(false));
   return location_;
 }
 
 #ifdef DEBUG
 template <typename T>
 bool Handle<T>::IsDereferenceAllowed(bool allow_deferred) const {
-  if (location_ == NULL) return true;
+  ASSERT(location_ != NULL);
   Object* object = *BitCast<T**>(location_);
   if (object->IsSmi()) return true;
   HeapObject* heap_object = HeapObject::cast(object);
index f937426..b71978b 100644 (file)
@@ -650,6 +650,10 @@ inline bool Heap::allow_allocation(bool new_state) {
   return old;
 }
 
+inline void Heap::set_allow_allocation(bool allocation_allowed) {
+  allocation_allowed_ = allocation_allowed;
+}
+
 #endif
 
 
@@ -864,33 +868,41 @@ DisallowAllocationFailure::~DisallowAllocationFailure() {
 
 
 #ifdef DEBUG
-AssertNoAllocation::AssertNoAllocation() {
-  Isolate* isolate = ISOLATE;
-  active_ = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
-  if (active_) {
-    old_state_ = isolate->heap()->allow_allocation(false);
+bool EnterAllocationScope(Isolate* isolate, bool allow_allocation) {
+  bool active = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
+  bool last_state = isolate->heap()->IsAllocationAllowed();
+  if (active) {
+    // TODO(yangguo): Make HandleDereferenceGuard avoid isolate mutation in the
+    // same way if running on the optimizer thread.
+    isolate->heap()->set_allow_allocation(allow_allocation);
   }
+  return last_state;
 }
 
 
-AssertNoAllocation::~AssertNoAllocation() {
-  if (active_) HEAP->allow_allocation(old_state_);
+void ExitAllocationScope(Isolate* isolate, bool last_state) {
+  bool active = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
+  if (active) {
+    isolate->heap()->set_allow_allocation(last_state);
+  }
 }
 
 
-DisableAssertNoAllocation::DisableAssertNoAllocation() {
-  Isolate* isolate = ISOLATE;
-  active_ = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
-  if (active_) {
-    old_state_ = isolate->heap()->allow_allocation(true);
-  }
+AssertNoAllocation::AssertNoAllocation()
+    : last_state_(EnterAllocationScope(ISOLATE, false)) {
 }
 
+AssertNoAllocation::~AssertNoAllocation() {
+  ExitAllocationScope(ISOLATE, last_state_);
+}
 
-DisableAssertNoAllocation::~DisableAssertNoAllocation() {
-  if (active_) HEAP->allow_allocation(old_state_);
+DisableAssertNoAllocation::DisableAssertNoAllocation()
+  : last_state_(EnterAllocationScope(ISOLATE, true)) {
 }
 
+DisableAssertNoAllocation::~DisableAssertNoAllocation() {
+  ExitAllocationScope(ISOLATE, last_state_);
+}
 #else
 
 AssertNoAllocation::AssertNoAllocation() { }
index 3d890f7..f488304 100644 (file)
@@ -1309,8 +1309,7 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
   if (js_obj->HasFastProperties()) {
     DescriptorArray* descs = js_obj->map()->instance_descriptors();
     int real_size = js_obj->map()->NumberOfOwnDescriptors();
-    for (int i = 0; i < descs->number_of_descriptors(); i++) {
-      if (descs->GetDetails(i).descriptor_index() > real_size) continue;
+    for (int i = 0; i < real_size; i++) {
       switch (descs->GetType(i)) {
         case FIELD: {
           int index = descs->GetFieldIndex(i);
@@ -1332,7 +1331,7 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
                   js_obj->GetInObjectPropertyOffset(index));
             }
           } else {
-            Object* value = js_obj->FastPropertyAt(index);
+            Object* value = js_obj->RawFastPropertyAt(index);
             if (k != heap_->hidden_string()) {
               SetPropertyReference(js_obj, entry, k, value);
             } else {
index fb2f9d9..6139080 100644 (file)
@@ -3176,7 +3176,8 @@ void Heap::SetNumberStringCache(Object* number, String* string) {
 
 
 MaybeObject* Heap::NumberToString(Object* number,
-                                  bool check_number_string_cache) {
+                                  bool check_number_string_cache,
+                                  PretenureFlag pretenure) {
   isolate_->counters()->number_to_string_runtime()->Increment();
   if (check_number_string_cache) {
     Object* cached = GetNumberStringCache(number);
@@ -3197,7 +3198,8 @@ MaybeObject* Heap::NumberToString(Object* number,
   }
 
   Object* js_string;
-  MaybeObject* maybe_js_string = AllocateStringFromOneByte(CStrVector(str));
+  MaybeObject* maybe_js_string =
+      AllocateStringFromOneByte(CStrVector(str), pretenure);
   if (maybe_js_string->ToObject(&js_string)) {
     SetNumberStringCache(number, String::cast(js_string));
   }
@@ -4156,7 +4158,9 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
       for (int i = 0; i < count; i++) {
         String* name = fun->shared()->GetThisPropertyAssignmentName(i);
         ASSERT(name->IsInternalizedString());
-        FieldDescriptor field(name, i, NONE, i + 1);
+        // TODO(verwaest): Since we cannot update the boilerplate's map yet,
+        // initialize to the worst case.
+        FieldDescriptor field(name, i, NONE, Representation::Tagged());
         descriptors->Set(i, &field, witness);
       }
       descriptors->Sort();
@@ -4336,8 +4340,7 @@ MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor,
   ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
   AllocationSiteMode mode = TRACK_ALLOCATION_SITE;
   if (to_kind != initial_map->elements_kind()) {
-    MaybeObject* maybe_new_map = constructor->GetElementsTransitionMap(
-        isolate(), to_kind);
+    MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind);
     if (!maybe_new_map->To(&initial_map)) return maybe_new_map;
     // Possibly alter the mode, since we found an updated elements kind
     // in the type info cell.
@@ -4585,12 +4588,10 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
   // The global object might be created from an object template with accessors.
   // Fill these accessors into the dictionary.
   DescriptorArray* descs = map->instance_descriptors();
-  for (int i = 0; i < descs->number_of_descriptors(); i++) {
+  for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
     PropertyDetails details = descs->GetDetails(i);
     ASSERT(details.type() == CALLBACKS);  // Only accessors are expected.
-    PropertyDetails d = PropertyDetails(details.attributes(),
-                                        CALLBACKS,
-                                        details.descriptor_index());
+    PropertyDetails d = PropertyDetails(details.attributes(), CALLBACKS, i + 1);
     Object* value = descs->GetCallbacksObject(i);
     MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
     if (!maybe_value->ToObject(&value)) return maybe_value;
index 7722079..add42c0 100644 (file)
@@ -1476,6 +1476,7 @@ class Heap {
 
 #ifdef DEBUG
   bool IsAllocationAllowed() { return allocation_allowed_; }
+  inline void set_allow_allocation(bool allocation_allowed);
   inline bool allow_allocation(bool enable);
 
   bool disallow_allocation_failure() {
@@ -1530,6 +1531,14 @@ class Heap {
     return new_space_high_promotion_mode_active_;
   }
 
+  inline PretenureFlag GetPretenureMode() {
+    return new_space_high_promotion_mode_active_ ? TENURED : NOT_TENURED;
+  }
+
+  inline Address* NewSpaceHighPromotionModeActiveAddress() {
+    return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
+  }
+
   inline intptr_t PromotedTotalSize() {
     return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
   }
@@ -1608,7 +1617,8 @@ class Heap {
   static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
 
   MUST_USE_RESULT MaybeObject* NumberToString(
-      Object* number, bool check_number_string_cache = true);
+      Object* number, bool check_number_string_cache = true,
+      PretenureFlag pretenure = NOT_TENURED);
   MUST_USE_RESULT MaybeObject* Uint32ToString(
       uint32_t value, bool check_number_string_cache = true);
 
@@ -1975,7 +1985,8 @@ class Heap {
 
   // Indicates that the new space should be kept small due to high promotion
   // rates caused by the mutator allocating a lot of long-lived objects.
-  bool new_space_high_promotion_mode_active_;
+  // TODO(hpayer): change to bool if no longer accessed from generated code
+  intptr_t new_space_high_promotion_mode_active_;
 
   // Limit that triggers a global GC on the next (normally caused) GC.  This
   // is checked when we have already decided to do a GC to help determine
@@ -2691,6 +2702,13 @@ class DescriptorLookupCache {
 // { AssertNoAllocation nogc;
 //   ...
 // }
+
+#ifdef DEBUG
+inline bool EnterAllocationScope(Isolate* isolate, bool allow_allocation);
+inline void ExitAllocationScope(Isolate* isolate, bool last_state);
+#endif
+
+
 class AssertNoAllocation {
  public:
   inline AssertNoAllocation();
@@ -2698,8 +2716,7 @@ class AssertNoAllocation {
 
 #ifdef DEBUG
  private:
-  bool old_state_;
-  bool active_;
+  bool last_state_;
 #endif
 };
 
@@ -2711,8 +2728,7 @@ class DisableAssertNoAllocation {
 
 #ifdef DEBUG
  private:
-  bool old_state_;
-  bool active_;
+  bool last_state_;
 #endif
 };
 
index 5f0cd9d..8f8c59e 100644 (file)
@@ -54,20 +54,6 @@ HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
 #undef DEFINE_COMPILE
 
 
-const char* Representation::Mnemonic() const {
-  switch (kind_) {
-    case kNone: return "v";
-    case kTagged: return "t";
-    case kDouble: return "d";
-    case kInteger32: return "i";
-    case kExternal: return "x";
-    default:
-      UNREACHABLE();
-      return NULL;
-  }
-}
-
-
 int HValue::LoopWeight() const {
   const int w = FLAG_loop_weight;
   static const int weights[] = { 1, w, w*w, w*w*w, w*w*w*w };
@@ -1615,15 +1601,6 @@ void HCheckMaps::SetSideEffectDominator(GVNFlag side_effect,
 }
 
 
-void HLoadElements::PrintDataTo(StringStream* stream) {
-  value()->PrintNameTo(stream);
-  if (HasTypeCheck()) {
-    stream->Add(" ");
-    typecheck()->PrintNameTo(stream);
-  }
-}
-
-
 void HCheckMaps::PrintDataTo(StringStream* stream) {
   value()->PrintNameTo(stream);
   stream->Add(" [%p", *map_set()->first());
@@ -1972,6 +1949,10 @@ void HPhi::DeleteFromGraph() {
 void HPhi::InitRealUses(int phi_id) {
   // Initialize real uses.
   phi_id_ = phi_id;
+  // Compute a conservative approximation of truncating uses before inferring
+  // representations. The proper, exact computation will be done later, when
+  // inserting representation changes.
+  SetFlag(kTruncatingToInt32);
   for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
     HValue* value = it.value();
     if (!value->IsPhi()) {
@@ -1981,6 +1962,9 @@ void HPhi::InitRealUses(int phi_id) {
         PrintF("#%d Phi is used by real #%d %s as %s\n",
                id(), value->id(), value->Mnemonic(), rep.Mnemonic());
       }
+      if (!value->IsSimulate() && !value->CheckFlag(kTruncatingToInt32)) {
+        ClearFlag(kTruncatingToInt32);
+      }
     }
   }
 }
@@ -2076,7 +2060,12 @@ HConstant::HConstant(Handle<Object> handle, Representation r)
     has_int32_value_(false),
     has_double_value_(false),
     is_internalized_string_(false),
+    is_not_in_new_space_(true),
     boolean_value_(handle->BooleanValue()) {
+  if (handle_->IsHeapObject()) {
+    Heap* heap = Handle<HeapObject>::cast(handle)->GetHeap();
+    is_not_in_new_space_ = !heap->InNewSpace(*handle);
+  }
   if (handle_->IsNumber()) {
     double n = handle_->Number();
     has_int32_value_ = IsInteger32(n);
@@ -2105,12 +2094,14 @@ HConstant::HConstant(Handle<Object> handle,
                      Representation r,
                      HType type,
                      bool is_internalize_string,
+                     bool is_not_in_new_space,
                      bool boolean_value)
     : handle_(handle),
       unique_id_(unique_id),
       has_int32_value_(false),
       has_double_value_(false),
       is_internalized_string_(is_internalize_string),
+      is_not_in_new_space_(is_not_in_new_space),
       boolean_value_(boolean_value),
       type_from_value_(type) {
   ASSERT(!handle.is_null());
@@ -2122,12 +2113,14 @@ HConstant::HConstant(Handle<Object> handle,
 
 HConstant::HConstant(int32_t integer_value,
                      Representation r,
+                     bool is_not_in_new_space,
                      Handle<Object> optional_handle)
     : handle_(optional_handle),
       unique_id_(),
       has_int32_value_(true),
       has_double_value_(true),
       is_internalized_string_(false),
+      is_not_in_new_space_(is_not_in_new_space),
       boolean_value_(integer_value != 0),
       int32_value_(integer_value),
       double_value_(FastI2D(integer_value)) {
@@ -2137,12 +2130,14 @@ HConstant::HConstant(int32_t integer_value,
 
 HConstant::HConstant(double double_value,
                      Representation r,
+                     bool is_not_in_new_space,
                      Handle<Object> optional_handle)
     : handle_(optional_handle),
       unique_id_(),
       has_int32_value_(IsInteger32(double_value)),
       has_double_value_(true),
       is_internalized_string_(false),
+      is_not_in_new_space_(is_not_in_new_space),
       boolean_value_(double_value != 0 && !std::isnan(double_value)),
       int32_value_(DoubleToInt32(double_value)),
       double_value_(double_value) {
@@ -2162,26 +2157,35 @@ void HConstant::Initialize(Representation r) {
 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
   if (r.IsInteger32() && !has_int32_value_) return NULL;
   if (r.IsDouble() && !has_double_value_) return NULL;
-  if (has_int32_value_) return new(zone) HConstant(int32_value_, r, handle_);
-  if (has_double_value_) return new(zone) HConstant(double_value_, r, handle_);
+  if (has_int32_value_) {
+    return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, handle_);
+  }
+  if (has_double_value_) {
+    return new(zone) HConstant(double_value_, r, is_not_in_new_space_, handle_);
+  }
   ASSERT(!handle_.is_null());
   return new(zone) HConstant(handle_,
                              unique_id_,
                              r,
                              type_from_value_,
                              is_internalized_string_,
+                             is_not_in_new_space_,
                              boolean_value_);
 }
 
 
 HConstant* HConstant::CopyToTruncatedInt32(Zone* zone) const {
   if (has_int32_value_) {
-    return new(zone) HConstant(
-        int32_value_, Representation::Integer32(), handle_);
+    return new(zone) HConstant(int32_value_,
+                               Representation::Integer32(),
+                               is_not_in_new_space_,
+                               handle_);
   }
   if (has_double_value_) {
-    return new(zone) HConstant(
-        DoubleToInt32(double_value_), Representation::Integer32(), handle_);
+    return new(zone) HConstant(DoubleToInt32(double_value_),
+                               Representation::Integer32(),
+                               is_not_in_new_space_,
+                               handle_);
   }
   return NULL;
 }
@@ -2517,6 +2521,8 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
        i < types->length() && types_.length() < kMaxLoadPolymorphism;
        ++i) {
     Handle<Map> map = types->at(i);
+    // Deprecated maps are updated to the current map in the type oracle.
+    ASSERT(!map->is_deprecated());
     LookupResult lookup(map->GetIsolate());
     map->LookupDescriptor(NULL, *name, &lookup);
     if (lookup.IsFound()) {
@@ -2528,6 +2534,12 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
           } else {
             SetGVNFlag(kDependsOnBackingStoreFields);
           }
+          if (FLAG_track_double_fields &&
+              lookup.representation().IsDouble()) {
+            // Since the value needs to be boxed, use a generic handler for
+            // loading doubles.
+            continue;
+          }
           types_.Add(types->at(i), zone);
           break;
         }
@@ -3501,14 +3513,7 @@ void HPhi::SimplifyConstantInputs() {
 
 void HPhi::InferRepresentation(HInferRepresentation* h_infer) {
   ASSERT(CheckFlag(kFlexibleRepresentation));
-  // If there are non-Phi uses, and all of them have observed the same
-  // representation, than that's what this Phi is going to use.
-  Representation new_rep = RepresentationObservedByAllNonPhiUses();
-  if (!new_rep.IsNone()) {
-    UpdateRepresentation(new_rep, h_infer, "unanimous use observations");
-    return;
-  }
-  new_rep = RepresentationFromInputs();
+  Representation new_rep = RepresentationFromInputs();
   UpdateRepresentation(new_rep, h_infer, "inputs");
   new_rep = RepresentationFromUses();
   UpdateRepresentation(new_rep, h_infer, "uses");
@@ -3517,22 +3522,6 @@ void HPhi::InferRepresentation(HInferRepresentation* h_infer) {
 }
 
 
-Representation HPhi::RepresentationObservedByAllNonPhiUses() {
-  int non_phi_use_count = 0;
-  for (int i = Representation::kInteger32;
-       i < Representation::kNumRepresentations; ++i) {
-    non_phi_use_count += non_phi_uses_[i];
-  }
-  if (non_phi_use_count <= 1) return Representation::None();
-  for (int i = 0; i < Representation::kNumRepresentations; ++i) {
-    if (non_phi_uses_[i] == non_phi_use_count) {
-      return Representation::FromKind(static_cast<Representation::Kind>(i));
-    }
-  }
-  return Representation::None();
-}
-
-
 Representation HPhi::RepresentationFromInputs() {
   bool double_occurred = false;
   bool int32_occurred = false;
index aa89f71..3ea99d4 100644 (file)
@@ -135,7 +135,6 @@ class LChunkBuilder;
   V(IsUndetectableAndBranch)                   \
   V(LeaveInlined)                              \
   V(LoadContextSlot)                           \
-  V(LoadElements)                              \
   V(LoadExternalArrayPointer)                  \
   V(LoadFunctionPrototype)                     \
   V(LoadGlobalCell)                            \
@@ -207,6 +206,7 @@ class LChunkBuilder;
   V(Calls)                                     \
   V(InobjectFields)                            \
   V(BackingStoreFields)                        \
+  V(DoubleFields)                              \
   V(ElementsKind)                              \
   V(ElementsPointer)                           \
   V(ArrayElements)                             \
@@ -304,58 +304,6 @@ class Range: public ZoneObject {
 };
 
 
-class Representation {
- public:
-  enum Kind {
-    kNone,
-    kInteger32,
-    kDouble,
-    kTagged,
-    kExternal,
-    kNumRepresentations
-  };
-
-  Representation() : kind_(kNone) { }
-
-  static Representation None() { return Representation(kNone); }
-  static Representation Tagged() { return Representation(kTagged); }
-  static Representation Integer32() { return Representation(kInteger32); }
-  static Representation Double() { return Representation(kDouble); }
-  static Representation External() { return Representation(kExternal); }
-
-  static Representation FromKind(Kind kind) { return Representation(kind); }
-
-  bool Equals(const Representation& other) {
-    return kind_ == other.kind_;
-  }
-
-  bool is_more_general_than(const Representation& other) {
-    ASSERT(kind_ != kExternal);
-    ASSERT(other.kind_ != kExternal);
-    return kind_ > other.kind_;
-  }
-
-  Kind kind() const { return static_cast<Kind>(kind_); }
-  bool IsNone() const { return kind_ == kNone; }
-  bool IsTagged() const { return kind_ == kTagged; }
-  bool IsInteger32() const { return kind_ == kInteger32; }
-  bool IsDouble() const { return kind_ == kDouble; }
-  bool IsExternal() const { return kind_ == kExternal; }
-  bool IsSpecialization() const {
-    return kind_ == kInteger32 || kind_ == kDouble;
-  }
-  const char* Mnemonic() const;
-
- private:
-  explicit Representation(Kind k) : kind_(k) { }
-
-  // Make sure kind fits in int8.
-  STATIC_ASSERT(kNumRepresentations <= (1 << kBitsPerByte));
-
-  int8_t kind_;
-};
-
-
 class UniqueValueId {
  public:
   UniqueValueId() : raw_address_(NULL) { }
@@ -2405,15 +2353,20 @@ class HCallNewArray: public HCallNew {
               Handle<JSGlobalPropertyCell> type_cell)
       : HCallNew(context, constructor, argument_count),
         type_cell_(type_cell) {
+    elements_kind_ = static_cast<ElementsKind>(
+        Smi::cast(type_cell->value())->value());
   }
 
   Handle<JSGlobalPropertyCell> property_cell() const {
     return type_cell_;
   }
 
+  ElementsKind elements_kind() const { return elements_kind_; }
+
   DECLARE_CONCRETE_INSTRUCTION(CallNewArray)
 
  private:
+  ElementsKind elements_kind_;
   Handle<JSGlobalPropertyCell> type_cell_;
 };
 
@@ -2637,39 +2590,6 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
 };
 
 
-class HLoadElements: public HTemplateInstruction<2> {
- public:
-  HLoadElements(HValue* value, HValue* typecheck) {
-    SetOperandAt(0, value);
-    SetOperandAt(1, typecheck != NULL ? typecheck : value);
-    set_representation(Representation::Tagged());
-    SetFlag(kUseGVN);
-    SetGVNFlag(kDependsOnElementsPointer);
-  }
-
-  HValue* value() { return OperandAt(0); }
-  HValue* typecheck() {
-    ASSERT(HasTypeCheck());
-    return OperandAt(1);
-  }
-  bool HasTypeCheck() const { return OperandAt(0) != OperandAt(1); }
-
-  virtual void PrintDataTo(StringStream* stream);
-
-  virtual Representation RequiredInputRepresentation(int index) {
-    return Representation::Tagged();
-  }
-
-  DECLARE_CONCRETE_INSTRUCTION(LoadElements)
-
- protected:
-  virtual bool DataEquals(HValue* other) { return true; }
-
- private:
-  virtual bool IsDeletable() const { return true; }
-};
-
-
 class HLoadExternalArrayPointer: public HUnaryOperation {
  public:
   explicit HLoadExternalArrayPointer(HValue* value)
@@ -3065,7 +2985,6 @@ class HPhi: public HValue {
 
   virtual Range* InferRange(Zone* zone);
   virtual void InferRepresentation(HInferRepresentation* h_infer);
-  Representation RepresentationObservedByAllNonPhiUses();
   Representation RepresentationFromUseRequirements();
   virtual Representation RequiredInputRepresentation(int index) {
     return representation();
@@ -3238,19 +3157,24 @@ class HConstant: public HTemplateInstruction<0> {
   HConstant(Handle<Object> handle, Representation r);
   HConstant(int32_t value,
             Representation r,
+            bool is_not_in_new_space = true,
             Handle<Object> optional_handle = Handle<Object>::null());
   HConstant(double value,
             Representation r,
+            bool is_not_in_new_space = true,
             Handle<Object> optional_handle = Handle<Object>::null());
   HConstant(Handle<Object> handle,
             UniqueValueId unique_id,
             Representation r,
             HType type,
             bool is_internalized_string,
+            bool is_not_in_new_space,
             bool boolean_value);
 
   Handle<Object> handle() {
     if (handle_.is_null()) {
+      // Default arguments to is_not_in_new_space depend on this heap number
+      // to be tenured so that it's guaranteed not be be located in new space.
       handle_ = FACTORY->NewNumber(double_value_, TENURED);
     }
     ALLOW_HANDLE_DEREF(Isolate::Current(), "smi check");
@@ -3265,6 +3189,10 @@ class HConstant: public HTemplateInstruction<0> {
          std::isnan(double_value_));
   }
 
+  bool NotInNewSpace() const {
+    return is_not_in_new_space_;
+  }
+
   bool ImmortalImmovable() const {
     if (has_int32_value_) {
       return false;
@@ -3411,6 +3339,7 @@ class HConstant: public HTemplateInstruction<0> {
   bool has_int32_value_ : 1;
   bool has_double_value_ : 1;
   bool is_internalized_string_ : 1;  // TODO(yangguo): make this part of HType.
+  bool is_not_in_new_space_ : 1;
   bool boolean_value_ : 1;
   int32_t int32_value_;
   double double_value_;
@@ -3434,16 +3363,27 @@ class HBinaryOperation: public HTemplateInstruction<3> {
   HValue* left() { return OperandAt(1); }
   HValue* right() { return OperandAt(2); }
 
-  // TODO(kasperl): Move these helpers to the IA-32 Lithium
-  // instruction sequence builder.
-  HValue* LeastConstantOperand() {
-    if (IsCommutative() && left()->IsConstant()) return right();
-    return left();
+  // True if switching left and right operands likely generates better code.
+  bool AreOperandsBetterSwitched() {
+    if (!IsCommutative()) return false;
+
+    // Constant operands are better off on the right, they can be inlined in
+    // many situations on most platforms.
+    if (left()->IsConstant()) return true;
+    if (right()->IsConstant()) return false;
+
+    // Otherwise, if there is only one use of the right operand, it would be
+    // better off on the left for platforms that only have 2-arg arithmetic
+    // ops (e.g ia32, x64) that clobber the left operand.
+    return (right()->UseCount() == 1);
   }
 
-  HValue* MostConstantOperand() {
-    if (IsCommutative() && left()->IsConstant()) return left();
-    return right();
+  HValue* BetterLeftOperand() {
+    return AreOperandsBetterSwitched() ? right() : left();
+  }
+
+  HValue* BetterRightOperand() {
+    return AreOperandsBetterSwitched() ? left() : right();
   }
 
   void set_observed_input_representation(int index, Representation rep) {
@@ -5265,29 +5205,45 @@ class HStoreContextSlot: public HTemplateInstruction<2> {
 
 class HLoadNamedField: public HTemplateInstruction<2> {
  public:
-  HLoadNamedField(HValue* object, bool is_in_object, int offset,
-                  HValue* typecheck = NULL)
+  HLoadNamedField(HValue* object, bool is_in_object,
+                  Representation field_representation,
+                  int offset, HValue* typecheck = NULL)
       : is_in_object_(is_in_object),
+        field_representation_(field_representation),
         offset_(offset) {
     ASSERT(object != NULL);
     SetOperandAt(0, object);
     SetOperandAt(1, typecheck != NULL ? typecheck : object);
 
-    set_representation(Representation::Tagged());
+    if (FLAG_track_fields && field_representation.IsSmi()) {
+      set_type(HType::Smi());
+      set_representation(Representation::Tagged());
+    } else if (FLAG_track_double_fields && field_representation.IsDouble()) {
+      set_representation(field_representation);
+    } else {
+      set_representation(Representation::Tagged());
+    }
     SetFlag(kUseGVN);
-    SetGVNFlag(kDependsOnMaps);
-    if (is_in_object) {
+    if (FLAG_track_double_fields && representation().IsDouble()) {
+      ASSERT(is_in_object);
+      ASSERT(offset == HeapNumber::kValueOffset);
+      SetGVNFlag(kDependsOnDoubleFields);
+    } else if (is_in_object) {
       SetGVNFlag(kDependsOnInobjectFields);
+      SetGVNFlag(kDependsOnMaps);
     } else {
       SetGVNFlag(kDependsOnBackingStoreFields);
+      SetGVNFlag(kDependsOnMaps);
     }
   }
 
   static HLoadNamedField* NewArrayLength(Zone* zone, HValue* object,
                                          HValue* typecheck,
                                          HType type = HType::Tagged()) {
+    Representation representation =
+        type.IsSmi() ? Representation::Smi() : Representation::Tagged();
     HLoadNamedField* result = new(zone) HLoadNamedField(
-        object, true, JSArray::kLengthOffset, typecheck);
+        object, true, representation, JSArray::kLengthOffset, typecheck);
     result->set_type(type);
     result->SetGVNFlag(kDependsOnArrayLengths);
     result->ClearGVNFlag(kDependsOnInobjectFields);
@@ -5302,6 +5258,7 @@ class HLoadNamedField: public HTemplateInstruction<2> {
 
   bool HasTypeCheck() const { return OperandAt(0) != OperandAt(1); }
   bool is_in_object() const { return is_in_object_; }
+  Representation field_representation() const { return representation_; }
   int offset() const { return offset_; }
 
   virtual Representation RequiredInputRepresentation(int index) {
@@ -5321,6 +5278,7 @@ class HLoadNamedField: public HTemplateInstruction<2> {
   virtual bool IsDeletable() const { return true; }
 
   bool is_in_object_;
+  Representation field_representation_;
   int offset_;
 };
 
@@ -5615,29 +5573,41 @@ class HLoadKeyedGeneric: public HTemplateInstruction<3> {
 class HStoreNamedField: public HTemplateInstruction<2> {
  public:
   HStoreNamedField(HValue* obj,
-                   Handle<String> name,
+                   Handle<Name> name,
                    HValue* val,
                    bool in_object,
+                   Representation field_representation,
                    int offset)
       : name_(name),
         is_in_object_(in_object),
+        field_representation_(field_representation),
         offset_(offset),
         transition_unique_id_(),
         new_space_dominator_(NULL) {
     SetOperandAt(0, obj);
     SetOperandAt(1, val);
     SetFlag(kTrackSideEffectDominators);
-    SetGVNFlag(kDependsOnNewSpacePromotion);
-    if (is_in_object_) {
+    if (FLAG_track_double_fields && field_representation.IsDouble()) {
+      SetGVNFlag(kChangesDoubleFields);
+    } else if (is_in_object_) {
       SetGVNFlag(kChangesInobjectFields);
+      SetGVNFlag(kDependsOnNewSpacePromotion);
     } else {
       SetGVNFlag(kChangesBackingStoreFields);
+      SetGVNFlag(kDependsOnNewSpacePromotion);
     }
   }
 
   DECLARE_CONCRETE_INSTRUCTION(StoreNamedField)
 
   virtual Representation RequiredInputRepresentation(int index) {
+    if (FLAG_track_double_fields &&
+        index == 1 && field_representation_.IsDouble()) {
+      return field_representation_;
+    } else if (FLAG_track_fields &&
+               index == 1 && field_representation_.IsSmi()) {
+      return Representation::Integer32();
+    }
     return Representation::Tagged();
   }
   virtual void SetSideEffectDominator(GVNFlag side_effect, HValue* dominator) {
@@ -5649,7 +5619,7 @@ class HStoreNamedField: public HTemplateInstruction<2> {
   HValue* object() { return OperandAt(0); }
   HValue* value() { return OperandAt(1); }
 
-  Handle<String> name() const { return name_; }
+  Handle<Name> name() const { return name_; }
   bool is_in_object() const { return is_in_object_; }
   int offset() const { return offset_; }
   Handle<Map> transition() const { return transition_; }
@@ -5658,7 +5628,12 @@ class HStoreNamedField: public HTemplateInstruction<2> {
   HValue* new_space_dominator() const { return new_space_dominator_; }
 
   bool NeedsWriteBarrier() {
-    return StoringValueNeedsWriteBarrier(value()) &&
+    ASSERT(!(FLAG_track_double_fields && field_representation_.IsDouble()) ||
+           transition_.is_null());
+    return (!FLAG_track_fields || !field_representation_.IsSmi()) &&
+        // If there is a transition, a new storage object needs to be allocated.
+        !(FLAG_track_double_fields && field_representation_.IsDouble()) &&
+        StoringValueNeedsWriteBarrier(value()) &&
         ReceiverObjectNeedsWriteBarrier(object(), new_space_dominator());
   }
 
@@ -5670,9 +5645,14 @@ class HStoreNamedField: public HTemplateInstruction<2> {
     transition_unique_id_ = UniqueValueId(transition_);
   }
 
+  Representation field_representation() const {
+    return field_representation_;
+  }
+
  private:
-  Handle<String> name_;
+  Handle<Name> name_;
   bool is_in_object_;
+  Representation field_representation_;
   int offset_;
   Handle<Map> transition_;
   UniqueValueId transition_unique_id_;
@@ -6159,12 +6139,14 @@ class HObjectLiteral: public HMaterializedLiteral<1> {
                  bool fast_elements,
                  int literal_index,
                  int depth,
+                 bool may_store_doubles,
                  bool has_function)
       : HMaterializedLiteral<1>(literal_index, depth),
         constant_properties_(constant_properties),
         constant_properties_length_(constant_properties->length()),
         literals_(literals),
         fast_elements_(fast_elements),
+        may_store_doubles_(may_store_doubles),
         has_function_(has_function) {
     SetOperandAt(0, context);
     SetGVNFlag(kChangesNewSpacePromotion);
@@ -6179,6 +6161,7 @@ class HObjectLiteral: public HMaterializedLiteral<1> {
   }
   Handle<FixedArray> literals() const { return literals_; }
   bool fast_elements() const { return fast_elements_; }
+  bool may_store_doubles() const { return may_store_doubles_; }
   bool has_function() const { return has_function_; }
 
   virtual Representation RequiredInputRepresentation(int index) {
@@ -6193,6 +6176,7 @@ class HObjectLiteral: public HMaterializedLiteral<1> {
   int constant_properties_length_;
   Handle<FixedArray> literals_;
   bool fast_elements_ : 1;
+  bool may_store_doubles_ : 1;
   bool has_function_ : 1;
 };
 
index a978834..5c573fe 100644 (file)
@@ -641,6 +641,7 @@ HConstant* HGraph::GetConstant##Name() {                                       \
         Representation::Tagged(),                                              \
         htype,                                                                 \
         false,                                                                 \
+        true,                                                                  \
         boolean_value);                                                        \
     constant->InsertAfter(GetConstantUndefined());                             \
     constant_##name##_.set(constant);                                          \
@@ -1191,10 +1192,13 @@ HValue* HGraphBuilder::BuildCheckForCapacityGrow(HValue* object,
     new_length->ClearFlag(HValue::kCanOverflow);
 
     Factory* factory = isolate()->factory();
+    Representation representation = IsFastElementsKind(kind)
+        ? Representation::Smi() : Representation::Tagged();
     HInstruction* length_store = AddInstruction(new(zone) HStoreNamedField(
         object,
         factory->length_field_string(),
         new_length, true,
+        representation,
         JSArray::kLengthOffset));
     length_store->SetGVNFlag(kChangesArrayLengths);
   }
@@ -1267,8 +1271,7 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
   }
   bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
   bool fast_elements = IsFastObjectElementsKind(elements_kind);
-  HValue* elements =
-      AddInstruction(new(zone) HLoadElements(object, mapcheck));
+  HValue* elements = AddLoadElements(object, mapcheck);
   if (is_store && (fast_elements || fast_smi_only_elements) &&
       store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
     HCheckMaps* check_cow_map = HCheckMaps::New(
@@ -1413,9 +1416,12 @@ void HGraphBuilder::BuildInitializeElements(HValue* elements,
   BuildStoreMap(elements, map);
 
   Handle<String> fixed_array_length_field_name = factory->length_field_string();
+  Representation representation = IsFastElementsKind(kind)
+      ? Representation::Smi() : Representation::Tagged();
   HInstruction* store_length =
       new(zone) HStoreNamedField(elements, fixed_array_length_field_name,
-                                 capacity, true, FixedArray::kLengthOffset);
+                                 capacity, true, representation,
+                                 FixedArray::kLengthOffset);
   AddInstruction(store_length);
 }
 
@@ -1447,6 +1453,7 @@ HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
       isolate()->factory()->properties_field_symbol(),
       empty_fixed_array,
       true,
+      Representation::Tagged(),
       JSArray::kPropertiesOffset));
 
   HInstruction* length_store = AddInstruction(
@@ -1454,6 +1461,7 @@ HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
                                    isolate()->factory()->length_field_string(),
                                    length_field,
                                    true,
+                                   Representation::Tagged(),
                                    JSArray::kLengthOffset));
   length_store->SetGVNFlag(kChangesArrayLengths);
 
@@ -1479,6 +1487,7 @@ HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
           isolate()->factory()->elements_field_string(),
           elements,
           true,
+          Representation::Tagged(),
           JSArray::kElementsOffset));
   elements_store->SetGVNFlag(kChangesElementsPointer);
 
@@ -1493,7 +1502,9 @@ HInstruction* HGraphBuilder::BuildStoreMap(HValue* object,
   Handle<String> map_field_name = factory->map_field_string();
   HInstruction* store_map =
       new(zone) HStoreNamedField(object, map_field_name, map,
-                                 true, JSObject::kMapOffset);
+                                 true, Representation::Tagged(),
+                                 JSObject::kMapOffset);
+  store_map->ClearGVNFlag(kChangesInobjectFields);
   store_map->SetGVNFlag(kChangesMaps);
   AddInstruction(store_map);
   return store_map;
@@ -1509,6 +1520,18 @@ HInstruction* HGraphBuilder::BuildStoreMap(HValue* object,
 }
 
 
+HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
+    HValue* typecheck) {
+  HLoadNamedField* instr = new(zone()) HLoadNamedField(object, true,
+      Representation::Tagged(), JSObject::kElementsOffset, typecheck);
+  AddInstruction(instr);
+  instr->SetGVNFlag(kDependsOnElementsPointer);
+  instr->ClearGVNFlag(kDependsOnMaps);
+  instr->ClearGVNFlag(kDependsOnInobjectFields);
+  return instr;
+}
+
+
 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* context,
                                                 HValue* old_capacity) {
   Zone* zone = this->zone();
@@ -1574,7 +1597,7 @@ HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
   HInstruction* elements_store = AddInstruction(new(zone) HStoreNamedField(
       object,
       factory->elements_field_string(),
-      new_elements, true,
+      new_elements, true, Representation::Tagged(),
       JSArray::kElementsOffset));
   elements_store->SetGVNFlag(kChangesElementsPointer);
 
@@ -1711,13 +1734,13 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
   // Copy the JS array part.
   for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
     if ((i != JSArray::kElementsOffset) || (length == 0)) {
-      HInstruction* value =
-          AddInstruction(new(zone) HLoadNamedField(boilerplate, true, i));
+      HInstruction* value = AddInstruction(new(zone) HLoadNamedField(
+          boilerplate, true, Representation::Tagged(), i));
       if (i != JSArray::kMapOffset) {
         AddInstruction(new(zone) HStoreNamedField(object,
                                                   factory->empty_string(),
-                                                  value,
-                                                  true, i));
+                                                  value, true,
+                                                  Representation::Tagged(), i));
       } else {
         BuildStoreMap(object, value);
       }
@@ -1732,24 +1755,24 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
   if (length > 0) {
     // Get hold of the elements array of the boilerplate and setup the
     // elements pointer in the resulting object.
-    HValue* boilerplate_elements =
-        AddInstruction(new(zone) HLoadElements(boilerplate, NULL));
+    HValue* boilerplate_elements = AddLoadElements(boilerplate);
     HValue* object_elements =
         AddInstruction(new(zone) HInnerAllocatedObject(object, elems_offset));
     AddInstruction(new(zone) HStoreNamedField(object,
                                               factory->elements_field_string(),
-                                              object_elements,
-                                              true, JSObject::kElementsOffset));
+                                              object_elements, true,
+                                              Representation::Tagged(),
+                                              JSObject::kElementsOffset));
 
     // Copy the elements array header.
     for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
       HInstruction* value =
-          AddInstruction(new(zone) HLoadNamedField(boilerplate_elements,
-                                                   true, i));
+          AddInstruction(new(zone) HLoadNamedField(
+              boilerplate_elements, true, Representation::Tagged(), i));
       AddInstruction(new(zone) HStoreNamedField(object_elements,
                                                 factory->empty_string(),
-                                                value,
-                                                true, i));
+                                                value, true,
+                                                Representation::Tagged(), i));
     }
 
     // Copy the elements array contents.
@@ -1834,11 +1857,32 @@ HValue* HGraphBuilder::BuildCreateAllocationSiteInfo(HValue* previous_object,
       isolate()->factory()->payload_string(),
       payload,
       true,
+      Representation::Tagged(),
       AllocationSiteInfo::kPayloadOffset));
   return alloc_site;
 }
 
 
+HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* context) {
+  HInstruction* global_object = AddInstruction(new(zone())
+                                               HGlobalObject(context));
+  HInstruction* native_context = AddInstruction(new(zone())
+      HLoadNamedField(global_object, true, Representation::Tagged(),
+                      GlobalObject::kNativeContextOffset));
+  return native_context;
+}
+
+
+HInstruction* HGraphBuilder::BuildGetArrayFunction(HValue* context) {
+  HInstruction* native_context = BuildGetNativeContext(context);
+  int offset = Context::kHeaderSize +
+      kPointerSize * Context::ARRAY_FUNCTION_INDEX;
+  HInstruction* array_function = AddInstruction(new(zone())
+      HLoadNamedField(native_context, true, Representation::Tagged(), offset));
+  return array_function;
+}
+
+
 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
                                               ElementsKind kind,
                                               HValue* allocation_site_payload,
@@ -1855,17 +1899,14 @@ HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
 
 
 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode(HValue* context) {
-  // Get the global context, the native context, the map array
-  HInstruction* global_object = AddInstruction(new(zone())
-      HGlobalObject(context));
-  HInstruction* native_context = AddInstruction(new(zone())
-      HLoadNamedField(global_object, true, GlobalObject::kNativeContextOffset));
+  HInstruction* native_context = builder()->BuildGetNativeContext(context);
   int offset = Context::kHeaderSize +
       kPointerSize * Context::JS_ARRAY_MAPS_INDEX;
   HInstruction* map_array = AddInstruction(new(zone())
-      HLoadNamedField(native_context, true, offset));
+      HLoadNamedField(native_context, true, Representation::Tagged(), offset));
   offset = kind_ * kPointerSize + FixedArrayBase::kHeaderSize;
-  return AddInstruction(new(zone()) HLoadNamedField(map_array, true, offset));
+  return AddInstruction(new(zone()) HLoadNamedField(
+      map_array, true, Representation::Tagged(), offset));
 }
 
 
@@ -3747,7 +3788,39 @@ void HInferRepresentation::Analyze() {
     }
   }
 
+  // Set truncation flags for groups of connected phis. This is a conservative
+  // approximation; the flag will be properly re-computed after representations
+  // have been determined.
+  if (phi_count > 0) {
+    BitVector* done = new(zone()) BitVector(phi_count, graph_->zone());
+    for (int i = 0; i < phi_count; ++i) {
+      if (done->Contains(i)) continue;
+
+      // Check if all uses of all connected phis in this group are truncating.
+      bool all_uses_everywhere_truncating = true;
+      for (BitVector::Iterator it(connected_phis.at(i));
+           !it.Done();
+           it.Advance()) {
+        int index = it.Current();
+        all_uses_everywhere_truncating &=
+            phi_list->at(index)->CheckFlag(HInstruction::kTruncatingToInt32);
+        done->Add(index);
+      }
+      if (all_uses_everywhere_truncating) {
+        continue;  // Great, nothing to do.
+      }
+      // Clear truncation flag of this group of connected phis.
+      for (BitVector::Iterator it(connected_phis.at(i));
+           !it.Done();
+           it.Advance()) {
+        int index = it.Current();
+        phi_list->at(index)->ClearFlag(HInstruction::kTruncatingToInt32);
+      }
+    }
+  }
+
   // Simplify constant phi inputs where possible.
+  // This step uses kTruncatingToInt32 flags of phis.
   for (int i = 0; i < phi_count; ++i) {
     phi_list->at(i)->SimplifyConstantInputs();
   }
@@ -4027,36 +4100,50 @@ void HGraph::InsertRepresentationChanges() {
   // int32-phis allow truncation and iteratively remove the ones that
   // are used in an operation that does not allow a truncating
   // conversion.
-  // TODO(fschneider): Replace this with a worklist-based iteration.
+  ZoneList<HPhi*> worklist(8, zone());
+
   for (int i = 0; i < phi_list()->length(); i++) {
     HPhi* phi = phi_list()->at(i);
     if (phi->representation().IsInteger32()) {
       phi->SetFlag(HValue::kTruncatingToInt32);
     }
   }
-  bool change = true;
-  while (change) {
-    change = false;
-    for (int i = 0; i < phi_list()->length(); i++) {
-      HPhi* phi = phi_list()->at(i);
-      if (!phi->CheckFlag(HValue::kTruncatingToInt32)) continue;
-      for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
-        // If a Phi is used as a non-truncating int32 or as a double,
-        // clear its "truncating" flag.
-        HValue* use = it.value();
-        Representation input_representation =
-            use->RequiredInputRepresentation(it.index());
-        if ((input_representation.IsInteger32() &&
-             !use->CheckFlag(HValue::kTruncatingToInt32)) ||
-            input_representation.IsDouble()) {
-          if (FLAG_trace_representation) {
-            PrintF("#%d Phi is not truncating because of #%d %s\n",
-                   phi->id(), it.value()->id(), it.value()->Mnemonic());
-          }
-          phi->ClearFlag(HValue::kTruncatingToInt32);
-          change = true;
-          break;
+
+  for (int i = 0; i < phi_list()->length(); i++) {
+    HPhi* phi = phi_list()->at(i);
+    for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
+      // If a Phi is used as a non-truncating int32 or as a double,
+      // clear its "truncating" flag.
+      HValue* use = it.value();
+      Representation input_representation =
+          use->RequiredInputRepresentation(it.index());
+      if ((input_representation.IsInteger32() &&
+           !use->CheckFlag(HValue::kTruncatingToInt32)) ||
+          input_representation.IsDouble()) {
+        if (FLAG_trace_representation) {
+          PrintF("#%d Phi is not truncating because of #%d %s\n",
+                 phi->id(), it.value()->id(), it.value()->Mnemonic());
+        }
+        phi->ClearFlag(HValue::kTruncatingToInt32);
+        worklist.Add(phi, zone());
+        break;
+      }
+    }
+  }
+
+  while (!worklist.is_empty()) {
+    HPhi* current = worklist.RemoveLast();
+    for (int i = 0; i < current->OperandCount(); ++i) {
+      HValue* input = current->OperandAt(i);
+      if (input->IsPhi() &&
+          input->representation().IsInteger32() &&
+          input->CheckFlag(HValue::kTruncatingToInt32)) {
+        if (FLAG_trace_representation) {
+          PrintF("#%d Phi is not truncating because of #%d %s\n",
+                 input->id(), current->id(), current->Mnemonic());
         }
+        input->ClearFlag(HValue::kTruncatingToInt32);
+        worklist.Add(HPhi::cast(input), zone());
       }
     }
   }
@@ -5364,6 +5451,9 @@ void HGraph::DeadCodeElimination() {
 
   while (!worklist.is_empty()) {
     HInstruction* instr = worklist.RemoveLast();
+    // This happens when an instruction is used multiple times as operand. That
+    // in turn could happen through GVN.
+    if (!instr->IsLinked()) continue;
     if (FLAG_trace_dead_code_elimination) {
       HeapStringAllocator allocator;
       StringStream stream(&allocator);
@@ -6604,10 +6694,16 @@ static bool IsFastLiteral(Handle<JSObject> boilerplate,
   if (properties->length() > 0) {
     return false;
   } else {
-    int nof = boilerplate->map()->inobject_properties();
-    for (int i = 0; i < nof; i++) {
+    Handle<DescriptorArray> descriptors(
+        boilerplate->map()->instance_descriptors());
+    int limit = boilerplate->map()->NumberOfOwnDescriptors();
+    for (int i = 0; i < limit; i++) {
+      PropertyDetails details = descriptors->GetDetails(i);
+      if (details.type() != FIELD) continue;
+      Representation representation = details.representation();
+      int index = descriptors->GetFieldIndex(i);
       if ((*max_properties)-- == 0) return false;
-      Handle<Object> value(boilerplate->InObjectPropertyAt(i), isolate);
+      Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate);
       if (value->IsJSObject()) {
         Handle<JSObject> value_object = Handle<JSObject>::cast(value);
         if (!IsFastLiteral(value_object,
@@ -6617,6 +6713,8 @@ static bool IsFastLiteral(Handle<JSObject> boilerplate,
                            pointer_size)) {
           return false;
         }
+      } else if (representation.IsDouble()) {
+        *data_size += HeapNumber::kSize;
       }
     }
   }
@@ -6666,6 +6764,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
                                    expr->fast_elements(),
                                    expr->literal_index(),
                                    expr->depth(),
+                                   expr->may_store_doubles(),
                                    expr->has_function()));
   }
 
@@ -6814,7 +6913,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
   // of the property values and is the value of the entire expression.
   Push(literal);
 
-  HLoadElements* elements = NULL;
+  HInstruction* elements = NULL;
 
   for (int i = 0; i < length; i++) {
     Expression* subexpr = subexprs->at(i);
@@ -6826,10 +6925,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
     HValue* value = Pop();
     if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
 
-    // Pass in literal as dummy depedency, since  the receiver always has
-    // elements.
-    elements = new(zone()) HLoadElements(literal, literal);
-    AddInstruction(elements);
+    elements = AddLoadElements(literal);
 
     HValue* key = AddInstruction(
         new(zone()) HConstant(Handle<Object>(Smi::FromInt(i), isolate()),
@@ -6887,14 +6983,29 @@ static bool ComputeLoadStoreField(Handle<Map> type,
 
 
 static int ComputeLoadStoreFieldIndex(Handle<Map> type,
-                                      Handle<String> name,
                                       LookupResult* lookup) {
   ASSERT(lookup->IsField() || lookup->IsTransitionToField(*type));
   if (lookup->IsField()) {
     return lookup->GetLocalFieldIndexFromMap(*type);
   } else {
     Map* transition = lookup->GetTransitionMapFromMap(*type);
-    return transition->PropertyIndexFor(*name) - type->inobject_properties();
+    int descriptor = transition->LastAdded();
+    int index = transition->instance_descriptors()->GetFieldIndex(descriptor);
+    return index - type->inobject_properties();
+  }
+}
+
+
+static Representation ComputeLoadStoreRepresentation(Handle<Map> type,
+                                                     LookupResult* lookup) {
+  if (lookup->IsField()) {
+    return lookup->representation();
+  } else {
+    Map* transition = lookup->GetTransitionMapFromMap(*type);
+    int descriptor = transition->LastAdded();
+    PropertyDetails details =
+        transition->instance_descriptors()->GetDetails(descriptor);
+    return details.representation();
   }
 }
 
@@ -6949,8 +7060,9 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
         zone()));
   }
 
-  int index = ComputeLoadStoreFieldIndex(map, name, lookup);
+  int index = ComputeLoadStoreFieldIndex(map, lookup);
   bool is_in_object = index < 0;
+  Representation representation = ComputeLoadStoreRepresentation(map, lookup);
   int offset = index * kPointerSize;
   if (index < 0) {
     // Negative property indices are in-object properties, indexed
@@ -6959,9 +7071,33 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
   } else {
     offset += FixedArray::kHeaderSize;
   }
-  HStoreNamedField* instr =
-      new(zone()) HStoreNamedField(object, name, value, is_in_object, offset);
-  if (lookup->IsTransitionToField(*map)) {
+  bool transition_to_field = lookup->IsTransitionToField(*map);
+  if (FLAG_track_double_fields && representation.IsDouble()) {
+    if (transition_to_field) {
+      NoObservableSideEffectsScope no_side_effects(this);
+      HInstruction* heap_number_size = AddInstruction(new(zone()) HConstant(
+          HeapNumber::kSize, Representation::Integer32()));
+      HInstruction* double_box = AddInstruction(new(zone()) HAllocate(
+          environment()->LookupContext(), heap_number_size,
+          HType::HeapNumber(), HAllocate::CAN_ALLOCATE_IN_NEW_SPACE));
+      BuildStoreMap(double_box, isolate()->factory()->heap_number_map());
+      AddInstruction(new(zone()) HStoreNamedField(
+          double_box, name, value, true,
+          Representation::Double(), HeapNumber::kValueOffset));
+      value = double_box;
+      representation = Representation::Tagged();
+    } else {
+      HInstruction* double_box = AddInstruction(new(zone()) HLoadNamedField(
+          object, is_in_object, Representation::Tagged(), offset));
+      double_box->set_type(HType::HeapNumber());
+      return new(zone()) HStoreNamedField(
+          double_box, name, value, true,
+          Representation::Double(), HeapNumber::kValueOffset);
+    }
+  }
+  HStoreNamedField* instr = new(zone()) HStoreNamedField(
+      object, name, value, is_in_object, representation, offset);
+  if (transition_to_field) {
     Handle<Map> transition(lookup->GetTransitionMapFromMap(*map));
     instr->set_transition(transition);
     // TODO(fschneider): Record the new map type of the object in the IR to
@@ -7043,22 +7179,31 @@ void HOptimizedGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
     HValue* object,
     SmallMapList* types,
     Handle<String> name) {
-  int count = 0;
-  int previous_field_offset = 0;
-  bool previous_field_is_in_object = false;
-  bool is_monomorphic_field = true;
 
   if (HandlePolymorphicArrayLengthLoad(expr, object, types, name))
     return;
 
-  Handle<Map> map;
-  LookupResult lookup(isolate());
-  for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
-    map = types->at(i);
-    if (ComputeLoadStoreField(map, name, &lookup, false)) {
-      int index = ComputeLoadStoreFieldIndex(map, name, &lookup);
+  AddInstruction(new(zone()) HCheckNonSmi(object));
+
+  // Use monomorphic load if property lookup results in the same field index
+  // for all maps. Requires special map check on the set of all handled maps.
+  HInstruction* instr = NULL;
+  if (types->length() > 0 && types->length() <= kMaxLoadPolymorphism) {
+    LookupResult lookup(isolate());
+    int previous_field_offset = 0;
+    bool previous_field_is_in_object = false;
+    Representation representation = Representation::None();
+    int count;
+    for (count = 0; count < types->length(); ++count) {
+      Handle<Map> map = types->at(count);
+      if (!ComputeLoadStoreField(map, name, &lookup, false)) break;
+
+      int index = ComputeLoadStoreFieldIndex(map, &lookup);
+      Representation new_representation =
+          ComputeLoadStoreRepresentation(map, &lookup);
       bool is_in_object = index < 0;
       int offset = index * kPointerSize;
+
       if (index < 0) {
         // Negative property indices are in-object properties, indexed
         // from the end of the fixed part of the object.
@@ -7066,31 +7211,33 @@ void HOptimizedGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
       } else {
         offset += FixedArray::kHeaderSize;
       }
+
       if (count == 0) {
         previous_field_offset = offset;
         previous_field_is_in_object = is_in_object;
-      } else if (is_monomorphic_field) {
-        is_monomorphic_field = (offset == previous_field_offset) &&
-                               (is_in_object == previous_field_is_in_object);
+        representation = new_representation;
+      } else if (offset != previous_field_offset ||
+                 is_in_object != previous_field_is_in_object ||
+                 (FLAG_track_fields &&
+                  !representation.IsCompatibleForLoad(new_representation))) {
+        break;
       }
-      ++count;
+
+      representation = representation.generalize(new_representation);
+    }
+
+    if (count == types->length()) {
+      AddInstruction(HCheckMaps::New(object, types, zone()));
+      instr = DoBuildLoadNamedField(
+          object, previous_field_is_in_object,
+          representation, previous_field_offset);
     }
   }
 
-  // Use monomorphic load if property lookup results in the same field index
-  // for all maps.  Requires special map check on the set of all handled maps.
-  AddInstruction(new(zone()) HCheckNonSmi(object));
-  HInstruction* instr;
-  if (count == types->length() && is_monomorphic_field) {
-    AddInstruction(HCheckMaps::New(object, types, zone()));
-    instr = BuildLoadNamedField(object, map, &lookup);
-  } else {
+  if (instr == NULL) {
     HValue* context = environment()->LookupContext();
-    instr = new(zone()) HLoadNamedFieldPolymorphic(context,
-                                                   object,
-                                                   types,
-                                                   name,
-                                                   zone());
+    instr = new(zone()) HLoadNamedFieldPolymorphic(
+        context, object, types, name, zone());
   }
 
   instr->set_position(expr->position());
@@ -7239,14 +7386,15 @@ void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
     // Keyed store.
     CHECK_ALIVE(VisitForValue(prop->key()));
     CHECK_ALIVE(VisitForValue(expr->value()));
-    HValue* value = Pop();
-    HValue* key = Pop();
-    HValue* object = Pop();
+    HValue* value = environment()->ExpressionStackAt(0);
+    HValue* key = environment()->ExpressionStackAt(1);
+    HValue* object = environment()->ExpressionStackAt(2);
     bool has_side_effects = false;
     HandleKeyedElementAccess(object, key, value, expr, expr->AssignmentId(),
                              expr->position(),
                              true,  // is_store
                              &has_side_effects);
+    Drop(3);
     Push(value);
     AddSimulate(expr->AssignmentId(), REMOVABLE_SIMULATE);
     return ast_context()->ReturnValue(Pop());
@@ -7648,16 +7796,37 @@ HLoadNamedField* HOptimizedGraphBuilder::BuildLoadNamedField(
     Handle<Map> map,
     LookupResult* lookup) {
   int index = lookup->GetLocalFieldIndexFromMap(*map);
-  if (index < 0) {
-    // Negative property indices are in-object properties, indexed
-    // from the end of the fixed part of the object.
-    int offset = (index * kPointerSize) + map->instance_size();
-    return new(zone()) HLoadNamedField(object, true, offset);
-  } else {
-    // Non-negative property indices are in the properties array.
-    int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
-    return new(zone()) HLoadNamedField(object, false, offset);
+  // Negative property indices are in-object properties, indexed from the end of
+  // the fixed part of the object. Non-negative property indices are in the
+  // properties array.
+  int inobject = index < 0;
+  Representation representation = lookup->representation();
+  int offset = inobject
+      ? index * kPointerSize + map->instance_size()
+      : index * kPointerSize + FixedArray::kHeaderSize;
+  return DoBuildLoadNamedField(object, inobject, representation, offset);
+}
+
+
+HLoadNamedField* HGraphBuilder::DoBuildLoadNamedField(
+    HValue* object,
+    bool inobject,
+    Representation representation,
+    int offset) {
+  bool load_double = false;
+  if (representation.IsDouble()) {
+    representation = Representation::Tagged();
+    load_double = FLAG_track_double_fields;
   }
+  HLoadNamedField* field =
+      new(zone()) HLoadNamedField(object, inobject, representation, offset);
+  if (load_double) {
+    AddInstruction(field);
+    field->set_type(HType::HeapNumber());
+    return new(zone()) HLoadNamedField(
+        field, true, Representation::Double(), HeapNumber::kValueOffset);
+  }
+  return field;
 }
 
 
@@ -7927,8 +8096,7 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
 
   HInstruction* elements_kind_instr =
       AddInstruction(new(zone()) HElementsKind(object));
-  HInstruction* elements =
-      AddInstruction(new(zone()) HLoadElements(object, checkspec));
+  HInstruction* elements = AddLoadElements(object, checkspec);
   HLoadExternalArrayPointer* external_elements = NULL;
   HInstruction* checked_key = NULL;
 
@@ -9555,16 +9723,13 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
     CHECK_ALIVE(VisitArgumentList(expr->arguments()));
     HCallNew* call;
     if (use_call_new_array) {
-      AddInstruction(new(zone()) HCheckFunction(constructor,
-          Handle<JSFunction>(isolate()->global_context()->array_function())));
-      Handle<Object> feedback = oracle()->GetInfo(expr->CallNewFeedbackId());
-      ASSERT(feedback->IsSmi());
-
       // TODO(mvstanton): It would be better to use the already created global
       // property cell that is shared by full code gen. That way, any transition
       // information that happened after crankshaft won't be lost.  The right
       // way to do that is to begin passing the cell to the type feedback oracle
       // instead of just the value in the cell. Do this in a follow-up checkin.
+      Handle<Object> feedback = oracle()->GetInfo(expr->CallNewFeedbackId());
+      ASSERT(feedback->IsSmi());
       Handle<JSGlobalPropertyCell> cell =
           isolate()->factory()->NewJSGlobalPropertyCell(feedback);
 
@@ -9977,7 +10142,7 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
       if (has_side_effects) AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
 
       after = BuildIncrement(returns_original_input, expr);
-      input = Pop();
+      input = environment()->ExpressionStackAt(0);
 
       expr->RecordTypeFeedback(oracle(), zone());
       HandleKeyedElementAccess(obj, key, after, expr, expr->AssignmentId(),
@@ -9985,10 +10150,10 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
                                true,  // is_store
                                &has_side_effects);
 
-      // Drop the key from the bailout environment.  Overwrite the receiver
-      // with the result of the operation, and the placeholder with the
-      // original value if necessary.
-      Drop(1);
+      // Drop the key and the original value from the bailout environment.
+      // Overwrite the receiver with the result of the operation, and the
+      // placeholder with the original value if necessary.
+      Drop(2);
       environment()->SetExpressionStackAt(0, after);
       if (returns_original_input) environment()->SetExpressionStackAt(1, input);
       ASSERT(has_side_effects);  // Stores always have side effects.
@@ -10660,7 +10825,6 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy(
       elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
           elements->Size() : 0;
   int elements_offset = *offset + object_size;
-  int inobject_properties = boilerplate_object->map()->inobject_properties();
   if (create_allocation_site_info) {
     elements_offset += AllocationSiteInfo::kSize;
     *offset += AllocationSiteInfo::kSize;
@@ -10674,28 +10838,49 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy(
   // Copy in-object properties.
   HValue* object_properties =
       AddInstruction(new(zone) HInnerAllocatedObject(target, object_offset));
-  for (int i = 0; i < inobject_properties; i++) {
+
+  Handle<DescriptorArray> descriptors(
+      boilerplate_object->map()->instance_descriptors());
+  int limit = boilerplate_object->map()->NumberOfOwnDescriptors();
+
+  for (int i = 0; i < limit; i++) {
+    PropertyDetails details = descriptors->GetDetails(i);
+    if (details.type() != FIELD) continue;
+    int index = descriptors->GetFieldIndex(i);
+    int property_offset = boilerplate_object->GetInObjectPropertyOffset(index);
+    Handle<Name> name(descriptors->GetKey(i));
     Handle<Object> value =
-        Handle<Object>(boilerplate_object->InObjectPropertyAt(i),
+        Handle<Object>(boilerplate_object->InObjectPropertyAt(index),
         isolate());
     if (value->IsJSObject()) {
       Handle<JSObject> value_object = Handle<JSObject>::cast(value);
       Handle<JSObject> original_value_object = Handle<JSObject>::cast(
-          Handle<Object>(original_boilerplate_object->InObjectPropertyAt(i),
+          Handle<Object>(original_boilerplate_object->InObjectPropertyAt(index),
               isolate()));
       HInstruction* value_instruction =
           AddInstruction(new(zone) HInnerAllocatedObject(target, *offset));
       AddInstruction(new(zone) HStoreNamedField(
-          object_properties, factory->unknown_field_string(), value_instruction,
-          true, boilerplate_object->GetInObjectPropertyOffset(i)));
+          object_properties, name, value_instruction, true,
+          Representation::Tagged(), property_offset));
       BuildEmitDeepCopy(value_object, original_value_object, target,
                         offset, DONT_TRACK_ALLOCATION_SITE);
     } else {
+      Representation representation = details.representation();
       HInstruction* value_instruction = AddInstruction(new(zone) HConstant(
           value, Representation::Tagged()));
+      if (representation.IsDouble()) {
+        HInstruction* double_box =
+            AddInstruction(new(zone) HInnerAllocatedObject(target, *offset));
+        BuildStoreMap(double_box, factory->heap_number_map());
+        AddInstruction(new(zone) HStoreNamedField(
+            double_box, name, value_instruction, true,
+            Representation::Double(), HeapNumber::kValueOffset));
+        value_instruction = double_box;
+        *offset += HeapNumber::kSize;
+      }
       AddInstruction(new(zone) HStoreNamedField(
-          object_properties, factory->unknown_field_string(), value_instruction,
-          true, boilerplate_object->GetInObjectPropertyOffset(i)));
+          object_properties, name, value_instruction, true,
+          Representation::Tagged(), property_offset));
     }
   }
 
@@ -10790,7 +10975,7 @@ HValue* HOptimizedGraphBuilder::BuildCopyObjectHeader(
       object_header,
       factory->elements_field_string(),
       elements,
-      true, JSObject::kElementsOffset));
+      true, Representation::Tagged(), JSObject::kElementsOffset));
   elements_store->SetGVNFlag(kChangesElementsPointer);
 
   Handle<Object> properties_field =
@@ -10800,8 +10985,9 @@ HValue* HOptimizedGraphBuilder::BuildCopyObjectHeader(
       properties_field, Representation::None()));
   AddInstruction(new(zone) HStoreNamedField(object_header,
                                             factory->empty_string(),
-                                            properties,
-                                            true, JSObject::kPropertiesOffset));
+                                            properties, true,
+                                            Representation::Tagged(),
+                                            JSObject::kPropertiesOffset));
 
   if (boilerplate_object->IsJSArray()) {
     Handle<JSArray> boilerplate_array =
@@ -10810,11 +10996,15 @@ HValue* HOptimizedGraphBuilder::BuildCopyObjectHeader(
         Handle<Object>(boilerplate_array->length(), isolate());
     HInstruction* length = AddInstruction(new(zone) HConstant(
         length_field, Representation::None()));
+    ASSERT(boilerplate_array->length()->IsSmi());
+    Representation representation =
+        IsFastElementsKind(boilerplate_array->GetElementsKind())
+        ? Representation::Smi() : Representation::Tagged();
     HInstruction* length_store = AddInstruction(new(zone) HStoreNamedField(
         object_header,
         factory->length_field_string(),
         length,
-        true, JSArray::kLengthOffset));
+        true, representation, JSArray::kLengthOffset));
     length_store->SetGVNFlag(kChangesArrayLengths);
   }
 
@@ -11207,6 +11397,7 @@ void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
                                               name,
                                               value,
                                               true,  // in-object store.
+                                              Representation::Tagged(),
                                               JSValue::kValueOffset));
   if_js_value->Goto(join);
   join->SetJoinId(call->id());
index ab721bd..a95424a 100644 (file)
@@ -982,6 +982,11 @@ class HGraphBuilder {
   HValue* BuildCheckMap(HValue* obj, Handle<Map> map);
 
   // Building common constructs
+  HLoadNamedField* DoBuildLoadNamedField(HValue* object,
+                                         bool inobject,
+                                         Representation representation,
+                                         int offset);
+
   HInstruction* BuildExternalArrayElementAccess(
       HValue* external_elements,
       HValue* checked_key,
@@ -1025,6 +1030,8 @@ class HGraphBuilder {
   HInstruction* BuildStoreMap(HValue* object, HValue* map);
   HInstruction* BuildStoreMap(HValue* object, Handle<Map> map);
 
+  HLoadNamedField* AddLoadElements(HValue *object, HValue *typecheck = NULL);
+
   class IfBuilder {
    public:
     explicit IfBuilder(HGraphBuilder* builder,
@@ -1317,6 +1324,9 @@ class HGraphBuilder {
                                         int previous_object_size,
                                         HValue* payload);
 
+  HInstruction* BuildGetNativeContext(HValue* context);
+  HInstruction* BuildGetArrayFunction(HValue* context);
+
  private:
   HGraphBuilder();
   CompilationInfo* info_;
index 96d2411..2897234 100644 (file)
@@ -30,6 +30,7 @@
 #if defined(V8_TARGET_ARCH_IA32)
 
 #include "bootstrapper.h"
+#include "builtins-decls.h"
 #include "code-stubs.h"
 #include "isolate.h"
 #include "jsregexp.h"
@@ -79,6 +80,28 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
 }
 
 
+void LoadFieldStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { edx };
+  descriptor->register_param_count_ = 1;
+  descriptor->register_params_ = registers;
+  descriptor->stack_parameter_count_ = NULL;
+  descriptor->deoptimization_handler_ = NULL;
+}
+
+
+void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { edx };
+  descriptor->register_param_count_ = 1;
+  descriptor->register_params_ = registers;
+  descriptor->stack_parameter_count_ = NULL;
+  descriptor->deoptimization_handler_ = NULL;
+}
+
+
 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
     Isolate* isolate,
     CodeStubInterfaceDescriptor* descriptor) {
@@ -107,9 +130,10 @@ static void InitializeArrayConstructorDescriptor(
     int constant_stack_parameter_count) {
   // register state
   // eax -- number of arguments
+  // edi -- function
   // ebx -- type info cell with elements kind
-  static Register registers[] = { ebx };
-  descriptor->register_param_count_ = 1;
+  static Register registers[] = { edi, ebx };
+  descriptor->register_param_count_ = 2;
 
   if (constant_stack_parameter_count != 0) {
     // stack param count needs (constructor pointer, and single argument)
@@ -3298,12 +3322,6 @@ void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
 }
 
 
-void LoadFieldStub::Generate(MacroAssembler* masm) {
-  StubCompiler::DoGenerateFastPropertyLoad(masm, eax, reg_, inobject_, index_);
-  __ ret(0);
-}
-
-
 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
   // The key is in edx and the parameter count is in eax.
 
@@ -4758,6 +4776,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
   Handle<Object> terminal_kind_sentinel =
       TypeFeedbackCells::MonomorphicArraySentinel(isolate,
                                                   LAST_FAST_ELEMENTS_KIND);
+  __ JumpIfNotSmi(ecx, &miss);
   __ cmp(ecx, Immediate(terminal_kind_sentinel));
   __ j(above, &miss);
   // Load the global or builtins object from the current context
@@ -5821,8 +5840,33 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
   __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
          Immediate(String::kEmptyHashField));
+
+  Label skip_write_barrier, after_writing;
+  ExternalReference high_promotion_mode = ExternalReference::
+      new_space_high_promotion_mode_active_address(masm->isolate());
+  __ test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
+  __ j(zero, &skip_write_barrier);
+
   __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
+  __ RecordWriteField(ecx,
+                     ConsString::kFirstOffset,
+                     eax,
+                     ebx,
+                     kDontSaveFPRegs);
   __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
+  __ RecordWriteField(ecx,
+                     ConsString::kSecondOffset,
+                     edx,
+                     ebx,
+                     kDontSaveFPRegs);
+  __ jmp(&after_writing);
+
+  __ bind(&skip_write_barrier);
+  __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
+  __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
+
+  __ bind(&after_writing);
+
   __ mov(eax, ecx);
   __ IncrementCounter(counters->string_add_native(), 1);
   __ ret(2 * kPointerSize);
@@ -7354,8 +7398,10 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
   { REG(edx), REG(eax), REG(edi), EMIT_REMEMBERED_SET},
   // StoreArrayLiteralElementStub::Generate
   { REG(ebx), REG(eax), REG(ecx), EMIT_REMEMBERED_SET},
-  // FastNewClosureStub
+  // FastNewClosureStub and StringAddStub::Generate
   { REG(ecx), REG(edx), REG(ebx), EMIT_REMEMBERED_SET},
+  // StringAddStub::Generate
+  { REG(ecx), REG(eax), REG(ebx), EMIT_REMEMBERED_SET},
   // Null termination.
   { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
 };
@@ -7876,15 +7922,8 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
     // Get the elements kind and case on that.
     __ cmp(ebx, Immediate(undefined_sentinel));
     __ j(equal, &no_info);
-    __ mov(edx, FieldOperand(ebx, kPointerSize));
-
-    // There is no info if the call site went megamorphic either
-
-    // TODO(mvstanton): Really? I thought if it was the array function that
-    // the cell wouldn't get stamped as megamorphic.
-    __ cmp(edx, Immediate(TypeFeedbackCells::MegamorphicSentinel(
-        masm->isolate())));
-    __ j(equal, &no_info);
+    __ mov(edx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset));
+    __ JumpIfNotSmi(edx, &no_info);
     __ SmiUntag(edx);
     __ jmp(&switch_ready);
     __ bind(&no_info);
index d153e18..a4c6bcc 100644 (file)
@@ -240,6 +240,15 @@ void Debug::GenerateKeyedStoreICDebugBreak(MacroAssembler* masm) {
 }
 
 
+void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) {
+  // Register state for CompareNil IC
+  // ----------- S t a t e -------------
+  //  -- eax    : value
+  // -----------------------------------
+  Generate_DebugBreakCallHelper(masm, eax.bit(), 0, false);
+}
+
+
 void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
   // Register state for keyed IC call call (from ic-ia32.cc)
   // ----------- S t a t e -------------
index f71a76d..5a78019 100644 (file)
@@ -1529,7 +1529,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
       ? ObjectLiteral::kHasFunction
       : ObjectLiteral::kNoFlags;
   int properties_count = constant_properties->length() / 2;
-  if (expr->depth() > 1) {
+  if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
+      expr->depth() > 1) {
     __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
     __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
     __ push(Immediate(Smi::FromInt(expr->literal_index())));
@@ -1900,11 +1901,12 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       Label resume;
       __ CompareRoot(result_register(), Heap::kTheHoleValueRootIndex);
       __ j(not_equal, &resume);
-      __ pop(result_register());
       if (expr->yield_kind() == Yield::SUSPEND) {
-        // TODO(wingo): Box into { value: VALUE, done: false }.
+        EmitReturnIteratorResult(false);
+      } else {
+        __ pop(result_register());
+        EmitReturnSequence();
       }
-      EmitReturnSequence();
 
       __ bind(&resume);
       context()->Plug(result_register());
@@ -1916,18 +1918,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       __ mov(FieldOperand(result_register(),
                           JSGeneratorObject::kContinuationOffset),
              Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
-      __ pop(result_register());
-      // TODO(wingo): Box into { value: VALUE, done: true }.
-
-      // Exit all nested statements.
-      NestedStatement* current = nesting_stack_;
-      int stack_depth = 0;
-      int context_length = 0;
-      while (current != NULL) {
-        current = current->Exit(&stack_depth, &context_length);
-      }
-      __ Drop(stack_depth);
-      EmitReturnSequence();
+      EmitReturnIteratorResult(true);
       break;
     }
 
@@ -2033,6 +2024,54 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
 }
 
 
+void FullCodeGenerator::EmitReturnIteratorResult(bool done) {
+  Label gc_required;
+  Label allocated;
+
+  Handle<Map> map(isolate()->native_context()->generator_result_map());
+
+  __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT);
+
+  __ bind(&allocated);
+  __ mov(ebx, map);
+  __ pop(ecx);
+  __ mov(edx, isolate()->factory()->ToBoolean(done));
+  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
+  __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
+  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
+         isolate()->factory()->empty_fixed_array());
+  __ mov(FieldOperand(eax, JSObject::kElementsOffset),
+         isolate()->factory()->empty_fixed_array());
+  __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
+  __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
+
+  // Only the value field needs a write barrier, as the other values are in the
+  // root set.
+  __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
+                      ecx, edx, kDontSaveFPRegs);
+
+  if (done) {
+    // Exit all nested statements.
+    NestedStatement* current = nesting_stack_;
+    int stack_depth = 0;
+    int context_length = 0;
+    while (current != NULL) {
+      current = current->Exit(&stack_depth, &context_length);
+    }
+    __ Drop(stack_depth);
+  }
+
+  EmitReturnSequence();
+
+  __ bind(&gc_required);
+  __ Push(Smi::FromInt(map->instance_size()));
+  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+  __ mov(context_register(),
+         Operand(ebp, StandardFrameConstants::kContextOffset));
+  __ jmp(&allocated);
+}
+
+
 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
   Literal* key = prop->key()->AsLiteral();
index 964db0e..e05031b 100644 (file)
@@ -1530,6 +1530,26 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
 }
 
 
+void StoreIC::GenerateSlow(MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- eax    : value
+  //  -- ecx    : key
+  //  -- edx    : receiver
+  //  -- esp[0] : return address
+  // -----------------------------------
+
+  __ pop(ebx);
+  __ push(edx);
+  __ push(ecx);
+  __ push(eax);
+  __ push(ebx);   // return address
+
+  // Do tail-call to runtime routine.
+  ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate());
+  __ TailCallExternalReference(ref, 3, 1);
+}
+
+
 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax    : value
index d93c27a..1d9e942 100644 (file)
@@ -113,6 +113,10 @@ void LCodeGen::FinishCode(Handle<Code> code) {
     prototype_maps_.at(i)->AddDependentCode(
         DependentCode::kPrototypeCheckGroup, code);
   }
+  for (int i = 0 ; i < transition_maps_.length(); i++) {
+    transition_maps_.at(i)->AddDependentCode(
+        DependentCode::kTransitionGroup, code);
+  }
 }
 
 
@@ -1230,7 +1234,7 @@ void LCodeGen::DoModI(LModI* instr) {
     __ and_(dividend, divisor - 1);
     __ bind(&done);
   } else {
-    Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
+    Label done, remainder_eq_dividend, slow, both_positive;
     Register left_reg = ToRegister(instr->left());
     Register right_reg = ToRegister(instr->right());
     Register result_reg = ToRegister(instr->result());
@@ -1266,23 +1270,10 @@ void LCodeGen::DoModI(LModI* instr) {
     __ mov(scratch, right_reg);
     __ sub(Operand(scratch), Immediate(1));
     __ test(scratch, Operand(right_reg));
-    __ j(not_zero, &do_subtraction, Label::kNear);
+    __ j(not_zero, &slow, Label::kNear);
     __ and_(left_reg, Operand(scratch));
     __ jmp(&remainder_eq_dividend, Label::kNear);
 
-    __ bind(&do_subtraction);
-    const int kUnfolds = 3;
-    // Try a few subtractions of the dividend.
-    __ mov(scratch, left_reg);
-    for (int i = 0; i < kUnfolds; i++) {
-      // Reduce the dividend by the divisor.
-      __ sub(left_reg, Operand(right_reg));
-      // Check if the dividend is less than the divisor.
-      __ cmp(left_reg, Operand(right_reg));
-      __ j(less, &remainder_eq_dividend, Label::kNear);
-    }
-    __ mov(left_reg, scratch);
-
     // Slow case, using idiv instruction.
     __ bind(&slow);
 
@@ -1915,16 +1906,24 @@ void LCodeGen::DoThrow(LThrow* instr) {
 void LCodeGen::DoAddI(LAddI* instr) {
   LOperand* left = instr->left();
   LOperand* right = instr->right();
-  ASSERT(left->Equals(instr->result()));
 
-  if (right->IsConstantOperand()) {
-    __ add(ToOperand(left), ToInteger32Immediate(right));
+  if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) {
+    if (right->IsConstantOperand()) {
+      int32_t offset = ToInteger32(LConstantOperand::cast(right));
+      __ lea(ToRegister(instr->result()), MemOperand(ToRegister(left), offset));
+    } else {
+      Operand address(ToRegister(left), ToRegister(right), times_1, 0);
+      __ lea(ToRegister(instr->result()), address);
+    }
   } else {
-    __ add(ToRegister(left), ToOperand(right));
-  }
-
-  if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
-    DeoptimizeIf(overflow, instr->environment());
+    if (right->IsConstantOperand()) {
+      __ add(ToOperand(left), ToInteger32Immediate(right));
+    } else {
+      __ add(ToRegister(left), ToOperand(right));
+    }
+    if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
+      DeoptimizeIf(overflow, instr->environment());
+    }
   }
 }
 
@@ -2956,13 +2955,27 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
 
 
 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
+  int offset = instr->hydrogen()->offset();
   Register object = ToRegister(instr->object());
+  if (FLAG_track_double_fields &&
+      instr->hydrogen()->representation().IsDouble()) {
+    if (CpuFeatures::IsSupported(SSE2)) {
+      CpuFeatureScope scope(masm(), SSE2);
+      XMMRegister result = ToDoubleRegister(instr->result());
+      __ movdbl(result, FieldOperand(object, offset));
+    } else {
+      PushX87DoubleOperand(FieldOperand(object, offset));
+      CurrentInstructionReturnsX87Result();
+    }
+    return;
+  }
+
   Register result = ToRegister(instr->result());
   if (instr->hydrogen()->is_in_object()) {
-    __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
+    __ mov(result, FieldOperand(object, offset));
   } else {
     __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
-    __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
+    __ mov(result, FieldOperand(result, offset));
   }
 }
 
@@ -3146,41 +3159,6 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
 }
 
 
-void LCodeGen::DoLoadElements(LLoadElements* instr) {
-  Register result = ToRegister(instr->result());
-  Register input = ToRegister(instr->object());
-  __ mov(result, FieldOperand(input, JSObject::kElementsOffset));
-  if (FLAG_debug_code) {
-    Label done, ok, fail;
-    __ cmp(FieldOperand(result, HeapObject::kMapOffset),
-           Immediate(factory()->fixed_array_map()));
-    __ j(equal, &done, Label::kNear);
-    __ cmp(FieldOperand(result, HeapObject::kMapOffset),
-           Immediate(factory()->fixed_cow_array_map()));
-    __ j(equal, &done, Label::kNear);
-    Register temp((result.is(eax)) ? ebx : eax);
-    __ push(temp);
-    __ mov(temp, FieldOperand(result, HeapObject::kMapOffset));
-    __ movzx_b(temp, FieldOperand(temp, Map::kBitField2Offset));
-    __ and_(temp, Map::kElementsKindMask);
-    __ shr(temp, Map::kElementsKindShift);
-    __ cmp(temp, GetInitialFastElementsKind());
-    __ j(less, &fail, Label::kNear);
-    __ cmp(temp, TERMINAL_FAST_ELEMENTS_KIND);
-    __ j(less_equal, &ok, Label::kNear);
-    __ cmp(temp, FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
-    __ j(less, &fail, Label::kNear);
-    __ cmp(temp, LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
-    __ j(less_equal, &ok, Label::kNear);
-    __ bind(&fail);
-    __ Abort("Check for fast or external elements failed.");
-    __ bind(&ok);
-    __ pop(temp);
-    __ bind(&done);
-  }
-}
-
-
 void LCodeGen::DoLoadExternalArrayPointer(
     LLoadExternalArrayPointer* instr) {
   Register result = ToRegister(instr->result());
@@ -4213,8 +4191,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
 
   __ Set(eax, Immediate(instr->arity()));
   __ mov(ebx, instr->hydrogen()->property_cell());
-  Object* cell_value = instr->hydrogen()->property_cell()->value();
-  ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+  ElementsKind kind = instr->hydrogen()->elements_kind();
   if (instr->arity() == 0) {
     ArrayNoArgumentConstructorStub stub(kind);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
@@ -4241,16 +4218,51 @@ void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
 
 
 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
+  Representation representation = instr->representation();
+
   Register object = ToRegister(instr->object());
+
   int offset = instr->offset();
 
-  if (!instr->transition().is_null()) {
+  Handle<Map> transition = instr->transition();
+
+  if (FLAG_track_fields && representation.IsSmi()) {
+    if (instr->value()->IsConstantOperand()) {
+      LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
+      if (!IsInteger32(operand_value)) {
+        DeoptimizeIf(no_condition, instr->environment());
+      }
+    } else {
+      Register value = ToRegister(instr->value());
+      __ SmiTag(value);
+      if (!instr->hydrogen()->value()->range()->IsInSmiRange()) {
+        DeoptimizeIf(overflow, instr->environment());
+      }
+    }
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    ASSERT(transition.is_null());
+    ASSERT(instr->is_in_object());
+    ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
+    if (CpuFeatures::IsSupported(SSE2)) {
+      CpuFeatureScope scope(masm(), SSE2);
+      XMMRegister value = ToDoubleRegister(instr->value());
+      __ movdbl(FieldOperand(object, offset), value);
+    } else {
+      __ fstp_d(FieldOperand(object, offset));
+    }
+    return;
+  }
+
+  if (!transition.is_null()) {
+    if (transition->CanBeDeprecated()) {
+      transition_maps_.Add(transition, info()->zone());
+    }
     if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
-      __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
+      __ mov(FieldOperand(object, HeapObject::kMapOffset), transition);
     } else {
       Register temp = ToRegister(instr->temp());
       Register temp_map = ToRegister(instr->temp_map());
-      __ mov(temp_map, instr->transition());
+      __ mov(temp_map, transition);
       __ mov(FieldOperand(object, HeapObject::kMapOffset), temp_map);
       // Update the write barrier for the map field.
       __ RecordWriteField(object,
@@ -4286,6 +4298,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
       __ mov(FieldOperand(write_register, offset), ToRegister(operand_value));
     } else {
       Handle<Object> handle_value = ToHandle(operand_value);
+      ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
       __ mov(FieldOperand(write_register, offset), handle_value);
     }
   } else {
@@ -5459,6 +5472,8 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
       } else {
         mode = NUMBER_CANDIDATE_IS_SMI;
       }
+    } else {
+      mode = NUMBER_CANDIDATE_IS_SMI;
     }
   }
 
@@ -6025,18 +6040,24 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
 
 
 void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
-  Register size = ToRegister(instr->size());
   Register result = ToRegister(instr->result());
 
-  __ SmiTag(size);
-  PushSafepointRegistersScope scope(this);
   // TODO(3095996): Get rid of this. For now, we need to make the
   // result register contain a valid pointer because it is already
   // contained in the register pointer map.
-  if (!size.is(result)) {
-    __ StoreToSafepointRegisterSlot(result, size);
+  __ mov(result, Immediate(Smi::FromInt(0)));
+
+  PushSafepointRegistersScope scope(this);
+  if (instr->size()->IsRegister()) {
+    Register size = ToRegister(instr->size());
+    ASSERT(!size.is(result));
+    __ SmiTag(ToRegister(instr->size()));
+    __ push(size);
+  } else {
+    int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
+    __ push(Immediate(Smi::FromInt(size)));
   }
-  __ push(size);
+
   if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
     CallRuntimeFromDeferred(
         Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context());
@@ -6125,7 +6146,8 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
   // Set up the parameters to the stub/runtime call and pick the right
   // runtime function or stub to call.
   int properties_count = instr->hydrogen()->constant_properties_length() / 2;
-  if (instr->hydrogen()->depth() > 1) {
+  if ((FLAG_track_double_fields && instr->hydrogen()->may_store_doubles()) ||
+      instr->hydrogen()->depth() > 1) {
     __ PushHeapObject(literals);
     __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
     __ push(Immediate(constant_properties));
index 5b44d87..1fea25b 100644 (file)
@@ -59,6 +59,7 @@ class LCodeGen BASE_EMBEDDED {
         jump_table_(4, info->zone()),
         deoptimization_literals_(8, info->zone()),
         prototype_maps_(0, info->zone()),
+        transition_maps_(0, info->zone()),
         inlined_function_count_(0),
         scope_(info->scope()),
         status_(UNUSED),
@@ -415,6 +416,7 @@ class LCodeGen BASE_EMBEDDED {
   ZoneList<JumpTableEntry> jump_table_;
   ZoneList<Handle<Object> > deoptimization_literals_;
   ZoneList<Handle<Map> > prototype_maps_;
+  ZoneList<Handle<Map> > transition_maps_;
   int inlined_function_count_;
   Scope* const scope_;
   Status status_;
index c023fd1..6c9098e 100644 (file)
@@ -601,6 +601,11 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
 }
 
 
+LOperand* LChunkBuilder::UseConstant(HValue* value) {
+  return chunk_->DefineConstantOperand(HConstant::cast(value));
+}
+
+
 LOperand* LChunkBuilder::UseAny(HValue* value) {
   return value->IsConstant()
       ? chunk_->DefineConstantOperand(HConstant::cast(value))
@@ -731,7 +736,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
   int vreg = allocator_->GetVirtualRegister();
   if (!allocator_->AllocationOk()) {
     Abort("Out of virtual registers while trying to allocate temp register.");
-    return NULL;
+    vreg = 0;
   }
   operand->set_virtual_register(vreg);
   return operand;
@@ -829,8 +834,8 @@ LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
   ASSERT(instr->left()->representation().IsDouble());
   ASSERT(instr->right()->representation().IsDouble());
   ASSERT(op != Token::MOD);
-  LOperand* left = UseRegisterAtStart(instr->left());
-  LOperand* right = UseRegisterAtStart(instr->right());
+  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+  LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
   LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
   return DefineSameAsFirst(result);
 }
@@ -1387,8 +1392,8 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
 
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
     return DefineSameAsFirst(new(zone()) LBitI(left, right));
   } else {
     ASSERT(instr->representation().IsTagged());
@@ -1555,8 +1560,8 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
   if (instr->representation().IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstant(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    LOperand* right = UseOrConstant(instr->BetterRightOperand());
     LOperand* temp = NULL;
     if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
       temp = TempRegister();
@@ -1599,13 +1604,24 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) {
 
 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
   if (instr->representation().IsInteger32()) {
+    // Check to see if it would be advantageous to use an lea instruction rather
+    // than an add. This is the case when no overflow check is needed and there
+    // are multiple uses of the add's inputs, so using a 3-register add will
+    // preserve all input values for later uses.
+    bool use_lea = LAddI::UseLea(instr);
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    HValue* right_candidate = instr->BetterRightOperand();
+    LOperand* right = use_lea
+        ? UseRegisterOrConstantAtStart(right_candidate)
+        : UseOrConstantAtStart(right_candidate);
     LAddI* add = new(zone()) LAddI(left, right);
-    LInstruction* result = DefineSameAsFirst(add);
-    if (instr->CheckFlag(HValue::kCanOverflow)) {
+    bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
+    LInstruction* result = use_lea
+        ? DefineAsRegister(add)
+        : DefineSameAsFirst(add);
+    if (can_overflow) {
       result = AssignEnvironment(result);
     }
     return result;
@@ -1624,8 +1640,8 @@ LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
   if (instr->representation().IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    left = UseRegisterAtStart(instr->LeastConstantOperand());
-    right = UseOrConstantAtStart(instr->MostConstantOperand());
+    left = UseRegisterAtStart(instr->BetterLeftOperand());
+    right = UseOrConstantAtStart(instr->BetterRightOperand());
   } else {
     ASSERT(instr->representation().IsDouble());
     ASSERT(instr->left()->representation().IsDouble());
@@ -2166,7 +2182,6 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
 
 
 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
-  ASSERT(instr->representation().IsTagged());
   LOperand* obj = UseRegisterAtStart(instr->object());
   return DefineAsRegister(new(zone()) LLoadNamedField(obj));
 }
@@ -2207,12 +2222,6 @@ LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
 }
 
 
-LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
-  LOperand* input = UseRegisterAtStart(instr->value());
-  return DefineAsRegister(new(zone()) LLoadElements(input));
-}
-
-
 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
     HLoadExternalArrayPointer* instr) {
   LOperand* input = UseRegisterAtStart(instr->value());
@@ -2287,19 +2296,6 @@ LOperand* LChunkBuilder::GetStoreKeyedValueOperand(HStoreKeyed* instr) {
 }
 
 
-// DoStoreKeyed and DoStoreNamedField have special considerations for allowing
-// use of a constant instead of a register.
-static bool StoreConstantValueAllowed(HValue* value) {
-  if (value->IsConstant()) {
-    HConstant* constant_value = HConstant::cast(value);
-    return constant_value->HasSmiValue()
-        || constant_value->HasDoubleValue()
-        || constant_value->ImmortalImmovable();
-  }
-  return false;
-}
-
-
 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
   if (!instr->is_external()) {
     ASSERT(instr->elements()->representation().IsTagged());
@@ -2327,17 +2323,8 @@ LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
         val = UseTempRegister(instr->value());
         key = UseTempRegister(instr->key());
       } else {
-        if (StoreConstantValueAllowed(instr->value())) {
-          val = UseRegisterOrConstantAtStart(instr->value());
-        } else {
-          val = UseRegisterAtStart(instr->value());
-        }
-
-        if (StoreConstantValueAllowed(instr->key())) {
-          key = UseRegisterOrConstantAtStart(instr->key());
-        } else {
-          key = UseRegisterAtStart(instr->key());
-        }
+        val = UseRegisterOrConstantAtStart(instr->value());
+        key = UseRegisterOrConstantAtStart(instr->key());
       }
       return new(zone()) LStoreKeyed(obj, key, val);
     }
@@ -2438,11 +2425,24 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
         : UseRegisterAtStart(instr->object());
   }
 
+  bool can_be_constant = instr->value()->IsConstant() &&
+      HConstant::cast(instr->value())->NotInNewSpace() &&
+      !(FLAG_track_double_fields && instr->field_representation().IsDouble());
+
   LOperand* val;
   if (needs_write_barrier) {
     val = UseTempRegister(instr->value());
-  } else if (StoreConstantValueAllowed(instr->value())) {
+  } else if (can_be_constant) {
     val = UseRegisterOrConstant(instr->value());
+  } else if (FLAG_track_fields && instr->field_representation().IsSmi()) {
+    val = UseTempRegister(instr->value());
+  } else if (FLAG_track_double_fields &&
+             instr->field_representation().IsDouble()) {
+    if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
+      val = UseRegisterAtStart(instr->value());
+    } else {
+      val = UseX87TopOfStack(instr->value());
+    }
   } else {
     val = UseRegister(instr->value());
   }
@@ -2450,12 +2450,17 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
   // We only need a scratch register if we have a write barrier or we
   // have a store into the properties array (not in-object-property).
   LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
-      needs_write_barrier_for_map) ? TempRegister() : NULL;
+                    needs_write_barrier_for_map) ? TempRegister() : NULL;
 
   // We need a temporary register for write barrier of the map field.
   LOperand* temp_map = needs_write_barrier_for_map ? TempRegister() : NULL;
 
-  return new(zone()) LStoreNamedField(obj, val, temp, temp_map);
+  LStoreNamedField* result =
+      new(zone()) LStoreNamedField(obj, val, temp, temp_map);
+  if (FLAG_track_fields && instr->field_representation().IsSmi()) {
+    return AssignEnvironment(result);
+  }
+  return result;
 }
 
 
@@ -2516,8 +2521,9 @@ LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
   info()->MarkAsDeferredCalling();
   LOperand* context = UseAny(instr->context());
-  // TODO(mvstanton): why can't size be a constant if possible?
-  LOperand* size = UseTempRegister(instr->size());
+  LOperand* size = instr->size()->IsConstant()
+      ? UseConstant(instr->size())
+      : UseTempRegister(instr->size());
   LOperand* temp = TempRegister();
   LAllocate* result = new(zone()) LAllocate(context, size, temp);
   return AssignPointerMap(DefineAsRegister(result));
@@ -2587,7 +2593,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
 
 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
   int spill_index = chunk()->GetNextSpillIndex(false);  // Not double-width.
-  if (spill_index > LUnallocated::kMaxFixedIndex) {
+  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
     Abort("Too many spill slots needed for OSR");
     spill_index = 0;
   }
index 49462cb..8202418 100644 (file)
@@ -117,7 +117,6 @@ class LCodeGen;
   V(Label)                                      \
   V(LazyBailout)                                \
   V(LoadContextSlot)                            \
-  V(LoadElements)                               \
   V(LoadExternalArrayPointer)                   \
   V(LoadFunctionPrototype)                      \
   V(LoadGlobalCell)                             \
@@ -1370,6 +1369,11 @@ class LAddI: public LTemplateInstruction<1, 2, 0> {
   LOperand* left() { return inputs_[0]; }
   LOperand* right() { return inputs_[1]; }
 
+  static bool UseLea(HAdd* add) {
+    return !add->CheckFlag(HValue::kCanOverflow) &&
+        add->BetterLeftOperand()->UseCount() > 1;
+  }
+
   DECLARE_CONCRETE_INSTRUCTION(AddI, "add-i")
   DECLARE_HYDROGEN_ACCESSOR(Add)
 };
@@ -1496,6 +1500,11 @@ class LLoadNamedField: public LTemplateInstruction<1, 1, 0> {
     inputs_[0] = object;
   }
 
+  virtual bool ClobbersDoubleRegisters() const {
+    return !CpuFeatures::IsSupported(SSE2) &&
+        !hydrogen()->representation().IsDouble();
+  }
+
   LOperand* object() { return inputs_[0]; }
 
   DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field")
@@ -1550,18 +1559,6 @@ class LLoadFunctionPrototype: public LTemplateInstruction<1, 1, 1> {
 };
 
 
-class LLoadElements: public LTemplateInstruction<1, 1, 0> {
- public:
-  explicit LLoadElements(LOperand* object) {
-    inputs_[0] = object;
-  }
-
-  LOperand* object() { return inputs_[0]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(LoadElements, "load-elements")
-};
-
-
 class LLoadExternalArrayPointer: public LTemplateInstruction<1, 1, 0> {
  public:
   explicit LLoadExternalArrayPointer(LOperand* object) {
@@ -2206,6 +2203,9 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 2> {
   bool is_in_object() { return hydrogen()->is_in_object(); }
   int offset() { return hydrogen()->offset(); }
   Handle<Map> transition() const { return hydrogen()->transition(); }
+  Representation representation() const {
+    return hydrogen()->field_representation();
+  }
 };
 
 
@@ -2908,6 +2908,9 @@ class LChunkBuilder BASE_EMBEDDED {
   MUST_USE_RESULT LOperand* UseRegisterOrConstant(HValue* value);
   MUST_USE_RESULT LOperand* UseRegisterOrConstantAtStart(HValue* value);
 
+  // An input operand in a constant operand.
+  MUST_USE_RESULT LOperand* UseConstant(HValue* value);
+
   // An input operand in register, stack slot or a constant operand.
   // Will not be moved to a register even if one is freely available.
   MUST_USE_RESULT LOperand* UseAny(HValue* value);
index 733dbdb..175b1ca 100644 (file)
@@ -1603,10 +1603,32 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  // Allocate heap number in new space.
-  Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
-           TAG_OBJECT);
+  Label allocate_new_space, install_map;
+  AllocationFlags flags = TAG_OBJECT;
+
+  ExternalReference high_promotion_mode = ExternalReference::
+      new_space_high_promotion_mode_active_address(isolate());
+
+  test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
+  j(zero, &allocate_new_space);
+
+  Allocate(ConsString::kSize,
+           result,
+           scratch1,
+           scratch2,
+           gc_required,
+           static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
+  jmp(&install_map);
 
+  bind(&allocate_new_space);
+  Allocate(ConsString::kSize,
+           result,
+           scratch1,
+           scratch2,
+           gc_required,
+           flags);
+
+  bind(&install_map);
   // Set the map. The other fields are left uninitialized.
   mov(FieldOperand(result, HeapObject::kMapOffset),
       Immediate(isolate()->factory()->cons_ascii_string_map()));
@@ -2889,6 +2911,18 @@ void MacroAssembler::CheckPageFlagForMap(
 }
 
 
+void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
+                                        Register scratch,
+                                        Label* if_deprecated) {
+  if (map->CanBeDeprecated()) {
+    mov(scratch, map);
+    mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
+    and_(scratch, Immediate(Smi::FromInt(Map::Deprecated::kMask)));
+    j(not_zero, if_deprecated);
+  }
+}
+
+
 void MacroAssembler::JumpIfBlack(Register object,
                                  Register scratch0,
                                  Register scratch1,
index 2b7641c..519652a 100644 (file)
@@ -91,6 +91,10 @@ class MacroAssembler: public Assembler {
       Label* condition_met,
       Label::Distance condition_met_distance = Label::kFar);
 
+  void CheckMapDeprecated(Handle<Map> map,
+                          Register scratch,
+                          Label* if_deprecated);
+
   // Check if object is in new space.  Jumps if the object is not in new space.
   // The register scratch can be object itself, but scratch will be clobbered.
   void JumpIfNotInNewSpace(Register object,
index a44beec..9623b9a 100644 (file)
@@ -369,11 +369,13 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
 }
 
 
-void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm,
-                                              Register dst,
-                                              Register src,
-                                              bool inobject,
-                                              int index) {
+void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
+                                            Register dst,
+                                            Register src,
+                                            bool inobject,
+                                            int index,
+                                            Representation representation) {
+  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
   int offset = index * kPointerSize;
   if (!inobject) {
     // Calculate the offset into the properties array.
@@ -763,8 +765,10 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
                                            Register value_reg,
                                            Register scratch1,
                                            Register scratch2,
+                                           Register unused,
                                            Label* miss_label,
-                                           Label* miss_restore_name) {
+                                           Label* miss_restore_name,
+                                           Label* slow) {
   // Check that the map of the object hasn't changed.
   __ CheckMap(receiver_reg, Handle<Map>(object->map()),
               miss_label, DO_SMI_CHECK, REQUIRE_EXACT_MAP);
@@ -774,6 +778,15 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     __ CheckAccessGlobalProxy(receiver_reg, scratch1, scratch2, miss_label);
   }
 
+  int descriptor = transition->LastAdded();
+  DescriptorArray* descriptors = transition->instance_descriptors();
+  PropertyDetails details = descriptors->GetDetails(descriptor);
+  Representation representation = details.representation();
+  ASSERT(!representation.IsNone());
+
+  // Ensure no transitions to deprecated maps are followed.
+  __ CheckMapDeprecated(transition, scratch1, miss_label);
+
   // Check that we are allowed to write this.
   if (object->GetPrototype()->IsJSObject()) {
     JSObject* holder;
@@ -790,7 +803,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     // We need an extra register, push
     Register holder_reg = CheckPrototypes(
         object, receiver_reg, Handle<JSObject>(holder), name_reg,
-        scratch1, scratch2, name, miss_restore_name);
+        scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER);
     // If no property was found, and the holder (the last object in the
     // prototype chain) is in slow mode, we need to do a negative lookup on the
     // holder.
@@ -809,6 +822,46 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     }
   }
 
+  Register storage_reg = name_reg;
+
+  if (FLAG_track_fields && representation.IsSmi()) {
+    __ JumpIfNotSmi(value_reg, miss_restore_name);
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    Label do_store, heap_number;
+    __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow);
+
+    __ JumpIfNotSmi(value_reg, &heap_number);
+    __ SmiUntag(value_reg);
+    if (CpuFeatures::IsSupported(SSE2)) {
+      CpuFeatureScope use_sse2(masm, SSE2);
+      __ cvtsi2sd(xmm0, value_reg);
+    } else {
+      __ push(value_reg);
+      __ fild_s(Operand(esp, 0));
+      __ pop(value_reg);
+    }
+    __ SmiTag(value_reg);
+    __ jmp(&do_store);
+
+    __ bind(&heap_number);
+    __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
+                miss_restore_name, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP);
+    if (CpuFeatures::IsSupported(SSE2)) {
+      CpuFeatureScope use_sse2(masm, SSE2);
+      __ movdbl(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
+    } else {
+      __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
+    }
+
+    __ bind(&do_store);
+    if (CpuFeatures::IsSupported(SSE2)) {
+      CpuFeatureScope use_sse2(masm, SSE2);
+      __ movdbl(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
+    } else {
+      __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset));
+    }
+  }
+
   // Stub never generated for non-global objects that require access
   // checks.
   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
@@ -820,7 +873,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     __ pop(scratch1);  // Return address.
     __ push(receiver_reg);
     __ push(Immediate(transition));
-    __ push(eax);
+    __ push(value_reg);
     __ push(scratch1);
     __ TailCallExternalReference(
         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
@@ -834,12 +887,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
   __ mov(scratch1, Immediate(transition));
   __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
 
-  // Update the write barrier for the map field and pass the now unused
-  // name_reg as scratch register.
+  // Update the write barrier for the map field.
   __ RecordWriteField(receiver_reg,
                       HeapObject::kMapOffset,
                       scratch1,
-                      name_reg,
+                      scratch2,
                       kDontSaveFPRegs,
                       OMIT_REMEMBERED_SET,
                       OMIT_SMI_CHECK);
@@ -856,31 +908,51 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
   if (index < 0) {
     // Set the property straight into the object.
     int offset = object->map()->instance_size() + (index * kPointerSize);
-    __ mov(FieldOperand(receiver_reg, offset), value_reg);
+    if (FLAG_track_double_fields && representation.IsDouble()) {
+      __ mov(FieldOperand(receiver_reg, offset), storage_reg);
+    } else {
+      __ mov(FieldOperand(receiver_reg, offset), value_reg);
+    }
 
-    // Update the write barrier for the array address.
-    // Pass the value being stored in the now unused name_reg.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(receiver_reg,
-                        offset,
-                        name_reg,
-                        scratch1,
-                        kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Update the write barrier for the array address.
+      // Pass the value being stored in the now unused name_reg.
+      if (!FLAG_track_double_fields || !representation.IsDouble()) {
+        __ mov(name_reg, value_reg);
+      } else {
+        ASSERT(storage_reg.is(name_reg));
+      }
+      __ RecordWriteField(receiver_reg,
+                          offset,
+                          name_reg,
+                          scratch1,
+                          kDontSaveFPRegs);
+    }
   } else {
     // Write to the properties array.
     int offset = index * kPointerSize + FixedArray::kHeaderSize;
     // Get the properties array (optimistically).
     __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
-    __ mov(FieldOperand(scratch1, offset), eax);
+    if (FLAG_track_double_fields && representation.IsDouble()) {
+      __ mov(FieldOperand(scratch1, offset), storage_reg);
+    } else {
+      __ mov(FieldOperand(scratch1, offset), value_reg);
+    }
 
-    // Update the write barrier for the array address.
-    // Pass the value being stored in the now unused name_reg.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(scratch1,
-                        offset,
-                        name_reg,
-                        receiver_reg,
-                        kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Update the write barrier for the array address.
+      // Pass the value being stored in the now unused name_reg.
+      if (!FLAG_track_double_fields || !representation.IsDouble()) {
+        __ mov(name_reg, value_reg);
+      } else {
+        ASSERT(storage_reg.is(name_reg));
+      }
+      __ RecordWriteField(scratch1,
+                          offset,
+                          name_reg,
+                          receiver_reg,
+                          kDontSaveFPRegs);
+    }
   }
 
   // Return the value (register eax).
@@ -920,35 +992,91 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
   // object and the number of in-object properties is not going to change.
   index -= object->map()->inobject_properties();
 
+  Representation representation = lookup->representation();
+  ASSERT(!representation.IsNone());
+  if (FLAG_track_fields && representation.IsSmi()) {
+    __ JumpIfNotSmi(value_reg, miss_label);
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    // Load the double storage.
+    if (index < 0) {
+      int offset = object->map()->instance_size() + (index * kPointerSize);
+      __ mov(scratch1, FieldOperand(receiver_reg, offset));
+    } else {
+      __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
+      int offset = index * kPointerSize + FixedArray::kHeaderSize;
+      __ mov(scratch1, FieldOperand(scratch1, offset));
+    }
+
+    // Store the value into the storage.
+    Label do_store, heap_number;
+    __ JumpIfNotSmi(value_reg, &heap_number);
+    __ SmiUntag(value_reg);
+    if (CpuFeatures::IsSupported(SSE2)) {
+      CpuFeatureScope use_sse2(masm, SSE2);
+      __ cvtsi2sd(xmm0, value_reg);
+    } else {
+      __ push(value_reg);
+      __ fild_s(Operand(esp, 0));
+      __ pop(value_reg);
+    }
+    __ SmiTag(value_reg);
+    __ jmp(&do_store);
+    __ bind(&heap_number);
+    __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
+                miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP);
+    if (CpuFeatures::IsSupported(SSE2)) {
+      CpuFeatureScope use_sse2(masm, SSE2);
+      __ movdbl(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
+    } else {
+      __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
+    }
+    __ bind(&do_store);
+    if (CpuFeatures::IsSupported(SSE2)) {
+      CpuFeatureScope use_sse2(masm, SSE2);
+      __ movdbl(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
+    } else {
+      __ fstp_d(FieldOperand(scratch1, HeapNumber::kValueOffset));
+    }
+    // Return the value (register eax).
+    ASSERT(value_reg.is(eax));
+    __ ret(0);
+    return;
+  }
+
+  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
   // TODO(verwaest): Share this code as a code stub.
   if (index < 0) {
     // Set the property straight into the object.
     int offset = object->map()->instance_size() + (index * kPointerSize);
     __ mov(FieldOperand(receiver_reg, offset), value_reg);
 
-    // Update the write barrier for the array address.
-    // Pass the value being stored in the now unused name_reg.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(receiver_reg,
-                        offset,
-                        name_reg,
-                        scratch1,
-                        kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Update the write barrier for the array address.
+      // Pass the value being stored in the now unused name_reg.
+      __ mov(name_reg, value_reg);
+      __ RecordWriteField(receiver_reg,
+                          offset,
+                          name_reg,
+                          scratch1,
+                          kDontSaveFPRegs);
+    }
   } else {
     // Write to the properties array.
     int offset = index * kPointerSize + FixedArray::kHeaderSize;
     // Get the properties array (optimistically).
     __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
-    __ mov(FieldOperand(scratch1, offset), eax);
-
-    // Update the write barrier for the array address.
-    // Pass the value being stored in the now unused name_reg.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(scratch1,
-                        offset,
-                        name_reg,
-                        receiver_reg,
-                        kDontSaveFPRegs);
+    __ mov(FieldOperand(scratch1, offset), value_reg);
+
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Update the write barrier for the array address.
+      // Pass the value being stored in the now unused name_reg.
+      __ mov(name_reg, value_reg);
+      __ RecordWriteField(scratch1,
+                          offset,
+                          name_reg,
+                          receiver_reg,
+                          kDontSaveFPRegs);
+    }
   }
 
   // Return the value (register eax).
@@ -1197,10 +1325,20 @@ void BaseLoadStubCompiler::NonexistentHandlerFrontend(
 
 void BaseLoadStubCompiler::GenerateLoadField(Register reg,
                                              Handle<JSObject> holder,
-                                             PropertyIndex index) {
-  // Get the value from the properties.
-  GenerateFastPropertyLoad(masm(), eax, reg, holder, index);
-  __ ret(0);
+                                             PropertyIndex field,
+                                             Representation representation) {
+  if (!reg.is(receiver())) __ mov(receiver(), reg);
+  if (kind() == Code::LOAD_IC) {
+    LoadFieldStub stub(field.is_inobject(holder),
+                       field.translate(holder),
+                       representation);
+    GenerateTailCall(masm(), stub.GetCode(isolate()));
+  } else {
+    KeyedLoadFieldStub stub(field.is_inobject(holder),
+                            field.translate(holder),
+                            representation);
+    GenerateTailCall(masm(), stub.GetCode(isolate()));
+  }
 }
 
 
@@ -1455,7 +1593,9 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
   Register reg = CheckPrototypes(object, edx, holder, ebx, eax, edi,
                                  name, &miss);
 
-  GenerateFastPropertyLoad(masm(), edi, reg, holder, index);
+  GenerateFastPropertyLoad(
+      masm(), edi, reg, index.is_inobject(holder),
+      index.translate(holder), Representation::Tagged());
 
   // Check that the function really is a function.
   __ JumpIfSmi(edi, &miss);
@@ -2984,17 +3124,23 @@ Handle<Code> BaseLoadStubCompiler::CompilePolymorphicIC(
   Register map_reg = scratch1();
   __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
   int receiver_count = receiver_maps->length();
+  int number_of_handled_maps = 0;
   for (int current = 0; current < receiver_count; ++current) {
-    __ cmp(map_reg, receiver_maps->at(current));
-    __ j(equal, handlers->at(current));
+    Handle<Map> map = receiver_maps->at(current);
+    if (!map->is_deprecated()) {
+      number_of_handled_maps++;
+      __ cmp(map_reg, map);
+      __ j(equal, handlers->at(current));
+    }
   }
+  ASSERT(number_of_handled_maps != 0);
 
   __ bind(&miss);
   TailCallBuiltin(masm(), MissBuiltin(kind()));
 
   // Return the generated code.
   InlineCacheState state =
-      receiver_maps->length() > 1 ? POLYMORPHIC : MONOMORPHIC;
+      number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
   return GetICCode(kind(), type, name, state);
 }
 
index 40676ab..31845f2 100644 (file)
@@ -182,6 +182,15 @@ Address IC::OriginalCodeAddress() const {
 static bool TryRemoveInvalidPrototypeDependentStub(Code* target,
                                                    Object* receiver,
                                                    Object* name) {
+  if (target->is_keyed_load_stub() ||
+      target->is_keyed_call_stub() ||
+      target->is_keyed_store_stub()) {
+    // Determine whether the failure is due to a name failure.
+    if (!name->IsName()) return false;
+    Name* stub_name = target->FindFirstName();
+    if (Name::cast(name) != stub_name) return false;
+  }
+
   InlineCacheHolderFlag cache_holder =
       Code::ExtractCacheHolderFromFlags(target->flags());
 
@@ -208,10 +217,30 @@ static bool TryRemoveInvalidPrototypeDependentStub(Code* target,
   int index = map->IndexInCodeCache(name, target);
   if (index >= 0) {
     map->RemoveFromCodeCache(String::cast(name), target, index);
+    // For loads, handlers are stored in addition to the ICs on the map. Remove
+    // those, too.
+    if (target->is_load_stub() || target->is_keyed_load_stub()) {
+      Code* handler = target->FindFirstCode();
+      index = map->IndexInCodeCache(name, handler);
+      if (index >= 0) {
+        map->RemoveFromCodeCache(String::cast(name), handler, index);
+      }
+    }
     return true;
   }
 
-  return false;
+  // If the IC is shared between multiple receivers (slow dictionary mode), then
+  // the map cannot be deprecated and the stub invalidated.
+  if (cache_holder != OWN_MAP) return false;
+
+  // The stub is not in the cache. We've ruled out all other kinds of failure
+  // except for proptotype chain changes, a deprecated map, or a map that's
+  // different from the one that the stub expects. If the map hasn't changed,
+  // assume it's a prototype failure. Treat deprecated maps in the same way as
+  // prototype failures (stay monomorphic if possible).
+  Map* old_map = target->FindFirstMap();
+  if (old_map == NULL) return false;
+  return old_map == map || old_map->is_deprecated();
 }
 
 
@@ -221,22 +250,13 @@ IC::State IC::StateFrom(Code* target, Object* receiver, Object* name) {
   if (state != MONOMORPHIC || !name->IsString()) return state;
   if (receiver->IsUndefined() || receiver->IsNull()) return state;
 
-  // For keyed load/store/call, the most likely cause of cache failure is
-  // that the key has changed.  We do not distinguish between
-  // prototype and non-prototype failures for keyed access.
   Code::Kind kind = target->kind();
-  if (kind == Code::KEYED_LOAD_IC ||
-      kind == Code::KEYED_STORE_IC ||
-      kind == Code::KEYED_CALL_IC) {
-    return MONOMORPHIC;
-  }
-
   // Remove the target from the code cache if it became invalid
   // because of changes in the prototype chain to avoid hitting it
   // again.
   // Call stubs handle this later to allow extra IC state
   // transitions.
-  if (kind != Code::CALL_IC &&
+  if (kind != Code::CALL_IC && kind != Code::KEYED_CALL_IC &&
       TryRemoveInvalidPrototypeDependentStub(target, receiver, name)) {
     return MONOMORPHIC_PROTOTYPE_FAILURE;
   }
@@ -506,6 +526,13 @@ MaybeObject* CallICBase::LoadFunction(State state,
                                       Code::ExtraICState extra_ic_state,
                                       Handle<Object> object,
                                       Handle<String> name) {
+  if (object->IsJSObject()) {
+    Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+    if (receiver->map()->is_deprecated()) {
+      JSObject::MigrateInstance(receiver);
+    }
+  }
+
   // If the object is undefined or null it's illegal to try to get any
   // of its properties; throw a TypeError in that case.
   if (object->IsUndefined() || object->IsNull()) {
@@ -708,8 +735,7 @@ void CallICBase::UpdateCaches(LookupResult* lookup,
         TryUpdateExtraICState(lookup, object, &extra_ic_state)) {
       code = ComputeMonomorphicStub(lookup, state, extra_ic_state,
                                     object, name);
-    } else if (kind_ == Code::CALL_IC &&
-               TryRemoveInvalidPrototypeDependentStub(target(),
+    } else if (TryRemoveInvalidPrototypeDependentStub(target(),
                                                       *object,
                                                       *name)) {
       state = MONOMORPHIC_PROTOTYPE_FAILURE;
@@ -732,15 +758,7 @@ void CallICBase::UpdateCaches(LookupResult* lookup,
     case UNINITIALIZED:
     case MONOMORPHIC_PROTOTYPE_FAILURE:
     case PREMONOMORPHIC:
-      set_target(*code);
-      break;
     case MONOMORPHIC:
-      if (code->ic_state() != MONOMORPHIC) {
-        Map* map = target()->FindFirstMap();
-        if (map != NULL) {
-          UpdateMegamorphicCache(map, *name, target());
-        }
-      }
       set_target(*code);
       break;
     case MEGAMORPHIC: {
@@ -777,6 +795,13 @@ MaybeObject* KeyedCallIC::LoadFunction(State state,
                                     Handle<String>::cast(key));
   }
 
+  if (object->IsJSObject()) {
+    Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+    if (receiver->map()->is_deprecated()) {
+      JSObject::MigrateInstance(receiver);
+    }
+  }
+
   if (object->IsUndefined() || object->IsNull()) {
     return TypeError("non_object_property_call", object, key);
   }
@@ -891,6 +916,13 @@ MaybeObject* LoadIC::Load(State state,
     return Runtime::GetElementOrCharAtOrFail(isolate(), object, index);
   }
 
+  if (object->IsJSObject()) {
+    Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+    if (receiver->map()->is_deprecated()) {
+      JSObject::MigrateInstance(receiver);
+    }
+  }
+
   // Named lookup in the object.
   LookupResult lookup(isolate());
   LookupForRead(object, name, &lookup);
@@ -955,11 +987,30 @@ bool IC::UpdatePolymorphicIC(State state,
   MapHandleList receiver_maps;
   CodeHandleList handlers;
 
+  int number_of_valid_maps;
+  int handler_to_overwrite = -1;
+  Handle<Map> new_receiver_map(receiver->map());
   {
     AssertNoAllocation no_gc;
     target()->FindAllMaps(&receiver_maps);
     int number_of_maps = receiver_maps.length();
-    if (number_of_maps >= 4) return false;
+    number_of_valid_maps = number_of_maps;
+
+    for (int i = 0; i < number_of_maps; i++) {
+      Handle<Map> map = receiver_maps.at(i);
+      // Filter out deprecated maps to ensure its instances get migrated.
+      if (map->is_deprecated()) {
+        number_of_valid_maps--;
+      // If the receiver map is already in the polymorphic IC, this indicates
+      // there was a prototoype chain failure. In that case, just overwrite the
+      // handler.
+      } else if (map.is_identical_to(new_receiver_map)) {
+        number_of_valid_maps--;
+        handler_to_overwrite = i;
+      }
+    }
+
+    if (number_of_valid_maps >= 4) return false;
 
     // Only allow 0 maps in case target() was reset to UNINITIALIZED by the GC.
     // In that case, allow the IC to go back monomorphic.
@@ -969,14 +1020,16 @@ bool IC::UpdatePolymorphicIC(State state,
     target()->FindAllCode(&handlers, receiver_maps.length());
   }
 
-  if (!AddOneReceiverMapIfMissing(&receiver_maps,
-                                  Handle<Map>(receiver->map()))) {
-    return false;
+  number_of_valid_maps++;
+  if (handler_to_overwrite >= 0) {
+    handlers.Set(handler_to_overwrite, code);
+  } else {
+    receiver_maps.Add(new_receiver_map);
+    handlers.Add(code);
   }
 
-  handlers.Add(code);
   Handle<Code> ic = isolate()->stub_cache()->ComputePolymorphicIC(
-      &receiver_maps, &handlers, name);
+      &receiver_maps, &handlers, number_of_valid_maps, name);
   set_target(*ic);
   return true;
 }
@@ -1063,16 +1116,9 @@ void IC::PatchCache(State state,
           if (UpdatePolymorphicIC(state, strict_mode, receiver, name, code)) {
             break;
           }
-        }
-        if (target()->type() != Code::NORMAL) {
-          if (target()->is_load_stub()) {
+
+          if (target()->type() != Code::NORMAL) {
             CopyICToMegamorphicCache(name);
-          } else {
-            Code* handler = target();
-            Map* map = handler->FindFirstMap();
-            if (map != NULL) {
-              UpdateMegamorphicCache(map, *name, handler);
-            }
           }
         }
 
@@ -1175,7 +1221,7 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
 }
 
 
-void IC::UpdateMegamorphicCache(Map* map, String* name, Code* code) {
+void IC::UpdateMegamorphicCache(Map* map, Name* name, Code* code) {
   // Cache code holding map should be consistent with
   // GenerateMonomorphicCacheProbe.
   isolate()->stub_cache()->Set(name, map, code);
@@ -1195,7 +1241,8 @@ Handle<Code> LoadIC::ComputeLoadHandler(LookupResult* lookup,
   switch (lookup->type()) {
     case FIELD:
       return isolate()->stub_cache()->ComputeLoadField(
-          name, receiver, holder, lookup->GetFieldIndex());
+          name, receiver, holder,
+          lookup->GetFieldIndex(), lookup->representation());
     case CONSTANT_FUNCTION: {
       Handle<JSFunction> constant(lookup->GetConstantFunction());
       return isolate()->stub_cache()->ComputeLoadConstant(
@@ -1237,7 +1284,7 @@ Handle<Code> LoadIC::ComputeLoadHandler(LookupResult* lookup,
         PropertyIndex lengthIndex =
           PropertyIndex::NewHeaderIndex(JSArray::kLengthOffset / kPointerSize);
         return isolate()->stub_cache()->ComputeLoadField(
-            name, receiver, holder, lengthIndex);
+            name, receiver, holder, lengthIndex, Representation::Tagged());
       }
       // TODO(dcarney): Handle correctly.
       if (callback->IsDeclaredAccessorInfo()) break;
@@ -1366,6 +1413,10 @@ MaybeObject* KeyedLoadIC::Load(State state,
         }
       } else if (object->IsJSObject()) {
         Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+        if (receiver->map()->is_deprecated()) {
+          JSObject::MigrateInstance(receiver);
+        }
+
         if (receiver->elements()->map() ==
             isolate()->heap()->non_strict_arguments_elements_map()) {
           stub = non_strict_arguments_stub();
@@ -1400,7 +1451,8 @@ Handle<Code> KeyedLoadIC::ComputeLoadHandler(LookupResult* lookup,
   switch (lookup->type()) {
     case FIELD:
       return isolate()->stub_cache()->ComputeKeyedLoadField(
-          name, receiver, holder, lookup->GetFieldIndex());
+          name, receiver, holder,
+          lookup->GetFieldIndex(), lookup->representation());
     case CONSTANT_FUNCTION: {
       Handle<JSFunction> constant(lookup->GetConstantFunction(), isolate());
       return isolate()->stub_cache()->ComputeKeyedLoadConstant(
@@ -1432,7 +1484,9 @@ Handle<Code> KeyedLoadIC::ComputeLoadHandler(LookupResult* lookup,
 
 static bool LookupForWrite(Handle<JSObject> receiver,
                            Handle<String> name,
-                           LookupResult* lookup) {
+                           Handle<Object> value,
+                           LookupResult* lookup,
+                           IC::State* state) {
   Handle<JSObject> holder = receiver;
   receiver->Lookup(*name, lookup);
   if (lookup->IsFound()) {
@@ -1444,9 +1498,10 @@ static bool LookupForWrite(Handle<JSObject> receiver,
         receiver->LocalLookupRealNamedProperty(*name, lookup);
         return lookup->IsFound() &&
             !lookup->IsReadOnly() &&
+            lookup->CanHoldValue(value) &&
             lookup->IsCacheable();
       }
-      return true;
+      return lookup->CanHoldValue(value);
     }
 
     if (lookup->IsPropertyCallbacks()) return true;
@@ -1464,8 +1519,25 @@ static bool LookupForWrite(Handle<JSObject> receiver,
   // chain check. This avoids a double lookup, but requires us to pass in the
   // receiver when trying to fetch extra information from the transition.
   receiver->map()->LookupTransition(*holder, *name, lookup);
-  return lookup->IsTransition() &&
-      !lookup->GetTransitionDetails(receiver->map()).IsReadOnly();
+  if (!lookup->IsTransition()) return false;
+  PropertyDetails target_details =
+      lookup->GetTransitionDetails(receiver->map());
+  if (target_details.IsReadOnly()) return false;
+
+  // If the value that's being stored does not fit in the field that the
+  // instance would transition to, create a new transition that fits the value.
+  // This has to be done before generating the IC, since that IC will embed the
+  // transition target.
+  // Ensure the instance and its map were migrated before trying to update the
+  // transition target.
+  ASSERT(!receiver->map()->is_deprecated());
+  if (!value->FitsRepresentation(target_details.representation())) {
+    Handle<Map> target(lookup->GetTransitionMapFromMap(receiver->map()));
+    Map::GeneralizeRepresentation(
+        target, target->LastAdded(), value->OptimalRepresentation());
+    *state = MONOMORPHIC_PROTOTYPE_FAILURE;
+  }
+  return true;
 }
 
 
@@ -1499,6 +1571,10 @@ MaybeObject* StoreIC::Store(State state,
 
   Handle<JSObject> receiver = Handle<JSObject>::cast(object);
 
+  if (receiver->map()->is_deprecated()) {
+    JSObject::MigrateInstance(receiver);
+  }
+
   // Check if the given name is an array index.
   uint32_t index;
   if (name->AsArrayIndex(&index)) {
@@ -1545,7 +1621,7 @@ MaybeObject* StoreIC::Store(State state,
   }
 
   LookupResult lookup(isolate());
-  if (LookupForWrite(receiver, name, &lookup)) {
+  if (LookupForWrite(receiver, name, value, &lookup, &state)) {
     if (FLAG_use_ic) {
       UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
     }
@@ -1954,6 +2030,9 @@ MaybeObject* KeyedStoreIC::Store(State state,
     if (miss_mode != MISS_FORCE_GENERIC) {
       if (object->IsJSObject()) {
         Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+        if (receiver->map()->is_deprecated()) {
+          JSObject::MigrateInstance(receiver);
+        }
         bool key_is_smi_like = key->IsSmi() ||
             (FLAG_compiled_keyed_stores && !key->ToSmi()->IsFailure());
         if (receiver->elements()->map() ==
@@ -2183,11 +2262,24 @@ RUNTIME_FUNCTION(MaybeObject*, SharedStoreIC_ExtendStorage) {
   int new_unused = transition->unused_property_fields();
   int new_size = old_storage->length() + new_unused + 1;
   Object* result;
-  MaybeObject* maybe_result = old_storage->CopySize(new_size);
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  MaybeObject* maybe_result = old_storage->CopySize(new_size);
+  if (!maybe_result->ToObject(&result)) return maybe_result;
+
   FixedArray* new_storage = FixedArray::cast(result);
-  new_storage->set(old_storage->length(), value);
+
+  Object* to_store = value;
+
+  if (FLAG_track_double_fields) {
+    DescriptorArray* descriptors = transition->instance_descriptors();
+    PropertyDetails details = descriptors->GetDetails(transition->LastAdded());
+    if (details.representation().IsDouble()) {
+      MaybeObject* maybe_storage =
+          isolate->heap()->AllocateHeapNumber(value->Number());
+      if (!maybe_storage->To(&to_store)) return maybe_storage;
+    }
+  }
+
+  new_storage->set(old_storage->length(), to_store);
 
   // Set the new property value and do the map transition.
   object->set_properties(new_storage);
@@ -2229,6 +2321,24 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure) {
 }
 
 
+RUNTIME_FUNCTION(MaybeObject*, StoreIC_Slow) {
+  NoHandleAllocation na(isolate);
+  ASSERT(args.length() == 3);
+  StoreIC ic(IC::NO_EXTRA_FRAME, isolate);
+  Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
+  Handle<Object> object = args.at<Object>(0);
+  Handle<Object> key = args.at<Object>(1);
+  Handle<Object> value = args.at<Object>(2);
+  StrictModeFlag strict_mode = Code::GetStrictMode(extra_ic_state);
+  return Runtime::SetObjectProperty(isolate,
+                                    object,
+                                    key,
+                                    value,
+                                    NONE,
+                                    strict_mode);
+}
+
+
 RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) {
   NoHandleAllocation na(isolate);
   ASSERT(args.length() == 3);
index 4bf259a..739f34c 100644 (file)
@@ -45,6 +45,7 @@ namespace internal {
   ICU(KeyedCallIC_Miss)                               \
   ICU(StoreIC_Miss)                                   \
   ICU(StoreIC_ArrayLength)                            \
+  ICU(StoreIC_Slow)                                   \
   ICU(SharedStoreIC_ExtendStorage)                    \
   ICU(KeyedStoreIC_Miss)                              \
   ICU(KeyedStoreIC_MissForceGeneric)                  \
@@ -184,7 +185,7 @@ class IC {
                   Handle<JSObject> receiver,
                   Handle<String> name,
                   Handle<Code> code);
-  virtual void UpdateMegamorphicCache(Map* map, String* name, Code* code);
+  virtual void UpdateMegamorphicCache(Map* map, Name* name, Code* code);
   virtual Handle<Code> megamorphic_stub() {
     UNREACHABLE();
     return Handle<Code>::null();
@@ -471,7 +472,7 @@ class KeyedLoadIC: public LoadIC {
   virtual Handle<Code> ComputeLoadHandler(LookupResult* lookup,
                                           Handle<JSObject> receiver,
                                           Handle<String> name);
-  virtual void UpdateMegamorphicCache(Map* map, String* name, Code* code) { }
+  virtual void UpdateMegamorphicCache(Map* map, Name* name, Code* code) { }
 
  private:
   // Stub accessors.
@@ -504,6 +505,7 @@ class StoreIC: public IC {
   }
 
   // Code generators for stub routines. Only called once at startup.
+  static void GenerateSlow(MacroAssembler* masm);
   static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
   static void GenerateMiss(MacroAssembler* masm);
   static void GenerateMegamorphic(MacroAssembler* masm,
@@ -620,7 +622,7 @@ class KeyedStoreIC: public StoreIC {
                                                StrictModeFlag strict_mode,
                                                Handle<JSObject> receiver,
                                                Handle<String> name);
-  virtual void UpdateMegamorphicCache(Map* map, String* name, Code* code) { }
+  virtual void UpdateMegamorphicCache(Map* map, Name* name, Code* code) { }
 
   virtual Handle<Code> megamorphic_stub() {
     return isolate()->builtins()->KeyedStoreIC_Generic();
index 658a34c..bacbb93 100644 (file)
@@ -490,10 +490,16 @@ bool IncrementalMarking::WorthActivating() {
   // debug tests run with incremental marking and some without.
   static const intptr_t kActivationThreshold = 0;
 #endif
-
+  // Only start incremental marking in a safe state: 1) when expose GC is
+  // deactivated, 2) when incremental marking is turned on, 3) when we are
+  // currently not in a GC, and 4) when we are currently not serializing
+  // or deserializing the heap.
   return !FLAG_expose_gc &&
       FLAG_incremental_marking &&
+      FLAG_incremental_marking_steps &&
+      heap_->gc_state() == Heap::NOT_IN_GC &&
       !Serializer::enabled() &&
+      heap_->isolate()->IsInitialized() &&
       heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold;
 }
 
@@ -561,17 +567,21 @@ void IncrementalMarking::UncommitMarkingDeque() {
 }
 
 
-void IncrementalMarking::Start() {
+void IncrementalMarking::Start(CompactionFlag flag) {
   if (FLAG_trace_incremental_marking) {
     PrintF("[IncrementalMarking] Start\n");
   }
   ASSERT(FLAG_incremental_marking);
+  ASSERT(FLAG_incremental_marking_steps);
   ASSERT(state_ == STOPPED);
+  ASSERT(heap_->gc_state() == Heap::NOT_IN_GC);
+  ASSERT(!Serializer::enabled());
+  ASSERT(heap_->isolate()->IsInitialized());
 
   ResetStepCounters();
 
   if (heap_->IsSweepingComplete()) {
-    StartMarking(ALLOW_COMPACTION);
+    StartMarking(flag);
   } else {
     if (FLAG_trace_incremental_marking) {
       PrintF("[IncrementalMarking] Start sweeping.\n");
@@ -860,6 +870,17 @@ void IncrementalMarking::MarkingComplete(CompletionAction action) {
 }
 
 
+void IncrementalMarking::OldSpaceStep(intptr_t allocated) {
+  if (IsStopped() && WorthActivating() && heap_->NextGCIsLikelyToBeFull()) {
+    // TODO(hpayer): Let's play safe for now, but compaction should be
+    // in principle possible.
+    Start(PREVENT_COMPACTION);
+  } else {
+    Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD);
+  }
+}
+
+
 void IncrementalMarking::Step(intptr_t allocated_bytes,
                               CompletionAction action) {
   if (heap_->gc_state() != Heap::NOT_IN_GC ||
@@ -965,7 +986,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
         PrintPID("Postponing speeding up marking until marking starts\n");
       }
     } else {
-      marking_speed_ += kMarkingSpeedAccellerationInterval;
+      marking_speed_ += kMarkingSpeedAccelleration;
       marking_speed_ = static_cast<int>(
           Min(kMaxMarkingSpeed,
               static_cast<intptr_t>(marking_speed_ * 1.3)));
index fc5a978..47d5a51 100644 (file)
@@ -75,7 +75,9 @@ class IncrementalMarking {
 
   bool WorthActivating();
 
-  void Start();
+  enum CompactionFlag { ALLOW_COMPACTION, PREVENT_COMPACTION };
+
+  void Start(CompactionFlag flag = ALLOW_COMPACTION);
 
   void Stop();
 
@@ -110,10 +112,7 @@ class IncrementalMarking {
   static const intptr_t kMarkingSpeedAccelleration = 2;
   static const intptr_t kMaxMarkingSpeed = 1000;
 
-  void OldSpaceStep(intptr_t allocated) {
-    Step(allocated * kFastMarking / kInitialMarkingSpeed,
-         GC_VIA_STACK_GUARD);
-  }
+  void OldSpaceStep(intptr_t allocated);
 
   void Step(intptr_t allocated, CompletionAction action);
 
@@ -226,8 +225,6 @@ class IncrementalMarking {
 
   void ResetStepCounters();
 
-  enum CompactionFlag { ALLOW_COMPACTION, PREVENT_COMPACTION };
-
   void StartMarking(CompactionFlag flag);
 
   void ActivateIncrementalWriteBarrier(PagedSpace* space);
index 74850ca..ddc3b73 100644 (file)
@@ -381,33 +381,23 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() {
         // First check whether there is a single expected transition. If so, try
         // to parse it first.
         bool follow_expected = false;
+        Handle<Map> target;
         if (seq_ascii) {
           key = JSObject::ExpectedTransitionKey(map);
           follow_expected = !key.is_null() && ParseJsonString(key);
         }
         // If the expected transition hits, follow it.
         if (follow_expected) {
-          map = JSObject::ExpectedTransitionTarget(map);
+          target = JSObject::ExpectedTransitionTarget(map);
         } else {
           // If the expected transition failed, parse an internalized string and
           // try to find a matching transition.
           key = ParseJsonInternalizedString();
           if (key.is_null()) return ReportUnexpectedCharacter();
 
-          Handle<Map> target = JSObject::FindTransitionToField(map, key);
+          target = JSObject::FindTransitionToField(map, key);
           // If a transition was found, follow it and continue.
-          if (!target.is_null()) {
-            map = target;
-          } else {
-            // If no transition was found, commit the intermediate state to the
-            // object and stop transitioning.
-            JSObject::TransitionToMap(json_object, map);
-            int length = properties.length();
-            for (int i = 0; i < length; i++) {
-              json_object->FastPropertyAtPut(i, *properties[i]);
-            }
-            transitioning = false;
-          }
+          transitioning = !target.is_null();
         }
         if (c0_ != ':') return ReportUnexpectedCharacter();
 
@@ -415,8 +405,36 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() {
         value = ParseJsonValue();
         if (value.is_null()) return ReportUnexpectedCharacter();
 
-        properties.Add(value, zone());
-        if (transitioning) continue;
+        if (transitioning) {
+          int descriptor = map->NumberOfOwnDescriptors();
+          PropertyDetails details =
+              target->instance_descriptors()->GetDetails(descriptor);
+          Representation expected_representation = details.representation();
+
+          if (value->FitsRepresentation(expected_representation)) {
+            // If the target representation is double and the value is already
+            // double, use the existing box.
+            if (FLAG_track_double_fields &&
+                value->IsSmi() &&
+                expected_representation.IsDouble()) {
+              value = factory()->NewHeapNumber(
+                  Handle<Smi>::cast(value)->value());
+            }
+            properties.Add(value, zone());
+            map = target;
+            continue;
+          } else {
+            transitioning = false;
+          }
+        }
+
+        // Commit the intermediate state to the object and stop transitioning.
+        JSObject::AllocateStorageForMap(json_object, map);
+        int length = properties.length();
+        for (int i = 0; i < length; i++) {
+          Handle<Object> value = properties[i];
+          json_object->FastPropertyAtPut(i, *value);
+        }
       } else {
         key = ParseJsonInternalizedString();
         if (key.is_null() || c0_ != ':') return ReportUnexpectedCharacter();
@@ -435,10 +453,21 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() {
 
     // If we transitioned until the very end, transition the map now.
     if (transitioning) {
-      JSObject::TransitionToMap(json_object, map);
+      JSObject::AllocateStorageForMap(json_object, map);
       int length = properties.length();
       for (int i = 0; i < length; i++) {
-        json_object->FastPropertyAtPut(i, *properties[i]);
+        Handle<Object> value = properties[i];
+        // If the target representation is double and the value is already
+        // double, use the existing box.
+        if (FLAG_track_double_fields && value->IsSmi()) {
+          Representation representation =
+              map->instance_descriptors()->GetDetails(i).representation();
+          if (representation.IsDouble()) {
+            value = factory()->NewHeapNumber(
+                Handle<Smi>::cast(value)->value());
+          }
+        }
+        json_object->FastPropertyAtPut(i, *value);
       }
     }
   }
index 47a0129..b67a9f6 100644 (file)
@@ -644,7 +644,7 @@ BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSObject(
       Handle<Object> property;
       if (details.type() == FIELD && *map == object->map()) {
         property = Handle<Object>(
-                       object->FastPropertyAt(
+                       object->RawFastPropertyAt(
                            map->instance_descriptors()->GetFieldIndex(i)),
                        isolate_);
       } else {
index d815a7e..143c830 100644 (file)
@@ -104,6 +104,13 @@ Vector<T> List<T, P>::AddBlock(T value, int count, P alloc) {
 
 
 template<typename T, class P>
+void List<T, P>::Set(int index, const T& elm) {
+  ASSERT(index >= 0 && index <= length_);
+  data_[index] = elm;
+}
+
+
+template<typename T, class P>
 void List<T, P>::InsertAt(int index, const T& elm, P alloc) {
   ASSERT(index >= 0 && index <= length_);
   Add(elm, alloc);
index 43d982f..0e4e35b 100644 (file)
@@ -115,6 +115,9 @@ class List {
   void InsertAt(int index, const T& element,
                 AllocationPolicy allocator = AllocationPolicy());
 
+  // Overwrites the element at the specific index.
+  void Set(int index, const T& element);
+
   // Added 'count' elements with the value 'value' and returns a
   // vector that allows access to the elements.  The vector is valid
   // until the next change is made to this list.
index 7bddef7..74132b3 100644 (file)
@@ -56,9 +56,11 @@ static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
 }
 
 
-UsePosition::UsePosition(LifetimePosition pos, LOperand* operand)
+UsePosition::UsePosition(LifetimePosition pos,
+                         LOperand* operand,
+                         LOperand* hint)
     : operand_(operand),
-      hint_(NULL),
+      hint_(hint),
       pos_(pos),
       next_(NULL),
       requires_reg_(false),
@@ -138,6 +140,7 @@ LiveRange::LiveRange(int id, Zone* zone)
       next_(NULL),
       current_interval_(NULL),
       last_processed_use_(NULL),
+      current_hint_operand_(NULL),
       spill_operand_(new(zone) LOperand()),
       spill_start_index_(kMaxInt) { }
 
@@ -227,13 +230,6 @@ bool LiveRange::CanBeSpilled(LifetimePosition pos) {
 }
 
 
-UsePosition* LiveRange::FirstPosWithHint() const {
-  UsePosition* pos = first_pos_;
-  while (pos != NULL && !pos->HasHint()) pos = pos->next();
-  return pos;
-}
-
-
 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
   LOperand* op = NULL;
   if (HasRegisterAssigned()) {
@@ -375,7 +371,7 @@ bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
   LifetimePosition start = Start();
   LifetimePosition other_start = other->Start();
   if (start.Value() == other_start.Value()) {
-    UsePosition* pos = FirstPosWithHint();
+    UsePosition* pos = first_pos();
     if (pos == NULL) return false;
     UsePosition* other_pos = other->first_pos();
     if (other_pos == NULL) return true;
@@ -449,16 +445,19 @@ void LiveRange::AddUseInterval(LifetimePosition start,
 }
 
 
-UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
-                                       LOperand* operand,
-                                       Zone* zone) {
+void LiveRange::AddUsePosition(LifetimePosition pos,
+                               LOperand* operand,
+                               LOperand* hint,
+                               Zone* zone) {
   LAllocator::TraceAlloc("Add to live range %d use position %d\n",
                          id_,
                          pos.Value());
-  UsePosition* use_pos = new(zone) UsePosition(pos, operand);
+  UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
+  UsePosition* prev_hint = NULL;
   UsePosition* prev = NULL;
   UsePosition* current = first_pos_;
   while (current != NULL && current->pos().Value() < pos.Value()) {
+    prev_hint = current->HasHint() ? current : prev_hint;
     prev = current;
     current = current->next();
   }
@@ -471,7 +470,9 @@ UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
     prev->next_ = use_pos;
   }
 
-  return use_pos;
+  if (prev_hint == NULL && use_pos->HasHint()) {
+    current_hint_operand_ = hint;
+  }
 }
 
 
@@ -624,13 +625,13 @@ LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
                                     bool is_tagged) {
   TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
   ASSERT(operand->HasFixedPolicy());
-  if (operand->policy() == LUnallocated::FIXED_SLOT) {
-    operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index());
-  } else if (operand->policy() == LUnallocated::FIXED_REGISTER) {
-    int reg_index = operand->fixed_index();
+  if (operand->HasFixedSlotPolicy()) {
+    operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
+  } else if (operand->HasFixedRegisterPolicy()) {
+    int reg_index = operand->fixed_register_index();
     operand->ConvertTo(LOperand::REGISTER, reg_index);
-  } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
-    int reg_index = operand->fixed_index();
+  } else if (operand->HasFixedDoubleRegisterPolicy()) {
+    int reg_index = operand->fixed_register_index();
     operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
   } else {
     UNREACHABLE();
@@ -725,14 +726,14 @@ void LAllocator::Define(LifetimePosition position,
   if (range->IsEmpty() || range->Start().Value() > position.Value()) {
     // Can happen if there is a definition without use.
     range->AddUseInterval(position, position.NextInstruction(), zone_);
-    range->AddUsePosition(position.NextInstruction(), NULL, zone_);
+    range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone_);
   } else {
     range->ShortenTo(position);
   }
 
   if (operand->IsUnallocated()) {
     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
-    range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
+    range->AddUsePosition(position, unalloc_operand, hint, zone_);
   }
 }
 
@@ -745,7 +746,7 @@ void LAllocator::Use(LifetimePosition block_start,
   if (range == NULL) return;
   if (operand->IsUnallocated()) {
     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
-    range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
+    range->AddUsePosition(position, unalloc_operand, hint, zone_);
   }
   range->AddUseInterval(block_start, position, zone_);
 }
@@ -845,7 +846,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
         bool is_tagged = HasTaggedValue(cur_input->virtual_register());
         AllocateFixed(cur_input, gap_index + 1, is_tagged);
         AddConstraintsGapMove(gap_index, input_copy, cur_input);
-      } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
+      } else if (cur_input->HasWritableRegisterPolicy()) {
         // The live range of writable input registers always goes until the end
         // of the instruction.
         ASSERT(!cur_input->IsUsedAtStart());
@@ -924,7 +925,7 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
         if (phi != NULL) {
           // This is a phi resolving move.
           if (!phi->block()->IsLoopHeader()) {
-            hint = LiveRangeFor(phi->id())->FirstHint();
+            hint = LiveRangeFor(phi->id())->current_hint_operand();
           }
         } else {
           if (to->IsUnallocated()) {
@@ -1812,26 +1813,23 @@ bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
     free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
   }
 
-  UsePosition* hinted_use = current->FirstPosWithHint();
-  if (hinted_use != NULL) {
-    LOperand* hint = hinted_use->hint();
-    if (hint->IsRegister() || hint->IsDoubleRegister()) {
-      int register_index = hint->index();
-      TraceAlloc(
-          "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
-          RegisterName(register_index),
-          free_until_pos[register_index].Value(),
-          current->id(),
-          current->End().Value());
-
-      // The desired register is free until the end of the current live range.
-      if (free_until_pos[register_index].Value() >= current->End().Value()) {
-        TraceAlloc("Assigning preferred reg %s to live range %d\n",
-                   RegisterName(register_index),
-                   current->id());
-        SetLiveRangeAssignedRegister(current, register_index, mode_, zone_);
-        return true;
-      }
+  LOperand* hint = current->FirstHint();
+  if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
+    int register_index = hint->index();
+    TraceAlloc(
+        "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
+        RegisterName(register_index),
+        free_until_pos[register_index].Value(),
+        current->id(),
+        current->End().Value());
+
+    // The desired register is free until the end of the current live range.
+    if (free_until_pos[register_index].Value() >= current->End().Value()) {
+      TraceAlloc("Assigning preferred reg %s to live range %d\n",
+                 RegisterName(register_index),
+                 current->id());
+      SetLiveRangeAssignedRegister(current, register_index, mode_, zone_);
+      return true;
     }
   }
 
index 8b45531..552ebdd 100644 (file)
@@ -244,13 +244,12 @@ class UseInterval: public ZoneObject {
 // Representation of a use position.
 class UsePosition: public ZoneObject {
  public:
-  UsePosition(LifetimePosition pos, LOperand* operand);
+  UsePosition(LifetimePosition pos, LOperand* operand, LOperand* hint);
 
   LOperand* operand() const { return operand_; }
   bool HasOperand() const { return operand_ != NULL; }
 
   LOperand* hint() const { return hint_; }
-  void set_hint(LOperand* hint) { hint_ = hint; }
   bool HasHint() const;
   bool RequiresRegister() const;
   bool RegisterIsBeneficial() const;
@@ -261,9 +260,9 @@ class UsePosition: public ZoneObject {
  private:
   void set_next(UsePosition* next) { next_ = next; }
 
-  LOperand* operand_;
-  LOperand* hint_;
-  LifetimePosition pos_;
+  LOperand* const operand_;
+  LOperand* const hint_;
+  LifetimePosition const pos_;
   UsePosition* next_;
   bool requires_reg_;
   bool register_beneficial_;
@@ -329,10 +328,14 @@ class LiveRange: public ZoneObject {
     return assigned_register_ != kInvalidAssignment;
   }
   bool IsSpilled() const { return spilled_; }
-  UsePosition* FirstPosWithHint() const;
 
+  LOperand* current_hint_operand() const {
+    ASSERT(current_hint_operand_ == FirstHint());
+    return current_hint_operand_;
+  }
   LOperand* FirstHint() const {
-    UsePosition* pos = FirstPosWithHint();
+    UsePosition* pos = first_pos_;
+    while (pos != NULL && !pos->HasHint()) pos = pos->next();
     if (pos != NULL) return pos->hint();
     return NULL;
   }
@@ -367,9 +370,10 @@ class LiveRange: public ZoneObject {
   void AddUseInterval(LifetimePosition start,
                       LifetimePosition end,
                       Zone* zone);
-  UsePosition* AddUsePosition(LifetimePosition pos,
-                              LOperand* operand,
-                              Zone* zone);
+  void AddUsePosition(LifetimePosition pos,
+                      LOperand* operand,
+                      LOperand* hint,
+                      Zone* zone);
 
   // Shorten the most recently added interval by setting a new start.
   void ShortenTo(LifetimePosition start);
@@ -398,6 +402,8 @@ class LiveRange: public ZoneObject {
   // This is used as a cache, it doesn't affect correctness.
   mutable UseInterval* current_interval_;
   UsePosition* last_processed_use_;
+  // This is used as a cache, it's invalid outside of BuildLiveRanges.
+  LOperand* current_hint_operand_;
   LOperand* spill_operand_;
   int spill_start_index_;
 };
index 10d7f71..539f4ee 100644 (file)
@@ -58,24 +58,27 @@ void LOperand::PrintTo(StringStream* stream) {
     case UNALLOCATED:
       unalloc = LUnallocated::cast(this);
       stream->Add("v%d", unalloc->virtual_register());
-      switch (unalloc->policy()) {
+      if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
+        stream->Add("(=%dS)", unalloc->fixed_slot_index());
+        break;
+      }
+      switch (unalloc->extended_policy()) {
         case LUnallocated::NONE:
           break;
         case LUnallocated::FIXED_REGISTER: {
+          int reg_index = unalloc->fixed_register_index();
           const char* register_name =
-              Register::AllocationIndexToString(unalloc->fixed_index());
+              Register::AllocationIndexToString(reg_index);
           stream->Add("(=%s)", register_name);
           break;
         }
         case LUnallocated::FIXED_DOUBLE_REGISTER: {
+          int reg_index = unalloc->fixed_register_index();
           const char* double_register_name =
-              DoubleRegister::AllocationIndexToString(unalloc->fixed_index());
+              DoubleRegister::AllocationIndexToString(reg_index);
           stream->Add("(=%s)", double_register_name);
           break;
         }
-        case LUnallocated::FIXED_SLOT:
-          stream->Add("(=%dS)", unalloc->fixed_index());
-          break;
         case LUnallocated::MUST_HAVE_REGISTER:
           stream->Add("(R)");
           break;
index 2418274..388f565 100644 (file)
@@ -92,12 +92,16 @@ class LOperand: public ZoneObject {
 
 class LUnallocated: public LOperand {
  public:
-  enum Policy {
+  enum BasicPolicy {
+    FIXED_SLOT,
+    EXTENDED_POLICY
+  };
+
+  enum ExtendedPolicy {
     NONE,
     ANY,
     FIXED_REGISTER,
     FIXED_DOUBLE_REGISTER,
-    FIXED_SLOT,
     MUST_HAVE_REGISTER,
     WRITABLE_REGISTER,
     SAME_AS_FIRST_INPUT
@@ -117,99 +121,152 @@ class LUnallocated: public LOperand {
     USED_AT_END
   };
 
-  explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
-    Initialize(policy, 0, USED_AT_END);
+  explicit LUnallocated(ExtendedPolicy policy) : LOperand(UNALLOCATED, 0) {
+    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
+    value_ |= ExtendedPolicyField::encode(policy);
+    value_ |= LifetimeField::encode(USED_AT_END);
   }
 
-  LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
-    Initialize(policy, fixed_index, USED_AT_END);
+  LUnallocated(BasicPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
+    ASSERT(policy == FIXED_SLOT);
+    value_ |= BasicPolicyField::encode(policy);
+    value_ |= index << FixedSlotIndexField::kShift;
+    ASSERT(this->fixed_slot_index() == index);
   }
 
-  LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
-    Initialize(policy, 0, lifetime);
+  LUnallocated(ExtendedPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
+    ASSERT(policy == FIXED_REGISTER || policy == FIXED_DOUBLE_REGISTER);
+    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
+    value_ |= ExtendedPolicyField::encode(policy);
+    value_ |= LifetimeField::encode(USED_AT_END);
+    value_ |= FixedRegisterField::encode(index);
   }
 
-  // The superclass has a KindField.  Some policies have a signed fixed
-  // index in the upper bits.
-  static const int kPolicyWidth = 3;
-  static const int kLifetimeWidth = 1;
-  static const int kVirtualRegisterWidth = 15;
-
-  static const int kPolicyShift = kKindFieldWidth;
-  static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
-  static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
-  static const int kFixedIndexShift =
-      kVirtualRegisterShift + kVirtualRegisterWidth;
-  static const int kFixedIndexWidth = 32 - kFixedIndexShift;
-  STATIC_ASSERT(kFixedIndexWidth > 5);
-
-  class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
-
-  class LifetimeField
-      : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
-  };
+  LUnallocated(ExtendedPolicy policy, Lifetime lifetime)
+      : LOperand(UNALLOCATED, 0) {
+    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
+    value_ |= ExtendedPolicyField::encode(policy);
+    value_ |= LifetimeField::encode(lifetime);
+  }
 
-  class VirtualRegisterField
-      : public BitField<unsigned,
-                        kVirtualRegisterShift,
-                        kVirtualRegisterWidth> {
-  };
+  LUnallocated* CopyUnconstrained(Zone* zone) {
+    LUnallocated* result = new(zone) LUnallocated(ANY);
+    result->set_virtual_register(virtual_register());
+    return result;
+  }
 
-  static const int kMaxVirtualRegisters = 1 << kVirtualRegisterWidth;
-  static const int kMaxFixedIndex = (1 << (kFixedIndexWidth - 1)) - 1;
-  static const int kMinFixedIndex = -(1 << (kFixedIndexWidth - 1));
+  static LUnallocated* cast(LOperand* op) {
+    ASSERT(op->IsUnallocated());
+    return reinterpret_cast<LUnallocated*>(op);
+  }
 
+  // The encoding used for LUnallocated operands depends on the policy that is
+  // stored within the operand. The FIXED_SLOT policy uses a compact encoding
+  // because it accommodates a larger pay-load.
+  //
+  // For FIXED_SLOT policy:
+  //     +------------------------------------------+
+  //     |       slot_index      |  vreg  | 0 | 001 |
+  //     +------------------------------------------+
+  //
+  // For all other (extended) policies:
+  //     +------------------------------------------+
+  //     |  reg_index  | L | PPP |  vreg  | 1 | 001 |    L ... Lifetime
+  //     +------------------------------------------+    P ... Policy
+  //
+  // The slot index is a signed value which requires us to decode it manually
+  // instead of using the BitField utility class.
+
+  // The superclass has a KindField.
+  STATIC_ASSERT(kKindFieldWidth == 3);
+
+  // BitFields for all unallocated operands.
+  class BasicPolicyField     : public BitField<BasicPolicy,     3,  1> {};
+  class VirtualRegisterField : public BitField<unsigned,        4, 18> {};
+
+  // BitFields specific to BasicPolicy::FIXED_SLOT.
+  class FixedSlotIndexField  : public BitField<int,            22, 10> {};
+
+  // BitFields specific to BasicPolicy::EXTENDED_POLICY.
+  class ExtendedPolicyField  : public BitField<ExtendedPolicy, 22,  3> {};
+  class LifetimeField        : public BitField<Lifetime,       25,  1> {};
+  class FixedRegisterField   : public BitField<int,            26,  6> {};
+
+  static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1;
+  static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize;
+  static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1;
+  static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1));
+
+  // Predicates for the operand policy.
   bool HasAnyPolicy() const {
-    return policy() == ANY;
+    return basic_policy() == EXTENDED_POLICY &&
+        extended_policy() == ANY;
   }
   bool HasFixedPolicy() const {
-    return policy() == FIXED_REGISTER ||
-        policy() == FIXED_DOUBLE_REGISTER ||
-        policy() == FIXED_SLOT;
+    return basic_policy() == FIXED_SLOT ||
+        extended_policy() == FIXED_REGISTER ||
+        extended_policy() == FIXED_DOUBLE_REGISTER;
   }
   bool HasRegisterPolicy() const {
-    return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
+    return basic_policy() == EXTENDED_POLICY && (
+        extended_policy() == WRITABLE_REGISTER ||
+        extended_policy() == MUST_HAVE_REGISTER);
   }
   bool HasSameAsInputPolicy() const {
-    return policy() == SAME_AS_FIRST_INPUT;
+    return basic_policy() == EXTENDED_POLICY &&
+        extended_policy() == SAME_AS_FIRST_INPUT;
+  }
+  bool HasFixedSlotPolicy() const {
+    return basic_policy() == FIXED_SLOT;
+  }
+  bool HasFixedRegisterPolicy() const {
+    return basic_policy() == EXTENDED_POLICY &&
+        extended_policy() == FIXED_REGISTER;
   }
-  Policy policy() const { return PolicyField::decode(value_); }
-  void set_policy(Policy policy) {
-    value_ = PolicyField::update(value_, policy);
+  bool HasFixedDoubleRegisterPolicy() const {
+    return basic_policy() == EXTENDED_POLICY &&
+        extended_policy() == FIXED_DOUBLE_REGISTER;
   }
-  int fixed_index() const {
-    return static_cast<int>(value_) >> kFixedIndexShift;
+  bool HasWritableRegisterPolicy() const {
+    return basic_policy() == EXTENDED_POLICY &&
+        extended_policy() == WRITABLE_REGISTER;
   }
 
-  int virtual_register() const {
-    return VirtualRegisterField::decode(value_);
+  // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
+  BasicPolicy basic_policy() const {
+    return BasicPolicyField::decode(value_);
   }
 
-  void set_virtual_register(unsigned id) {
-    value_ = VirtualRegisterField::update(value_, id);
+  // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
+  ExtendedPolicy extended_policy() const {
+    ASSERT(basic_policy() == EXTENDED_POLICY);
+    return ExtendedPolicyField::decode(value_);
   }
 
-  LUnallocated* CopyUnconstrained(Zone* zone) {
-    LUnallocated* result = new(zone) LUnallocated(ANY);
-    result->set_virtual_register(virtual_register());
-    return result;
+  // [fixed_slot_index]: Only for FIXED_SLOT.
+  int fixed_slot_index() const {
+    ASSERT(HasFixedSlotPolicy());
+    return static_cast<int>(value_) >> FixedSlotIndexField::kShift;
   }
 
-  static LUnallocated* cast(LOperand* op) {
-    ASSERT(op->IsUnallocated());
-    return reinterpret_cast<LUnallocated*>(op);
+  // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_DOUBLE_REGISTER.
+  int fixed_register_index() const {
+    ASSERT(HasFixedRegisterPolicy() || HasFixedDoubleRegisterPolicy());
+    return FixedRegisterField::decode(value_);
   }
 
-  bool IsUsedAtStart() {
-    return LifetimeField::decode(value_) == USED_AT_START;
+  // [virtual_register]: The virtual register ID for this operand.
+  int virtual_register() const {
+    return VirtualRegisterField::decode(value_);
+  }
+  void set_virtual_register(unsigned id) {
+    value_ = VirtualRegisterField::update(value_, id);
   }
 
- private:
-  void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
-    value_ |= PolicyField::encode(policy);
-    value_ |= LifetimeField::encode(lifetime);
-    value_ |= fixed_index << kFixedIndexShift;
-    ASSERT(this->fixed_index() == fixed_index);
+  // [lifetime]: Only for non-FIXED_SLOT.
+  bool IsUsedAtStart() {
+    ASSERT(basic_policy() == EXTENDED_POLICY);
+    return LifetimeField::decode(value_) == USED_AT_START;
   }
 };
 
index 0c52f38..643d6c7 100644 (file)
@@ -116,7 +116,7 @@ macro IS_ERROR(arg)             = (%_ClassOf(arg) === 'Error');
 macro IS_SCRIPT(arg)            = (%_ClassOf(arg) === 'Script');
 macro IS_ARGUMENTS(arg)         = (%_ClassOf(arg) === 'Arguments');
 macro IS_GLOBAL(arg)            = (%_ClassOf(arg) === 'global');
-macro IS_ARRAYBUFFER(arg)       = (%_ClassOf(arg) === '__ArrayBuffer');
+macro IS_ARRAYBUFFER(arg)       = (%_ClassOf(arg) === 'ArrayBuffer');
 macro IS_GENERATOR(arg)         = (%_ClassOf(arg) === 'Generator');
 macro IS_UNDETECTABLE(arg)      = (%_IsUndetectableObject(arg));
 macro FLOOR(arg)                = $floor(arg);
index 15a39b7..b9bce1e 100644 (file)
@@ -101,16 +101,24 @@ var kMessages = {
   observe_type_non_string:       ["Invalid changeRecord with non-string 'type' property"],
   observe_notify_non_notifier:   ["notify called on non-notifier object"],
   proto_poison_pill:             ["Generic use of __proto__ accessor not allowed"],
+  parameterless_typed_array_constr:
+                                 ["%0"," constructor should have at least one argument."],
+  not_typed_array:               ["this is not a typed array."],
+  invalid_argument:              ["invalid_argument"],
   // RangeError
   invalid_array_length:          ["Invalid array length"],
   invalid_array_buffer_length:   ["Invalid array buffer length"],
   invalid_typed_array_offset:    ["Start offset is too large"],
   invalid_typed_array_length:    ["Length is too large"],
   invalid_typed_array_alignment: ["%0", "of", "%1", "should be a multiple of", "%3"],
+  typed_array_set_source_too_large:
+                                 ["Source is too large"],
   stack_overflow:                ["Maximum call stack size exceeded"],
   invalid_time_value:            ["Invalid time value"],
   // SyntaxError
-  unable_to_parse:               ["Parse error"],
+  paren_in_arg_string:           ["Function arg string contains parenthesis"],
+  not_isvar:                     ["builtin %IS_VAR: not a variable"],
+  single_function_literal:       ["Single function literal required"],
   invalid_regexp_flags:          ["Invalid flags supplied to RegExp constructor '", "%0", "'"],
   invalid_regexp:                ["Invalid RegExp pattern /", "%0", "/"],
   illegal_break:                 ["Illegal break statement"],
index 733c369..f1c2553 100644 (file)
@@ -30,6 +30,7 @@
 #if defined(V8_TARGET_ARCH_MIPS)
 
 #include "bootstrapper.h"
+#include "builtins-decls.h"
 #include "code-stubs.h"
 #include "codegen.h"
 #include "regexp-macro-assembler.h"
@@ -74,6 +75,28 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
 }
 
 
+void LoadFieldStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { a0 };
+  descriptor->register_param_count_ = 1;
+  descriptor->register_params_ = registers;
+  descriptor->stack_parameter_count_ = NULL;
+  descriptor->deoptimization_handler_ = NULL;
+}
+
+
+void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { a1 };
+  descriptor->register_param_count_ = 1;
+  descriptor->register_params_ = registers;
+  descriptor->stack_parameter_count_ = NULL;
+  descriptor->deoptimization_handler_ = NULL;
+}
+
+
 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
     Isolate* isolate,
     CodeStubInterfaceDescriptor* descriptor) {
@@ -116,9 +139,10 @@ static void InitializeArrayConstructorDescriptor(
     int constant_stack_parameter_count) {
   // register state
   // a0 -- number of arguments
+  // a1 -- function
   // a2 -- type info cell with elements kind
-  static Register registers[] = { a2 };
-  descriptor->register_param_count_ = 1;
+  static Register registers[] = { a1, a2 };
+  descriptor->register_param_count_ = 2;
   if (constant_stack_parameter_count != 0) {
     // stack param count needs (constructor pointer, and single argument)
     descriptor->stack_parameter_count_ = &a0;
@@ -4099,12 +4123,6 @@ Register InstanceofStub::left() { return a0; }
 Register InstanceofStub::right() { return a1; }
 
 
-void LoadFieldStub::Generate(MacroAssembler* masm) {
-  StubCompiler::DoGenerateFastPropertyLoad(masm, v0, reg_, inobject_, index_);
-  __ Ret();
-}
-
-
 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
   // The displacement is the offset of the last parameter (if any)
   // relative to the frame pointer.
@@ -5103,6 +5121,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
   Handle<Object> terminal_kind_sentinel =
       TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
                                                   LAST_FAST_ELEMENTS_KIND);
+  __ JumpIfNotSmi(a3, &miss);
   __ Branch(&miss, gt, a3, Operand(terminal_kind_sentinel));
   // Make sure the function is the Array() function
   __ LoadArrayFunction(a3);
@@ -6300,25 +6319,53 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ AllocateAsciiConsString(v0, t2, t0, t1, &call_runtime);
   __ bind(&allocated);
   // Fill the fields of the cons string.
+  Label skip_write_barrier, after_writing;
+  ExternalReference high_promotion_mode = ExternalReference::
+      new_space_high_promotion_mode_active_address(masm->isolate());
+  __ li(t0, Operand(high_promotion_mode));
+  __ lw(t0, MemOperand(t0, 0));
+  __ Branch(&skip_write_barrier, eq, t0, Operand(zero_reg));
+
+  __ mov(t3, v0);
+  __ sw(a0, FieldMemOperand(t3, ConsString::kFirstOffset));
+  __ RecordWriteField(t3,
+                      ConsString::kFirstOffset,
+                      a0,
+                      t0,
+                      kRAHasNotBeenSaved,
+                      kDontSaveFPRegs);
+  __ sw(a1, FieldMemOperand(t3, ConsString::kSecondOffset));
+  __ RecordWriteField(t3,
+                      ConsString::kSecondOffset,
+                      a1,
+                      t0,
+                      kRAHasNotBeenSaved,
+                      kDontSaveFPRegs);
+  __ jmp(&after_writing);
+
+  __ bind(&skip_write_barrier);
   __ sw(a0, FieldMemOperand(v0, ConsString::kFirstOffset));
   __ sw(a1, FieldMemOperand(v0, ConsString::kSecondOffset));
+
+  __ bind(&after_writing);
+
   __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
   __ DropAndRet(2);
 
   __ bind(&non_ascii);
   // At least one of the strings is two-byte. Check whether it happens
-  // to contain only ASCII characters.
+  // to contain only one byte characters.
   // t0: first instance type.
   // t1: second instance type.
-  // Branch to if _both_ instances have kAsciiDataHintMask set.
-  __ And(at, t0, Operand(kAsciiDataHintMask));
+  // Branch to if _both_ instances have kOneByteDataHintMask set.
+  __ And(at, t0, Operand(kOneByteDataHintMask));
   __ and_(at, at, t1);
   __ Branch(&ascii_data, ne, at, Operand(zero_reg));
   __ Xor(t0, t0, Operand(t1));
-  STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
-  __ And(t0, t0, Operand(kOneByteStringTag | kAsciiDataHintTag));
+  STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
+  __ And(t0, t0, Operand(kOneByteStringTag | kOneByteDataHintTag));
   __ Branch(&ascii_data, eq, t0,
-      Operand(kOneByteStringTag | kAsciiDataHintTag));
+      Operand(kOneByteStringTag | kOneByteDataHintTag));
 
   // Allocate a two byte cons string.
   __ AllocateTwoByteConsString(v0, t2, t0, t1, &call_runtime);
@@ -7181,6 +7228,9 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
   { REG(t1), REG(a0), REG(t2), EMIT_REMEMBERED_SET },
   // FastNewClosureStub::Generate
   { REG(a2), REG(t0), REG(a1), EMIT_REMEMBERED_SET },
+  // StringAddStub::Generate
+  { REG(t3), REG(a1), REG(t0), EMIT_REMEMBERED_SET },
+  { REG(t3), REG(a0), REG(t0), EMIT_REMEMBERED_SET },
   // Null termination.
   { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
 };
@@ -7703,13 +7753,8 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
     Label no_info, switch_ready;
     // Get the elements kind and case on that.
     __ Branch(&no_info, eq, a2, Operand(undefined_sentinel));
-    __ lw(a3, FieldMemOperand(a2, kPointerSize));
-
-    // There is no info if the call site went megamorphic either
-    // TODO(mvstanton): Really? I thought if it was the array function that
-    // the cell wouldn't get stamped as megamorphic.
-    __ Branch(&no_info, eq, a3,
-        Operand(TypeFeedbackCells::MegamorphicSentinel(masm->isolate())));
+    __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+    __ JumpIfNotSmi(a3, &no_info);
     __ SmiUntag(a3);
     __ jmp(&switch_ready);
     __ bind(&no_info);
index 3be1e4d..0ae0187 100644 (file)
@@ -236,6 +236,15 @@ void Debug::GenerateKeyedStoreICDebugBreak(MacroAssembler* masm) {
 }
 
 
+void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) {
+  // Register state for CompareNil IC
+  // ----------- S t a t e -------------
+  //  -- a0    : value
+  // -----------------------------------
+  Generate_DebugBreakCallHelper(masm, a0.bit(), 0);
+}
+
+
 void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
   // Calling convention for IC call (from ic-mips.cc).
   // ----------- S t a t e -------------
index a6fd39a..872af86 100644 (file)
@@ -1592,7 +1592,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
       : ObjectLiteral::kNoFlags;
   __ li(a0, Operand(Smi::FromInt(flags)));
   int properties_count = constant_properties->length() / 2;
-  if (expr->depth() > 1) {
+  if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
+      expr->depth() > 1) {
     __ Push(a3, a2, a1, a0);
     __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
   } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
@@ -1943,11 +1944,12 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       Label resume;
       __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
       __ Branch(&resume, ne, result_register(), Operand(at));
-      __ pop(result_register());
       if (expr->yield_kind() == Yield::SUSPEND) {
-        // TODO(wingo): Box into { value: VALUE, done: false }.
+        EmitReturnIteratorResult(false);
+      } else {
+        __ pop(result_register());
+        EmitReturnSequence();
       }
-      EmitReturnSequence();
 
       __ bind(&resume);
       context()->Plug(result_register());
@@ -1959,18 +1961,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
       __ sw(a1, FieldMemOperand(result_register(),
                                 JSGeneratorObject::kContinuationOffset));
-      __ pop(result_register());
-      // TODO(wingo): Box into { value: VALUE, done: true }.
-
-      // Exit all nested statements.
-      NestedStatement* current = nesting_stack_;
-      int stack_depth = 0;
-      int context_length = 0;
-      while (current != NULL) {
-        current = current->Exit(&stack_depth, &context_length);
-      }
-      __ Drop(stack_depth);
-      EmitReturnSequence();
+      EmitReturnIteratorResult(true);
       break;
     }
 
@@ -2057,7 +2048,7 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
   __ Subu(a3, a3, Operand(1));
   __ Branch(&call_resume, lt, a3, Operand(zero_reg));
   __ push(a2);
-  __ b(&push_operand_holes);
+  __ Branch(&push_operand_holes);
   __ bind(&call_resume);
   __ push(a1);
   __ push(result_register());
@@ -2076,6 +2067,56 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
 }
 
 
+void FullCodeGenerator::EmitReturnIteratorResult(bool done) {
+  Label gc_required;
+  Label allocated;
+
+  Handle<Map> map(isolate()->native_context()->generator_result_map());
+
+  __ Allocate(map->instance_size(), a0, a2, a3, &gc_required, TAG_OBJECT);
+
+  __ bind(&allocated);
+  __ li(a1, Operand(map));
+  __ pop(a2);
+  __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
+  __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
+  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
+  __ sw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
+  __ sw(t0, FieldMemOperand(a0, JSObject::kPropertiesOffset));
+  __ sw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
+  __ sw(a2,
+        FieldMemOperand(a0, JSGeneratorObject::kResultValuePropertyOffset));
+  __ sw(a3,
+        FieldMemOperand(a0, JSGeneratorObject::kResultDonePropertyOffset));
+
+  // Only the value field needs a write barrier, as the other values are in the
+  // root set.
+  __ RecordWriteField(a0, JSGeneratorObject::kResultValuePropertyOffset,
+                      a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
+
+  if (done) {
+    // Exit all nested statements.
+    NestedStatement* current = nesting_stack_;
+    int stack_depth = 0;
+    int context_length = 0;
+    while (current != NULL) {
+      current = current->Exit(&stack_depth, &context_length);
+    }
+    __ Drop(stack_depth);
+  }
+
+  __ mov(result_register(), a0);
+  EmitReturnSequence();
+
+  __ bind(&gc_required);
+  __ Push(Smi::FromInt(map->instance_size()));
+  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+  __ lw(context_register(),
+        MemOperand(fp, StandardFrameConstants::kContextOffset));
+  __ jmp(&allocated);
+}
+
+
 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
   Literal* key = prop->key()->AsLiteral();
index e434fdb..02055a4 100644 (file)
@@ -1440,6 +1440,25 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
 }
 
 
+void StoreIC::GenerateSlow(MacroAssembler* masm) {
+  // ---------- S t a t e --------------
+  //  -- a0     : value
+  //  -- a2     : key
+  //  -- a1     : receiver
+  //  -- ra     : return address
+  // -----------------------------------
+
+  // Push receiver, key and value for runtime call.
+  __ Push(a1, a2, a0);
+
+  // The slow case calls into the runtime to complete the store without causing
+  // an IC miss that would otherwise cause a transition to the generic stub.
+  ExternalReference ref =
+      ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
+  __ TailCallExternalReference(ref, 3, 1);
+}
+
+
 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
   // ---------- S t a t e --------------
   //  -- a0     : value
index 0c2983f..77e4216 100644 (file)
@@ -91,6 +91,10 @@ void LCodeGen::FinishCode(Handle<Code> code) {
     prototype_maps_.at(i)->AddDependentCode(
         DependentCode::kPrototypeCheckGroup, code);
   }
+  for (int i = 0 ; i < transition_maps_.length(); i++) {
+    transition_maps_.at(i)->AddDependentCode(
+        DependentCode::kTransitionGroup, code);
+  }
 }
 
 
@@ -2684,13 +2688,20 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
 
 
 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
+  int offset = instr->hydrogen()->offset();
   Register object = ToRegister(instr->object());
+  if (instr->hydrogen()->representation().IsDouble()) {
+    DoubleRegister result = ToDoubleRegister(instr->result());
+    __ ldc1(result, FieldMemOperand(object, offset));
+    return;
+  }
+
   Register result = ToRegister(instr->result());
   if (instr->hydrogen()->is_in_object()) {
-    __ lw(result, FieldMemOperand(object, instr->hydrogen()->offset()));
+    __ lw(result, FieldMemOperand(object, offset));
   } else {
     __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
-    __ lw(result, FieldMemOperand(result, instr->hydrogen()->offset()));
+    __ lw(result, FieldMemOperand(result, offset));
   }
 }
 
@@ -2835,38 +2846,6 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
 }
 
 
-void LCodeGen::DoLoadElements(LLoadElements* instr) {
-  Register result = ToRegister(instr->result());
-  Register input = ToRegister(instr->object());
-  Register scratch = scratch0();
-
-  __ lw(result, FieldMemOperand(input, JSObject::kElementsOffset));
-  if (FLAG_debug_code) {
-    Label done, fail;
-    __ lw(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
-    __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
-    __ Branch(USE_DELAY_SLOT, &done, eq, scratch, Operand(at));
-    __ LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);  // In the delay slot.
-    __ Branch(&done, eq, scratch, Operand(at));
-    // |scratch| still contains |input|'s map.
-    __ lbu(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
-    __ Ext(scratch, scratch, Map::kElementsKindShift,
-           Map::kElementsKindBitCount);
-    __ Branch(&fail, lt, scratch,
-              Operand(GetInitialFastElementsKind()));
-    __ Branch(&done, le, scratch,
-              Operand(TERMINAL_FAST_ELEMENTS_KIND));
-    __ Branch(&fail, lt, scratch,
-              Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
-    __ Branch(&done, le, scratch,
-              Operand(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
-    __ bind(&fail);
-    __ Abort("Check for fast or external elements failed.");
-    __ bind(&done);
-  }
-}
-
-
 void LCodeGen::DoLoadExternalArrayPointer(
     LLoadExternalArrayPointer* instr) {
   Register to_reg = ToRegister(instr->result());
@@ -3891,8 +3870,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
 
   __ li(a0, Operand(instr->arity()));
   __ li(a2, Operand(instr->hydrogen()->property_cell()));
-  Object* cell_value = instr->hydrogen()->property_cell()->value();
-  ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+  ElementsKind kind = instr->hydrogen()->elements_kind();
   if (instr->arity() == 0) {
     ArrayNoArgumentConstructorStub stub(kind);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
@@ -3919,15 +3897,34 @@ void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
 
 
 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
+  Representation representation = instr->representation();
+
   Register object = ToRegister(instr->object());
-  Register value = ToRegister(instr->value());
   Register scratch = scratch0();
   int offset = instr->offset();
 
-  ASSERT(!object.is(value));
+  Handle<Map> transition = instr->transition();
+
+  if (FLAG_track_fields && representation.IsSmi()) {
+    Register value = ToRegister(instr->value());
+    __ SmiTagCheckOverflow(value, value, scratch);
+    if (!instr->hydrogen()->value()->range()->IsInSmiRange()) {
+      DeoptimizeIf(lt, instr->environment(), scratch, Operand(zero_reg));
+    }
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    ASSERT(transition.is_null());
+    ASSERT(instr->is_in_object());
+    ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
+    DoubleRegister value = ToDoubleRegister(instr->value());
+    __ sdc1(value, FieldMemOperand(object, offset));
+    return;
+  }
 
-  if (!instr->transition().is_null()) {
-    __ li(scratch, Operand(instr->transition()));
+  if (!transition.is_null()) {
+    if (transition->CanBeDeprecated()) {
+      transition_maps_.Add(transition, info()->zone());
+    }
+    __ li(scratch, Operand(transition));
     __ sw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
     if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
       Register temp = ToRegister(instr->temp());
@@ -3944,6 +3941,8 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
   }
 
   // Do the store.
+  Register value = ToRegister(instr->value());
+  ASSERT(!object.is(value));
   HType type = instr->hydrogen()->value()->type();
   SmiCheck check_needed =
       type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
@@ -4811,6 +4810,8 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
       } else {
         mode = NUMBER_CANDIDATE_IS_SMI;
       }
+    } else {
+      mode = NUMBER_CANDIDATE_IS_SMI;
     }
   }
 
@@ -5150,7 +5151,6 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
 
 
 void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
-  Register size = ToRegister(instr->size());
   Register result = ToRegister(instr->result());
 
   // TODO(3095996): Get rid of this. For now, we need to make the
@@ -5159,8 +5159,16 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
   __ mov(result, zero_reg);
 
   PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
-  __ SmiTag(size, size);
-  __ push(size);
+  if (instr->size()->IsRegister()) {
+    Register size = ToRegister(instr->size());
+    ASSERT(!size.is(result));
+    __ SmiTag(size);
+    __ push(size);
+  } else {
+    int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
+    __ Push(Smi::FromInt(size));
+  }
+
   if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
     CallRuntimeFromDeferred(
         Runtime::kAllocateInOldPointerSpace, 1, instr);
@@ -5246,7 +5254,8 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
 
   // Pick the right runtime function or stub to call.
   int properties_count = instr->hydrogen()->constant_properties_length() / 2;
-  if (instr->hydrogen()->depth() > 1) {
+  if ((FLAG_track_double_fields && instr->hydrogen()->may_store_doubles()) ||
+      instr->hydrogen()->depth() > 1) {
     __ Push(a3, a2, a1, a0);
     CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
   } else if (flags != ObjectLiteral::kFastElements ||
index f082c01..a36059b 100644 (file)
@@ -56,6 +56,7 @@ class LCodeGen BASE_EMBEDDED {
         deopt_jump_table_(4, info->zone()),
         deoptimization_literals_(8, info->zone()),
         prototype_maps_(0, info->zone()),
+        transition_maps_(0, info->zone()),
         inlined_function_count_(0),
         scope_(info->scope()),
         status_(UNUSED),
@@ -416,6 +417,7 @@ class LCodeGen BASE_EMBEDDED {
   ZoneList<JumpTableEntry> deopt_jump_table_;
   ZoneList<Handle<Object> > deoptimization_literals_;
   ZoneList<Handle<Map> > prototype_maps_;
+  ZoneList<Handle<Map> > transition_maps_;
   int inlined_function_count_;
   Scope* const scope_;
   Status status_;
index c2f8986..28309e2 100644 (file)
@@ -552,6 +552,11 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
 }
 
 
+LOperand* LChunkBuilder::UseConstant(HValue* value) {
+  return chunk_->DefineConstantOperand(HConstant::cast(value));
+}
+
+
 LOperand* LChunkBuilder::UseAny(HValue* value) {
   return value->IsConstant()
       ? chunk_->DefineConstantOperand(HConstant::cast(value))
@@ -672,7 +677,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
   int vreg = allocator_->GetVirtualRegister();
   if (!allocator_->AllocationOk()) {
     Abort("Out of virtual registers while trying to allocate temp register.");
-    return NULL;
+    vreg = 0;
   }
   operand->set_virtual_register(vreg);
   return operand;
@@ -1301,8 +1306,8 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
 
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
     return DefineAsRegister(new(zone()) LBitI(left, right));
   } else {
     ASSERT(instr->representation().IsTagged());
@@ -1397,15 +1402,15 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
     LOperand* left;
-    LOperand* right = UseOrConstant(instr->MostConstantOperand());
+    LOperand* right = UseOrConstant(instr->BetterRightOperand());
     LOperand* temp = NULL;
     if (instr->CheckFlag(HValue::kBailoutOnMinusZero) &&
         (instr->CheckFlag(HValue::kCanOverflow) ||
         !right->IsConstantOperand())) {
-      left = UseRegister(instr->LeastConstantOperand());
+      left = UseRegister(instr->BetterLeftOperand());
       temp = TempRegister();
     } else {
-      left = UseRegisterAtStart(instr->LeastConstantOperand());
+      left = UseRegisterAtStart(instr->BetterLeftOperand());
     }
     LMulI* mul = new(zone()) LMulI(left, right, temp);
     if (instr->CheckFlag(HValue::kCanOverflow) ||
@@ -1470,8 +1475,8 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
   if (instr->representation().IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
     LAddI* add = new(zone()) LAddI(left, right);
     LInstruction* result = DefineAsRegister(add);
     if (instr->CheckFlag(HValue::kCanOverflow)) {
@@ -1502,8 +1507,8 @@ LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
   if (instr->representation().IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    left = UseRegisterAtStart(instr->LeastConstantOperand());
-    right = UseOrConstantAtStart(instr->MostConstantOperand());
+    left = UseRegisterAtStart(instr->BetterLeftOperand());
+    right = UseOrConstantAtStart(instr->BetterRightOperand());
   } else {
     ASSERT(instr->representation().IsDouble());
     ASSERT(instr->left()->representation().IsDouble());
@@ -1988,8 +1993,8 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
 
 
 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
-  return DefineAsRegister(
-      new(zone()) LLoadNamedField(UseRegisterAtStart(instr->object())));
+  LOperand* obj = UseRegisterAtStart(instr->object());
+  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
 }
 
 
@@ -2024,12 +2029,6 @@ LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
 }
 
 
-LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
-  LOperand* input = UseRegisterAtStart(instr->value());
-  return DefineAsRegister(new(zone()) LLoadElements(input));
-}
-
-
 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
     HLoadExternalArrayPointer* instr) {
   LOperand* input = UseRegisterAtStart(instr->value());
@@ -2194,14 +2193,25 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
         : UseRegisterAtStart(instr->object());
   }
 
-  LOperand* val = needs_write_barrier
-      ? UseTempRegister(instr->value())
-      : UseRegister(instr->value());
+  LOperand* val;
+  if (needs_write_barrier ||
+      (FLAG_track_fields && instr->field_representation().IsSmi())) {
+    val = UseTempRegister(instr->value());
+  } else if (FLAG_track_double_fields &&
+             instr->field_representation().IsDouble()) {
+    val = UseRegisterAtStart(instr->value());
+  } else {
+    val = UseRegister(instr->value());
+  }
 
   // We need a temporary register for write barrier of the map field.
   LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
 
-  return new(zone()) LStoreNamedField(obj, val, temp);
+  LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp);
+  if (FLAG_track_fields && instr->field_representation().IsSmi()) {
+    return AssignEnvironment(result);
+  }
+  return result;
 }
 
 
@@ -2253,7 +2263,9 @@ LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
 
 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
   info()->MarkAsDeferredCalling();
-  LOperand* size = UseTempRegister(instr->size());
+  LOperand* size = instr->size()->IsConstant()
+      ? UseConstant(instr->size())
+      : UseTempRegister(instr->size());
   LOperand* temp1 = TempRegister();
   LOperand* temp2 = TempRegister();
   LAllocate* result = new(zone()) LAllocate(size, temp1, temp2);
@@ -2315,7 +2327,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
 
 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
   int spill_index = chunk()->GetNextSpillIndex(false);  // Not double-width.
-  if (spill_index > LUnallocated::kMaxFixedIndex) {
+  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
     Abort("Too many spill slots needed for OSR");
     spill_index = 0;
   }
index cfca644..bb8c993 100644 (file)
@@ -122,7 +122,6 @@ class LCodeGen;
   V(Label)                                      \
   V(LazyBailout)                                \
   V(LoadContextSlot)                            \
-  V(LoadElements)                               \
   V(LoadExternalArrayPointer)                   \
   V(LoadFunctionPrototype)                      \
   V(LoadGlobalCell)                             \
@@ -1521,18 +1520,6 @@ class LLoadFunctionPrototype: public LTemplateInstruction<1, 1, 0> {
 };
 
 
-class LLoadElements: public LTemplateInstruction<1, 1, 0> {
- public:
-  explicit LLoadElements(LOperand* object) {
-    inputs_[0] = object;
-  }
-
-  LOperand* object() { return inputs_[0]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(LoadElements, "load-elements")
-};
-
-
 class LLoadExternalArrayPointer: public LTemplateInstruction<1, 1, 0> {
  public:
   explicit LLoadExternalArrayPointer(LOperand* object) {
@@ -2089,6 +2076,9 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
   bool is_in_object() { return hydrogen()->is_in_object(); }
   int offset() { return hydrogen()->offset(); }
   Handle<Map> transition() const { return hydrogen()->transition(); }
+  Representation representation() const {
+    return hydrogen()->field_representation();
+  }
 };
 
 
@@ -2722,6 +2712,9 @@ class LChunkBuilder BASE_EMBEDDED {
   MUST_USE_RESULT LOperand* UseRegisterOrConstant(HValue* value);
   MUST_USE_RESULT LOperand* UseRegisterOrConstantAtStart(HValue* value);
 
+  // An input operand in a constant operand.
+  MUST_USE_RESULT LOperand* UseConstant(HValue* value);
+
   // An input operand in register, stack slot or a constant operand.
   // Will not be moved to a register even if one is freely available.
   MUST_USE_RESULT LOperand* UseAny(HValue* value);
index 220d9fe..81e9ec9 100644 (file)
@@ -3135,8 +3135,34 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
-           TAG_OBJECT);
+  Label allocate_new_space, install_map;
+  AllocationFlags flags = TAG_OBJECT;
+
+  ExternalReference high_promotion_mode = ExternalReference::
+      new_space_high_promotion_mode_active_address(isolate());
+  li(scratch1, Operand(high_promotion_mode));
+  lw(scratch1, MemOperand(scratch1, 0));
+  Branch(&allocate_new_space, eq, scratch1, Operand(zero_reg));
+
+  Allocate(ConsString::kSize,
+           result,
+           scratch1,
+           scratch2,
+           gc_required,
+           static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
+
+  jmp(&install_map);
+
+  bind(&allocate_new_space);
+  Allocate(ConsString::kSize,
+           result,
+           scratch1,
+           scratch2,
+           gc_required,
+           flags);
+
+  bind(&install_map);
+
   InitializeNewString(result,
                       length,
                       Heap::kConsAsciiStringMapRootIndex,
@@ -5135,6 +5161,18 @@ void MacroAssembler::CheckPageFlag(
 }
 
 
+void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
+                                        Register scratch,
+                                        Label* if_deprecated) {
+  if (map->CanBeDeprecated()) {
+    li(scratch, Operand(map));
+    lw(scratch, FieldMemOperand(scratch, Map::kBitField3Offset));
+    And(scratch, scratch, Operand(Smi::FromInt(Map::Deprecated::kMask)));
+    Branch(if_deprecated, ne, scratch, Operand(zero_reg));
+  }
+}
+
+
 void MacroAssembler::JumpIfBlack(Register object,
                                  Register scratch0,
                                  Register scratch1,
index e914f24..248e5b4 100644 (file)
@@ -325,6 +325,10 @@ class MacroAssembler: public Assembler {
                      Condition cc,
                      Label* condition_met);
 
+  void CheckMapDeprecated(Handle<Map> map,
+                          Register scratch,
+                          Label* if_deprecated);
+
   // Check if object is in new space.  Jumps if the object is not in new space.
   // The register scratch can be object itself, but it will be clobbered.
   void JumpIfNotInNewSpace(Register object,
index e110c47..80ab31a 100644 (file)
@@ -307,11 +307,13 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
 }
 
 
-void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm,
-                                              Register dst,
-                                              Register src,
-                                              bool inobject,
-                                              int index) {
+void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
+                                            Register dst,
+                                            Register src,
+                                            bool inobject,
+                                            int index,
+                                            Representation representation) {
+  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
   int offset = index * kPointerSize;
   if (!inobject) {
     // Calculate the offset into the properties array.
@@ -442,8 +444,10 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
                                            Register value_reg,
                                            Register scratch1,
                                            Register scratch2,
+                                           Register scratch3,
                                            Label* miss_label,
-                                           Label* miss_restore_name) {
+                                           Label* miss_restore_name,
+                                           Label* slow) {
   // a0 : value.
   Label exit;
 
@@ -456,6 +460,15 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
   }
 
+  int descriptor = transition->LastAdded();
+  DescriptorArray* descriptors = transition->instance_descriptors();
+  PropertyDetails details = descriptors->GetDetails(descriptor);
+  Representation representation = details.representation();
+  ASSERT(!representation.IsNone());
+
+  // Ensure no transitions to deprecated maps are followed.
+  __ CheckMapDeprecated(transition, scratch1, miss_label);
+
   // Check that we are allowed to write this.
   if (object->GetPrototype()->IsJSObject()) {
     JSObject* holder;
@@ -471,7 +484,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     }
     Register holder_reg = CheckPrototypes(
         object, receiver_reg, Handle<JSObject>(holder), name_reg,
-        scratch1, scratch2, name, miss_restore_name);
+        scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER);
     // If no property was found, and the holder (the last object in the
     // prototype chain) is in slow mode, we need to do a negative lookup on the
     // holder.
@@ -490,6 +503,30 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     }
   }
 
+  Register storage_reg = name_reg;
+
+  if (FLAG_track_fields && representation.IsSmi()) {
+    __ JumpIfNotSmi(value_reg, miss_restore_name);
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    Label do_store, heap_number;
+    __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
+    __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
+
+    __ JumpIfNotSmi(value_reg, &heap_number);
+    __ SmiUntag(scratch1, value_reg);
+    __ mtc1(scratch1, f6);
+    __ cvt_d_w(f4, f6);
+    __ jmp(&do_store);
+
+    __ bind(&heap_number);
+    __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
+                miss_restore_name, DONT_DO_SMI_CHECK);
+    __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
+
+    __ bind(&do_store);
+    __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
+  }
+
   // Stub never generated for non-global objects that require access
   // checks.
   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
@@ -517,7 +554,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
   __ RecordWriteField(receiver_reg,
                       HeapObject::kMapOffset,
                       scratch1,
-                      name_reg,
+                      scratch2,
                       kRAHasNotBeenSaved,
                       kDontSaveFPRegs,
                       OMIT_REMEMBERED_SET,
@@ -535,40 +572,61 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
   if (index < 0) {
     // Set the property straight into the object.
     int offset = object->map()->instance_size() + (index * kPointerSize);
-    __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
+    if (FLAG_track_double_fields && representation.IsDouble()) {
+      __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
+    } else {
+      __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
+    }
+
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Skip updating write barrier if storing a smi.
+      __ JumpIfSmi(value_reg, &exit);
 
-    // Skip updating write barrier if storing a smi.
-    __ JumpIfSmi(value_reg, &exit);
-
-    // Update the write barrier for the array address.
-    // Pass the now unused name_reg as a scratch register.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(receiver_reg,
-                        offset,
-                        name_reg,
-                        scratch1,
-                        kRAHasNotBeenSaved,
-                        kDontSaveFPRegs);
+      // Update the write barrier for the array address.
+      // Pass the now unused name_reg as a scratch register.
+      if (!FLAG_track_double_fields || !representation.IsDouble()) {
+        __ mov(name_reg, value_reg);
+      } else {
+        ASSERT(storage_reg.is(name_reg));
+      }
+      __ RecordWriteField(receiver_reg,
+                          offset,
+                          name_reg,
+                          scratch1,
+                          kRAHasNotBeenSaved,
+                          kDontSaveFPRegs);
+    }
   } else {
     // Write to the properties array.
     int offset = index * kPointerSize + FixedArray::kHeaderSize;
     // Get the properties array
     __ lw(scratch1,
           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
-    __ sw(value_reg, FieldMemOperand(scratch1, offset));
+    if (FLAG_track_double_fields && representation.IsDouble()) {
+      __ sw(storage_reg, FieldMemOperand(scratch1, offset));
+    } else {
+      __ sw(value_reg, FieldMemOperand(scratch1, offset));
+    }
 
-    // Skip updating write barrier if storing a smi.
-    __ JumpIfSmi(value_reg, &exit);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Skip updating write barrier if storing a smi.
+      __ JumpIfSmi(value_reg, &exit);
 
-    // Update the write barrier for the array address.
-    // Ok to clobber receiver_reg and name_reg, since we return.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(scratch1,
-                        offset,
-                        name_reg,
-                        receiver_reg,
-                        kRAHasNotBeenSaved,
-                        kDontSaveFPRegs);
+      // Update the write barrier for the array address.
+      // Ok to clobber receiver_reg and name_reg, since we return.
+      if (!FLAG_track_double_fields || !representation.IsDouble()) {
+        __ mov(name_reg, value_reg);
+      } else {
+        ASSERT(storage_reg.is(name_reg));
+      }
+      __ mov(name_reg, value_reg);
+      __ RecordWriteField(scratch1,
+                          offset,
+                          name_reg,
+                          receiver_reg,
+                          kRAHasNotBeenSaved,
+                          kDontSaveFPRegs);
+    }
   }
 
   // Return the value (register v0).
@@ -615,24 +673,64 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
   // object and the number of in-object properties is not going to change.
   index -= object->map()->inobject_properties();
 
+  Representation representation = lookup->representation();
+  ASSERT(!representation.IsNone());
+  if (FLAG_track_fields && representation.IsSmi()) {
+    __ JumpIfNotSmi(value_reg, miss_label);
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    // Load the double storage.
+    if (index < 0) {
+      int offset = object->map()->instance_size() + (index * kPointerSize);
+      __ lw(scratch1, FieldMemOperand(receiver_reg, offset));
+    } else {
+      __ lw(scratch1,
+            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
+      int offset = index * kPointerSize + FixedArray::kHeaderSize;
+      __ lw(scratch1, FieldMemOperand(scratch1, offset));
+    }
+
+    // Store the value into the storage.
+    Label do_store, heap_number;
+    __ JumpIfNotSmi(value_reg, &heap_number);
+    __ SmiUntag(scratch2, value_reg);
+    __ mtc1(scratch2, f6);
+    __ cvt_d_w(f4, f6);
+    __ jmp(&do_store);
+
+    __ bind(&heap_number);
+    __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
+                miss_label, DONT_DO_SMI_CHECK);
+    __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
+
+    __ bind(&do_store);
+    __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
+    // Return the value (register v0).
+    ASSERT(value_reg.is(a0));
+    __ mov(v0, a0);
+    __ Ret();
+    return;
+  }
+
   // TODO(verwaest): Share this code as a code stub.
   if (index < 0) {
     // Set the property straight into the object.
     int offset = object->map()->instance_size() + (index * kPointerSize);
     __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
 
-    // Skip updating write barrier if storing a smi.
-    __ JumpIfSmi(value_reg, &exit);
-
-    // Update the write barrier for the array address.
-    // Pass the now unused name_reg as a scratch register.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(receiver_reg,
-                        offset,
-                        name_reg,
-                        scratch1,
-                        kRAHasNotBeenSaved,
-                        kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Skip updating write barrier if storing a smi.
+      __ JumpIfSmi(value_reg, &exit);
+
+      // Update the write barrier for the array address.
+      // Pass the now unused name_reg as a scratch register.
+      __ mov(name_reg, value_reg);
+      __ RecordWriteField(receiver_reg,
+                          offset,
+                          name_reg,
+                          scratch1,
+                          kRAHasNotBeenSaved,
+                          kDontSaveFPRegs);
+    }
   } else {
     // Write to the properties array.
     int offset = index * kPointerSize + FixedArray::kHeaderSize;
@@ -641,18 +739,20 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
     __ sw(value_reg, FieldMemOperand(scratch1, offset));
 
-    // Skip updating write barrier if storing a smi.
-    __ JumpIfSmi(value_reg, &exit);
-
-    // Update the write barrier for the array address.
-    // Ok to clobber receiver_reg and name_reg, since we return.
-    __ mov(name_reg, value_reg);
-    __ RecordWriteField(scratch1,
-                        offset,
-                        name_reg,
-                        receiver_reg,
-                        kRAHasNotBeenSaved,
-                        kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Skip updating write barrier if storing a smi.
+      __ JumpIfSmi(value_reg, &exit);
+
+      // Update the write barrier for the array address.
+      // Ok to clobber receiver_reg and name_reg, since we return.
+      __ mov(name_reg, value_reg);
+      __ RecordWriteField(scratch1,
+                          offset,
+                          name_reg,
+                          receiver_reg,
+                          kRAHasNotBeenSaved,
+                          kDontSaveFPRegs);
+    }
   }
 
   // Return the value (register v0).
@@ -1269,9 +1369,20 @@ void BaseLoadStubCompiler::NonexistentHandlerFrontend(
 
 void BaseLoadStubCompiler::GenerateLoadField(Register reg,
                                              Handle<JSObject> holder,
-                                             PropertyIndex index) {
-  GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
-  __ Ret();
+                                             PropertyIndex field,
+                                             Representation representation) {
+  if (!reg.is(receiver())) __ mov(receiver(), reg);
+  if (kind() == Code::LOAD_IC) {
+    LoadFieldStub stub(field.is_inobject(holder),
+                       field.translate(holder),
+                       representation);
+    GenerateTailCall(masm(), stub.GetCode(isolate()));
+  } else {
+    KeyedLoadFieldStub stub(field.is_inobject(holder),
+                            field.translate(holder),
+                            representation);
+    GenerateTailCall(masm(), stub.GetCode(isolate()));
+  }
 }
 
 
@@ -1501,7 +1612,8 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
 
   // Do the right check and compute the holder register.
   Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
-  GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
+  GenerateFastPropertyLoad(masm(), a1, reg, index.is_inobject(holder),
+                           index.translate(holder), Representation::Tagged());
 
   GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
 
@@ -2935,18 +3047,24 @@ Handle<Code> BaseLoadStubCompiler::CompilePolymorphicIC(
   Register map_reg = scratch1();
 
   int receiver_count = receiver_maps->length();
+  int number_of_handled_maps = 0;
   __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
   for (int current = 0; current < receiver_count; ++current) {
-    __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
-        eq, map_reg, Operand(receiver_maps->at(current)));
+    Handle<Map> map = receiver_maps->at(current);
+    if (!map->is_deprecated()) {
+      number_of_handled_maps++;
+      __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
+          eq, map_reg, Operand(receiver_maps->at(current)));
+    }
   }
+  ASSERT(number_of_handled_maps != 0);
 
   __ bind(&miss);
   TailCallBuiltin(masm(), MissBuiltin(kind()));
 
   // Return the generated code.
   InlineCacheState state =
-      receiver_maps->length() > 1 ? POLYMORPHIC : MONOMORPHIC;
+      number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
   return GetICCode(kind(), type, name, state);
 }
 
index abfe693..7592a89 100644 (file)
 #endif
 #include <signal.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "bootstrapper.h"
@@ -291,6 +295,18 @@ class BZip2Decompressor : public StartupDataDecompressor {
 #endif
 
 
+void DumpException(Handle<Message> message) {
+  String::Utf8Value message_string(message->Get());
+  String::Utf8Value message_line(message->GetSourceLine());
+  fprintf(stderr, "%s at line %d\n", *message_string, message->GetLineNumber());
+  fprintf(stderr, "%s\n", *message_line);
+  for (int i = 0; i <= message->GetEndColumn(); ++i) {
+    fprintf(stderr, "%c", i < message->GetStartColumn() ? ' ' : '^');
+  }
+  fprintf(stderr, "\n");
+}
+
+
 int main(int argc, char** argv) {
   // By default, log code create information in the snapshot.
   i::FLAG_log_code = true;
@@ -312,13 +328,18 @@ int main(int argc, char** argv) {
   }
 #endif
   i::Serializer::Enable();
-  Persistent<Context> context = v8::Context::New();
+  Isolate* isolate = Isolate::GetCurrent();
+  Persistent<Context> context;
+  {
+    HandleScope handle_scope(isolate);
+    context.Reset(isolate, Context::New(isolate));
+  }
+
   if (context.IsEmpty()) {
     fprintf(stderr,
             "\nException thrown while compiling natives - see above.\n\n");
     exit(1);
   }
-  Isolate* isolate = context->GetIsolate();
   if (i::FLAG_extra_code != NULL) {
     context->Enter();
     // Capture 100 frames if anything happens.
@@ -350,27 +371,14 @@ int main(int argc, char** argv) {
     TryCatch try_catch;
     Local<Script> script = Script::Compile(source);
     if (try_catch.HasCaught()) {
-      fprintf(stderr, "Failure compiling '%s' (see above)\n", name);
+      fprintf(stderr, "Failure compiling '%s'\n", name);
+      DumpException(try_catch.Message());
       exit(1);
     }
     script->Run();
     if (try_catch.HasCaught()) {
       fprintf(stderr, "Failure running '%s'\n", name);
-      Local<Message> message = try_catch.Message();
-      Local<String> message_string = message->Get();
-      Local<String> message_line = message->GetSourceLine();
-      int len = 2 + message_string->Utf8Length() + message_line->Utf8Length();
-      char* buf = new char(len);
-      message_string->WriteUtf8(buf);
-      fprintf(stderr, "%s at line %d\n", buf, message->GetLineNumber());
-      message_line->WriteUtf8(buf);
-      fprintf(stderr, "%s\n", buf);
-      int from = message->GetStartColumn();
-      int to = message->GetEndColumn();
-      int i;
-      for (i = 0; i < from; i++) fprintf(stderr, " ");
-      for ( ; i <= to; i++) fprintf(stderr, "^");
-      fprintf(stderr, "\n");
+      DumpException(try_catch.Message());
       exit(1);
     }
     context->Exit();
index bfb4a65..77409b9 100644 (file)
@@ -138,7 +138,9 @@ function NotifyChange(type, object, name, oldValue) {
   var changeRecord = (arguments.length < 4) ?
       { type: type, object: object, name: name } :
       { type: type, object: object, name: name, oldValue: oldValue };
-  ObjectFreeze(changeRecord);
+  // TODO(rafaelw): This breaks spec-compliance. Re-enable when freezing isn't
+  // slow.
+  // ObjectFreeze(changeRecord);
   EnqueueChangeRecord(changeRecord, objectInfo.changeObservers);
 }
 
@@ -164,7 +166,9 @@ function ObjectNotifierNotify(changeRecord) {
     %DefineOrRedefineDataProperty(newRecord, prop, changeRecord[prop],
         READ_ONLY + DONT_DELETE);
   }
-  ObjectFreeze(newRecord);
+  // TODO(rafaelw): This breaks spec-compliance. Re-enable when freezing isn't
+  // slow.
+  // ObjectFreeze(newRecord);
 
   EnqueueChangeRecord(newRecord, objectInfo.changeObservers);
 }
index ee6df1d..ecbf9d6 100644 (file)
@@ -323,10 +323,6 @@ void Map::MapVerify() {
           instance_size() < HEAP->Capacity()));
   VerifyHeapPointer(prototype());
   VerifyHeapPointer(instance_descriptors());
-  DescriptorArray* descriptors = instance_descriptors();
-  for (int i = 0; i < NumberOfOwnDescriptors(); ++i) {
-    CHECK_EQ(i, descriptors->GetDetails(i).descriptor_index() - 1);
-  }
   SLOW_ASSERT(instance_descriptors()->IsSortedNoDuplicates());
   if (HasTransitionArray()) {
     SLOW_ASSERT(transitions()->IsSortedNoDuplicates());
@@ -419,6 +415,7 @@ void JSGeneratorObject::JSGeneratorObjectVerify() {
   VerifyObjectField(kReceiverOffset);
   VerifyObjectField(kOperandStackOffset);
   VerifyObjectField(kContinuationOffset);
+  VerifyObjectField(kStackHandlerIndexOffset);
 }
 
 
index 08378f1..06a13df 100644 (file)
@@ -283,6 +283,16 @@ bool Object::HasValidElements() {
   return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
 }
 
+
+MaybeObject* Object::AllocateNewStorageFor(Heap* heap,
+                                           Representation representation,
+                                           PretenureFlag tenure) {
+  if (!FLAG_track_double_fields) return this;
+  if (!representation.IsDouble()) return this;
+  return heap->AllocateHeapNumber(Number(), tenure);
+}
+
+
 StringShape::StringShape(String* str)
   : type_(str->map()->instance_type()) {
   set_valid();
@@ -357,12 +367,8 @@ bool String::IsTwoByteRepresentationUnderneath() {
 
 bool String::HasOnlyOneByteChars() {
   uint32_t type = map()->instance_type();
-  return (type & kOneByteDataHintMask) == kOneByteDataHintTag;
-}
-
-
-bool String::IsOneByteConvertible() {
-  return HasOnlyOneByteChars() || IsOneByteRepresentation();
+  return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
+         IsOneByteRepresentation();
 }
 
 
@@ -1513,22 +1519,7 @@ MaybeObject* JSObject::ResetElements() {
 }
 
 
-MaybeObject* JSObject::AddFastPropertyUsingMap(Map* map) {
-  ASSERT(this->map()->NumberOfOwnDescriptors() + 1 ==
-         map->NumberOfOwnDescriptors());
-  if (this->map()->unused_property_fields() == 0) {
-    int new_size = properties()->length() + map->unused_property_fields() + 1;
-    FixedArray* new_properties;
-    MaybeObject* maybe_properties = properties()->CopySize(new_size);
-    if (!maybe_properties->To(&new_properties)) return maybe_properties;
-    set_properties(new_properties);
-  }
-  set_map(map);
-  return this;
-}
-
-
-MaybeObject* JSObject::TransitionToMap(Map* map) {
+MaybeObject* JSObject::AllocateStorageForMap(Map* map) {
   ASSERT(this->map()->inobject_properties() == map->inobject_properties());
   ElementsKind expected_kind = this->map()->elements_kind();
   if (map->elements_kind() != expected_kind) {
@@ -1549,6 +1540,14 @@ MaybeObject* JSObject::TransitionToMap(Map* map) {
 }
 
 
+MaybeObject* JSObject::MigrateInstance() {
+  // Converting any field to the most specific type will cause the
+  // GeneralizeFieldRepresentation algorithm to create the most general existing
+  // transition that matches the object. This achieves what is needed.
+  return GeneralizeFieldRepresentation(0, Representation::Smi());
+}
+
+
 Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
   AssertNoAllocation no_gc;
   if (!map->HasTransitionArray()) return Handle<String>::null();
@@ -1710,10 +1709,17 @@ void JSObject::SetInternalField(int index, Smi* value) {
 }
 
 
+MaybeObject* JSObject::FastPropertyAt(Representation representation,
+                                      int index) {
+  Object* raw_value = RawFastPropertyAt(index);
+  return raw_value->AllocateNewStorageFor(GetHeap(), representation);
+}
+
+
 // Access fast-case object properties at index. The use of these routines
 // is needed to correctly distinguish between properties stored in-object and
 // properties stored in the properties array.
-Object* JSObject::FastPropertyAt(int index) {
+Object* JSObject::RawFastPropertyAt(int index) {
   // Adjust for the number of properties stored in the object.
   index -= map()->inobject_properties();
   if (index < 0) {
@@ -1726,7 +1732,7 @@ Object* JSObject::FastPropertyAt(int index) {
 }
 
 
-Object* JSObject::FastPropertyAtPut(int index, Object* value) {
+void JSObject::FastPropertyAtPut(int index, Object* value) {
   // Adjust for the number of properties stored in the object.
   index -= map()->inobject_properties();
   if (index < 0) {
@@ -1737,7 +1743,6 @@ Object* JSObject::FastPropertyAtPut(int index, Object* value) {
     ASSERT(index < properties()->length());
     properties()->set(index, value);
   }
-  return value;
 }
 
 
@@ -2277,6 +2282,23 @@ void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
 }
 
 
+void DescriptorArray::SetRepresentation(int descriptor_index,
+                                        Representation representation) {
+  ASSERT(!representation.IsNone());
+  PropertyDetails details = GetDetails(descriptor_index);
+  set(ToDetailsIndex(descriptor_index),
+      details.CopyWithRepresentation(representation).AsSmi());
+}
+
+
+void DescriptorArray::InitializeRepresentations(Representation representation) {
+  int length = number_of_descriptors();
+  for (int i = 0; i < length; i++) {
+    SetRepresentation(i, representation);
+  }
+}
+
+
 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
   ASSERT(descriptor_number < number_of_descriptors());
   return HeapObject::RawField(
@@ -2338,10 +2360,8 @@ void DescriptorArray::Set(int descriptor_number,
                           const WhitenessWitness&) {
   // Range check.
   ASSERT(descriptor_number < number_of_descriptors());
-  ASSERT(desc->GetDetails().descriptor_index() <=
-         number_of_descriptors());
-  ASSERT(desc->GetDetails().descriptor_index() > 0);
 
+  ASSERT(!desc->GetDetails().representation().IsNone());
   NoIncrementalWriteBarrierSet(this,
                                ToKeyIndex(descriptor_number),
                                desc->GetKey());
@@ -2357,9 +2377,7 @@ void DescriptorArray::Set(int descriptor_number,
 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
   // Range check.
   ASSERT(descriptor_number < number_of_descriptors());
-  ASSERT(desc->GetDetails().descriptor_index() <=
-         number_of_descriptors());
-  ASSERT(desc->GetDetails().descriptor_index() > 0);
+  ASSERT(!desc->GetDetails().representation().IsNone());
 
   set(ToKeyIndex(descriptor_number), desc->GetKey());
   set(ToValueIndex(descriptor_number), desc->GetValue());
@@ -2370,9 +2388,7 @@ void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
 void DescriptorArray::Append(Descriptor* desc,
                              const WhitenessWitness& witness) {
   int descriptor_number = number_of_descriptors();
-  int enumeration_index = descriptor_number + 1;
   SetNumberOfDescriptors(descriptor_number + 1);
-  desc->SetEnumerationIndex(enumeration_index);
   Set(descriptor_number, desc, witness);
 
   uint32_t hash = desc->GetKey()->Hash();
@@ -2391,9 +2407,7 @@ void DescriptorArray::Append(Descriptor* desc,
 
 void DescriptorArray::Append(Descriptor* desc) {
   int descriptor_number = number_of_descriptors();
-  int enumeration_index = descriptor_number + 1;
   SetNumberOfDescriptors(descriptor_number + 1);
-  desc->SetEnumerationIndex(enumeration_index);
   Set(descriptor_number, desc);
 
   uint32_t hash = desc->GetKey()->Hash();
@@ -3573,6 +3587,38 @@ bool Map::is_observed() {
 }
 
 
+void Map::deprecate() {
+  set_bit_field3(Deprecated::update(bit_field3(), true));
+}
+
+
+bool Map::is_deprecated() {
+  if (!FLAG_track_fields) return false;
+  return Deprecated::decode(bit_field3());
+}
+
+
+bool Map::CanBeDeprecated() {
+  int descriptor = LastAdded();
+  for (int i = 0; i <= descriptor; i++) {
+    PropertyDetails details = instance_descriptors()->GetDetails(i);
+    if (FLAG_track_fields && details.representation().IsSmi()) {
+      return true;
+    }
+    if (FLAG_track_double_fields && details.representation().IsDouble()) {
+      return true;
+    }
+  }
+  return false;
+}
+
+
+Handle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) {
+  if (!map->is_deprecated()) return map;
+  return GeneralizeRepresentation(map, 0, Representation::Smi());
+}
+
+
 void Map::NotifyLeafMapLayoutChange() {
   dependent_code()->DeoptimizeDependentCodeGroup(
       GetIsolate(),
@@ -4109,23 +4155,6 @@ static MaybeObject* EnsureHasTransitionArray(Map* map) {
 
 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
   int len = descriptors->number_of_descriptors();
-#ifdef DEBUG
-  ASSERT(len <= DescriptorArray::kMaxNumberOfDescriptors);
-
-  bool used_indices[DescriptorArray::kMaxNumberOfDescriptors];
-  for (int i = 0; i < len; ++i) used_indices[i] = false;
-
-  // Ensure that all enumeration indexes between 1 and length occur uniquely in
-  // the descriptor array.
-  for (int i = 0; i < len; ++i) {
-    int enum_index = descriptors->GetDetails(i).descriptor_index() -
-                     PropertyDetails::kInitialIndex;
-    ASSERT(0 <= enum_index && enum_index < len);
-    ASSERT(!used_indices[enum_index]);
-    used_indices[enum_index] = true;
-  }
-#endif
-
   set_instance_descriptors(descriptors);
   SetNumberOfOwnDescriptors(len);
 }
@@ -5095,6 +5124,7 @@ ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
+SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
 
 
 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
index 5aeeec6..0849a63 100644 (file)
@@ -271,7 +271,7 @@ void JSObject::PrintProperties(FILE* out) {
       switch (descs->GetType(i)) {
         case FIELD: {
           int index = descs->GetFieldIndex(i);
-          FastPropertyAt(index)->ShortPrint(out);
+          RawFastPropertyAt(index)->ShortPrint(out);
           PrintF(out, " (field at offset %d)\n", index);
           break;
         }
index 6a64cbf..add247e 100644 (file)
@@ -571,6 +571,12 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
     return false;
   }
 
+  // If this is a native function we do not flush the code because %SetCode
+  // breaks the one-to-one relation between SharedFunctionInfo and Code.
+  if (shared_info->native()) {
+    return false;
+  }
+
   if (FLAG_age_code) {
     return shared_info->code()->IsOld();
   } else {
index 128c04d..d127d1b 100644 (file)
@@ -815,11 +815,14 @@ MaybeObject* Object::GetProperty(Object* receiver,
       value = result->holder()->GetNormalizedProperty(result);
       ASSERT(!value->IsTheHole() || result->IsReadOnly());
       return value->IsTheHole() ? heap->undefined_value() : value;
-    case FIELD:
-      value = result->holder()->FastPropertyAt(
+    case FIELD: {
+      MaybeObject* maybe_result = result->holder()->FastPropertyAt(
+          result->representation(),
           result->GetFieldIndex().field_index());
+      if (!maybe_result->To(&value)) return maybe_result;
       ASSERT(!value->IsTheHole() || result->IsReadOnly());
       return value->IsTheHole() ? heap->undefined_value() : value;
+    }
     case CONSTANT_FUNCTION:
       return result->GetConstantFunction();
     case CALLBACKS:
@@ -1711,18 +1714,29 @@ String* JSReceiver::constructor_name() {
 MaybeObject* JSObject::AddFastPropertyUsingMap(Map* new_map,
                                                Name* name,
                                                Object* value,
-                                               int field_index) {
+                                               int field_index,
+                                               Representation representation) {
+  // This method is used to transition to a field. If we are transitioning to a
+  // double field, allocate new storage.
+  Object* storage;
+  MaybeObject* maybe_storage =
+      value->AllocateNewStorageFor(GetHeap(), representation);
+  if (!maybe_storage->To(&storage)) return maybe_storage;
+
   if (map()->unused_property_fields() == 0) {
     int new_unused = new_map->unused_property_fields();
     FixedArray* values;
-    MaybeObject* maybe_values =
-          properties()->CopySize(properties()->length() + new_unused + 1);
-      if (!maybe_values->To(&values)) return maybe_values;
-    }
+    MaybeObject* maybe_values =
+        properties()->CopySize(properties()->length() + new_unused + 1);
+    if (!maybe_values->To(&values)) return maybe_values;
+
     set_properties(values);
   }
+
   set_map(new_map);
-  return FastPropertyAtPut(field_index, value);
+
+  FastPropertyAtPut(field_index, storage);
+  return value;
 }
 
 
@@ -1774,7 +1788,8 @@ MaybeObject* JSObject::AddFastProperty(Name* name,
   int index = map()->NextFreePropertyIndex();
 
   // Allocate new instance descriptors with (name, index) added
-  FieldDescriptor new_field(name, index, attributes, 0);
+  Representation representation = value->OptimalRepresentation();
+  FieldDescriptor new_field(name, index, attributes, representation);
 
   ASSERT(index < map()->inobject_properties() ||
          (index - map()->inobject_properties()) < properties()->length() ||
@@ -1782,6 +1797,7 @@ MaybeObject* JSObject::AddFastProperty(Name* name,
 
   FixedArray* values = NULL;
 
+  // TODO(verwaest): Merge with AddFastPropertyUsingMap.
   if (map()->unused_property_fields() == 0) {
     // Make room for the new value
     MaybeObject* maybe_values =
@@ -1791,10 +1807,17 @@ MaybeObject* JSObject::AddFastProperty(Name* name,
 
   TransitionFlag flag = INSERT_TRANSITION;
 
+  Heap* heap = isolate->heap();
+
   Map* new_map;
   MaybeObject* maybe_new_map = map()->CopyAddDescriptor(&new_field, flag);
   if (!maybe_new_map->To(&new_map)) return maybe_new_map;
 
+  Object* storage;
+  MaybeObject* maybe_storage =
+      value->AllocateNewStorageFor(heap, representation);
+  if (!maybe_storage->To(&storage)) return maybe_storage;
+
   if (map()->unused_property_fields() == 0) {
     ASSERT(values != NULL);
     set_properties(values);
@@ -1804,7 +1827,9 @@ MaybeObject* JSObject::AddFastProperty(Name* name,
   }
 
   set_map(new_map);
-  return FastPropertyAtPut(index, value);
+
+  FastPropertyAtPut(index, storage);
+  return value;
 }
 
 
@@ -1813,7 +1838,7 @@ MaybeObject* JSObject::AddConstantFunctionProperty(
     JSFunction* function,
     PropertyAttributes attributes) {
   // Allocate new instance descriptors with (name, function) added
-  ConstantFunctionDescriptor d(name, function, attributes, 0);
+  ConstantFunctionDescriptor d(name, function, attributes);
 
   TransitionFlag flag =
       // Do not add transitions to  global objects.
@@ -1861,7 +1886,7 @@ MaybeObject* JSObject::AddSlowProperty(Name* name,
     }
     JSGlobalPropertyCell::cast(store_value)->set_value(value);
   }
-  PropertyDetails details = PropertyDetails(attributes, NORMAL);
+  PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
   Object* result;
   { MaybeObject* maybe_result = dict->Add(name, store_value, details);
     if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -2028,7 +2053,6 @@ MaybeObject* JSObject::ConvertTransitionToMapTransition(
   // TODO(verwaest): From here on we lose existing map transitions, causing
   // invalid back pointers. This will change once we can store multiple
   // transitions with the same key.
-
   bool owned_descriptors = old_map->owns_descriptors();
   if (owned_descriptors ||
       old_target->instance_descriptors() == old_map->instance_descriptors()) {
@@ -2049,6 +2073,8 @@ MaybeObject* JSObject::ConvertTransitionToMapTransition(
     old_map->set_owns_descriptors(false);
   }
 
+  old_target->DeprecateTransitionTree();
+
   old_map->SetTransition(transition_index, new_map);
   new_map->SetBackPointer(old_map);
   return result;
@@ -2066,8 +2092,9 @@ MaybeObject* JSObject::ConvertDescriptorToField(Name* name,
     return ReplaceSlowProperty(name, new_value, attributes);
   }
 
+  Representation representation = new_value->OptimalRepresentation();
   int index = map()->NextFreePropertyIndex();
-  FieldDescriptor new_field(name, index, attributes, 0);
+  FieldDescriptor new_field(name, index, attributes, representation);
 
   // Make a new map for the object.
   Map* new_map;
@@ -2085,6 +2112,12 @@ MaybeObject* JSObject::ConvertDescriptorToField(Name* name,
     if (!maybe_new_properties->To(&new_properties)) return maybe_new_properties;
   }
 
+  Heap* heap = GetHeap();
+  Object* storage;
+  MaybeObject* maybe_storage =
+      new_value->AllocateNewStorageFor(heap, representation);
+  if (!maybe_storage->To(&storage)) return maybe_storage;
+
   // Update pointers to commit changes.
   // Object points to the new map.
   new_map->set_unused_property_fields(new_unused_property_fields);
@@ -2092,10 +2125,463 @@ MaybeObject* JSObject::ConvertDescriptorToField(Name* name,
   if (new_properties != NULL) {
     set_properties(new_properties);
   }
-  return FastPropertyAtPut(index, new_value);
+  FastPropertyAtPut(index, new_value);
+  return new_value;
+}
+
+
+const char* Representation::Mnemonic() const {
+  switch (kind_) {
+    case kNone: return "v";
+    case kTagged: return "t";
+    case kSmi: return "s";
+    case kDouble: return "d";
+    case kInteger32: return "i";
+    case kExternal: return "x";
+    default:
+      UNREACHABLE();
+      return NULL;
+  }
+}
+
+
+enum RightTrimMode { FROM_GC, FROM_MUTATOR };
+
+
+static void ZapEndOfFixedArray(Address new_end, int to_trim) {
+  // If we are doing a big trim in old space then we zap the space.
+  Object** zap = reinterpret_cast<Object**>(new_end);
+  zap++;  // Header of filler must be at least one word so skip that.
+  for (int i = 1; i < to_trim; i++) {
+    *zap++ = Smi::FromInt(0);
+  }
+}
+
+
+template<RightTrimMode trim_mode>
+static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
+  ASSERT(elms->map() != HEAP->fixed_cow_array_map());
+  // For now this trick is only applied to fixed arrays in new and paged space.
+  ASSERT(!HEAP->lo_space()->Contains(elms));
+
+  const int len = elms->length();
+
+  ASSERT(to_trim < len);
+
+  Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
+
+  if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) {
+      ZapEndOfFixedArray(new_end, to_trim);
+  }
+
+  int size_delta = to_trim * kPointerSize;
+
+  // Technically in new space this write might be omitted (except for
+  // debug mode which iterates through the heap), but to play safer
+  // we still do it.
+  heap->CreateFillerObjectAt(new_end, size_delta);
+
+  elms->set_length(len - to_trim);
+
+  // Maintain marking consistency for IncrementalMarking.
+  if (Marking::IsBlack(Marking::MarkBitFrom(elms))) {
+    if (trim_mode == FROM_GC) {
+      MemoryChunk::IncrementLiveBytesFromGC(elms->address(), -size_delta);
+    } else {
+      MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
+    }
+  }
+}
+
+
+bool Map::InstancesNeedRewriting(Map* target,
+                                 int target_number_of_fields,
+                                 int target_inobject,
+                                 int target_unused) {
+  // If fields were added (or removed), rewrite the instance.
+  int number_of_fields = NumberOfFields();
+  ASSERT(target_number_of_fields >= number_of_fields);
+  if (target_number_of_fields != number_of_fields) return true;
+
+  if (FLAG_track_double_fields) {
+    // If smi descriptors were replaced by double descriptors, rewrite.
+    DescriptorArray* old_desc = instance_descriptors();
+    DescriptorArray* new_desc = target->instance_descriptors();
+    int limit = NumberOfOwnDescriptors();
+    for (int i = 0; i < limit; i++) {
+      if (new_desc->GetDetails(i).representation().IsDouble() &&
+          old_desc->GetDetails(i).representation().IsSmi()) {
+        return true;
+      }
+    }
+  }
+
+  // If no fields were added, and no inobject properties were removed, setting
+  // the map is sufficient.
+  if (target_inobject == inobject_properties()) return false;
+  // In-object slack tracking may have reduced the object size of the new map.
+  // In that case, succeed if all existing fields were inobject, and they still
+  // fit within the new inobject size.
+  ASSERT(target_inobject < inobject_properties());
+  if (target_number_of_fields <= target_inobject) {
+    ASSERT(target_number_of_fields + target_unused == target_inobject);
+    return false;
+  }
+  // Otherwise, properties will need to be moved to the backing store.
+  return true;
+}
+
+
+// To migrate an instance to a map:
+// - First check whether the instance needs to be rewritten. If not, simply
+//   change the map.
+// - Otherwise, allocate a fixed array large enough to hold all fields, in
+//   addition to unused space.
+// - Copy all existing properties in, in the following order: backing store
+//   properties, unused fields, inobject properties.
+// - If all allocation succeeded, commit the state atomically:
+//   * Copy inobject properties from the backing store back into the object.
+//   * Trim the difference in instance size of the object. This also cleanly
+//     frees inobject properties that moved to the backing store.
+//   * If there are properties left in the backing store, trim of the space used
+//     to temporarily store the inobject properties.
+//   * If there are properties left in the backing store, install the backing
+//     store.
+MaybeObject* JSObject::MigrateToMap(Map* new_map) {
+  Heap* heap = GetHeap();
+  Map* old_map = map();
+  int number_of_fields = new_map->NumberOfFields();
+  int inobject = new_map->inobject_properties();
+  int unused = new_map->unused_property_fields();
+
+  // Nothing to do if no functions were converted to fields.
+  if (!old_map->InstancesNeedRewriting(
+          new_map, number_of_fields, inobject, unused)) {
+    set_map(new_map);
+    return this;
+  }
+
+  int total_size = number_of_fields + unused;
+  int external = total_size - inobject;
+  FixedArray* array;
+  MaybeObject* maybe_array = heap->AllocateFixedArray(total_size);
+  if (!maybe_array->To(&array)) return maybe_array;
+
+  DescriptorArray* old_descriptors = old_map->instance_descriptors();
+  DescriptorArray* new_descriptors = new_map->instance_descriptors();
+  int descriptors = new_map->NumberOfOwnDescriptors();
+
+  for (int i = 0; i < descriptors; i++) {
+    PropertyDetails details = new_descriptors->GetDetails(i);
+    if (details.type() != FIELD) continue;
+    PropertyDetails old_details = old_descriptors->GetDetails(i);
+    ASSERT(old_details.type() == CONSTANT_FUNCTION ||
+           old_details.type() == FIELD);
+    Object* value = old_details.type() == CONSTANT_FUNCTION
+        ? old_descriptors->GetValue(i)
+        : RawFastPropertyAt(old_descriptors->GetFieldIndex(i));
+    if (FLAG_track_double_fields &&
+        old_details.representation().IsSmi() &&
+        details.representation().IsDouble()) {
+      // Objects must be allocated in the old object space, since the
+      // overall number of HeapNumbers needed for the conversion might
+      // exceed the capacity of new space, and we would fail repeatedly
+      // trying to migrate the instance.
+      MaybeObject* maybe_storage =
+          value->AllocateNewStorageFor(heap, details.representation(), TENURED);
+      if (!maybe_storage->To(&value)) return maybe_storage;
+    }
+    ASSERT(!(FLAG_track_double_fields &&
+             details.representation().IsDouble() &&
+             value->IsSmi()));
+    int target_index = new_descriptors->GetFieldIndex(i) - inobject;
+    if (target_index < 0) target_index += total_size;
+    array->set(target_index, value);
+  }
+
+  // From here on we cannot fail anymore.
+
+  // Copy (real) inobject properties. If necessary, stop at number_of_fields to
+  // avoid overwriting |one_pointer_filler_map|.
+  int limit = Min(inobject, number_of_fields);
+  for (int i = 0; i < limit; i++) {
+    FastPropertyAtPut(i, array->get(external + i));
+  }
+
+  // Create filler object past the new instance size.
+  int new_instance_size = new_map->instance_size();
+  int instance_size_delta = old_map->instance_size() - new_instance_size;
+  ASSERT(instance_size_delta >= 0);
+  Address address = this->address() + new_instance_size;
+  heap->CreateFillerObjectAt(address, instance_size_delta);
+
+  // If there are properties in the new backing store, trim it to the correct
+  // size and install the backing store into the object.
+  if (external > 0) {
+    RightTrimFixedArray<FROM_MUTATOR>(heap, array, inobject);
+    set_properties(array);
+  }
+
+  set_map(new_map);
+
+  return this;
+}
+
+
+MaybeObject* JSObject::GeneralizeFieldRepresentation(
+    int modify_index,
+    Representation new_representation) {
+  Map* new_map;
+  MaybeObject* maybe_new_map =
+      map()->GeneralizeRepresentation(modify_index, new_representation);
+  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+  ASSERT(map() != new_map || new_map->FindRootMap()->is_deprecated());
+
+  return MigrateToMap(new_map);
+}
+
+
+int Map::NumberOfFields() {
+  DescriptorArray* descriptors = instance_descriptors();
+  int result = 0;
+  for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
+    if (descriptors->GetDetails(i).type() == FIELD) result++;
+  }
+  return result;
+}
+
+
+MaybeObject* Map::CopyGeneralizeAllRepresentations() {
+  Map* new_map;
+  MaybeObject* maybe_map = this->Copy();
+  if (!maybe_map->To(&new_map)) return maybe_map;
+
+  new_map->instance_descriptors()->InitializeRepresentations(
+      Representation::Tagged());
+  if (FLAG_trace_generalization) {
+    PrintF("failed generalization %p -> %p\n",
+           static_cast<void*>(this), static_cast<void*>(new_map));
+  }
+  return new_map;
+}
+
+
+void Map::DeprecateTransitionTree() {
+  if (!FLAG_track_fields) return;
+  if (is_deprecated()) return;
+  if (HasTransitionArray()) {
+    TransitionArray* transitions = this->transitions();
+    for (int i = 0; i < transitions->number_of_transitions(); i++) {
+      transitions->GetTarget(i)->DeprecateTransitionTree();
+    }
+  }
+  deprecate();
+  dependent_code()->DeoptimizeDependentCodeGroup(
+      GetIsolate(), DependentCode::kTransitionGroup);
+  dependent_code()->DeoptimizeDependentCodeGroup(
+      GetIsolate(), DependentCode::kPrototypeCheckGroup);
+}
+
+
+// Invalidates a transition target at |key|, and installs |new_descriptors| over
+// the current instance_descriptors to ensure proper sharing of descriptor
+// arrays.
+void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) {
+  if (HasTransitionArray()) {
+    TransitionArray* transitions = this->transitions();
+    int transition = transitions->Search(key);
+    if (transition != TransitionArray::kNotFound) {
+      transitions->GetTarget(transition)->DeprecateTransitionTree();
+    }
+  }
+
+  // Don't overwrite the empty descriptor array.
+  if (NumberOfOwnDescriptors() == 0) return;
+
+  DescriptorArray* to_replace = instance_descriptors();
+  Map* current = this;
+  while (current->instance_descriptors() == to_replace) {
+    current->SetEnumLength(Map::kInvalidEnumCache);
+    current->set_instance_descriptors(new_descriptors);
+    Object* next = current->GetBackPointer();
+    if (next->IsUndefined()) break;
+    current = Map::cast(next);
+  }
+
+  set_owns_descriptors(false);
+}
+
+
+Map* Map::FindRootMap() {
+  Map* result = this;
+  while (true) {
+    Object* back = result->GetBackPointer();
+    if (back->IsUndefined()) return result;
+    result = Map::cast(back);
+  }
+}
+
+
+Map* Map::FindUpdatedMap(int verbatim,
+                         int length,
+                         DescriptorArray* descriptors) {
+  // This can only be called on roots of transition trees.
+  ASSERT(GetBackPointer()->IsUndefined());
+
+  Map* current = this;
+
+  for (int i = verbatim; i < length; i++) {
+    if (!current->HasTransitionArray()) break;
+    Name* name = descriptors->GetKey(i);
+    TransitionArray* transitions = current->transitions();
+    int transition = transitions->Search(name);
+    if (transition == TransitionArray::kNotFound) break;
+    current = transitions->GetTarget(transition);
+  }
+
+  return current;
+}
+
+
+Map* Map::FindLastMatchMap(int verbatim,
+                           int length,
+                           DescriptorArray* descriptors) {
+  // This can only be called on roots of transition trees.
+  ASSERT(GetBackPointer()->IsUndefined());
+
+  Map* current = this;
+
+  for (int i = verbatim; i < length; i++) {
+    if (!current->HasTransitionArray()) break;
+    Name* name = descriptors->GetKey(i);
+    TransitionArray* transitions = current->transitions();
+    int transition = transitions->Search(name);
+    if (transition == TransitionArray::kNotFound) break;
+
+    Map* next = transitions->GetTarget(transition);
+    DescriptorArray* next_descriptors = next->instance_descriptors();
+
+    if (next_descriptors->GetValue(i) != descriptors->GetValue(i)) break;
+
+    PropertyDetails details = descriptors->GetDetails(i);
+    PropertyDetails next_details = next_descriptors->GetDetails(i);
+    if (details.type() != next_details.type()) break;
+    if (details.attributes() != next_details.attributes()) break;
+    if (!details.representation().Equals(next_details.representation())) break;
+    ASSERT(!details.IsDeleted());
+    ASSERT(!next_details.IsDeleted());
+
+    current = next;
+  }
+  return current;
 }
 
 
+// Generalize the representation of the descriptor at |modify_index|.
+// This method rewrites the transition tree to reflect the new change. To avoid
+// high degrees over polymorphism, and to stabilize quickly, on every rewrite
+// the new type is deduced by merging the current type with any potential new
+// (partial) version of the type in the transition tree.
+// To do this, on each rewrite:
+// - Search the root of the transition tree using FindRootMap.
+// - Find |updated|, the newest matching version of this map using
+//   FindUpdatedMap. This uses the keys in the own map's descriptor array to
+//   walk the transition tree.
+// - Merge/generalize the descriptor array of the current map and |updated|.
+// - Generalize the |modify_index| descriptor using |new_representation|.
+// - Walk the tree again starting from the root towards |updated|. Stop at
+//   |split_map|, the first map who's descriptor array does not match the merged
+//   descriptor array.
+// - If |updated| == |split_map|, |updated| is in the expected state. Return it.
+// - Otherwise, invalidate the outdated transition target from |updated|, and
+//   replace its transition tree with a new branch for the updated descriptors.
+MaybeObject* Map::GeneralizeRepresentation(int modify_index,
+                                           Representation new_representation) {
+  Map* old_map = this;
+  DescriptorArray* old_descriptors = old_map->instance_descriptors();
+  Representation old_reprepresentation =
+      old_descriptors->GetDetails(modify_index).representation();
+
+  if (old_reprepresentation.IsNone()) {
+    UNREACHABLE();
+    old_descriptors->SetRepresentation(modify_index, new_representation);
+    return this;
+  }
+
+  int descriptors = old_map->NumberOfOwnDescriptors();
+  Map* root_map = old_map->FindRootMap();
+
+  if (!old_map->EquivalentToForTransition(root_map)) {
+    return CopyGeneralizeAllRepresentations();
+  }
+
+  int verbatim = root_map->NumberOfOwnDescriptors();
+
+  Map* updated = root_map->FindUpdatedMap(
+      verbatim, descriptors, old_descriptors);
+  // Check the state of the root map.
+  DescriptorArray* updated_descriptors = updated->instance_descriptors();
+
+  int valid = updated->NumberOfOwnDescriptors();
+  if (updated_descriptors->IsMoreGeneralThan(
+          verbatim, valid, descriptors, old_descriptors)) {
+    Representation updated_representation =
+        updated_descriptors->GetDetails(modify_index).representation();
+    if (new_representation.fits_into(updated_representation)) {
+      if (FLAG_trace_generalization) {
+        PrintF("migrating to existing map %p -> %p\n",
+               static_cast<void*>(this), static_cast<void*>(updated));
+      }
+      return updated;
+    }
+  }
+
+  DescriptorArray* new_descriptors;
+  MaybeObject* maybe_descriptors = updated_descriptors->Merge(
+      verbatim, valid, descriptors, old_descriptors);
+  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
+
+  old_reprepresentation =
+      new_descriptors->GetDetails(modify_index).representation();
+  new_representation = new_representation.generalize(old_reprepresentation);
+  new_descriptors->SetRepresentation(modify_index, new_representation);
+
+  Map* split_map = root_map->FindLastMatchMap(
+      verbatim, descriptors, new_descriptors);
+
+  int split_descriptors = split_map->NumberOfOwnDescriptors();
+  // This is shadowed by |updated_descriptors| being more general than
+  // |old_descriptors|.
+  ASSERT(descriptors != split_descriptors);
+
+  int descriptor = split_descriptors;
+  split_map->DeprecateTarget(
+      old_descriptors->GetKey(descriptor), new_descriptors);
+
+  if (FLAG_trace_generalization) {
+    PrintF("migrating to new map %p -> %p (%i steps)\n",
+           static_cast<void*>(this),
+           static_cast<void*>(new_descriptors),
+           descriptors - descriptor);
+  }
+
+  Map* new_map = split_map;
+  // Add missing transitions.
+  for (; descriptor < descriptors; descriptor++) {
+    MaybeObject* maybe_map = new_map->CopyInstallDescriptors(
+        descriptor, new_descriptors);
+    if (!maybe_map->To(&new_map)) {
+      // Create a handle for the last created map to ensure it stays alive
+      // during GC. Its descriptor array is too large, but it will be
+      // overwritten during retry anyway.
+      Handle<Map>(new_map);
+    }
+  }
+
+  new_map->set_owns_descriptors(true);
+  return new_map;
+}
+
 
 MaybeObject* JSObject::SetPropertyWithInterceptor(
     Name* name,
@@ -2391,55 +2877,6 @@ MaybeObject* JSObject::SetPropertyViaPrototypes(
 }
 
 
-enum RightTrimMode { FROM_GC, FROM_MUTATOR };
-
-
-static void ZapEndOfFixedArray(Address new_end, int to_trim) {
-  // If we are doing a big trim in old space then we zap the space.
-  Object** zap = reinterpret_cast<Object**>(new_end);
-  zap++;  // Header of filler must be at least one word so skip that.
-  for (int i = 1; i < to_trim; i++) {
-    *zap++ = Smi::FromInt(0);
-  }
-}
-
-
-template<RightTrimMode trim_mode>
-static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
-  ASSERT(elms->map() != HEAP->fixed_cow_array_map());
-  // For now this trick is only applied to fixed arrays in new and paged space.
-  ASSERT(!HEAP->lo_space()->Contains(elms));
-
-  const int len = elms->length();
-
-  ASSERT(to_trim < len);
-
-  Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
-
-  if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) {
-      ZapEndOfFixedArray(new_end, to_trim);
-  }
-
-  int size_delta = to_trim * kPointerSize;
-
-  // Technically in new space this write might be omitted (except for
-  // debug mode which iterates through the heap), but to play safer
-  // we still do it.
-  heap->CreateFillerObjectAt(new_end, size_delta);
-
-  elms->set_length(len - to_trim);
-
-  // Maintain marking consistency for IncrementalMarking.
-  if (Marking::IsBlack(Marking::MarkBitFrom(elms))) {
-    if (trim_mode == FROM_GC) {
-      MemoryChunk::IncrementLiveBytesFromGC(elms->address(), -size_delta);
-    } else {
-      MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
-    }
-  }
-}
-
-
 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) {
   Handle<DescriptorArray> descriptors(map->instance_descriptors());
   if (slack <= descriptors->NumberOfSlackDescriptors()) return;
@@ -2670,7 +3107,7 @@ void JSObject::LocalLookupRealNamedProperty(Name* name, LookupResult* result) {
     // occur as fields.
     if (result->IsField() &&
         result->IsReadOnly() &&
-        FastPropertyAt(result->GetFieldIndex().field_index())->IsTheHole()) {
+        RawFastPropertyAt(result->GetFieldIndex().field_index())->IsTheHole()) {
       result->DisallowCaching();
     }
     return;
@@ -3103,18 +3540,32 @@ MUST_USE_RESULT Handle<Object> JSProxy::CallTrap(const char* name,
 }
 
 
-void JSObject::AddFastPropertyUsingMap(Handle<JSObject> object,
-                                       Handle<Map> map) {
+void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
   CALL_HEAP_FUNCTION_VOID(
       object->GetIsolate(),
-      object->AddFastPropertyUsingMap(*map));
+      object->AllocateStorageForMap(*map));
 }
 
 
-void JSObject::TransitionToMap(Handle<JSObject> object, Handle<Map> map) {
+void JSObject::MigrateInstance(Handle<JSObject> object) {
+  if (FLAG_trace_migration) {
+    PrintF("migrating instance %p (%p)\n",
+           static_cast<void*>(*object),
+           static_cast<void*>(object->map()));
+  }
   CALL_HEAP_FUNCTION_VOID(
       object->GetIsolate(),
-      object->TransitionToMap(*map));
+      object->MigrateInstance());
+}
+
+
+Handle<Map> Map::GeneralizeRepresentation(Handle<Map> map,
+                                          int modify_index,
+                                          Representation representation) {
+  CALL_HEAP_FUNCTION(
+      map->GetIsolate(),
+      map->GeneralizeRepresentation(modify_index, representation),
+      Map);
 }
 
 
@@ -3206,10 +3657,30 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup,
     case NORMAL:
       result = lookup->holder()->SetNormalizedProperty(lookup, *value);
       break;
-    case FIELD:
-      result = lookup->holder()->FastPropertyAtPut(
+    case FIELD: {
+      Representation representation = lookup->representation();
+      if (!value->FitsRepresentation(representation)) {
+        MaybeObject* maybe_failure =
+            lookup->holder()->GeneralizeFieldRepresentation(
+                lookup->GetDescriptorIndex(), value->OptimalRepresentation());
+        if (maybe_failure->IsFailure()) return maybe_failure;
+        DescriptorArray* desc = lookup->holder()->map()->instance_descriptors();
+        int descriptor = lookup->GetDescriptorIndex();
+        representation = desc->GetDetails(descriptor).representation();
+      }
+      if (FLAG_track_double_fields && representation.IsDouble()) {
+        HeapNumber* storage =
+            HeapNumber::cast(lookup->holder()->RawFastPropertyAt(
+                lookup->GetFieldIndex().field_index()));
+        storage->set_value(value->Number());
+        result = *value;
+        break;
+      }
+      lookup->holder()->FastPropertyAtPut(
           lookup->GetFieldIndex().field_index(), *value);
+      result = *value;
       break;
+    }
     case CONSTANT_FUNCTION:
       // Only replace the function if necessary.
       if (*value == lookup->GetConstantFunction()) return *value;
@@ -3236,9 +3707,24 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup,
 
       if (details.type() == FIELD) {
         if (attributes == details.attributes()) {
+          Representation representation = details.representation();
+          if (!value->FitsRepresentation(representation)) {
+            MaybeObject* maybe_map = transition_map->GeneralizeRepresentation(
+                descriptor, value->OptimalRepresentation());
+            if (!maybe_map->To(&transition_map)) return maybe_map;
+            Object* back = transition_map->GetBackPointer();
+            if (back->IsMap()) {
+              MaybeObject* maybe_failure =
+                  lookup->holder()->MigrateToMap(Map::cast(back));
+              if (maybe_failure->IsFailure()) return maybe_failure;
+            }
+            DescriptorArray* desc = transition_map->instance_descriptors();
+            int descriptor = transition_map->LastAdded();
+            representation = desc->GetDetails(descriptor).representation();
+          }
           int field_index = descriptors->GetFieldIndex(descriptor);
           result = lookup->holder()->AddFastPropertyUsingMap(
-              transition_map, *name, *value, field_index);
+              transition_map, *name, *value, field_index, representation);
         } else {
           result = lookup->holder()->ConvertDescriptorToField(
               *name, *value, attributes);
@@ -3368,14 +3854,32 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
   MaybeObject* result = *value;
   switch (lookup.type()) {
     case NORMAL: {
-      PropertyDetails details = PropertyDetails(attributes, NORMAL);
+      PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
       result = self->SetNormalizedProperty(*name, *value, details);
       break;
     }
-    case FIELD:
-      result = self->FastPropertyAtPut(
-          lookup.GetFieldIndex().field_index(), *value);
+    case FIELD: {
+      Representation representation = lookup.representation();
+      if (!value->FitsRepresentation(representation)) {
+        MaybeObject* maybe_failure = self->GeneralizeFieldRepresentation(
+            lookup.GetDescriptorIndex(), value->OptimalRepresentation());
+        if (maybe_failure->IsFailure()) return maybe_failure;
+        DescriptorArray* desc = self->map()->instance_descriptors();
+        int descriptor = lookup.GetDescriptorIndex();
+        representation = desc->GetDetails(descriptor).representation();
+      }
+      if (FLAG_track_double_fields && representation.IsDouble()) {
+        HeapNumber* storage =
+            HeapNumber::cast(self->RawFastPropertyAt(
+                lookup.GetFieldIndex().field_index()));
+        storage->set_value(value->Number());
+        result = *value;
+        break;
+      }
+      self->FastPropertyAtPut(lookup.GetFieldIndex().field_index(), *value);
+      result = *value;
       break;
+    }
     case CONSTANT_FUNCTION:
       // Only replace the function if necessary.
       if (*value != lookup.GetConstantFunction()) {
@@ -3398,9 +3902,23 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
 
       if (details.type() == FIELD) {
         if (attributes == details.attributes()) {
+          Representation representation = details.representation();
+          if (!value->FitsRepresentation(representation)) {
+            MaybeObject* maybe_map = transition_map->GeneralizeRepresentation(
+                descriptor, value->OptimalRepresentation());
+            if (!maybe_map->To(&transition_map)) return maybe_map;
+            Object* back = transition_map->GetBackPointer();
+            if (back->IsMap()) {
+              MaybeObject* maybe_failure = self->MigrateToMap(Map::cast(back));
+              if (maybe_failure->IsFailure()) return maybe_failure;
+            }
+            DescriptorArray* desc = transition_map->instance_descriptors();
+            int descriptor = transition_map->LastAdded();
+            representation = desc->GetDetails(descriptor).representation();
+          }
           int field_index = descriptors->GetFieldIndex(descriptor);
           result = self->AddFastPropertyUsingMap(
-              transition_map, *name, *value, field_index);
+              transition_map, *name, *value, field_index, representation);
         } else {
           result = self->ConvertDescriptorToField(*name, *value, attributes);
         }
@@ -3810,9 +4328,8 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
     PropertyDetails details = descs->GetDetails(i);
     switch (details.type()) {
       case CONSTANT_FUNCTION: {
-        PropertyDetails d = PropertyDetails(details.attributes(),
-                                            NORMAL,
-                                            details.descriptor_index());
+        PropertyDetails d = PropertyDetails(
+            details.attributes(), NORMAL, i + 1);
         Object* value = descs->GetConstantFunction(i);
         MaybeObject* maybe_dictionary =
             dictionary->Add(descs->GetKey(i), value, d);
@@ -3820,10 +4337,9 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
         break;
       }
       case FIELD: {
-        PropertyDetails d = PropertyDetails(details.attributes(),
-                                            NORMAL,
-                                            details.descriptor_index());
-        Object* value = FastPropertyAt(descs->GetFieldIndex(i));
+        PropertyDetails d =
+            PropertyDetails(details.attributes(), NORMAL, i + 1);
+        Object* value = RawFastPropertyAt(descs->GetFieldIndex(i));
         MaybeObject* maybe_dictionary =
             dictionary->Add(descs->GetKey(i), value, d);
         if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
@@ -3831,9 +4347,10 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
       }
       case CALLBACKS: {
         Object* value = descs->GetCallbacksObject(i);
-        details = details.set_pointer(0);
+        PropertyDetails d = PropertyDetails(
+            details.attributes(), CALLBACKS, i + 1);
         MaybeObject* maybe_dictionary =
-            dictionary->Add(descs->GetKey(i), value, details);
+            dictionary->Add(descs->GetKey(i), value, d);
         if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
         break;
       }
@@ -3967,7 +4484,7 @@ MaybeObject* JSObject::NormalizeElements() {
       ASSERT(old_map->has_fast_smi_or_object_elements());
       value = FixedArray::cast(array)->get(i);
     }
-    PropertyDetails details = PropertyDetails(NONE, NORMAL);
+    PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
     if (!value->IsTheHole()) {
       Object* result;
       MaybeObject* maybe_result =
@@ -4200,8 +4717,10 @@ MaybeObject* JSObject::GetHiddenPropertiesHashTable(
       if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
           sorted_index < map()->NumberOfOwnDescriptors()) {
         ASSERT(descriptors->GetType(sorted_index) == FIELD);
-        inline_value =
-            this->FastPropertyAt(descriptors->GetFieldIndex(sorted_index));
+        MaybeObject* maybe_value = this->FastPropertyAt(
+            descriptors->GetDetails(sorted_index).representation(),
+            descriptors->GetFieldIndex(sorted_index));
+        if (!maybe_value->To(&inline_value)) return maybe_value;
       } else {
         inline_value = GetHeap()->undefined_value();
       }
@@ -4270,8 +4789,7 @@ MaybeObject* JSObject::SetHiddenPropertiesHashTable(Object* value) {
       if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
           sorted_index < map()->NumberOfOwnDescriptors()) {
         ASSERT(descriptors->GetType(sorted_index) == FIELD);
-        this->FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index),
-                                value);
+        FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index), value);
         return this;
       }
     }
@@ -4747,6 +5265,11 @@ MUST_USE_RESULT MaybeObject* JSObject::DeepCopy(Isolate* isolate) {
   StackLimitCheck check(isolate);
   if (check.HasOverflowed()) return isolate->StackOverflow();
 
+  if (map()->is_deprecated()) {
+    MaybeObject* maybe_failure = MigrateInstance();
+    if (maybe_failure->IsFailure()) return maybe_failure;
+  }
+
   Heap* heap = isolate->heap();
   Object* result;
   { MaybeObject* maybe_result = heap->CopyJSObject(this);
@@ -4756,27 +5279,24 @@ MUST_USE_RESULT MaybeObject* JSObject::DeepCopy(Isolate* isolate) {
 
   // Deep copy local properties.
   if (copy->HasFastProperties()) {
-    FixedArray* properties = copy->properties();
-    for (int i = 0; i < properties->length(); i++) {
-      Object* value = properties->get(i);
-      if (value->IsJSObject()) {
-        JSObject* js_object = JSObject::cast(value);
-        { MaybeObject* maybe_result = js_object->DeepCopy(isolate);
-          if (!maybe_result->ToObject(&result)) return maybe_result;
-        }
-        properties->set(i, result);
-      }
-    }
-    int nof = copy->map()->inobject_properties();
-    for (int i = 0; i < nof; i++) {
-      Object* value = copy->InObjectPropertyAt(i);
+    DescriptorArray* descriptors = copy->map()->instance_descriptors();
+    int limit = copy->map()->NumberOfOwnDescriptors();
+    for (int i = 0; i < limit; i++) {
+      PropertyDetails details = descriptors->GetDetails(i);
+      if (details.type() != FIELD) continue;
+      int index = descriptors->GetFieldIndex(i);
+      Object* value = RawFastPropertyAt(index);
       if (value->IsJSObject()) {
         JSObject* js_object = JSObject::cast(value);
-        { MaybeObject* maybe_result = js_object->DeepCopy(isolate);
-          if (!maybe_result->ToObject(&result)) return maybe_result;
-        }
-        copy->InObjectPropertyAtPut(i, result);
+        MaybeObject* maybe_copy = js_object->DeepCopy(isolate);
+        if (!maybe_copy->To(&value)) return maybe_copy;
+      } else {
+        Representation representation = details.representation();
+        MaybeObject* maybe_storage =
+            value->AllocateNewStorageFor(heap, representation);
+        if (!maybe_storage->To(&value)) return maybe_storage;
       }
+      copy->FastPropertyAtPut(index, value);
     }
   } else {
     { MaybeObject* maybe_result =
@@ -4923,16 +5443,6 @@ int Map::NumberOfDescribedProperties(DescriptorFlag which,
 }
 
 
-int Map::PropertyIndexFor(Name* name) {
-  DescriptorArray* descs = instance_descriptors();
-  int limit = NumberOfOwnDescriptors();
-  for (int i = 0; i < limit; i++) {
-    if (name->Equals(descs->GetKey(i))) return descs->GetFieldIndex(i);
-  }
-  return -1;
-}
-
-
 int Map::NextFreePropertyIndex() {
   int max_index = -1;
   int number_of_own_descriptors = NumberOfOwnDescriptors();
@@ -5046,8 +5556,9 @@ static bool UpdateGetterSetterInDictionary(
     if (details.type() == CALLBACKS && result->IsAccessorPair()) {
       ASSERT(!details.IsDontDelete());
       if (details.attributes() != attributes) {
-        dictionary->DetailsAtPut(entry,
-                                 PropertyDetails(attributes, CALLBACKS, index));
+        dictionary->DetailsAtPut(
+            entry,
+            PropertyDetails(attributes, CALLBACKS, index));
       }
       AccessorPair::cast(result)->SetComponents(getter, setter);
       return true;
@@ -5208,7 +5719,7 @@ bool JSObject::CanSetCallback(Name* name) {
 MaybeObject* JSObject::SetElementCallback(uint32_t index,
                                           Object* structure,
                                           PropertyAttributes attributes) {
-  PropertyDetails details = PropertyDetails(attributes, CALLBACKS);
+  PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
 
   // Normalize elements to make this operation simple.
   SeededNumberDictionary* dictionary;
@@ -5266,7 +5777,7 @@ MaybeObject* JSObject::SetPropertyCallback(Name* name,
   }
 
   // Update the dictionary with the new CALLBACKS property.
-  PropertyDetails details = PropertyDetails(attributes, CALLBACKS);
+  PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
   maybe_ok = SetNormalizedProperty(name, structure, details);
   if (maybe_ok->IsFailure()) return maybe_ok;
 
@@ -5610,7 +6121,14 @@ Object* JSObject::SlowReverseLookup(Object* value) {
     DescriptorArray* descs = map()->instance_descriptors();
     for (int i = 0; i < number_of_own_descriptors; i++) {
       if (descs->GetType(i) == FIELD) {
-        if (FastPropertyAt(descs->GetFieldIndex(i)) == value) {
+        Object* property = RawFastPropertyAt(descs->GetFieldIndex(i));
+        if (FLAG_track_double_fields &&
+            descs->GetDetails(i).representation().IsDouble()) {
+          ASSERT(property->IsHeapNumber());
+          if (value->IsNumber() && property->Number() == value->Number()) {
+            return descs->GetKey(i);
+          }
+        } else if (property == value) {
           return descs->GetKey(i);
         }
       } else if (descs->GetType(i) == CONSTANT_FUNCTION) {
@@ -5640,6 +6158,7 @@ MaybeObject* Map::RawCopy(int instance_size) {
   new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
   new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
   new_bit_field3 = EnumLengthBits::update(new_bit_field3, kInvalidEnumCache);
+  new_bit_field3 = Deprecated::update(new_bit_field3, false);
   result->set_bit_field3(new_bit_field3);
   return result;
 }
@@ -5783,11 +6302,53 @@ MaybeObject* Map::CopyReplaceDescriptors(DescriptorArray* descriptors,
         (descriptor_index == descriptors->number_of_descriptors() - 1)
          ? SIMPLE_TRANSITION
          : FULL_TRANSITION;
+    ASSERT(name == descriptors->GetKey(descriptor_index));
     MaybeObject* maybe_transitions = AddTransition(name, result, simple_flag);
     if (!maybe_transitions->To(&transitions)) return maybe_transitions;
 
     set_transitions(transitions);
     result->SetBackPointer(this);
+  } else {
+    descriptors->InitializeRepresentations(Representation::Tagged());
+  }
+
+  return result;
+}
+
+
+MaybeObject* Map::CopyInstallDescriptors(int new_descriptor,
+                                         DescriptorArray* descriptors) {
+  ASSERT(descriptors->IsSortedNoDuplicates());
+
+  Map* result;
+  MaybeObject* maybe_result = CopyDropDescriptors();
+  if (!maybe_result->To(&result)) return maybe_result;
+
+  result->InitializeDescriptors(descriptors);
+  result->SetNumberOfOwnDescriptors(new_descriptor + 1);
+
+  int unused_property_fields = this->unused_property_fields();
+  if (descriptors->GetDetails(new_descriptor).type() == FIELD) {
+    unused_property_fields = this->unused_property_fields() - 1;
+    if (unused_property_fields < 0) {
+      unused_property_fields += JSObject::kFieldsAdded;
+    }
+  }
+
+  result->set_unused_property_fields(unused_property_fields);
+  result->set_owns_descriptors(false);
+
+  if (CanHaveMoreTransitions()) {
+    Name* name = descriptors->GetKey(new_descriptor);
+    TransitionArray* transitions;
+    MaybeObject* maybe_transitions =
+        AddTransition(name, result, SIMPLE_TRANSITION);
+    if (!maybe_transitions->To(&transitions)) return maybe_transitions;
+
+    set_transitions(transitions);
+    result->SetBackPointer(this);
+  } else {
+    descriptors->InitializeRepresentations(Representation::Tagged());
   }
 
   return result;
@@ -5888,7 +6449,6 @@ MaybeObject* Map::CopyAddDescriptor(Descriptor* descriptor,
 
   int old_size = NumberOfOwnDescriptors();
   int new_size = old_size + 1;
-  descriptor->SetEnumerationIndex(new_size);
 
   if (flag == INSERT_TRANSITION &&
       owns_descriptors() &&
@@ -5973,9 +6533,7 @@ MaybeObject* Map::CopyReplaceDescriptor(DescriptorArray* descriptors,
   int new_size = NumberOfOwnDescriptors();
   ASSERT(0 <= insertion_index && insertion_index < new_size);
 
-  PropertyDetails details = descriptors->GetDetails(insertion_index);
-  ASSERT_LE(details.descriptor_index(), new_size);
-  descriptor->SetEnumerationIndex(details.descriptor_index());
+  ASSERT_LT(insertion_index, new_size);
 
   DescriptorArray* new_descriptors;
   MaybeObject* maybe_descriptors = DescriptorArray::Allocate(new_size);
@@ -6817,6 +7375,117 @@ void DescriptorArray::CopyFrom(int dst_index,
 }
 
 
+// Generalize the |other| descriptor array by merging it into the (at least
+// partly) updated |this| descriptor array.
+// The method merges two descriptor array in three parts. Both descriptor arrays
+// are identical up to |verbatim|. They also overlap in keys up to |valid|.
+// Between |verbatim| and |valid|, the resulting descriptor type as well as the
+// representation are generalized from both |this| and |other|. Beyond |valid|,
+// the descriptors are copied verbatim from |other| up to |new_size|.
+// In case of incompatible types, the type and representation of |other| is
+// used.
+MaybeObject* DescriptorArray::Merge(int verbatim,
+                                    int valid,
+                                    int new_size,
+                                    DescriptorArray* other) {
+  ASSERT(verbatim <= valid);
+  ASSERT(valid <= new_size);
+
+  DescriptorArray* result;
+  // Allocate a new descriptor array large enough to hold the required
+  // descriptors, with minimally the exact same size as this descriptor array.
+  MaybeObject* maybe_descriptors = DescriptorArray::Allocate(
+      new_size, Max(new_size, other->number_of_descriptors()) - new_size);
+  if (!maybe_descriptors->To(&result)) return maybe_descriptors;
+  ASSERT(result->length() > length() ||
+         result->NumberOfSlackDescriptors() > 0 ||
+         result->number_of_descriptors() == other->number_of_descriptors());
+  ASSERT(result->number_of_descriptors() == new_size);
+
+  DescriptorArray::WhitenessWitness witness(result);
+
+  int descriptor;
+
+  // 0 -> |verbatim|
+  int current_offset = 0;
+  for (descriptor = 0; descriptor < verbatim; descriptor++) {
+    if (GetDetails(descriptor).type() == FIELD) current_offset++;
+    result->CopyFrom(descriptor, this, descriptor, witness);
+  }
+
+  // |verbatim| -> |valid|
+  for (; descriptor < valid; descriptor++) {
+    Name* key = GetKey(descriptor);
+    PropertyDetails details = GetDetails(descriptor);
+    PropertyDetails other_details = other->GetDetails(descriptor);
+
+    if (details.type() == FIELD || other_details.type() == FIELD ||
+        (details.type() == CONSTANT_FUNCTION &&
+         other_details.type() == CONSTANT_FUNCTION &&
+         GetValue(descriptor) != other->GetValue(descriptor))) {
+      Representation representation =
+          details.representation().generalize(other_details.representation());
+      FieldDescriptor d(key,
+                        current_offset++,
+                        other_details.attributes(),
+                        representation);
+      result->Set(descriptor, &d, witness);
+    } else {
+      result->CopyFrom(descriptor, other, descriptor, witness);
+    }
+  }
+
+  // |valid| -> |new_size|
+  for (; descriptor < new_size; descriptor++) {
+    PropertyDetails details = other->GetDetails(descriptor);
+    if (details.type() == FIELD) {
+      Name* key = other->GetKey(descriptor);
+      FieldDescriptor d(key,
+                        current_offset++,
+                        details.attributes(),
+                        details.representation());
+      result->Set(descriptor, &d, witness);
+    } else {
+      result->CopyFrom(descriptor, other, descriptor, witness);
+    }
+  }
+
+  result->Sort();
+  return result;
+}
+
+
+// Checks whether a merge of |other| into |this| would return a copy of |this|.
+bool DescriptorArray::IsMoreGeneralThan(int verbatim,
+                                        int valid,
+                                        int new_size,
+                                        DescriptorArray* other) {
+  ASSERT(verbatim <= valid);
+  ASSERT(valid <= new_size);
+  if (valid != new_size) return false;
+
+  for (int descriptor = verbatim; descriptor < valid; descriptor++) {
+    PropertyDetails details = GetDetails(descriptor);
+    PropertyDetails other_details = other->GetDetails(descriptor);
+    if (details.type() != other_details.type()) {
+      if (details.type() != FIELD ||
+          other_details.type() != CONSTANT_FUNCTION) {
+        return false;
+      }
+    } else if (details.type() == CONSTANT_FUNCTION) {
+      if (GetValue(descriptor) != other->GetValue(descriptor)) {
+        return false;
+      }
+    } else if (!other_details.representation().fits_into(
+                   details.representation())) {
+      return false;
+    }
+  }
+
+  return true;
+}
+
+
 // We need the whiteness witness since sort will reshuffle the entries in the
 // descriptor array. If the descriptor array were to be black, the shuffling
 // would move a slot that was already recorded as pointing into an evacuation
@@ -8153,19 +8822,28 @@ int Map::Hash() {
 }
 
 
+static bool CheckEquivalent(Map* first, Map* second) {
+  return
+    first->constructor() == second->constructor() &&
+    first->prototype() == second->prototype() &&
+    first->instance_type() == second->instance_type() &&
+    first->bit_field() == second->bit_field() &&
+    first->bit_field2() == second->bit_field2() &&
+    first->is_observed() == second->is_observed() &&
+    first->function_with_prototype() == second->function_with_prototype();
+}
+
+
+bool Map::EquivalentToForTransition(Map* other) {
+  return CheckEquivalent(this, other);
+}
+
+
 bool Map::EquivalentToForNormalization(Map* other,
                                        PropertyNormalizationMode mode) {
-  return
-    constructor() == other->constructor() &&
-    prototype() == other->prototype() &&
-    inobject_properties() == ((mode == CLEAR_INOBJECT_PROPERTIES) ?
-                              0 :
-                              other->inobject_properties()) &&
-    instance_type() == other->instance_type() &&
-    bit_field() == other->bit_field() &&
-    bit_field2() == other->bit_field2() &&
-    is_observed() == other->is_observed() &&
-    function_with_prototype() == other->function_with_prototype();
+  int properties = mode == CLEAR_INOBJECT_PROPERTIES
+      ? 0 : other->inobject_properties();
+  return CheckEquivalent(this, other) && inobject_properties() == properties;
 }
 
 
@@ -8387,9 +9065,14 @@ static MUST_USE_RESULT MaybeObject* CacheInitialJSArrayMaps(
        i < kFastElementsKindCount; ++i) {
     Map* new_map;
     ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
-    MaybeObject* maybe_new_map =
-        current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
-    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+    if (current_map->HasElementsTransition()) {
+      new_map = current_map->elements_transition_map();
+      ASSERT(new_map->elements_kind() == next_kind);
+    } else {
+      MaybeObject* maybe_new_map =
+          current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
+      if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+    }
     maps->set(next_kind, new_map);
     current_map = new_map;
   }
@@ -9207,6 +9890,19 @@ void Code::FindAllCode(CodeHandleList* code_list, int length) {
 }
 
 
+Name* Code::FindFirstName() {
+  ASSERT(is_inline_cache_stub());
+  AssertNoAllocation no_allocation;
+  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
+  for (RelocIterator it(this, mask); !it.done(); it.next()) {
+    RelocInfo* info = it.rinfo();
+    Object* object = info->target_object();
+    if (object->IsName()) return Name::cast(object);
+  }
+  return NULL;
+}
+
+
 void Code::ClearInlineCaches() {
   int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
              RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
@@ -10665,7 +11361,7 @@ MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
       }
     }
     FixedArrayBase* new_dictionary;
-    PropertyDetails details = PropertyDetails(attributes, NORMAL);
+    PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
     MaybeObject* maybe = dictionary->AddNumberEntry(index, *value, details);
     if (!maybe->To(&new_dictionary)) return maybe;
     if (*dictionary != SeededNumberDictionary::cast(new_dictionary)) {
@@ -12659,7 +13355,7 @@ MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
   }
 
   uint32_t result = pos;
-  PropertyDetails no_details = PropertyDetails(NONE, NORMAL);
+  PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0);
   Heap* heap = GetHeap();
   while (undefs > 0) {
     if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
@@ -12835,6 +13531,58 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
   return result_double;
 }
 
+ExternalArrayType JSTypedArray::type() {
+  switch (elements()->map()->instance_type()) {
+    case EXTERNAL_BYTE_ARRAY_TYPE:
+      return kExternalByteArray;
+    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
+      return kExternalUnsignedByteArray;
+    case EXTERNAL_SHORT_ARRAY_TYPE:
+      return kExternalShortArray;
+    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
+      return kExternalUnsignedShortArray;
+    case EXTERNAL_INT_ARRAY_TYPE:
+      return kExternalIntArray;
+    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
+      return kExternalUnsignedIntArray;
+    case EXTERNAL_FLOAT_ARRAY_TYPE:
+      return kExternalFloatArray;
+    case EXTERNAL_DOUBLE_ARRAY_TYPE:
+      return kExternalDoubleArray;
+    case EXTERNAL_PIXEL_ARRAY_TYPE:
+      return kExternalPixelArray;
+    default:
+      return static_cast<ExternalArrayType>(-1);
+  }
+}
+
+
+size_t JSTypedArray::element_size() {
+  switch (elements()->map()->instance_type()) {
+    case EXTERNAL_BYTE_ARRAY_TYPE:
+      return 1;
+    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
+      return 1;
+    case EXTERNAL_SHORT_ARRAY_TYPE:
+      return 2;
+    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
+      return 2;
+    case EXTERNAL_INT_ARRAY_TYPE:
+      return 4;
+    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
+      return 4;
+    case EXTERNAL_FLOAT_ARRAY_TYPE:
+      return 4;
+    case EXTERNAL_DOUBLE_ARRAY_TYPE:
+      return 8;
+    case EXTERNAL_PIXEL_ARRAY_TYPE:
+      return 1;
+    default:
+      UNREACHABLE();
+      return 0;
+  }
+}
+
 
 Object* ExternalPixelArray::SetValue(uint32_t index, Object* value) {
   uint8_t clamped_value = 0;
@@ -13017,7 +13765,7 @@ MaybeObject* GlobalObject::EnsurePropertyCell(Name* name) {
           heap->AllocateJSGlobalPropertyCell(heap->the_hole_value());
       if (!maybe_cell->ToObject(&cell)) return maybe_cell;
     }
-    PropertyDetails details(NONE, NORMAL);
+    PropertyDetails details(NONE, NORMAL, 0);
     details = details.AsDeleted();
     Object* dictionary;
     { MaybeObject* maybe_dictionary =
@@ -13459,8 +14207,8 @@ MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
     if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) {
       int enum_index = Smi::cast(enumeration_order->get(pos++))->value();
       PropertyDetails details = DetailsAt(i);
-      PropertyDetails new_details =
-          PropertyDetails(details.attributes(), details.type(), enum_index);
+      PropertyDetails new_details = PropertyDetails(
+          details.attributes(), details.type(), enum_index);
       DetailsAtPut(i, new_details);
     }
   }
@@ -13526,7 +14274,7 @@ MaybeObject* Dictionary<Shape, Key>::AtPut(Key key, Object* value) {
   { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key);
     if (!maybe_k->ToObject(&k)) return maybe_k;
   }
-  PropertyDetails details = PropertyDetails(NONE, NORMAL);
+  PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
 
   return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details,
       Dictionary<Shape, Key>::Hash(key));
@@ -13537,8 +14285,6 @@ template<typename Shape, typename Key>
 MaybeObject* Dictionary<Shape, Key>::Add(Key key,
                                          Object* value,
                                          PropertyDetails details) {
-  ASSERT(details.dictionary_index() == details.descriptor_index());
-
   // Valdate key is absent.
   SLOW_ASSERT((this->FindEntry(key) == Dictionary<Shape, Key>::kNotFound));
   // Check whether the dictionary should be extended.
@@ -13614,7 +14360,7 @@ MaybeObject* SeededNumberDictionary::AddNumberEntry(uint32_t key,
 MaybeObject* UnseededNumberDictionary::AddNumberEntry(uint32_t key,
                                                       Object* value) {
   SLOW_ASSERT(this->FindEntry(key) == kNotFound);
-  return Add(key, value, PropertyDetails(NONE, NORMAL));
+  return Add(key, value, PropertyDetails(NONE, NORMAL, 0));
 }
 
 
@@ -13911,15 +14657,13 @@ MaybeObject* NameDictionary::TransformPropertiesToFastFor(
       }
 
       PropertyDetails details = DetailsAt(i);
-      ASSERT(details.descriptor_index() == details.dictionary_index());
-      int enumeration_index = details.descriptor_index();
+      int enumeration_index = details.dictionary_index();
       PropertyType type = details.type();
 
       if (value->IsJSFunction()) {
         ConstantFunctionDescriptor d(key,
                                      JSFunction::cast(value),
-                                     details.attributes(),
-                                     enumeration_index);
+                                     details.attributes());
         descriptors->Set(enumeration_index - 1, &d, witness);
       } else if (type == NORMAL) {
         if (current_offset < inobject_props) {
@@ -13933,13 +14677,13 @@ MaybeObject* NameDictionary::TransformPropertiesToFastFor(
         FieldDescriptor d(key,
                           current_offset++,
                           details.attributes(),
-                          enumeration_index);
+                          // TODO(verwaest): value->OptimalRepresentation();
+                          Representation::Tagged());
         descriptors->Set(enumeration_index - 1, &d, witness);
       } else if (type == CALLBACKS) {
         CallbacksDescriptor d(key,
                               value,
-                              details.attributes(),
-                              enumeration_index);
+                              details.attributes());
         descriptors->Set(enumeration_index - 1, &d, witness);
       } else {
         UNREACHABLE();
index e32c41b..1b4ed5b 100644 (file)
@@ -1061,6 +1061,29 @@ class Object : public MaybeObject {
   inline double Number();
   inline bool IsNaN();
 
+  inline Representation OptimalRepresentation() {
+    if (FLAG_track_fields && IsSmi()) {
+      return Representation::Smi();
+    } else if (FLAG_track_double_fields && IsHeapNumber()) {
+      return Representation::Double();
+    } else {
+      return Representation::Tagged();
+    }
+  }
+
+  inline bool FitsRepresentation(Representation representation) {
+    if (FLAG_track_fields && representation.IsSmi()) {
+      return IsSmi();
+    } else if (FLAG_track_double_fields && representation.IsDouble()) {
+      return IsNumber();
+    }
+    return true;
+  }
+
+  inline MaybeObject* AllocateNewStorageFor(Heap* heap,
+                                            Representation representation,
+                                            PretenureFlag tenure = NOT_TENURED);
+
   // Returns true if the object is of the correct type to be used as a
   // implementation of a JSObject's elements.
   inline bool HasValidElements();
@@ -1809,10 +1832,11 @@ class JSObject: public JSReceiver {
 
   // Extend the receiver with a single fast property appeared first in the
   // passed map. This also extends the property backing store if necessary.
-  static void AddFastPropertyUsingMap(Handle<JSObject> object, Handle<Map> map);
-  inline MUST_USE_RESULT MaybeObject* AddFastPropertyUsingMap(Map* map);
-  static void TransitionToMap(Handle<JSObject> object, Handle<Map> map);
-  inline MUST_USE_RESULT MaybeObject* TransitionToMap(Map* map);
+  static void AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map);
+  inline MUST_USE_RESULT MaybeObject* AllocateStorageForMap(Map* map);
+
+  static void MigrateInstance(Handle<JSObject> instance);
+  inline MUST_USE_RESULT MaybeObject* MigrateInstance();
 
   // Can cause GC.
   MUST_USE_RESULT MaybeObject* SetLocalPropertyIgnoreAttributes(
@@ -2115,10 +2139,12 @@ class JSObject: public JSReceiver {
 
   // Add a property to a fast-case object using a map transition to
   // new_map.
-  MUST_USE_RESULT MaybeObject* AddFastPropertyUsingMap(Map* new_map,
-                                                       Name* name,
-                                                       Object* value,
-                                                       int field_index);
+  MUST_USE_RESULT MaybeObject* AddFastPropertyUsingMap(
+      Map* new_map,
+      Name* name,
+      Object* value,
+      int field_index,
+      Representation representation);
 
   // Add a constant function property to a fast-case object.
   // This leaves a CONSTANT_TRANSITION in the old map, and
@@ -2167,6 +2193,11 @@ class JSObject: public JSReceiver {
       Object* new_value,
       PropertyAttributes attributes);
 
+  MUST_USE_RESULT MaybeObject* MigrateToMap(Map* new_map);
+  MUST_USE_RESULT MaybeObject* GeneralizeFieldRepresentation(
+      int modify_index,
+      Representation new_representation);
+
   // Add a property to a fast-case object.
   MUST_USE_RESULT MaybeObject* AddFastProperty(
       Name* name,
@@ -2222,8 +2253,11 @@ class JSObject: public JSReceiver {
       int unused_property_fields);
 
   // Access fast-case object properties at index.
-  inline Object* FastPropertyAt(int index);
-  inline Object* FastPropertyAtPut(int index, Object* value);
+  MUST_USE_RESULT inline MaybeObject* FastPropertyAt(
+      Representation representation,
+      int index);
+  inline Object* RawFastPropertyAt(int index);
+  inline void FastPropertyAtPut(int index, Object* value);
 
   // Access to in object properties.
   inline int GetInObjectPropertyOffset(int index);
@@ -2756,6 +2790,9 @@ class DescriptorArray: public FixedArray {
   inline Name* GetSortedKey(int descriptor_number);
   inline int GetSortedKeyIndex(int descriptor_number);
   inline void SetSortedKey(int pointer, int descriptor_number);
+  inline void InitializeRepresentations(Representation representation);
+  inline void SetRepresentation(int descriptor_number,
+                                Representation representation);
 
   // Accessor for complete descriptor.
   inline void Get(int descriptor_number, Descriptor* desc);
@@ -2776,6 +2813,15 @@ class DescriptorArray: public FixedArray {
                 DescriptorArray* src,
                 int src_index,
                 const WhitenessWitness&);
+  MUST_USE_RESULT MaybeObject* Merge(int verbatim,
+                                     int valid,
+                                     int new_size,
+                                     DescriptorArray* other);
+
+  bool IsMoreGeneralThan(int verbatim,
+                         int valid,
+                         int new_size,
+                         DescriptorArray* other);
 
   MUST_USE_RESULT MaybeObject* CopyUpTo(int enumeration_index);
 
@@ -4592,6 +4638,9 @@ class Code: public HeapObject {
   Code* FindFirstCode();
   void FindAllCode(CodeHandleList* code_list, int length);
 
+  // Find the first name in an IC stub.
+  Name* FindFirstName();
+
   class ExtraICStateStrictMode: public BitField<StrictModeFlag, 0, 1> {};
   class ExtraICStateKeyedAccessStoreMode:
       public BitField<KeyedAccessStoreMode, 1, 4> {};  // NOLINT
@@ -4911,6 +4960,9 @@ class DependentCode: public FixedArray {
     // Group of code that weakly embed this map and depend on being
     // deoptimized when the map is garbage collected.
     kWeaklyEmbeddedGroup,
+    // Group of code that embed a transition to this map, and depend on being
+    // deoptimized when the transition is replaced by a new version.
+    kTransitionGroup,
     // Group of code that omit run-time prototype checks for prototypes
     // described by this map. The group is deoptimized whenever an object
     // described by this map changes shape (and transitions to a new map),
@@ -5004,6 +5056,7 @@ class Map: public HeapObject {
   class DictionaryMap:              public BitField<bool, 24,  1> {};
   class OwnsDescriptors:            public BitField<bool, 25,  1> {};
   class IsObserved:                 public BitField<bool, 26,  1> {};
+  class Deprecated:                 public BitField<bool, 27,  1> {};
 
   // Tells whether the object in the prototype property will be used
   // for instances created from this function.  If the prototype
@@ -5146,6 +5199,28 @@ class Map: public HeapObject {
   inline void ClearTransitions(Heap* heap,
                                WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
 
+  void DeprecateTransitionTree();
+  void DeprecateTarget(Name* key, DescriptorArray* new_descriptors);
+
+  Map* FindRootMap();
+  Map* FindUpdatedMap(int verbatim, int length, DescriptorArray* descriptors);
+  Map* FindLastMatchMap(int verbatim, int length, DescriptorArray* descriptors);
+
+  int NumberOfFields();
+
+  bool InstancesNeedRewriting(Map* target,
+                              int target_number_of_fields,
+                              int target_inobject,
+                              int target_unused);
+  static Handle<Map> GeneralizeRepresentation(
+      Handle<Map> map,
+      int modify_index,
+      Representation new_representation);
+  MUST_USE_RESULT MaybeObject* GeneralizeRepresentation(
+      int modify_index,
+      Representation representation);
+  MUST_USE_RESULT MaybeObject* CopyGeneralizeAllRepresentations();
+
   // Tells whether the map is attached to SharedFunctionInfo
   // (for inobject slack tracking).
   inline void set_attached_to_shared_function_info(bool value);
@@ -5284,6 +5359,15 @@ class Map: public HeapObject {
   inline void set_owns_descriptors(bool is_shared);
   inline bool is_observed();
   inline void set_is_observed(bool is_observed);
+  inline void deprecate();
+  inline bool is_deprecated();
+  inline bool CanBeDeprecated();
+  // Returns a non-deprecated version of the input. If the input was not
+  // deprecated, it is directly returned. Otherwise, the non-deprecated version
+  // is found by re-transitioning from the root of the transition tree using the
+  // descriptor array of the map. New maps (and transitions) may be created if
+  // no new (more general) version exists.
+  static inline Handle<Map> CurrentMapForDeprecated(Handle<Map> map);
 
   MUST_USE_RESULT MaybeObject* RawCopy(int instance_size);
   MUST_USE_RESULT MaybeObject* CopyWithPreallocatedFieldDescriptors();
@@ -5293,6 +5377,9 @@ class Map: public HeapObject {
       Name* name,
       TransitionFlag flag,
       int descriptor_index);
+  MUST_USE_RESULT MaybeObject* CopyInstallDescriptors(
+      int new_descriptor,
+      DescriptorArray* descriptors);
   MUST_USE_RESULT MaybeObject* ShareDescriptor(DescriptorArray* descriptors,
                                                Descriptor* descriptor);
   MUST_USE_RESULT MaybeObject* CopyAddDescriptor(Descriptor* descriptor,
@@ -5318,9 +5405,6 @@ class Map: public HeapObject {
   // instance descriptors.
   MUST_USE_RESULT MaybeObject* Copy();
 
-  // Returns the property index for name (only valid for FAST MODE).
-  int PropertyIndexFor(Name* name);
-
   // Returns the next free property index (only valid for FAST MODE).
   int NextFreePropertyIndex();
 
@@ -5371,6 +5455,8 @@ class Map: public HeapObject {
   // Computes a hash value for this map, to be used in HashTables and such.
   int Hash();
 
+  bool EquivalentToForTransition(Map* other);
+
   // Compares this map to another to see if they describe equivalent objects.
   // If |mode| is set to CLEAR_INOBJECT_PROPERTIES, |other| is treated as if
   // it had exactly zero inobject properties.
@@ -6334,9 +6420,14 @@ class JSGeneratorObject: public JSObject {
   inline int continuation();
   inline void set_continuation(int continuation);
 
-  // [operands]: Saved operand stack.
+  // [operand_stack]: Saved operand stack.
   DECL_ACCESSORS(operand_stack, FixedArray)
 
+  // [stack_handler_index]: Index of first stack handler in operand_stack, or -1
+  // if the captured activation had no stack handler.
+  inline int stack_handler_index();
+  inline void set_stack_handler_index(int stack_handler_index);
+
   // Casting.
   static inline JSGeneratorObject* cast(Object* obj);
 
@@ -6354,11 +6445,24 @@ class JSGeneratorObject: public JSObject {
   static const int kReceiverOffset = kContextOffset + kPointerSize;
   static const int kContinuationOffset = kReceiverOffset + kPointerSize;
   static const int kOperandStackOffset = kContinuationOffset + kPointerSize;
-  static const int kSize = kOperandStackOffset + kPointerSize;
+  static const int kStackHandlerIndexOffset =
+      kOperandStackOffset + kPointerSize;
+  static const int kSize = kStackHandlerIndexOffset + kPointerSize;
 
   // Resume mode, for use by runtime functions.
   enum ResumeMode { SEND, THROW };
 
+  // Yielding from a generator returns an object with the following inobject
+  // properties.  See Context::generator_result_map() for the map.
+  static const int kResultValuePropertyIndex = 0;
+  static const int kResultDonePropertyIndex = 1;
+  static const int kResultPropertyCount = 2;
+
+  static const int kResultValuePropertyOffset = JSObject::kHeaderSize;
+  static const int kResultDonePropertyOffset =
+      kResultValuePropertyOffset + kPointerSize;
+  static const int kResultSize = kResultDonePropertyOffset + kPointerSize;
+
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSGeneratorObject);
 };
@@ -7638,8 +7742,6 @@ class String: public Name {
   // possible.
   inline bool HasOnlyOneByteChars();
 
-  inline bool IsOneByteConvertible();
-
   // Get and set individual two byte chars in the string.
   inline void Set(int index, uint16_t value);
   // Get individual two byte char in the string.  Repeated calls
@@ -8647,6 +8749,9 @@ class JSTypedArray: public JSObject {
   // Casting.
   static inline JSTypedArray* cast(Object* obj);
 
+  ExternalArrayType type();
+  size_t element_size();
+
   // Dispatched behavior.
   DECLARE_PRINTER(JSTypedArray)
   DECLARE_VERIFIER(JSTypedArray)
index 267b872..cff51bc 100644 (file)
@@ -662,7 +662,7 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
           !body->at(0)->IsExpressionStatement() ||
           !body->at(0)->AsExpressionStatement()->
               expression()->IsFunctionLiteral()) {
-        ReportMessage("unable_to_parse", Vector<const char*>::empty());
+        ReportMessage("single_function_literal", Vector<const char*>::empty());
         ok = false;
       }
     }
@@ -3738,33 +3738,6 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
 }
 
 
-void Parser::BuildArrayLiteralBoilerplateLiterals(ZoneList<Expression*>* values,
-                                                  Handle<FixedArray> literals,
-                                                  bool* is_simple,
-                                                  int* depth) {
-  // Fill in the literals.
-  // Accumulate output values in local variables.
-  bool is_simple_acc = true;
-  int depth_acc = 1;
-  for (int i = 0; i < values->length(); i++) {
-    MaterializedLiteral* m_literal = values->at(i)->AsMaterializedLiteral();
-    if (m_literal != NULL && m_literal->depth() >= depth_acc) {
-      depth_acc = m_literal->depth() + 1;
-    }
-    Handle<Object> boilerplate_value = GetBoilerplateValue(values->at(i));
-    if (boilerplate_value->IsUndefined()) {
-      literals->set_the_hole(i);
-      is_simple_acc = false;
-    } else {
-      literals->set(i, *boilerplate_value);
-    }
-  }
-
-  *is_simple = is_simple_acc;
-  *depth = depth_acc;
-}
-
-
 Expression* Parser::ParseArrayLiteral(bool* ok) {
   // ArrayLiteral ::
   //   '[' Expression? (',' Expression?)* ']'
@@ -3991,7 +3964,8 @@ void Parser::BuildObjectLiteralConstantProperties(
     Handle<FixedArray> constant_properties,
     bool* is_simple,
     bool* fast_elements,
-    int* depth) {
+    int* depth,
+    bool* may_store_doubles) {
   int position = 0;
   // Accumulate the value in local variables and store it at the end.
   bool is_simple_acc = true;
@@ -4014,6 +3988,13 @@ void Parser::BuildObjectLiteralConstantProperties(
     // runtime. The enumeration order is maintained.
     Handle<Object> key = property->key()->handle();
     Handle<Object> value = GetBoilerplateValue(property->value());
+
+    // Ensure objects with doubles are always treated as nested objects.
+    // TODO(verwaest): Remove once we can store them inline.
+    if (FLAG_track_double_fields && value->IsNumber()) {
+      *may_store_doubles = true;
+    }
+
     is_simple_acc = is_simple_acc && !value->IsUndefined();
 
     // Keep track of the number of elements in the object literal and
@@ -4215,17 +4196,20 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
   bool is_simple = true;
   bool fast_elements = true;
   int depth = 1;
+  bool may_store_doubles = false;
   BuildObjectLiteralConstantProperties(properties,
                                        constant_properties,
                                        &is_simple,
                                        &fast_elements,
-                                       &depth);
+                                       &depth,
+                                       &may_store_doubles);
   return factory()->NewObjectLiteral(constant_properties,
                                      properties,
                                      literal_index,
                                      is_simple,
                                      fast_elements,
                                      depth,
+                                     may_store_doubles,
                                      has_function);
 }
 
@@ -4752,7 +4736,7 @@ Expression* Parser::ParseV8Intrinsic(bool* ok) {
     if (args->length() == 1 && args->at(0)->AsVariableProxy() != NULL) {
       return args->at(0);
     } else {
-      ReportMessage("unable_to_parse", Vector<const char*>::empty());
+      ReportMessage("not_isvar", Vector<const char*>::empty());
       *ok = false;
       return NULL;
     }
index acf47bb..1defbf2 100644 (file)
@@ -692,13 +692,8 @@ class Parser BASE_EMBEDDED {
       Handle<FixedArray> constants,
       bool* is_simple,
       bool* fast_elements,
-      int* depth);
-
-  // Populate the literals fixed array for a materialized array literal.
-  void BuildArrayLiteralBoilerplateLiterals(ZoneList<Expression*>* properties,
-                                            Handle<FixedArray> constants,
-                                            bool* is_simple,
-                                            int* depth);
+      int* depth,
+      bool* may_store_doubles);
 
   // Decide if a property should be in the object boilerplate.
   bool IsBoilerplateProperty(ObjectLiteral::Property* property);
index 054d5b5..f76ec44 100644 (file)
@@ -115,26 +115,11 @@ void* OS::GetRandomMmapAddr() {
     raw_addr &= V8_UINT64_C(0x3ffffffff000);
 #else
     uint32_t raw_addr = V8::RandomPrivate(isolate);
-
-    raw_addr &= 0x3ffff000;
-
-# ifdef __sun
-    // For our Solaris/illumos mmap hint, we pick a random address in the bottom
-    // half of the top half of the address space (that is, the third quarter).
-    // Because we do not MAP_FIXED, this will be treated only as a hint -- the
-    // system will not fail to mmap() because something else happens to already
-    // be mapped at our random address. We deliberately set the hint high enough
-    // to get well above the system's break (that is, the heap); Solaris and
-    // illumos will try the hint and if that fails allocate as if there were
-    // no hint at all. The high hint prevents the break from getting hemmed in
-    // at low values, ceding half of the address space to the system heap.
-    raw_addr += 0x80000000;
-# else
     // The range 0x20000000 - 0x60000000 is relatively unpopulated across a
     // variety of ASLR modes (PAE kernel, NX compat mode, etc) and on macos
     // 10.6 and 10.7.
+    raw_addr &= 0x3ffff000;
     raw_addr += 0x20000000;
-# endif
 #endif
     return reinterpret_cast<void*>(raw_addr);
   }
index 4660c0f..3a1eca7 100644 (file)
@@ -628,11 +628,7 @@ void PrettyPrinter::PrintCaseClause(CaseClause* clause) {
 
 class IndentedScope BASE_EMBEDDED {
  public:
-  explicit IndentedScope(AstPrinter* printer) : ast_printer_(printer) {
-    ast_printer_->inc_indent();
-  }
-
-  IndentedScope(AstPrinter* printer, const char* txt, AstNode* node = NULL)
+  IndentedScope(AstPrinter* printer, const char* txt)
       : ast_printer_(printer) {
     ast_printer_->PrintIndented(txt);
     ast_printer_->Print("\n");
@@ -693,21 +689,16 @@ void AstPrinter::PrintLiteralWithModeIndented(const char* info,
 }
 
 
-void AstPrinter::PrintLabelsIndented(const char* info, ZoneStringList* labels) {
-  if (labels != NULL && labels->length() > 0) {
-    PrintIndented(info == NULL ? "LABELS" : info);
-    Print(" ");
-    PrintLabels(labels);
-    Print("\n");
-  } else if (info != NULL) {
-    PrintIndented(info);
-    Print("\n");
-  }
+void AstPrinter::PrintLabelsIndented(ZoneStringList* labels) {
+  if (labels == NULL || labels->length() == 0) return;
+  PrintIndented("LABELS ");
+  PrintLabels(labels);
+  Print("\n");
 }
 
 
 void AstPrinter::PrintIndentedVisit(const char* s, AstNode* node) {
-  IndentedScope indent(this, s, node);
+  IndentedScope indent(this, s);
   Visit(node);
 }
 
@@ -779,6 +770,7 @@ void AstPrinter::VisitBlock(Block* node) {
 }
 
 
+// TODO(svenpanne) Start with IndentedScope.
 void AstPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
   PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()),
                                node->proxy()->var(),
@@ -786,6 +778,7 @@ void AstPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
 }
 
 
+// TODO(svenpanne) Start with IndentedScope.
 void AstPrinter::VisitFunctionDeclaration(FunctionDeclaration* node) {
   PrintIndented("FUNCTION ");
   PrintLiteral(node->proxy()->name(), true);
@@ -816,19 +809,21 @@ void AstPrinter::VisitExportDeclaration(ExportDeclaration* node) {
 
 
 void AstPrinter::VisitModuleLiteral(ModuleLiteral* node) {
+  IndentedScope indent(this, "MODULE LITERAL");
   VisitBlock(node->body());
 }
 
 
 void AstPrinter::VisitModuleVariable(ModuleVariable* node) {
+  IndentedScope indent(this, "MODULE VARIABLE");
   Visit(node->proxy());
 }
 
 
 void AstPrinter::VisitModulePath(ModulePath* node) {
-  IndentedScope indent(this, "PATH");
-  PrintIndentedVisit("MODULE", node->module());
-  PrintLiteralIndented("NAME", node->name(), false);
+  IndentedScope indent(this, "MODULE PATH");
+  PrintIndentedVisit("MODULE PATH PARENT", node->module());
+  PrintLiteralIndented("NAME", node->name(), true);
 }
 
 
@@ -838,24 +833,26 @@ void AstPrinter::VisitModuleUrl(ModuleUrl* node) {
 
 
 void AstPrinter::VisitModuleStatement(ModuleStatement* node) {
-  IndentedScope indent(this, "MODULE");
+  IndentedScope indent(this, "MODULE STATEMENT");
   PrintLiteralIndented("NAME", node->proxy()->name(), true);
   PrintStatements(node->body()->statements());
 }
 
 
 void AstPrinter::VisitExpressionStatement(ExpressionStatement* node) {
+  IndentedScope indent(this, "EXPRESSION STATEMENT");
   Visit(node->expression());
 }
 
 
 void AstPrinter::VisitEmptyStatement(EmptyStatement* node) {
-  PrintIndented("EMPTY\n");
+  IndentedScope indent(this, "EMPTY");
 }
 
 
 void AstPrinter::VisitIfStatement(IfStatement* node) {
-  PrintIndentedVisit("IF", node->condition());
+  IndentedScope indent(this, "IF");
+  PrintIndentedVisit("CONDITION", node->condition());
   PrintIndentedVisit("THEN", node->then_statement());
   if (node->HasElseStatement()) {
     PrintIndentedVisit("ELSE", node->else_statement());
@@ -864,17 +861,20 @@ void AstPrinter::VisitIfStatement(IfStatement* node) {
 
 
 void AstPrinter::VisitContinueStatement(ContinueStatement* node) {
-  PrintLabelsIndented("CONTINUE", node->target()->labels());
+  IndentedScope indent(this, "CONTINUE");
+  PrintLabelsIndented(node->target()->labels());
 }
 
 
 void AstPrinter::VisitBreakStatement(BreakStatement* node) {
-  PrintLabelsIndented("BREAK", node->target()->labels());
+  IndentedScope indent(this, "BREAK");
+  PrintLabelsIndented(node->target()->labels());
 }
 
 
 void AstPrinter::VisitReturnStatement(ReturnStatement* node) {
-  PrintIndentedVisit("RETURN", node->expression());
+  IndentedScope indent(this, "RETURN");
+  Visit(node->expression());
 }
 
 
@@ -887,7 +887,7 @@ void AstPrinter::VisitWithStatement(WithStatement* node) {
 
 void AstPrinter::VisitSwitchStatement(SwitchStatement* node) {
   IndentedScope indent(this, "SWITCH");
-  PrintLabelsIndented(NULL, node->labels());
+  PrintLabelsIndented(node->labels());
   PrintIndentedVisit("TAG", node->tag());
   for (int i = 0; i < node->cases()->length(); i++) {
     PrintCaseClause(node->cases()->at(i));
@@ -897,7 +897,7 @@ void AstPrinter::VisitSwitchStatement(SwitchStatement* node) {
 
 void AstPrinter::VisitDoWhileStatement(DoWhileStatement* node) {
   IndentedScope indent(this, "DO");
-  PrintLabelsIndented(NULL, node->labels());
+  PrintLabelsIndented(node->labels());
   PrintIndentedVisit("BODY", node->body());
   PrintIndentedVisit("COND", node->cond());
 }
@@ -905,7 +905,7 @@ void AstPrinter::VisitDoWhileStatement(DoWhileStatement* node) {
 
 void AstPrinter::VisitWhileStatement(WhileStatement* node) {
   IndentedScope indent(this, "WHILE");
-  PrintLabelsIndented(NULL, node->labels());
+  PrintLabelsIndented(node->labels());
   PrintIndentedVisit("COND", node->cond());
   PrintIndentedVisit("BODY", node->body());
 }
@@ -913,7 +913,7 @@ void AstPrinter::VisitWhileStatement(WhileStatement* node) {
 
 void AstPrinter::VisitForStatement(ForStatement* node) {
   IndentedScope indent(this, "FOR");
-  PrintLabelsIndented(NULL, node->labels());
+  PrintLabelsIndented(node->labels());
   if (node->init()) PrintIndentedVisit("INIT", node->init());
   if (node->cond()) PrintIndentedVisit("COND", node->cond());
   PrintIndentedVisit("BODY", node->body());
@@ -972,12 +972,13 @@ void AstPrinter::VisitSharedFunctionInfoLiteral(
 
 void AstPrinter::VisitConditional(Conditional* node) {
   IndentedScope indent(this, "CONDITIONAL");
-  PrintIndentedVisit("?", node->condition());
+  PrintIndentedVisit("CONDITION", node->condition());
   PrintIndentedVisit("THEN", node->then_expression());
   PrintIndentedVisit("ELSE", node->else_expression());
 }
 
 
+// TODO(svenpanne) Start with IndentedScope.
 void AstPrinter::VisitLiteral(Literal* node) {
   PrintLiteralIndented("LITERAL", node->handle(), true);
 }
@@ -1034,6 +1035,7 @@ void AstPrinter::VisitArrayLiteral(ArrayLiteral* node) {
 }
 
 
+// TODO(svenpanne) Start with IndentedScope.
 void AstPrinter::VisitVariableProxy(VariableProxy* node) {
   Variable* var = node->var();
   EmbeddedVector<char, 128> buf;
@@ -1059,24 +1061,26 @@ void AstPrinter::VisitVariableProxy(VariableProxy* node) {
 
 
 void AstPrinter::VisitAssignment(Assignment* node) {
-  IndentedScope indent(this, Token::Name(node->op()), node);
+  IndentedScope indent(this, Token::Name(node->op()));
   Visit(node->target());
   Visit(node->value());
 }
 
 
 void AstPrinter::VisitYield(Yield* node) {
-  PrintIndentedVisit("YIELD", node->expression());
+  IndentedScope indent(this, "YIELD");
+  Visit(node->expression());
 }
 
 
 void AstPrinter::VisitThrow(Throw* node) {
-  PrintIndentedVisit("THROW", node->exception());
+  IndentedScope indent(this, "THROW");
+  Visit(node->exception());
 }
 
 
 void AstPrinter::VisitProperty(Property* node) {
-  IndentedScope indent(this, "PROPERTY", node);
+  IndentedScope indent(this, "PROPERTY");
   Visit(node->obj());
   Literal* literal = node->key()->AsLiteral();
   if (literal != NULL && literal->handle()->IsInternalizedString()) {
@@ -1102,14 +1106,15 @@ void AstPrinter::VisitCallNew(CallNew* node) {
 
 
 void AstPrinter::VisitCallRuntime(CallRuntime* node) {
-  PrintLiteralIndented("CALL RUNTIME ", node->name(), false);
-  IndentedScope indent(this);
+  IndentedScope indent(this, "CALL RUNTIME");
+  PrintLiteralIndented("NAME", node->name(), false);
   PrintArguments(node->arguments());
 }
 
 
 void AstPrinter::VisitUnaryOperation(UnaryOperation* node) {
-  PrintIndentedVisit(Token::Name(node->op()), node->expression());
+  IndentedScope indent(this, Token::Name(node->op()));
+  Visit(node->expression());
 }
 
 
@@ -1117,19 +1122,20 @@ void AstPrinter::VisitCountOperation(CountOperation* node) {
   EmbeddedVector<char, 128> buf;
   OS::SNPrintF(buf, "%s %s", (node->is_prefix() ? "PRE" : "POST"),
                Token::Name(node->op()));
-  PrintIndentedVisit(buf.start(), node->expression());
+  IndentedScope indent(this, buf.start());
+  Visit(node->expression());
 }
 
 
 void AstPrinter::VisitBinaryOperation(BinaryOperation* node) {
-  IndentedScope indent(this, Token::Name(node->op()), node);
+  IndentedScope indent(this, Token::Name(node->op()));
   Visit(node->left());
   Visit(node->right());
 }
 
 
 void AstPrinter::VisitCompareOperation(CompareOperation* node) {
-  IndentedScope indent(this, Token::Name(node->op()), node);
+  IndentedScope indent(this, Token::Name(node->op()));
   Visit(node->left());
   Visit(node->right());
 }
index 41175ab..6657ecd 100644 (file)
@@ -106,7 +106,7 @@ class AstPrinter: public PrettyPrinter {
   void PrintLiteralWithModeIndented(const char* info,
                                     Variable* var,
                                     Handle<Object> value);
-  void PrintLabelsIndented(const char* info, ZoneStringList* labels);
+  void PrintLabelsIndented(ZoneStringList* labels);
 
   void inc_indent() { indent_++; }
   void dec_indent() { indent_--; }
index 2aa6dcf..62140fe 100644 (file)
@@ -76,30 +76,121 @@ enum PropertyType {
 };
 
 
+class Representation {
+ public:
+  enum Kind {
+    kNone,
+    kSmi,
+    kInteger32,
+    kDouble,
+    kTagged,
+    kExternal,
+    kNumRepresentations
+  };
+
+  Representation() : kind_(kNone) { }
+
+  static Representation None() { return Representation(kNone); }
+  static Representation Tagged() { return Representation(kTagged); }
+  static Representation Smi() { return Representation(kSmi); }
+  static Representation Integer32() { return Representation(kInteger32); }
+  static Representation Double() { return Representation(kDouble); }
+  static Representation External() { return Representation(kExternal); }
+
+  static Representation FromKind(Kind kind) { return Representation(kind); }
+
+  bool Equals(const Representation& other) const {
+    return kind_ == other.kind_;
+  }
+
+  bool IsCompatibleForLoad(const Representation& other) const {
+    return (IsDouble() && other.IsDouble()) ||
+        (!IsDouble() && !other.IsDouble());
+  }
+
+  bool is_more_general_than(const Representation& other) const {
+    ASSERT(kind_ != kExternal);
+    ASSERT(other.kind_ != kExternal);
+    return kind_ > other.kind_;
+  }
+
+  bool fits_into(const Representation& other) const {
+    return other.is_more_general_than(*this) || other.Equals(*this);
+  }
+
+  Representation generalize(Representation other) {
+    if (is_more_general_than(other)) {
+      return *this;
+    } else {
+      return other;
+    }
+  }
+
+  Kind kind() const { return static_cast<Kind>(kind_); }
+  bool IsNone() const { return kind_ == kNone; }
+  bool IsTagged() const { return kind_ == kTagged; }
+  bool IsSmi() const { return kind_ == kSmi; }
+  bool IsInteger32() const { return kind_ == kInteger32; }
+  bool IsDouble() const { return kind_ == kDouble; }
+  bool IsExternal() const { return kind_ == kExternal; }
+  bool IsSpecialization() const {
+    return kind_ == kInteger32 || kind_ == kDouble;
+  }
+  const char* Mnemonic() const;
+
+ private:
+  explicit Representation(Kind k) : kind_(k) { }
+
+  // Make sure kind fits in int8.
+  STATIC_ASSERT(kNumRepresentations <= (1 << kBitsPerByte));
+
+  int8_t kind_;
+};
+
+
 // PropertyDetails captures type and attributes for a property.
 // They are used both in property dictionaries and instance descriptors.
 class PropertyDetails BASE_EMBEDDED {
  public:
   PropertyDetails(PropertyAttributes attributes,
                   PropertyType type,
-                  int index = 0) {
+                  int index) {
     value_ = TypeField::encode(type)
         | AttributesField::encode(attributes)
         | DictionaryStorageField::encode(index);
 
     ASSERT(type == this->type());
     ASSERT(attributes == this->attributes());
-    ASSERT(index == this->dictionary_index());
+  }
+
+  PropertyDetails(PropertyAttributes attributes,
+                  PropertyType type,
+                  Representation representation) {
+    value_ = TypeField::encode(type)
+        | AttributesField::encode(attributes)
+        | RepresentationField::encode(EncodeRepresentation(representation));
   }
 
   int pointer() { return DescriptorPointer::decode(value_); }
 
   PropertyDetails set_pointer(int i) { return PropertyDetails(value_, i); }
 
+  PropertyDetails CopyWithRepresentation(Representation representation) {
+    return PropertyDetails(value_, representation);
+  }
+
   // Conversion for storing details as Object*.
   explicit inline PropertyDetails(Smi* smi);
   inline Smi* AsSmi();
 
+  static uint8_t EncodeRepresentation(Representation representation) {
+    return representation.kind();
+  }
+
+  static Representation DecodeRepresentation(uint32_t bits) {
+    return Representation::FromKind(static_cast<Representation::Kind>(bits));
+  }
+
   PropertyType type() { return TypeField::decode(value_); }
 
   PropertyAttributes attributes() const {
@@ -110,8 +201,8 @@ class PropertyDetails BASE_EMBEDDED {
     return DictionaryStorageField::decode(value_);
   }
 
-  int descriptor_index() {
-    return DescriptorStorageField::decode(value_);
+  Representation representation() {
+    return DecodeRepresentation(RepresentationField::decode(value_));
   }
 
   inline PropertyDetails AsDeleted();
@@ -131,14 +222,18 @@ class PropertyDetails BASE_EMBEDDED {
   class AttributesField:          public BitField<PropertyAttributes, 3,  3> {};
   class DeletedField:             public BitField<uint32_t,           6,  1> {};
   class DictionaryStorageField:   public BitField<uint32_t,           7, 24> {};
-  class DescriptorStorageField:   public BitField<uint32_t,           7, 11> {};
-  class DescriptorPointer:        public BitField<uint32_t,          18, 11> {};
+  class DescriptorPointer:        public BitField<uint32_t,           7, 11> {};
+  class RepresentationField:      public BitField<uint32_t,          18,  3> {};
 
   static const int kInitialIndex = 1;
 
  private:
   PropertyDetails(int value, int pointer) {
-      value_ = DescriptorPointer::update(value, pointer);
+    value_ = DescriptorPointer::update(value, pointer);
+  }
+  PropertyDetails(int value, Representation representation) {
+    value_ = RepresentationField::update(
+        value, EncodeRepresentation(representation));
   }
 
   uint32_t value_;
index cbf2fc8..80a06cb 100644 (file)
@@ -112,7 +112,6 @@ void Descriptor::Print(FILE* out) {
   GetKey()->ShortPrint(out);
   PrintF(out, " @ ");
   GetValue()->ShortPrint(out);
-  PrintF(out, " %d\n", GetDetails().descriptor_index());
 }
 
 
index bbba8ae..606f111 100644 (file)
@@ -64,10 +64,6 @@ class Descriptor BASE_EMBEDDED {
   void Print(FILE* out);
 #endif
 
-  void SetEnumerationIndex(int index) {
-    details_ = PropertyDetails(details_.attributes(), details_.type(), index);
-  }
-
   void SetSortedKeyIndex(int index) { details_ = details_.set_pointer(index); }
 
  private:
@@ -93,10 +89,10 @@ class Descriptor BASE_EMBEDDED {
              Object* value,
              PropertyAttributes attributes,
              PropertyType type,
-             int index)
+             Representation representation)
       : key_(key),
         value_(value),
-        details_(attributes, type, index) { }
+        details_(attributes, type, representation) { }
 
   friend class DescriptorArray;
 };
@@ -107,8 +103,9 @@ class FieldDescriptor: public Descriptor {
   FieldDescriptor(Name* key,
                   int field_index,
                   PropertyAttributes attributes,
-                  int index = 0)
-      : Descriptor(key, Smi::FromInt(field_index), attributes, FIELD, index) {}
+                  Representation representation)
+      : Descriptor(key, Smi::FromInt(field_index), attributes,
+                   FIELD, representation) {}
 };
 
 
@@ -116,9 +113,9 @@ class ConstantFunctionDescriptor: public Descriptor {
  public:
   ConstantFunctionDescriptor(Name* key,
                              JSFunction* function,
-                             PropertyAttributes attributes,
-                             int index)
-      : Descriptor(key, function, attributes, CONSTANT_FUNCTION, index) {}
+                             PropertyAttributes attributes)
+      : Descriptor(key, function, attributes, CONSTANT_FUNCTION,
+                   Representation::Tagged()) {}
 };
 
 
@@ -126,9 +123,9 @@ class CallbacksDescriptor:  public Descriptor {
  public:
   CallbacksDescriptor(Name* key,
                       Object* foreign,
-                      PropertyAttributes attributes,
-                      int index = 0)
-      : Descriptor(key, foreign, attributes, CALLBACKS, index) {}
+                      PropertyAttributes attributes)
+      : Descriptor(key, foreign, attributes, CALLBACKS,
+                   Representation::Tagged()) {}
 };
 
 
@@ -190,7 +187,7 @@ class LookupResult BASE_EMBEDDED {
         lookup_type_(NOT_FOUND),
         holder_(NULL),
         cacheable_(true),
-        details_(NONE, NONEXISTENT) {
+        details_(NONE, NONEXISTENT, Representation::None()) {
     isolate->SetTopLookupResult(this);
   }
 
@@ -208,9 +205,13 @@ class LookupResult BASE_EMBEDDED {
     number_ = number;
   }
 
+  bool CanHoldValue(Handle<Object> value) {
+    return value->FitsRepresentation(details_.representation());
+  }
+
   void TransitionResult(JSObject* holder, int number) {
     lookup_type_ = TRANSITION_TYPE;
-    details_ = PropertyDetails(NONE, TRANSITION);
+    details_ = PropertyDetails(NONE, TRANSITION, Representation::None());
     holder_ = holder;
     number_ = number;
   }
@@ -225,19 +226,19 @@ class LookupResult BASE_EMBEDDED {
   void HandlerResult(JSProxy* proxy) {
     lookup_type_ = HANDLER_TYPE;
     holder_ = proxy;
-    details_ = PropertyDetails(NONE, HANDLER);
+    details_ = PropertyDetails(NONE, HANDLER, Representation::None());
     cacheable_ = false;
   }
 
   void InterceptorResult(JSObject* holder) {
     lookup_type_ = INTERCEPTOR_TYPE;
     holder_ = holder;
-    details_ = PropertyDetails(NONE, INTERCEPTOR);
+    details_ = PropertyDetails(NONE, INTERCEPTOR, Representation::None());
   }
 
   void NotFound() {
     lookup_type_ = NOT_FOUND;
-    details_ = PropertyDetails(NONE, NONEXISTENT);
+    details_ = PropertyDetails(NONE, NONEXISTENT, Representation::None());
     holder_ = NULL;
   }
 
@@ -256,6 +257,13 @@ class LookupResult BASE_EMBEDDED {
     return details_.type();
   }
 
+  Representation representation() {
+    ASSERT(IsFound());
+    ASSERT(!IsTransition());
+    ASSERT(details_.type() != NONEXISTENT);
+    return details_.representation();
+  }
+
   PropertyAttributes GetAttributes() {
     ASSERT(!IsTransition());
     ASSERT(IsFound());
@@ -340,7 +348,7 @@ class LookupResult BASE_EMBEDDED {
   Object* GetLazyValue() {
     switch (type()) {
       case FIELD:
-        return holder()->FastPropertyAt(GetFieldIndex().field_index());
+        return holder()->RawFastPropertyAt(GetFieldIndex().field_index());
       case NORMAL: {
         Object* value;
         value = holder()->property_dictionary()->ValueAt(GetDictionaryEntry());
index ebe88fe..4e00b29 100644 (file)
@@ -28,6 +28,8 @@
 #include <stdlib.h>
 #include <limits>
 
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "accessors.h"
@@ -232,7 +234,9 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
                                 constant_properties,
                                 &is_result_from_cache);
 
-  Handle<JSObject> boilerplate = isolate->factory()->NewJSObjectFromMap(map);
+  Handle<JSObject> boilerplate =
+      isolate->factory()->NewJSObjectFromMap(
+          map, isolate->heap()->GetPretenureMode());
 
   // Normalize the elements of the boilerplate to save space if needed.
   if (!should_have_fast_elements) JSObject::NormalizeElements(boilerplate);
@@ -249,6 +253,7 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
         boilerplate, KEEP_INOBJECT_PROPERTIES, length / 2);
   }
 
+  // TODO(verwaest): Support tracking representations in the boilerplate.
   for (int index = 0; index < length; index +=2) {
     Handle<Object> key(constant_properties->get(index+0), isolate);
     Handle<Object> value(constant_properties->get(index+1), isolate);
@@ -335,8 +340,10 @@ Handle<Object> Runtime::CreateArrayLiteralBoilerplate(
   // Create the JSArray.
   Handle<JSFunction> constructor(
       JSFunction::NativeContextFromLiterals(*literals)->array_function());
-  Handle<JSArray> object =
-      Handle<JSArray>::cast(isolate->factory()->NewJSObject(constructor));
+
+  Handle<JSArray> object = Handle<JSArray>::cast(
+      isolate->factory()->NewJSObject(
+          constructor, isolate->heap()->GetPretenureMode()));
 
   ElementsKind constant_elements_kind =
       static_cast<ElementsKind>(Smi::cast(elements->get(0))->value());
@@ -646,11 +653,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Fix) {
 
 
 static void ArrayBufferWeakCallback(v8::Isolate* external_isolate,
-                                    Persistent<Value> object,
+                                    Persistent<Value>* object,
                                     void* data) {
   Isolate* isolate = reinterpret_cast<Isolate*>(external_isolate);
   HandleScope scope(isolate);
-  Handle<Object> internal_object = Utils::OpenHandle(*object);
+  Handle<Object> internal_object = Utils::OpenHandle(**object);
 
   size_t allocated_length = NumberToSize(
       isolate, JSArrayBuffer::cast(*internal_object)->byte_length());
@@ -658,7 +665,7 @@ static void ArrayBufferWeakCallback(v8::Isolate* external_isolate,
       -static_cast<intptr_t>(allocated_length));
   if (data != NULL)
     free(data);
-  object.Dispose(external_isolate);
+  object->Dispose(external_isolate);
 }
 
 
@@ -773,7 +780,8 @@ enum TypedArrayId {
   ARRAY_ID_UINT32 = 5,
   ARRAY_ID_INT32 = 6,
   ARRAY_ID_FLOAT32 = 7,
-  ARRAY_ID_FLOAT64 = 8
+  ARRAY_ID_FLOAT64 = 8,
+  ARRAY_ID_UINT8C = 9
 };
 
 
@@ -830,6 +838,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) {
       arrayType = kExternalDoubleArray;
       elementSize = 8;
       break;
+    case ARRAY_ID_UINT8C:
+      elementsKind = EXTERNAL_PIXEL_ELEMENTS;
+      arrayType = kExternalPixelArray;
+      elementSize = 1;
+      break;
     default:
       UNREACHABLE();
       return NULL;
@@ -860,11 +873,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) {
 
 
 #define TYPED_ARRAY_GETTER(getter, accessor) \
-  RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayGet##getter) { \
-    HandleScope scope(isolate);                                   \
-    ASSERT(args.length() == 1);                                   \
-    CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, holder, 0);          \
-    return holder->accessor();                                    \
+  RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayGet##getter) {             \
+    HandleScope scope(isolate);                                               \
+    ASSERT(args.length() == 1);                                               \
+    CONVERT_ARG_HANDLE_CHECKED(Object, holder, 0);                            \
+    if (!holder->IsJSTypedArray())                                            \
+      return isolate->Throw(*isolate->factory()->NewTypeError(                \
+          "not_typed_array", HandleVector<Object>(NULL, 0)));                 \
+    Handle<JSTypedArray> typed_array(JSTypedArray::cast(*holder));            \
+    return typed_array->accessor();                                           \
   }
 
 TYPED_ARRAY_GETTER(Buffer, buffer)
@@ -874,6 +891,128 @@ TYPED_ARRAY_GETTER(Length, length)
 
 #undef TYPED_ARRAY_GETTER
 
+RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArraySetFastCases) {
+  HandleScope scope(isolate);
+  CONVERT_ARG_HANDLE_CHECKED(Object, target_obj, 0);
+  CONVERT_ARG_HANDLE_CHECKED(Object, source_obj, 1);
+  CONVERT_ARG_HANDLE_CHECKED(Object, offset_obj, 2);
+
+  if (!target_obj->IsJSTypedArray())
+    return isolate->Throw(*isolate->factory()->NewTypeError(
+        "not_typed_array", HandleVector<Object>(NULL, 0)));
+
+  if (!source_obj->IsJSTypedArray())
+    return isolate->heap()->false_value();
+
+  Handle<JSTypedArray> target(JSTypedArray::cast(*target_obj));
+  Handle<JSTypedArray> source(JSTypedArray::cast(*source_obj));
+  size_t offset = NumberToSize(isolate, *offset_obj);
+  size_t target_length = NumberToSize(isolate, target->length());
+  size_t source_length = NumberToSize(isolate, source->length());
+  size_t target_byte_length = NumberToSize(isolate, target->byte_length());
+  size_t source_byte_length = NumberToSize(isolate, source->byte_length());
+  if (offset > target_length ||
+      offset + source_length > target_length ||
+      offset + source_length < offset)  // overflow
+    return isolate->Throw(*isolate->factory()->NewRangeError(
+          "typed_array_set_source_too_large", HandleVector<Object>(NULL, 0)));
+
+  Handle<JSArrayBuffer> target_buffer(JSArrayBuffer::cast(target->buffer()));
+  Handle<JSArrayBuffer> source_buffer(JSArrayBuffer::cast(source->buffer()));
+  size_t target_offset = NumberToSize(isolate, target->byte_offset());
+  size_t source_offset = NumberToSize(isolate, source->byte_offset());
+  uint8_t* target_base =
+      static_cast<uint8_t*>(target_buffer->backing_store()) + target_offset;
+  uint8_t* source_base =
+      static_cast<uint8_t*>(source_buffer->backing_store()) + source_offset;
+
+  // Typed arrays of the same type: use memmove.
+  if (target->type() == source->type()) {
+    memmove(target_base + offset * target->element_size(),
+        source_base, source_byte_length);
+    return isolate->heap()->true_value();
+  }
+
+  // Typed arrays of different types over the same backing store
+  if ((source_base <= target_base &&
+        source_base + source_byte_length > target_base) ||
+      (target_base <= source_base &&
+        target_base + target_byte_length > source_base)) {
+    size_t target_element_size = target->element_size();
+    size_t source_element_size = source->element_size();
+
+    size_t source_length = NumberToSize(isolate, source->length());
+
+    // Copy left part
+    size_t left_index;
+    {
+      // First un-mutated byte after the next write
+      uint8_t* target_ptr = target_base + (offset + 1) * target_element_size;
+      // Next read at source_ptr. We do not care for memory changing before
+      // source_ptr - we have already copied it.
+      uint8_t* source_ptr = source_base;
+      for (left_index = 0;
+           left_index < source_length && target_ptr <= source_ptr;
+           left_index++) {
+        Handle<Object> v = Object::GetElement(
+            source, static_cast<uint32_t>(left_index));
+        JSObject::SetElement(
+            target, static_cast<uint32_t>(offset + left_index), v,
+            NONE, kNonStrictMode);
+        target_ptr += target_element_size;
+        source_ptr += source_element_size;
+      }
+    }
+    // Copy right part
+    size_t right_index;
+    {
+      // First unmutated byte before the next write
+      uint8_t* target_ptr =
+        target_base + (offset + source_length - 1) * target_element_size;
+      // Next read before source_ptr. We do not care for memory changing after
+      // source_ptr - we have already copied it.
+      uint8_t* source_ptr =
+        source_base + source_length * source_element_size;
+      for (right_index = source_length - 1;
+           right_index >= left_index && target_ptr >= source_ptr;
+           right_index--) {
+        Handle<Object> v = Object::GetElement(
+            source, static_cast<uint32_t>(right_index));
+        JSObject::SetElement(
+            target, static_cast<uint32_t>(offset + right_index), v,
+            NONE, kNonStrictMode);
+        target_ptr -= target_element_size;
+        source_ptr -= source_element_size;
+      }
+    }
+    // There can be at most 8 entries left in the middle that need buffering
+    // (because the largest element_size is 8 times the smallest).
+    ASSERT((right_index + 1) - left_index <= 8);
+    Handle<Object> temp[8];
+    size_t idx;
+    for (idx = left_index; idx <= right_index; idx++) {
+      temp[idx - left_index] = Object::GetElement(
+          source, static_cast<uint32_t>(idx));
+    }
+    for (idx = left_index; idx <= right_index; idx++) {
+      JSObject::SetElement(
+          target, static_cast<uint32_t>(offset + idx), temp[idx-left_index],
+          NONE, kNonStrictMode);
+    }
+  } else {  // Non-overlapping typed arrays
+    for (size_t idx = 0; idx < source_length; idx++) {
+      Handle<Object> value = Object::GetElement(
+          source, static_cast<uint32_t>(idx));
+      JSObject::SetElement(
+          target, static_cast<uint32_t>(offset + idx), value,
+          NONE, kNonStrictMode);
+    }
+  }
+
+  return isolate->heap()->true_value();
+}
+
+
 RUNTIME_FUNCTION(MaybeObject*, Runtime_SetInitialize) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
@@ -2180,6 +2319,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionMarkNameShouldPrintAsAnonymous) {
 }
 
 
+RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsGenerator) {
+  NoHandleAllocation ha(isolate);
+  ASSERT(args.length() == 1);
+  CONVERT_ARG_CHECKED(JSFunction, f, 0);
+  return isolate->heap()->ToBoolean(f->shared()->is_generator());
+}
+
+
 RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionRemovePrototype) {
   NoHandleAllocation ha(isolate);
   ASSERT(args.length() == 1);
@@ -2291,8 +2438,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetReadOnlyPrototype) {
 
     CallbacksDescriptor new_desc(name,
         instance_desc->GetValue(index),
-        static_cast<PropertyAttributes>(details.attributes() | READ_ONLY),
-        details.descriptor_index());
+        static_cast<PropertyAttributes>(details.attributes() | READ_ONLY));
 
     // Create a new map featuring the new field descriptors array.
     Map* new_map;
@@ -2431,57 +2577,53 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateJSGeneratorObject) {
   generator->set_receiver(frame->receiver());
   generator->set_continuation(0);
   generator->set_operand_stack(isolate->heap()->empty_fixed_array());
+  generator->set_stack_handler_index(-1);
 
   return generator;
 }
 
 
 RUNTIME_FUNCTION(MaybeObject*, Runtime_SuspendJSGeneratorObject) {
-  HandleScope scope(isolate);
+  NoHandleAllocation ha(isolate);
   ASSERT(args.length() == 1);
-  CONVERT_ARG_HANDLE_CHECKED(JSGeneratorObject, generator_object, 0);
+  CONVERT_ARG_CHECKED(JSGeneratorObject, generator_object, 0);
 
   JavaScriptFrameIterator stack_iterator(isolate);
-  JavaScriptFrame *frame = stack_iterator.frame();
-  Handle<JSFunction> function(JSFunction::cast(frame->function()));
+  JavaScriptFrameframe = stack_iterator.frame();
+  JSFunction* function = JSFunction::cast(frame->function());
   RUNTIME_ASSERT(function->shared()->is_generator());
+  ASSERT_EQ(function, generator_object->function());
 
-  intptr_t offset = frame->pc() - function->code()->instruction_start();
-  ASSERT(*function == generator_object->function());
-  ASSERT(offset > 0 && Smi::IsValid(offset));
-  generator_object->set_continuation(static_cast<int>(offset));
+  // We expect there to be at least two values on the operand stack: the return
+  // value of the yield expression, and the argument to this runtime call.
+  // Neither of those should be saved.
+  int operands_count = frame->ComputeOperandsCount();
+  ASSERT(operands_count >= 2);
+  operands_count -= 2;
 
-  // Generator functions force context allocation for locals, so Local0 points
-  // to the bottom of the operand stack.  Assume the stack grows down.
-  //
-  // TODO(wingo): Move these magical calculations to frames.h when the
-  // generators implementation has stabilized.
-  intptr_t stack_size_in_bytes =
-      (frame->fp() + JavaScriptFrameConstants::kLocal0Offset) -
-      (frame->sp() - kPointerSize);
-  ASSERT(IsAddressAligned(frame->fp(), kPointerSize));
-  ASSERT(IsAligned(stack_size_in_bytes, kPointerSize));
-  ASSERT(stack_size_in_bytes >= 0);
-  ASSERT(Smi::IsValid(stack_size_in_bytes));
-  intptr_t stack_size = stack_size_in_bytes >> kPointerSizeLog2;
-
-  // We expect there to be at least two values on the stack: the return value of
-  // the yield expression, and the argument to this runtime call.  Neither of
-  // those should be saved.
-  ASSERT(stack_size >= 2);
-  stack_size -= 2;
-
-  if (stack_size == 0) {
+  if (operands_count == 0) {
     ASSERT_EQ(generator_object->operand_stack(),
               isolate->heap()->empty_fixed_array());
+    ASSERT_EQ(generator_object->stack_handler_index(), -1);
     // If there are no operands on the stack, there shouldn't be a handler
     // active either.
     ASSERT(!frame->HasHandler());
   } else {
-    // TODO(wingo): Save the operand stack and/or the stack handlers.
-    UNIMPLEMENTED();
+    int stack_handler_index = -1;
+    MaybeObject* alloc = isolate->heap()->AllocateFixedArray(operands_count);
+    FixedArray* operand_stack;
+    if (!alloc->To(&operand_stack)) return alloc;
+    frame->SaveOperandStack(operand_stack, &stack_handler_index);
+    generator_object->set_operand_stack(operand_stack);
+    generator_object->set_stack_handler_index(stack_handler_index);
   }
 
+  // Set continuation down here to avoid side effects if the operand stack
+  // allocation fails.
+  intptr_t offset = frame->pc() - function->code()->instruction_start();
+  ASSERT(offset > 0 && Smi::IsValid(offset));
+  generator_object->set_continuation(static_cast<int>(offset));
+
   // It's possible for the context to be other than the initial context even if
   // there is no stack handler active.  For example, this is the case in the
   // body of a "with" statement.  Therefore we always save the context.
@@ -2501,13 +2643,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SuspendJSGeneratorObject) {
 // EmitGeneratorResumeResume is called in any case, as it needs to reconstruct
 // the stack frame and make space for arguments and operands.
 RUNTIME_FUNCTION(MaybeObject*, Runtime_ResumeJSGeneratorObject) {
-  HandleScope scope(isolate);
+  NoHandleAllocation ha(isolate);
   ASSERT(args.length() == 3);
-  CONVERT_ARG_HANDLE_CHECKED(JSGeneratorObject, generator_object, 0);
-  CONVERT_ARG_HANDLE_CHECKED(Object, value, 1);
+  CONVERT_ARG_CHECKED(JSGeneratorObject, generator_object, 0);
+  CONVERT_ARG_CHECKED(Object, value, 1);
   CONVERT_SMI_ARG_CHECKED(resume_mode_int, 2);
   JavaScriptFrameIterator stack_iterator(isolate);
-  JavaScriptFrame *frame = stack_iterator.frame();
+  JavaScriptFrameframe = stack_iterator.frame();
 
   ASSERT_EQ(frame->function(), generator_object->function());
 
@@ -2520,18 +2662,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ResumeJSGeneratorObject) {
   frame->set_pc(pc + offset);
   generator_object->set_continuation(JSGeneratorObject::kGeneratorExecuting);
 
-  if (generator_object->operand_stack()->length() != 0) {
-    // TODO(wingo): Copy operand stack.  Rewind handlers.
-    UNIMPLEMENTED();
+  FixedArray* operand_stack = generator_object->operand_stack();
+  int operands_count = operand_stack->length();
+  if (operands_count != 0) {
+    frame->RestoreOperandStack(operand_stack,
+                               generator_object->stack_handler_index());
+    generator_object->set_operand_stack(isolate->heap()->empty_fixed_array());
+    generator_object->set_stack_handler_index(-1);
   }
 
   JSGeneratorObject::ResumeMode resume_mode =
       static_cast<JSGeneratorObject::ResumeMode>(resume_mode_int);
   switch (resume_mode) {
     case JSGeneratorObject::SEND:
-      return *value;
+      return value;
     case JSGeneratorObject::THROW:
-      return isolate->Throw(*value);
+      return isolate->Throw(value);
   }
 
   UNREACHABLE();
@@ -2544,7 +2690,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ThrowGeneratorStateError) {
   ASSERT(args.length() == 1);
   CONVERT_ARG_HANDLE_CHECKED(JSGeneratorObject, generator, 0);
   int continuation = generator->continuation();
-  const char *message = continuation == JSGeneratorObject::kGeneratorClosed ?
+  const charmessage = continuation == JSGeneratorObject::kGeneratorClosed ?
       "generator_finished" : "generator_running";
   Vector< Handle<Object> > argv = HandleVector<Object>(NULL, 0);
   Handle<Object> error = isolate->factory()->NewError(message, argv);
@@ -3339,8 +3485,8 @@ MUST_USE_RESULT static MaybeObject* StringReplaceGlobalRegExpWithString(
 
   // Shortcut for simple non-regexp global replacements
   if (regexp->TypeTag() == JSRegExp::ATOM && simple_replace) {
-    if (subject->IsOneByteConvertible() &&
-        replacement->IsOneByteConvertible()) {
+    if (subject->HasOnlyOneByteChars() &&
+        replacement->HasOnlyOneByteChars()) {
       return StringReplaceGlobalAtomRegExpWithString<SeqOneByteString>(
           isolate, subject, regexp, replacement, last_match_info);
     } else {
@@ -3522,7 +3668,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceGlobalRegExpWithString) {
   if (!subject->IsFlat()) subject = FlattenGetString(subject);
 
   if (replacement->length() == 0) {
-    if (subject->IsOneByteConvertible()) {
+    if (subject->HasOnlyOneByteChars()) {
       return StringReplaceGlobalRegExpWithEmptyString<SeqOneByteString>(
           isolate, subject, regexp, last_match_info);
     } else {
@@ -4327,7 +4473,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
         KeyedLookupCache* keyed_lookup_cache = isolate->keyed_lookup_cache();
         int offset = keyed_lookup_cache->Lookup(receiver_map, key);
         if (offset != -1) {
-          Object* value = receiver->FastPropertyAt(offset);
+          // Doubles are not cached, so raw read the value.
+          Object* value = receiver->RawFastPropertyAt(offset);
           return value->IsTheHole()
               ? isolate->heap()->undefined_value()
               : value;
@@ -4338,8 +4485,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
         receiver->LocalLookup(key, &result);
         if (result.IsField()) {
           int offset = result.GetFieldIndex().field_index();
-          keyed_lookup_cache->Update(receiver_map, key, offset);
-          return receiver->FastPropertyAt(offset);
+          // Do not track double fields in the keyed lookup cache. Reading
+          // double values requires boxing.
+          if (!FLAG_track_double_fields ||
+              !result.representation().IsDouble()) {
+            keyed_lookup_cache->Update(receiver_map, key, offset);
+          }
+          return receiver->FastPropertyAt(result.representation(), offset);
         }
       } else {
         // Attempt dictionary lookup.
@@ -4515,6 +4667,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDataProperty) {
       return lookup.holder()->GetNormalizedProperty(&lookup);
     case FIELD:
       return lookup.holder()->FastPropertyAt(
+          lookup.representation(),
           lookup.GetFieldIndex().field_index());
     case CONSTANT_FUNCTION:
       return lookup.GetConstantFunction();
@@ -6113,7 +6266,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToStringSkipCache) {
   Object* number = args[0];
   RUNTIME_ASSERT(number->IsNumber());
 
-  return isolate->heap()->NumberToString(number, false);
+  return isolate->heap()->NumberToString(
+      number, false, isolate->heap()->GetPretenureMode());
 }
 
 
@@ -6381,7 +6535,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderConcat) {
     if (first->IsString()) return first;
   }
 
-  bool one_byte = special->IsOneByteConvertible();
+  bool one_byte = special->HasOnlyOneByteChars();
   int position = 0;
   for (int i = 0; i < array_length; i++) {
     int increment = 0;
@@ -6422,7 +6576,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderConcat) {
       String* element = String::cast(elt);
       int element_length = element->length();
       increment = element_length;
-      if (one_byte && !element->IsOneByteConvertible()) {
+      if (one_byte && !element->HasOnlyOneByteChars()) {
         one_byte = false;
       }
     } else {
@@ -7570,20 +7724,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObjectFromBound) {
 }
 
 
-static void TrySettingInlineConstructStub(Isolate* isolate,
-                                          Handle<JSFunction> function) {
-  Handle<Object> prototype = isolate->factory()->null_value();
-  if (function->has_instance_prototype()) {
-    prototype = Handle<Object>(function->instance_prototype(), isolate);
-  }
-  if (function->shared()->CanGenerateInlineConstructor(*prototype)) {
-    ConstructStubCompiler compiler(isolate);
-    Handle<Code> code = compiler.CompileConstructStub(function);
-    function->shared()->set_construct_stub(*code);
-  }
-}
-
-
 RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
@@ -7647,13 +7787,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) {
     shared->CompleteInobjectSlackTracking();
   }
 
-  bool first_allocation = !shared->live_objects_may_exist();
   Handle<JSObject> result = isolate->factory()->NewJSObject(function);
   RETURN_IF_EMPTY_HANDLE(isolate, result);
-  // Delay setting the stub if inobject slack tracking is in progress.
-  if (first_allocation && !shared->IsInobjectSlackTrackingInProgress()) {
-    TrySettingInlineConstructStub(isolate, function);
-  }
 
   isolate->counters()->constructed_objects()->Increment();
   isolate->counters()->constructed_objects_runtime()->Increment();
@@ -7668,7 +7803,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FinalizeInstanceSize) {
 
   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
   function->shared()->CompleteInobjectSlackTracking();
-  TrySettingInlineConstructStub(isolate, function);
 
   return isolate->heap()->undefined_value();
 }
@@ -10050,14 +10184,18 @@ static MaybeObject* DebugLookupResultValue(Heap* heap,
         return heap->undefined_value();
       }
       return value;
-    case FIELD:
-      value =
+    case FIELD: {
+      Object* value;
+      MaybeObject* maybe_value =
           JSObject::cast(result->holder())->FastPropertyAt(
+              result->representation(),
               result->GetFieldIndex().field_index());
+      if (!maybe_value->To(&value)) return maybe_value;
       if (value->IsTheHole()) {
         return heap->undefined_value();
       }
       return value;
+    }
     case CONSTANT_FUNCTION:
       return result->GetConstantFunction();
     case CALLBACKS: {
@@ -10144,7 +10282,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) {
       }
     }
     details->set(0, element_or_char);
-    details->set(1, PropertyDetails(NONE, NORMAL).AsSmi());
+    details->set(
+        1, PropertyDetails(NONE, NORMAL, Representation::None()).AsSmi());
     return *isolate->factory()->NewJSArrayWithElements(details);
   }
 
@@ -13333,7 +13472,7 @@ MaybeObject* Runtime::InitializeIntrinsicFunctionNames(Heap* heap,
     { MaybeObject* maybe_dictionary = name_dictionary->Add(
           String::cast(name_string),
           Smi::FromInt(i),
-          PropertyDetails(NONE, NORMAL));
+          PropertyDetails(NONE, NORMAL, Representation::None()));
       if (!maybe_dictionary->ToObject(&dictionary)) {
         // Non-recoverable failure.  Calling code must restart heap
         // initialization.
index 2a102e1..c91fee6 100644 (file)
@@ -231,6 +231,7 @@ namespace internal {
   F(FunctionSetName, 2, 1) \
   F(FunctionNameShouldPrintAsAnonymous, 1, 1) \
   F(FunctionMarkNameShouldPrintAsAnonymous, 1, 1) \
+  F(FunctionIsGenerator, 1, 1) \
   F(FunctionBindArguments, 4, 1) \
   F(BoundFunctionGetBindings, 1, 1) \
   F(FunctionRemovePrototype, 1, 1) \
@@ -360,6 +361,7 @@ namespace internal {
   F(TypedArrayGetByteLength, 1, 1) \
   F(TypedArrayGetByteOffset, 1, 1) \
   F(TypedArrayGetLength, 1, 1) \
+  F(TypedArraySetFastCases, 3, 1) \
   \
   /* Statements */ \
   F(NewClosure, 3, 1) \
index e271470..da186b6 100644 (file)
@@ -26,7 +26,8 @@
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 #if defined(__linux__) || defined(__FreeBSD__) || defined(__OpenBSD__) \
-    || defined(__NetBSD__) || defined(__sun) || defined(__ANDROID__)
+    || defined(__NetBSD__) || defined(__sun) || defined(__ANDROID__) \
+    || defined(__native_client__)
 
 #define USE_SIGNALS
 
index 5ad970a..208dc76 100644 (file)
@@ -727,8 +727,9 @@ int Scope::ContextChainLength(Scope* scope) {
   for (Scope* s = this; s != scope; s = s->outer_scope_) {
     ASSERT(s != NULL);  // scope must be in the scope chain
     if (s->is_with_scope() || s->num_heap_slots() > 0) n++;
-    // Catch scopes always have heap slots.
+    // Catch and module scopes always have heap slots.
     ASSERT(!s->is_catch_scope() || s->num_heap_slots() > 0);
+    ASSERT(!s->is_module_scope() || s->num_heap_slots() > 0);
   }
   return n;
 }
index d4f31c1..3e70edc 100644 (file)
@@ -558,6 +558,11 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
       UNCLASSIFIED,
       58,
       "Runtime::AllocateInOldPointerSpace");
+  Add(ExternalReference::new_space_high_promotion_mode_active_address(isolate).
+      address(),
+      UNCLASSIFIED,
+      59,
+      "Heap::NewSpaceAllocationLimitAddress");
 
   // Add a small set of deopt entry addresses to encoder without generating the
   // deopt table code, which isn't possible at deserialization time.
@@ -568,7 +573,7 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
         entry,
         Deoptimizer::LAZY,
         Deoptimizer::CALCULATE_ENTRY_ADDRESS);
-    Add(address, LAZY_DEOPTIMIZATION, 59 + entry, "lazy_deopt");
+    Add(address, LAZY_DEOPTIMIZATION, 60 + entry, "lazy_deopt");
   }
 }
 
index df1c3ef..099ad93 100644 (file)
@@ -2304,14 +2304,6 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
   // Don't free list allocate if there is linear space available.
   ASSERT(owner_->limit() - owner_->top() < size_in_bytes);
 
-  int new_node_size = 0;
-  FreeListNode* new_node = FindNodeFor(size_in_bytes, &new_node_size);
-  if (new_node == NULL) return NULL;
-
-
-  int bytes_left = new_node_size - size_in_bytes;
-  ASSERT(bytes_left >= 0);
-
   int old_linear_size = static_cast<int>(owner_->limit() - owner_->top());
   // Mark the old linear allocation area with a free space map so it can be
   // skipped when scanning the heap.  This also puts it back in the free list
@@ -2321,6 +2313,16 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
   owner_->heap()->incremental_marking()->OldSpaceStep(
       size_in_bytes - old_linear_size);
 
+  int new_node_size = 0;
+  FreeListNode* new_node = FindNodeFor(size_in_bytes, &new_node_size);
+  if (new_node == NULL) {
+    owner_->SetTop(NULL, NULL);
+    return NULL;
+  }
+
+  int bytes_left = new_node_size - size_in_bytes;
+  ASSERT(bytes_left >= 0);
+
 #ifdef DEBUG
   for (int i = 0; i < size_in_bytes / kPointerSize; i++) {
     reinterpret_cast<Object**>(new_node->address())[i] =
index 61eec0d..ebe1b5b 100644 (file)
@@ -350,9 +350,8 @@ void StringStream::PrintUsingMap(JSObject* js_object) {
   }
   int real_size = map->NumberOfOwnDescriptors();
   DescriptorArray* descs = map->instance_descriptors();
-  for (int i = 0; i < descs->number_of_descriptors(); i++) {
+  for (int i = 0; i < real_size; i++) {
     PropertyDetails details = descs->GetDetails(i);
-    if (details.descriptor_index() > real_size) continue;
     if (details.type() == FIELD) {
       Object* key = descs->GetKey(i);
       if (key->IsString() || key->IsNumber()) {
@@ -368,7 +367,7 @@ void StringStream::PrintUsingMap(JSObject* js_object) {
           key->ShortPrint();
         }
         Add(": ");
-        Object* value = js_object->FastPropertyAt(descs->GetFieldIndex(i));
+        Object* value = js_object->RawFastPropertyAt(descs->GetFieldIndex(i));
         Add("%o\n", value);
       }
     }
index f928cf6..f43c9ac 100644 (file)
@@ -221,11 +221,12 @@ Handle<Code> StubCache::ComputeLoadNonexistent(Handle<Name> name,
 Handle<Code> StubCache::ComputeLoadField(Handle<Name> name,
                                          Handle<JSObject> receiver,
                                          Handle<JSObject> holder,
-                                         PropertyIndex field) {
+                                         PropertyIndex field,
+                                         Representation representation) {
   if (receiver.is_identical_to(holder)) {
-    LoadFieldStub stub(LoadStubCompiler::receiver(),
-                       field.is_inobject(holder),
-                       field.translate(holder));
+    LoadFieldStub stub(field.is_inobject(holder),
+                       field.translate(holder),
+                       representation);
     return stub.GetCode(isolate());
   }
 
@@ -236,7 +237,7 @@ Handle<Code> StubCache::ComputeLoadField(Handle<Name> name,
 
   LoadStubCompiler compiler(isolate_);
   Handle<Code> handler =
-      compiler.CompileLoadField(receiver, holder, name, field);
+      compiler.CompileLoadField(receiver, holder, name, field, representation);
   JSObject::UpdateMapCodeCache(stub_holder, name, handler);
   return handler;
 }
@@ -337,11 +338,12 @@ Handle<Code> StubCache::ComputeLoadGlobal(Handle<Name> name,
 Handle<Code> StubCache::ComputeKeyedLoadField(Handle<Name> name,
                                               Handle<JSObject> receiver,
                                               Handle<JSObject> holder,
-                                              PropertyIndex field) {
+                                              PropertyIndex field,
+                                              Representation representation) {
   if (receiver.is_identical_to(holder)) {
-    LoadFieldStub stub(KeyedLoadStubCompiler::receiver(),
-                       field.is_inobject(holder),
-                       field.translate(holder));
+    KeyedLoadFieldStub stub(field.is_inobject(holder),
+                            field.translate(holder),
+                            representation);
     return stub.GetCode(isolate());
   }
 
@@ -352,7 +354,7 @@ Handle<Code> StubCache::ComputeKeyedLoadField(Handle<Name> name,
 
   KeyedLoadStubCompiler compiler(isolate_);
   Handle<Code> handler =
-      compiler.CompileLoadField(receiver, holder, name, field);
+      compiler.CompileLoadField(receiver, holder, name, field, representation);
   JSObject::UpdateMapCodeCache(stub_holder, name, handler);
   return handler;
 }
@@ -954,10 +956,11 @@ Handle<Code> StubCache::ComputeLoadElementPolymorphic(
 
 Handle<Code> StubCache::ComputePolymorphicIC(MapHandleList* receiver_maps,
                                              CodeHandleList* handlers,
+                                             int number_of_valid_maps,
                                              Handle<Name> name) {
   LoadStubCompiler ic_compiler(isolate_);
-  Code::StubType type = handlers->length() == 1 ? handlers->at(0)->type()
-                                                : Code::NORMAL;
+  Code::StubType type = number_of_valid_maps == 1 ? handlers->at(0)->type()
+                                                  : Code::NORMAL;
   Handle<Code> ic = ic_compiler.CompilePolymorphicIC(
       receiver_maps, handlers, name, type, PROPERTY);
   return ic;
@@ -1042,45 +1045,40 @@ void StubCache::Clear() {
 
 
 void StubCache::CollectMatchingMaps(SmallMapList* types,
-                                    Name* name,
+                                    Handle<Name> name,
                                     Code::Flags flags,
                                     Handle<Context> native_context,
                                     Zone* zone) {
   for (int i = 0; i < kPrimaryTableSize; i++) {
-    if (primary_[i].key == name) {
+    if (primary_[i].key == *name) {
       Map* map = primary_[i].map;
       // Map can be NULL, if the stub is constant function call
       // with a primitive receiver.
       if (map == NULL) continue;
 
-      int offset = PrimaryOffset(name, flags, map);
+      int offset = PrimaryOffset(*name, flags, map);
       if (entry(primary_, offset) == &primary_[i] &&
           !TypeFeedbackOracle::CanRetainOtherContext(map, *native_context)) {
-        types->Add(Handle<Map>(map), zone);
+        types->AddMapIfMissing(Handle<Map>(map), zone);
       }
     }
   }
 
   for (int i = 0; i < kSecondaryTableSize; i++) {
-    if (secondary_[i].key == name) {
+    if (secondary_[i].key == *name) {
       Map* map = secondary_[i].map;
       // Map can be NULL, if the stub is constant function call
       // with a primitive receiver.
       if (map == NULL) continue;
 
       // Lookup in primary table and skip duplicates.
-      int primary_offset = PrimaryOffset(name, flags, map);
-      Entry* primary_entry = entry(primary_, primary_offset);
-      if (primary_entry->key == name) {
-        Map* primary_map = primary_entry->map;
-        if (map == primary_map) continue;
-      }
+      int primary_offset = PrimaryOffset(*name, flags, map);
 
       // Lookup in secondary table and add matches.
-      int offset = SecondaryOffset(name, flags, primary_offset);
+      int offset = SecondaryOffset(*name, flags, primary_offset);
       if (entry(secondary_, offset) == &secondary_[i] &&
           !TypeFeedbackOracle::CanRetainOtherContext(map, *native_context)) {
-        types->Add(Handle<Map>(map), zone);
+        types->AddMapIfMissing(Handle<Map>(map), zone);
       }
     }
   }
@@ -1500,16 +1498,17 @@ Register BaseLoadStubCompiler::HandlerFrontend(Handle<JSObject> object,
 }
 
 
-Handle<Code> BaseLoadStubCompiler::CompileLoadField(Handle<JSObject> object,
-                                                    Handle<JSObject> holder,
-                                                    Handle<Name> name,
-                                                    PropertyIndex field) {
+Handle<Code> BaseLoadStubCompiler::CompileLoadField(
+    Handle<JSObject> object,
+    Handle<JSObject> holder,
+    Handle<Name> name,
+    PropertyIndex field,
+    Representation representation) {
   Label miss;
 
   Register reg = HandlerFrontendHeader(object, receiver(), holder, name, &miss);
 
-  LoadFieldStub stub(reg, field.is_inobject(holder), field.translate(holder));
-  GenerateTailCall(masm(), stub.GetCode(isolate()));
+  GenerateLoadField(reg, holder, field, representation);
 
   __ bind(&miss);
   TailCallBuiltin(masm(), MissBuiltin(kind()));
@@ -1519,19 +1518,6 @@ Handle<Code> BaseLoadStubCompiler::CompileLoadField(Handle<JSObject> object,
 }
 
 
-// Load a fast property out of a holder object (src). In-object properties
-// are loaded directly otherwise the property is loaded from the properties
-// fixed array.
-void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
-                                            Register dst,
-                                            Register src,
-                                            Handle<JSObject> holder,
-                                            PropertyIndex index) {
-  DoGenerateFastPropertyLoad(
-      masm, dst, src, index.is_inobject(holder), index.translate(holder));
-}
-
-
 Handle<Code> BaseLoadStubCompiler::CompileLoadConstant(
     Handle<JSObject> object,
     Handle<JSObject> holder,
@@ -1594,17 +1580,16 @@ void BaseLoadStubCompiler::GenerateLoadPostInterceptor(
   if (lookup->IsField()) {
     PropertyIndex field = lookup->GetFieldIndex();
     if (interceptor_holder.is_identical_to(holder)) {
-      LoadFieldStub stub(interceptor_reg,
-                         field.is_inobject(holder),
-                         field.translate(holder));
-      GenerateTailCall(masm(), stub.GetCode(isolate()));
+      GenerateLoadField(
+          interceptor_reg, holder, field, lookup->representation());
     } else {
       // We found FIELD property in prototype chain of interceptor's holder.
       // Retrieve a field from field's holder.
       Register reg = HandlerFrontend(
           interceptor_holder, interceptor_reg, holder, name, &success);
       __ bind(&success);
-      GenerateLoadField(reg, holder, field);
+      GenerateLoadField(
+          reg, holder, field, lookup->representation());
     }
   } else {
     // We found CALLBACKS property in prototype chain of interceptor's
@@ -1656,7 +1641,7 @@ Handle<Code> BaseStoreStubCompiler::CompileStoreTransition(
     LookupResult* lookup,
     Handle<Map> transition,
     Handle<Name> name) {
-  Label miss, miss_restore_name;
+  Label miss, miss_restore_name, slow;
 
   GenerateNameCheck(name, this->name(), &miss);
 
@@ -1666,15 +1651,19 @@ Handle<Code> BaseStoreStubCompiler::CompileStoreTransition(
                           transition,
                           name,
                           receiver(), this->name(), value(),
-                          scratch1(), scratch2(),
+                          scratch1(), scratch2(), scratch3(),
                           &miss,
-                          &miss_restore_name);
+                          &miss_restore_name,
+                          &slow);
 
   // Handle store cache miss.
   GenerateRestoreName(masm(), &miss_restore_name, name);
   __ bind(&miss);
   TailCallBuiltin(masm(), MissBuiltin(kind()));
 
+  GenerateRestoreName(masm(), &slow, name);
+  TailCallBuiltin(masm(), SlowBuiltin(kind()));
+
   // Return the generated code.
   return GetICCode(kind(), Code::MAP_TRANSITION, name);
 }
index dbb5e90..cbaeace 100644 (file)
@@ -112,7 +112,8 @@ class StubCache {
   Handle<Code> ComputeLoadField(Handle<Name> name,
                                 Handle<JSObject> object,
                                 Handle<JSObject> holder,
-                                PropertyIndex field_index);
+                                PropertyIndex field_index,
+                                Representation representation);
 
   Handle<Code> ComputeLoadCallback(Handle<Name> name,
                                    Handle<JSObject> object,
@@ -147,7 +148,8 @@ class StubCache {
   Handle<Code> ComputeKeyedLoadField(Handle<Name> name,
                                      Handle<JSObject> object,
                                      Handle<JSObject> holder,
-                                     PropertyIndex field_index);
+                                     PropertyIndex field_index,
+                                     Representation representation);
 
   Handle<Code> ComputeKeyedLoadCallback(
       Handle<Name> name,
@@ -291,6 +293,7 @@ class StubCache {
 
   Handle<Code> ComputePolymorphicIC(MapHandleList* receiver_maps,
                                     CodeHandleList* handlers,
+                                    int number_of_valid_maps,
                                     Handle<Name> name);
 
   // Finds the Code object stored in the Heap::non_monomorphic_cache().
@@ -310,7 +313,7 @@ class StubCache {
 
   // Collect all maps that match the name and flags.
   void CollectMatchingMaps(SmallMapList* types,
-                           Name* name,
+                           Handle<Name> name,
                            Code::Flags flags,
                            Handle<Context> native_context,
                            Zone* zone);
@@ -505,13 +508,9 @@ class StubCompiler BASE_EMBEDDED {
   static void GenerateFastPropertyLoad(MacroAssembler* masm,
                                        Register dst,
                                        Register src,
-                                       Handle<JSObject> holder,
-                                       PropertyIndex index);
-  static void DoGenerateFastPropertyLoad(MacroAssembler* masm,
-                                         Register dst,
-                                         Register src,
-                                         bool inobject,
-                                         int index);
+                                       bool inobject,
+                                       int index,
+                                       Representation representation);
 
   static void GenerateLoadArrayLength(MacroAssembler* masm,
                                       Register receiver,
@@ -541,8 +540,10 @@ class StubCompiler BASE_EMBEDDED {
                                Register value_reg,
                                Register scratch1,
                                Register scratch2,
+                               Register scratch3,
                                Label* miss_label,
-                               Label* miss_restore_name);
+                               Label* miss_restore_name,
+                               Label* slow);
 
   void GenerateStoreField(MacroAssembler* masm,
                           Handle<JSObject> object,
@@ -564,6 +565,14 @@ class StubCompiler BASE_EMBEDDED {
     }
     return Builtins::kLoadIC_Miss;
   }
+  static Builtins::Name SlowBuiltin(Code::Kind kind) {
+    switch (kind) {
+      case Code::STORE_IC: return Builtins::kStoreIC_Slow;
+      case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Slow;
+      default: UNREACHABLE();
+    }
+    return Builtins::kStoreIC_Slow;
+  }
   static void TailCallBuiltin(MacroAssembler* masm, Builtins::Name name);
 
   // Generates code that verifies that the property holder has not changed
@@ -642,7 +651,8 @@ class BaseLoadStubCompiler: public StubCompiler {
   Handle<Code> CompileLoadField(Handle<JSObject> object,
                                 Handle<JSObject> holder,
                                 Handle<Name> name,
-                                PropertyIndex index);
+                                PropertyIndex index,
+                                Representation representation);
 
   Handle<Code> CompileLoadCallback(Handle<JSObject> object,
                                    Handle<JSObject> holder,
@@ -694,7 +704,8 @@ class BaseLoadStubCompiler: public StubCompiler {
 
   void GenerateLoadField(Register reg,
                          Handle<JSObject> holder,
-                         PropertyIndex index);
+                         PropertyIndex field,
+                         Representation representation);
   void GenerateLoadConstant(Handle<JSFunction> value);
   void GenerateLoadCallback(Register reg,
                             Handle<ExecutableAccessorInfo> callback);
index 36c11e6..29ea3ea 100644 (file)
@@ -60,7 +60,7 @@
 
 namespace vTune {
 
-void InitilizeVtuneForV8();
+void InitializeVtuneForV8();
 
 }  // namespace vTune
 
index 6ff595f..d3f7a68 100644 (file)
@@ -194,8 +194,8 @@ void VTUNEJITInterface::event_handler(const v8::JitCodeEvent* event) {
          if ((*script->GetScriptName())->IsString()) {
             Handle<String> script_name =
                 Handle<String>(String::Cast(*script->GetScriptName()));
-            temp_file_name = new char[script_name->Length() + 1];
-            script_name->WriteAscii(temp_file_name);
+            temp_file_name = new char[script_name->Utf8Length() + 1];
+            script_name->WriteUtf8(temp_file_name);
             jmethod.source_file_name = temp_file_name;
           }
 
@@ -267,7 +267,7 @@ void VTUNEJITInterface::event_handler(const v8::JitCodeEvent* event) {
 
 }  // namespace internal
 
-void InitilizeVtuneForV8() {
+void InitializeVtuneForV8() {
   if (v8::V8::Initialize()) {
     v8::V8::SetFlagsFromString("--nocompact_code_space",
                               (int)strlen("--nocompact_code_space"));
index adbe6a1..df53178 100644 (file)
@@ -135,6 +135,7 @@ MaybeObject* TransitionArray::CopyInsert(Name* name, Map* target) {
       }
     }
     result->NoIncrementalWriteBarrierSet(insertion_index, name, target);
+    result->set_back_pointer_storage(back_pointer_storage());
     return result;
   }
 
index 3bc509a..1757bee 100644 (file)
@@ -67,7 +67,7 @@ TypeFeedbackOracle::TypeFeedbackOracle(Handle<Code> code,
       isolate_(isolate),
       zone_(zone) {
   BuildDictionary(code);
-  ASSERT(reinterpret_cast<Address>(*dictionary_.location()) != kHandleZapValue);
+  ASSERT(dictionary_->IsDictionary());
 }
 
 
@@ -539,15 +539,6 @@ TypeInfo TypeFeedbackOracle::IncrementType(CountOperation* expr) {
 }
 
 
-static void AddMapIfMissing(Handle<Map> map, SmallMapList* list,
-                            Zone* zone) {
-  for (int i = 0; i < list->length(); ++i) {
-    if (list->at(i).is_identical_to(map)) return;
-  }
-  list->Add(map, zone);
-}
-
-
 void TypeFeedbackOracle::CollectPolymorphicMaps(Handle<Code> code,
                                                 SmallMapList* types) {
   MapHandleList maps;
@@ -556,7 +547,7 @@ void TypeFeedbackOracle::CollectPolymorphicMaps(Handle<Code> code,
   for (int i = 0; i < maps.length(); i++) {
     Handle<Map> map(maps.at(i));
     if (!CanRetainOtherContext(*map, *native_context_)) {
-      AddMapIfMissing(map, types, zone());
+      types->AddMapIfMissing(map, zone());
     }
   }
 }
@@ -574,7 +565,7 @@ void TypeFeedbackOracle::CollectReceiverTypes(TypeFeedbackId ast_id,
     // we need a generic store (or load) here.
     ASSERT(Handle<Code>::cast(object)->ic_state() == GENERIC);
   } else if (object->IsMap()) {
-    types->Add(Handle<Map>::cast(object), zone());
+    types->AddMapIfMissing(Handle<Map>::cast(object), zone());
   } else if (Handle<Code>::cast(object)->ic_state() == POLYMORPHIC) {
     CollectPolymorphicMaps(Handle<Code>::cast(object), types);
   } else if (FLAG_collect_megamorphic_maps_from_stub_cache &&
@@ -582,7 +573,7 @@ void TypeFeedbackOracle::CollectReceiverTypes(TypeFeedbackId ast_id,
     types->Reserve(4, zone());
     ASSERT(object->IsCode());
     isolate_->stub_cache()->CollectMatchingMaps(types,
-                                                *name,
+                                                name,
                                                 flags,
                                                 native_context_,
                                                 zone());
index 24fcf1e..4fade00 100644 (file)
 // in runtime.js:
 // var $Array = global.Array;
 
-var $ArrayBuffer = global.__ArrayBuffer;
 
-// -------------------------------------------------------------------
 
-function ArrayBufferConstructor(byteLength) { // length = 1
-  if (%_IsConstructCall()) {
-    var l = TO_POSITIVE_INTEGER(byteLength);
-    %ArrayBufferInitialize(this, l);
-  } else {
-    return new $ArrayBuffer(byteLength);
-  }
-}
+// --------------- Typed Arrays ---------------------
 
-function ArrayBufferGetByteLength() {
-  if (!IS_ARRAYBUFFER(this)) {
-    throw MakeTypeError('incompatible_method_receiver',
-                        ['ArrayBuffer.prototype.byteLength', this]);
-  }
-  return %ArrayBufferGetByteLength(this);
-}
+function CreateTypedArrayConstructor(name, elementSize, arrayId, constructor) {
+  function ConstructByArrayBuffer(obj, buffer, byteOffset, length) {
+    var offset = IS_UNDEFINED(byteOffset) ? 0 : TO_POSITIVE_INTEGER(byteOffset);
 
-// ES6 Draft 15.13.5.5.3
-function ArrayBufferSlice(start, end) {
-  if (!IS_ARRAYBUFFER(this)) {
-    throw MakeTypeError('incompatible_method_receiver',
-                        ['ArrayBuffer.prototype.slice', this]);
-  }
+    if (offset % elementSize !== 0) {
+      throw MakeRangeError("invalid_typed_array_alignment",
+          "start offset", name, elementSize);
+    }
+    var bufferByteLength = %ArrayBufferGetByteLength(buffer);
+    if (offset > bufferByteLength) {
+      throw MakeRangeError("invalid_typed_array_offset");
+    }
 
-  var relativeStart = TO_INTEGER(start);
-  var first;
-  if (relativeStart < 0) {
-    first = MathMax(this.byteLength + relativeStart, 0);
-  } else {
-    first = MathMin(relativeStart, this.byteLength);
-  }
-  var relativeEnd = IS_UNDEFINED(end) ? this.byteLength : TO_INTEGER(end);
-  var fin;
-  if (relativeEnd < 0) {
-    fin = MathMax(this.byteLength + relativeEnd, 0);
-  } else {
-    fin = MathMin(relativeEnd, this.byteLength);
+    var newByteLength;
+    var newLength;
+    if (IS_UNDEFINED(length)) {
+      if (bufferByteLength % elementSize !== 0) {
+        throw MakeRangeError("invalid_typed_array_alignment",
+          "byte length", name, elementSize);
+      }
+      newByteLength = bufferByteLength - offset;
+      newLength = newByteLength / elementSize;
+    } else {
+      var newLength = TO_POSITIVE_INTEGER(length);
+      newByteLength = newLength * elementSize;
+    }
+    if (offset + newByteLength > bufferByteLength) {
+      throw MakeRangeError("invalid_typed_array_length");
+    }
+    %TypedArrayInitialize(obj, arrayId, buffer, offset, newByteLength);
   }
 
-  var newLen = fin - first;
-  // TODO(dslomov): implement inheritance
-  var result = new $ArrayBuffer(newLen);
-
-  %ArrayBufferSliceImpl(this, result, first);
-  return result;
-}
+  function ConstructByLength(obj, length) {
+    var l = IS_UNDEFINED(length) ? 0 : TO_POSITIVE_INTEGER(length);
+    var byteLength = l * elementSize;
+    var buffer = new global.ArrayBuffer(byteLength);
+    %TypedArrayInitialize(obj, arrayId, buffer, 0, byteLength);
+  }
 
-// --------------- Typed Arrays ---------------------
+  function ConstructByArrayLike(obj, arrayLike) {
+    var length = arrayLike.length;
+    var l =  IS_UNDEFINED(length) ? 0 : TO_POSITIVE_INTEGER(length);
+    var byteLength = l * elementSize;
+    var buffer = new $ArrayBuffer(byteLength);
+    %TypedArrayInitialize(obj, arrayId, buffer, 0, byteLength);
+    for (var i = 0; i < l; i++) {
+      obj[i] = arrayLike[i];
+    }
+  }
 
-function CreateTypedArrayConstructor(name, elementSize, arrayId, constructor) {
-  return function (buffer, byteOffset, length) {
+  return function (arg1, arg2, arg3) {
     if (%_IsConstructCall()) {
-      if (!IS_ARRAYBUFFER(buffer)) {
-        throw MakeTypeError("Type error!");
-      }
-      var offset = IS_UNDEFINED(byteOffset)
-        ? 0 : offset = TO_POSITIVE_INTEGER(byteOffset);
-
-      if (offset % elementSize !== 0) {
-        throw MakeRangeError("invalid_typed_array_alignment",
-            "start offset", name, elementSize);
-      }
-      var bufferByteLength = %ArrayBufferGetByteLength(buffer);
-      if (offset >= bufferByteLength) {
-        throw MakeRangeError("invalid_typed_array_offset");
-      }
-
-      var newByteLength;
-      var newLength;
-      if (IS_UNDEFINED(length)) {
-        if (bufferByteLength % elementSize !== 0) {
-          throw MakeRangeError("invalid_typed_array_alignment",
-            "byte length", name, elementSize);
-        }
-        newByteLength = bufferByteLength - offset;
-        newLength = newByteLength / elementSize;
+      if (IS_ARRAYBUFFER(arg1)) {
+        ConstructByArrayBuffer(this, arg1, arg2, arg3);
+      } else if (IS_NUMBER(arg1) || IS_STRING(arg1) || IS_BOOLEAN(arg1)) {
+        ConstructByLength(this, arg1);
+      } else if (!IS_UNDEFINED(arg1)){
+        ConstructByArrayLike(this, arg1);
       } else {
-        var newLength = TO_POSITIVE_INTEGER(length);
-        newByteLength = newLength * elementSize;
-      }
-      if (newByteLength > bufferByteLength) {
-        throw MakeRangeError("invalid_typed_array_length");
+        throw MakeTypeError("parameterless_typed_array_constr", name);
       }
-      %TypedArrayInitialize(this, arrayId, buffer, offset, newByteLength);
     } else {
-      return new constructor(buffer, byteOffset, length);
+      return new constructor(arg1, arg2, arg3);
     }
   }
 }
@@ -141,32 +118,57 @@ function TypedArrayGetLength() {
   return %TypedArrayGetLength(this);
 }
 
+function CreateSubArray(elementSize, constructor) {
+  return function(begin, end) {
+    var srcLength = %TypedArrayGetLength(this);
+    var beginInt = TO_INTEGER(begin);
+    if (beginInt < 0) {
+      beginInt = MathMax(0, srcLength + beginInt);
+    } else {
+      beginInt = MathMin(srcLength, beginInt);
+    }
 
-// -------------------------------------------------------------------
-
-function SetUpArrayBuffer() {
-  %CheckIsBootstrapping();
-
-  // Set up the ArrayBuffer constructor function.
-  %SetCode($ArrayBuffer, ArrayBufferConstructor);
-  %FunctionSetPrototype($ArrayBuffer, new $Object());
-
-  // Set up the constructor property on the ArrayBuffer prototype object.
-  %SetProperty($ArrayBuffer.prototype, "constructor", $ArrayBuffer, DONT_ENUM);
+    var endInt = IS_UNDEFINED(end) ? srcLength : TO_INTEGER(end);
+    if (endInt < 0) {
+      endInt = MathMax(0, srcLength + endInt);
+    } else {
+      endInt = MathMin(endInt, srcLength);
+    }
+    if (endInt < beginInt) {
+      endInt = beginInt;
+    }
+    var newLength = endInt - beginInt;
+    var beginByteOffset =
+        %TypedArrayGetByteOffset(this) + beginInt * elementSize;
+    return new constructor(%TypedArrayGetBuffer(this),
+                           beginByteOffset, newLength);
+  }
+}
 
-  InstallGetter($ArrayBuffer.prototype, "byteLength", ArrayBufferGetByteLength);
+function TypedArraySet(obj, offset) {
+  var intOffset = IS_UNDEFINED(offset) ? 0 : TO_POSITIVE_INTEGER(offset);
+  if (%TypedArraySetFastCases(this, obj, intOffset))
+    return;
 
-  InstallFunctions($ArrayBuffer.prototype, DONT_ENUM, $Array(
-      "slice", ArrayBufferSlice
-  ));
+  var l = obj.length;
+  if (IS_UNDEFINED(l)) {
+    throw MakeTypeError("invalid_argument");
+  }
+  if (intOffset + l > this.length) {
+    throw MakeRangeError("typed_array_set_source_too_large");
+  }
+  for (var i = 0; i < l; i++) {
+    this[intOffset + i] = obj[i];
+  }
 }
 
-SetUpArrayBuffer();
+// -------------------------------------------------------------------
 
 function SetupTypedArray(arrayId, name, constructor, elementSize) {
-  var f = CreateTypedArrayConstructor(name, elementSize,
-                                      arrayId, constructor);
-  %SetCode(constructor, f);
+  %CheckIsBootstrapping();
+  var fun = CreateTypedArrayConstructor(name, elementSize,
+                                        arrayId, constructor);
+  %SetCode(constructor, fun);
   %FunctionSetPrototype(constructor, new $Object());
 
   %SetProperty(constructor.prototype,
@@ -178,15 +180,20 @@ function SetupTypedArray(arrayId, name, constructor, elementSize) {
   InstallGetter(constructor.prototype, "byteOffset", TypedArrayGetByteOffset);
   InstallGetter(constructor.prototype, "byteLength", TypedArrayGetByteLength);
   InstallGetter(constructor.prototype, "length", TypedArrayGetLength);
+
+  InstallFunctions(constructor.prototype, DONT_ENUM, $Array(
+        "subarray", CreateSubArray(elementSize, constructor),
+        "set", TypedArraySet
+  ));
 }
 
 // arrayIds below should be synchronized with Runtime_TypedArrayInitialize.
-SetupTypedArray(1, "Uint8Array", global.__Uint8Array, 1);
-SetupTypedArray(2, "Int8Array", global.__Int8Array, 1);
-SetupTypedArray(3, "Uint16Array", global.__Uint16Array, 2);
-SetupTypedArray(4, "Int16Array", global.__Int16Array, 2);
-SetupTypedArray(5, "Uint32Array", global.__Uint32Array, 4);
-SetupTypedArray(6, "Int32Array", global.__Int32Array, 4);
-SetupTypedArray(7, "Float32Array", global.__Float32Array, 4);
-SetupTypedArray(8, "Float64Array", global.__Float64Array, 8);
-
+SetupTypedArray(1, "Uint8Array", global.Uint8Array, 1);
+SetupTypedArray(2, "Int8Array", global.Int8Array, 1);
+SetupTypedArray(3, "Uint16Array", global.Uint16Array, 2);
+SetupTypedArray(4, "Int16Array", global.Int16Array, 2);
+SetupTypedArray(5, "Uint32Array", global.Uint32Array, 4);
+SetupTypedArray(6, "Int32Array", global.Int32Array, 4);
+SetupTypedArray(7, "Float32Array", global.Float32Array, 4);
+SetupTypedArray(8, "Float64Array", global.Float64Array, 8);
+SetupTypedArray(9, "Uint8ClampedArray", global.Uint8ClampedArray, 1);
index b041fc2..5ead877 100644 (file)
@@ -48,6 +48,9 @@
 #error both DEBUG and NDEBUG are set
 #endif
 
+// TODO(dcarney): remove this
+#define V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+
 // Basic includes
 #include "../include/v8.h"
 #include "v8globals.h"
index f71de82..c72ce7a 100644 (file)
@@ -64,6 +64,14 @@ class Memory {
     return *reinterpret_cast<unsigned*>(addr);
   }
 
+  static intptr_t& intptr_at(Address addr)  {
+    return *reinterpret_cast<intptr_t*>(addr);
+  }
+
+  static uintptr_t& uintptr_at(Address addr) {
+    return *reinterpret_cast<uintptr_t*>(addr);
+  }
+
   static double& double_at(Address addr)  {
     return *reinterpret_cast<double*>(addr);
   }
index 83b5618..9266af6 100644 (file)
@@ -1665,7 +1665,6 @@ function FunctionSourceString(func) {
     func = %GetCallTrap(func);
   }
 
-  // TODO(wingo): Print source using function* for generators.
   if (!IS_FUNCTION(func)) {
     throw new $TypeError('Function.prototype.toString is not generic');
   }
@@ -1684,7 +1683,8 @@ function FunctionSourceString(func) {
   var name = %FunctionNameShouldPrintAsAnonymous(func)
       ? 'anonymous'
       : %FunctionGetName(func);
-  return 'function ' + name + source;
+  var head = %FunctionIsGenerator(func) ? 'function* ' : 'function ';
+  return head + name + source;
 }
 
 
@@ -1766,7 +1766,7 @@ function NewFunction(arg1) {  // length == 1
     // If the formal parameters string include ) - an illegal
     // character - it may make the combined function expression
     // compile. We avoid this problem by checking for this early on.
-    if (p.indexOf(')') != -1) throw MakeSyntaxError('unable_to_parse',[]);
+    if (p.indexOf(')') != -1) throw MakeSyntaxError('paren_in_arg_string',[]);
     // If the formal parameters include an unbalanced block comment, the
     // function must be rejected. Since JavaScript does not allow nested
     // comments we can include a trailing block comment to catch this.
index dac2bf0..baafcf7 100644 (file)
@@ -33,8 +33,8 @@
 // NOTE these macros are used by some of the tool scripts and the build
 // system so their names cannot be changed without changing the scripts.
 #define MAJOR_VERSION     3
-#define MINOR_VERSION     18
-#define BUILD_NUMBER      4
+#define MINOR_VERSION     19
+#define BUILD_NUMBER      0
 #define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
index 5bb1292..aec50c9 100644 (file)
@@ -202,16 +202,16 @@ const Register no_reg = { kRegister_no_reg_Code };
 
 #ifdef _WIN64
   // Windows calling convention
-  const Register arg_reg_1 = rcx;
-  const Register arg_reg_2 = rdx;
-  const Register arg_reg_3 = r8;
-  const Register arg_reg_4 = r9;
+  const Register arg_reg_1 = { kRegister_rcx_Code };
+  const Register arg_reg_2 = { kRegister_rdx_Code };
+  const Register arg_reg_3 = { kRegister_r8_Code };
+  const Register arg_reg_4 = { kRegister_r9_Code };
 #else
   // AMD64 calling convention
-  const Register arg_reg_1 = rdi;
-  const Register arg_reg_2 = rsi;
-  const Register arg_reg_3 = rdx;
-  const Register arg_reg_4 = rcx;
+  const Register arg_reg_1 = { kRegister_rdi_Code };
+  const Register arg_reg_2 = { kRegister_rsi_Code };
+  const Register arg_reg_3 = { kRegister_rdx_Code };
+  const Register arg_reg_4 = { kRegister_rcx_Code };
 #endif  // _WIN64
 
 struct XMMRegister {
index 3a9a023..f7ded18 100644 (file)
@@ -30,6 +30,7 @@
 #if defined(V8_TARGET_ARCH_X64)
 
 #include "bootstrapper.h"
+#include "builtins-decls.h"
 #include "code-stubs.h"
 #include "regexp-macro-assembler.h"
 #include "stub-cache.h"
@@ -74,6 +75,28 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
 }
 
 
+void LoadFieldStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { rax };
+  descriptor->register_param_count_ = 1;
+  descriptor->register_params_ = registers;
+  descriptor->stack_parameter_count_ = NULL;
+  descriptor->deoptimization_handler_ = NULL;
+}
+
+
+void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { rdx };
+  descriptor->register_param_count_ = 1;
+  descriptor->register_params_ = registers;
+  descriptor->stack_parameter_count_ = NULL;
+  descriptor->deoptimization_handler_ = NULL;
+}
+
+
 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
     Isolate* isolate,
     CodeStubInterfaceDescriptor* descriptor) {
@@ -102,9 +125,10 @@ static void InitializeArrayConstructorDescriptor(
     int constant_stack_parameter_count) {
   // register state
   // rax -- number of arguments
+  // rdi -- function
   // rbx -- type info cell with elements kind
-  static Register registers[] = { rbx };
-  descriptor->register_param_count_ = 1;
+  static Register registers[] = { rdi, rbx };
+  descriptor->register_param_count_ = 2;
   if (constant_stack_parameter_count != 0) {
     // stack param count needs (constructor pointer, and single argument)
     descriptor->stack_parameter_count_ = &rax;
@@ -2391,12 +2415,6 @@ void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
 }
 
 
-void LoadFieldStub::Generate(MacroAssembler* masm) {
-  StubCompiler::DoGenerateFastPropertyLoad(masm, rax, reg_, inobject_, index_);
-  __ ret(0);
-}
-
-
 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
   // The key is in rdx and the parameter count is in rax.
 
@@ -3818,6 +3836,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
   Handle<Object> terminal_kind_sentinel =
       TypeFeedbackCells::MonomorphicArraySentinel(isolate,
                                                   LAST_FAST_ELEMENTS_KIND);
+  __ JumpIfNotSmi(rcx, &miss);
   __ Cmp(rcx, terminal_kind_sentinel);
   __ j(above, &miss);
   // Make sure the function is the Array() function
@@ -4106,22 +4125,23 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
 
   // Call C function.
 #ifdef _WIN64
-  // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
-  // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
-  __ movq(StackSpaceOperand(0), r14);  // argc.
-  __ movq(StackSpaceOperand(1), r15);  // argv.
+  // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9.
+  // Pass argv and argc as two parameters. The arguments object will
+  // be created by stubs declared by DECLARE_RUNTIME_FUNCTION().
   if (result_size_ < 2) {
     // Pass a pointer to the Arguments object as the first argument.
     // Return result in single register (rax).
-    __ lea(rcx, StackSpaceOperand(0));
-    __ LoadAddress(rdx, ExternalReference::isolate_address(masm->isolate()));
+    __ movq(rcx, r14);  // argc.
+    __ movq(rdx, r15);  // argv.
+    __ movq(r8, ExternalReference::isolate_address(masm->isolate()));
   } else {
     ASSERT_EQ(2, result_size_);
     // Pass a pointer to the result location as the first argument.
     __ lea(rcx, StackSpaceOperand(2));
     // Pass a pointer to the Arguments object as the second argument.
-    __ lea(rdx, StackSpaceOperand(0));
-    __ LoadAddress(r8, ExternalReference::isolate_address(masm->isolate()));
+    __ movq(rdx, r14);  // argc.
+    __ movq(r8, r15);   // argv.
+    __ movq(r9, ExternalReference::isolate_address(masm->isolate()));
   }
 
 #else  // _WIN64
@@ -4906,8 +4926,34 @@ void StringAddStub::Generate(MacroAssembler* masm) {
   __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
   __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
           Immediate(String::kEmptyHashField));
+
+  Label skip_write_barrier, after_writing;
+  ExternalReference high_promotion_mode = ExternalReference::
+      new_space_high_promotion_mode_active_address(masm->isolate());
+  __ Load(rbx, high_promotion_mode);
+  __ testb(rbx, Immediate(1));
+  __ j(zero, &skip_write_barrier);
+
+  __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
+  __ RecordWriteField(rcx,
+                      ConsString::kFirstOffset,
+                      rax,
+                      rbx,
+                      kDontSaveFPRegs);
+  __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
+  __ RecordWriteField(rcx,
+                      ConsString::kSecondOffset,
+                      rdx,
+                      rbx,
+                      kDontSaveFPRegs);
+  __ jmp(&after_writing);
+
+  __ bind(&skip_write_barrier);
   __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
   __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
+
+  __ bind(&after_writing);
+
   __ movq(rax, rcx);
   __ IncrementCounter(counters->string_add_native(), 1);
   __ ret(2 * kPointerSize);
@@ -6346,8 +6392,11 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
   { REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET},
   // StoreArrayLiteralElementStub::Generate
   { REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET},
-  // FastNewClosureStub::Generate
+  // FastNewClosureStub::Generate and
+  // StringAddStub::Generate
   { REG(rcx), REG(rdx), REG(rbx), EMIT_REMEMBERED_SET},
+  // StringAddStub::Generate
+  { REG(rcx), REG(rax), REG(rbx), EMIT_REMEMBERED_SET},
   // Null termination.
   { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
 };
@@ -6900,14 +6949,8 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
     // Get the elements kind and case on that.
     __ Cmp(rbx, undefined_sentinel);
     __ j(equal, &no_info);
-    __ movq(rdx, FieldOperand(rbx, kPointerSize));
-
-    // There is no info if the call site went megamorphic either
-
-    // TODO(mvstanton): Really? I thought if it was the array function that
-    // the cell wouldn't get stamped as megamorphic.
-    __ Cmp(rdx, TypeFeedbackCells::MegamorphicSentinel(masm->isolate()));
-    __ j(equal, &no_info);
+    __ movq(rdx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset));
+    __ JumpIfNotSmi(rdx, &no_info);
     __ SmiToInteger32(rdx, rdx);
     __ jmp(&switch_ready);
     __ bind(&no_info);
index 1b29e58..750d929 100644 (file)
@@ -233,6 +233,15 @@ void Debug::GenerateKeyedStoreICDebugBreak(MacroAssembler* masm) {
 }
 
 
+void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) {
+  // Register state for CompareNil IC
+  // ----------- S t a t e -------------
+  //  -- rax    : value
+  // -----------------------------------
+  Generate_DebugBreakCallHelper(masm, rax.bit(), 0, false);
+}
+
+
 void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
   // Register state for IC call call (from ic-x64.cc)
   // ----------- S t a t e -------------
index a20d468..19fa0aa 100644 (file)
@@ -1554,7 +1554,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
       ? ObjectLiteral::kHasFunction
       : ObjectLiteral::kNoFlags;
   int properties_count = constant_properties->length() / 2;
-  if (expr->depth() > 1) {
+  if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
+      expr->depth() > 1) {
     __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
     __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
     __ Push(Smi::FromInt(expr->literal_index()));
@@ -1924,11 +1925,12 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       Label resume;
       __ CompareRoot(result_register(), Heap::kTheHoleValueRootIndex);
       __ j(not_equal, &resume);
-      __ pop(result_register());
       if (expr->yield_kind() == Yield::SUSPEND) {
-        // TODO(wingo): Box into { value: VALUE, done: false }.
+        EmitReturnIteratorResult(false);
+      } else {
+        __ pop(result_register());
+        EmitReturnSequence();
       }
-      EmitReturnSequence();
 
       __ bind(&resume);
       context()->Plug(result_register());
@@ -1940,18 +1942,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       __ Move(FieldOperand(result_register(),
                            JSGeneratorObject::kContinuationOffset),
               Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
-      __ pop(result_register());
-      // TODO(wingo): Box into { value: VALUE, done: true }.
-
-      // Exit all nested statements.
-      NestedStatement* current = nesting_stack_;
-      int stack_depth = 0;
-      int context_length = 0;
-      while (current != NULL) {
-        current = current->Exit(&stack_depth, &context_length);
-      }
-      __ Drop(stack_depth);
-      EmitReturnSequence();
+      EmitReturnIteratorResult(true);
       break;
     }
 
@@ -2058,6 +2049,56 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
 }
 
 
+void FullCodeGenerator::EmitReturnIteratorResult(bool done) {
+  Label gc_required;
+  Label allocated;
+
+  Handle<Map> map(isolate()->native_context()->generator_result_map());
+
+  __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
+
+  __ bind(&allocated);
+  __ Move(rbx, map);
+  __ pop(rcx);
+  __ Move(rdx, isolate()->factory()->ToBoolean(done));
+  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
+  __ movq(FieldOperand(rax, HeapObject::kMapOffset), rbx);
+  __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
+          isolate()->factory()->empty_fixed_array());
+  __ Move(FieldOperand(rax, JSObject::kElementsOffset),
+          isolate()->factory()->empty_fixed_array());
+  __ movq(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
+          rcx);
+  __ movq(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
+          rdx);
+
+  // Only the value field needs a write barrier, as the other values are in the
+  // root set.
+  __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
+                      rcx, rdx, kDontSaveFPRegs);
+
+  if (done) {
+    // Exit all nested statements.
+    NestedStatement* current = nesting_stack_;
+    int stack_depth = 0;
+    int context_length = 0;
+    while (current != NULL) {
+      current = current->Exit(&stack_depth, &context_length);
+    }
+    __ Drop(stack_depth);
+  }
+
+  EmitReturnSequence();
+
+  __ bind(&gc_required);
+  __ Push(Smi::FromInt(map->instance_size()));
+  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+  __ movq(context_register(),
+          Operand(rbp, StandardFrameConstants::kContextOffset));
+  __ jmp(&allocated);
+}
+
+
 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
   Literal* key = prop->key()->AsLiteral();
index 0a9ceaa..761e05a 100644 (file)
@@ -1528,6 +1528,26 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
 }
 
 
+void StoreIC::GenerateSlow(MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- rax     : value
+  //  -- rcx     : key
+  //  -- rdx     : receiver
+  //  -- rsp[0]  : return address
+  // -----------------------------------
+
+  __ pop(rbx);
+  __ push(rdx);  // receiver
+  __ push(rcx);  // key
+  __ push(rax);  // value
+  __ push(rbx);  // return address
+
+  // Do tail-call to runtime routine.
+  ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
+  __ TailCallExternalReference(ref, 3, 1);
+}
+
+
 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- rax     : value
index fbb7c28..f908ea1 100644 (file)
@@ -96,6 +96,10 @@ void LCodeGen::FinishCode(Handle<Code> code) {
     prototype_maps_.at(i)->AddDependentCode(
         DependentCode::kPrototypeCheckGroup, code);
   }
+  for (int i = 0 ; i < transition_maps_.length(); i++) {
+    transition_maps_.at(i)->AddDependentCode(
+        DependentCode::kTransitionGroup, code);
+  }
 }
 
 
@@ -1033,7 +1037,7 @@ void LCodeGen::DoModI(LModI* instr) {
     __ andl(dividend, Immediate(divisor - 1));
     __ bind(&done);
   } else {
-    Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
+    Label done, remainder_eq_dividend, slow, both_positive;
     Register left_reg = ToRegister(instr->left());
     Register right_reg = ToRegister(instr->right());
     Register result_reg = ToRegister(instr->result());
@@ -1069,23 +1073,10 @@ void LCodeGen::DoModI(LModI* instr) {
     __ movl(scratch, right_reg);
     __ subl(scratch, Immediate(1));
     __ testl(scratch, right_reg);
-    __ j(not_zero, &do_subtraction, Label::kNear);
+    __ j(not_zero, &slow, Label::kNear);
     __ andl(left_reg, scratch);
     __ jmp(&remainder_eq_dividend, Label::kNear);
 
-    __ bind(&do_subtraction);
-    const int kUnfolds = 3;
-    // Try a few subtractions of the dividend.
-    __ movl(scratch, left_reg);
-    for (int i = 0; i < kUnfolds; i++) {
-      // Reduce the dividend by the divisor.
-      __ subl(left_reg, right_reg);
-      // Check if the dividend is less than the divisor.
-      __ cmpl(left_reg, right_reg);
-      __ j(less, &remainder_eq_dividend, Label::kNear);
-    }
-    __ movl(left_reg, scratch);
-
     // Slow case, using idiv instruction.
     __ bind(&slow);
 
@@ -1683,19 +1674,27 @@ void LCodeGen::DoThrow(LThrow* instr) {
 void LCodeGen::DoAddI(LAddI* instr) {
   LOperand* left = instr->left();
   LOperand* right = instr->right();
-  ASSERT(left->Equals(instr->result()));
 
-  if (right->IsConstantOperand()) {
-    __ addl(ToRegister(left),
-            Immediate(ToInteger32(LConstantOperand::cast(right))));
-  } else if (right->IsRegister()) {
-    __ addl(ToRegister(left), ToRegister(right));
+  if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) {
+    if (right->IsConstantOperand()) {
+      int32_t offset = ToInteger32(LConstantOperand::cast(right));
+      __ lea(ToRegister(instr->result()), MemOperand(ToRegister(left), offset));
+    } else {
+      Operand address(ToRegister(left), ToRegister(right), times_1, 0);
+      __ lea(ToRegister(instr->result()), address);
+    }
   } else {
-    __ addl(ToRegister(left), ToOperand(right));
-  }
-
-  if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
-    DeoptimizeIf(overflow, instr->environment());
+    if (right->IsConstantOperand()) {
+      __ addl(ToRegister(left),
+              Immediate(ToInteger32(LConstantOperand::cast(right))));
+    } else if (right->IsRegister()) {
+      __ addl(ToRegister(left), ToRegister(right));
+    } else {
+      __ addl(ToRegister(left), ToOperand(right));
+    }
+    if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
+      DeoptimizeIf(overflow, instr->environment());
+    }
   }
 }
 
@@ -2677,13 +2676,21 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
 
 
 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
+  int offset = instr->hydrogen()->offset();
   Register object = ToRegister(instr->object());
+  if (FLAG_track_double_fields &&
+      instr->hydrogen()->representation().IsDouble()) {
+    XMMRegister result = ToDoubleRegister(instr->result());
+    __ movsd(result, FieldOperand(object, offset));
+    return;
+  }
+
   Register result = ToRegister(instr->result());
   if (instr->hydrogen()->is_in_object()) {
-    __ movq(result, FieldOperand(object, instr->hydrogen()->offset()));
+    __ movq(result, FieldOperand(object, offset));
   } else {
     __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
-    __ movq(result, FieldOperand(result, instr->hydrogen()->offset()));
+    __ movq(result, FieldOperand(result, offset));
   }
 }
 
@@ -2847,41 +2854,6 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
 }
 
 
-void LCodeGen::DoLoadElements(LLoadElements* instr) {
-  Register result = ToRegister(instr->result());
-  Register input = ToRegister(instr->object());
-  __ movq(result, FieldOperand(input, JSObject::kElementsOffset));
-  if (FLAG_debug_code) {
-    Label done, ok, fail;
-    __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
-                   Heap::kFixedArrayMapRootIndex);
-    __ j(equal, &done, Label::kNear);
-    __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
-                   Heap::kFixedCOWArrayMapRootIndex);
-    __ j(equal, &done, Label::kNear);
-    Register temp((result.is(rax)) ? rbx : rax);
-    __ push(temp);
-    __ movq(temp, FieldOperand(result, HeapObject::kMapOffset));
-    __ movzxbq(temp, FieldOperand(temp, Map::kBitField2Offset));
-    __ and_(temp, Immediate(Map::kElementsKindMask));
-    __ shr(temp, Immediate(Map::kElementsKindShift));
-    __ cmpl(temp, Immediate(GetInitialFastElementsKind()));
-    __ j(less, &fail, Label::kNear);
-    __ cmpl(temp, Immediate(TERMINAL_FAST_ELEMENTS_KIND));
-    __ j(less_equal, &ok, Label::kNear);
-    __ cmpl(temp, Immediate(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
-    __ j(less, &fail, Label::kNear);
-    __ cmpl(temp, Immediate(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
-    __ j(less_equal, &ok, Label::kNear);
-    __ bind(&fail);
-    __ Abort("Check for fast or external elements failed");
-    __ bind(&ok);
-    __ pop(temp);
-    __ bind(&done);
-  }
-}
-
-
 void LCodeGen::DoLoadExternalArrayPointer(
     LLoadExternalArrayPointer* instr) {
   Register result = ToRegister(instr->result());
@@ -3904,8 +3876,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
 
   __ Set(rax, instr->arity());
   __ Move(rbx, instr->hydrogen()->property_cell());
-  Object* cell_value = instr->hydrogen()->property_cell()->value();
-  ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+  ElementsKind kind = instr->hydrogen()->elements_kind();
   if (instr->arity() == 0) {
     ArrayNoArgumentConstructorStub stub(kind);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
@@ -3932,16 +3903,42 @@ void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
 
 
 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
+  Representation representation = instr->representation();
+
   Register object = ToRegister(instr->object());
+
   int offset = instr->offset();
 
-  if (!instr->transition().is_null()) {
+  Handle<Map> transition = instr->transition();
+
+  if (FLAG_track_fields && representation.IsSmi()) {
+    if (instr->value()->IsConstantOperand()) {
+      LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
+      if (!IsInteger32Constant(operand_value)) {
+        DeoptimizeIf(no_condition, instr->environment());
+      }
+    } else {
+      Register value = ToRegister(instr->value());
+      __ Integer32ToSmi(value, value);
+    }
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    ASSERT(transition.is_null());
+    ASSERT(instr->is_in_object());
+    ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
+    XMMRegister value = ToDoubleRegister(instr->value());
+    __ movsd(FieldOperand(object, offset), value);
+    return;
+  }
+
+  if (!transition.is_null()) {
+    if (transition->CanBeDeprecated()) {
+      transition_maps_.Add(transition, info()->zone());
+    }
     if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
-      __ Move(FieldOperand(object, HeapObject::kMapOffset),
-              instr->transition());
+      __ Move(FieldOperand(object, HeapObject::kMapOffset), transition);
     } else {
       Register temp = ToRegister(instr->temp());
-      __ Move(kScratchRegister, instr->transition());
+      __ Move(kScratchRegister, transition);
       __ movq(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister);
       // Update the write barrier for the map field.
       __ RecordWriteField(object,
@@ -3977,6 +3974,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
               ToRegister(operand_value));
     } else {
       Handle<Object> handle_value = ToHandle(operand_value);
+      ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
       __ Move(FieldOperand(write_register, offset), handle_value);
     }
   } else {
@@ -4384,6 +4382,7 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
 
   __ cmpl(char_code, Immediate(String::kMaxOneByteCharCode));
   __ j(above, deferred->entry());
+  __ movsxlq(char_code, char_code);
   __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
   __ movq(result, FieldOperand(result,
                                char_code, times_pointer_size,
@@ -4768,6 +4767,8 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
       } else {
         mode = NUMBER_CANDIDATE_IS_SMI;
       }
+    } else {
+      mode = NUMBER_CANDIDATE_IS_SMI;
     }
   }
 
@@ -5122,17 +5123,24 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
 
 
 void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
-  Register size = ToRegister(instr->size());
   Register result = ToRegister(instr->result());
 
   // TODO(3095996): Get rid of this. For now, we need to make the
   // result register contain a valid pointer because it is already
   // contained in the register pointer map.
-  __ Set(result, 0);
+  __ Move(result, Smi::FromInt(0));
 
   PushSafepointRegistersScope scope(this);
-  __ Integer32ToSmi(size, size);
-  __ push(size);
+  if (instr->size()->IsRegister()) {
+    Register size = ToRegister(instr->size());
+    ASSERT(!size.is(result));
+    __ Integer32ToSmi(size, size);
+    __ push(size);
+  } else {
+    int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
+    __ Push(Smi::FromInt(size));
+  }
+
   if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
     CallRuntimeFromDeferred(
         Runtime::kAllocateInOldPointerSpace, 1, instr);
@@ -5219,7 +5227,8 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
   // Set up the parameters to the stub/runtime call and pick the right
   // runtime function or stub to call.
   int properties_count = instr->hydrogen()->constant_properties_length() / 2;
-  if (instr->hydrogen()->depth() > 1) {
+  if ((FLAG_track_double_fields && instr->hydrogen()->may_store_doubles()) ||
+      instr->hydrogen()->depth() > 1) {
     __ PushHeapObject(literals);
     __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
     __ Push(constant_properties);
index 887c788..aa0ab9c 100644 (file)
@@ -58,6 +58,7 @@ class LCodeGen BASE_EMBEDDED {
         jump_table_(4, info->zone()),
         deoptimization_literals_(8, info->zone()),
         prototype_maps_(0, info->zone()),
+        transition_maps_(0, info->zone()),
         inlined_function_count_(0),
         scope_(info->scope()),
         status_(UNUSED),
@@ -371,6 +372,7 @@ class LCodeGen BASE_EMBEDDED {
   ZoneList<JumpTableEntry> jump_table_;
   ZoneList<Handle<Object> > deoptimization_literals_;
   ZoneList<Handle<Map> > prototype_maps_;
+  ZoneList<Handle<Map> > transition_maps_;
   int inlined_function_count_;
   Scope* const scope_;
   Status status_;
index 6707455..f49f7d6 100644 (file)
@@ -557,6 +557,11 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
 }
 
 
+LOperand* LChunkBuilder::UseConstant(HValue* value) {
+  return chunk_->DefineConstantOperand(HConstant::cast(value));
+}
+
+
 LOperand* LChunkBuilder::UseAny(HValue* value) {
   return value->IsConstant()
       ? chunk_->DefineConstantOperand(HConstant::cast(value))
@@ -680,7 +685,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
   int vreg = allocator_->GetVirtualRegister();
   if (!allocator_->AllocationOk()) {
     Abort("Out of virtual registers while trying to allocate temp register.");
-    return NULL;
+    vreg = 0;
   }
   operand->set_virtual_register(vreg);
   return operand;
@@ -777,8 +782,8 @@ LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
   ASSERT(instr->left()->representation().IsDouble());
   ASSERT(instr->right()->representation().IsDouble());
   ASSERT(op != Token::MOD);
-  LOperand* left = UseRegisterAtStart(instr->left());
-  LOperand* right = UseRegisterAtStart(instr->right());
+  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+  LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
   LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
   return DefineSameAsFirst(result);
 }
@@ -1304,8 +1309,8 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
 
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
     return DefineSameAsFirst(new(zone()) LBitI(left, right));
   } else {
     ASSERT(instr->representation().IsTagged());
@@ -1468,8 +1473,8 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
   if (instr->representation().IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstant(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    LOperand* right = UseOrConstant(instr->BetterRightOperand());
     LMulI* mul = new(zone()) LMulI(left, right);
     if (instr->CheckFlag(HValue::kCanOverflow) ||
         instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -1508,13 +1513,24 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) {
 
 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
   if (instr->representation().IsInteger32()) {
+    // Check to see if it would be advantageous to use an lea instruction rather
+    // than an add. This is the case when no overflow check is needed and there
+    // are multiple uses of the add's inputs, so using a 3-register add will
+    // preserve all input values for later uses.
+    bool use_lea = LAddI::UseLea(instr);
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
-    LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
+    LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
+    HValue* right_candidate = instr->BetterRightOperand();
+    LOperand* right = use_lea
+        ? UseRegisterOrConstantAtStart(right_candidate)
+        : UseOrConstantAtStart(right_candidate);
     LAddI* add = new(zone()) LAddI(left, right);
-    LInstruction* result = DefineSameAsFirst(add);
-    if (instr->CheckFlag(HValue::kCanOverflow)) {
+    bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
+    LInstruction* result = use_lea
+        ? DefineAsRegister(add)
+        : DefineSameAsFirst(add);
+    if (can_overflow) {
       result = AssignEnvironment(result);
     }
     return result;
@@ -1534,8 +1550,8 @@ LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
   if (instr->representation().IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    left = UseRegisterAtStart(instr->LeastConstantOperand());
-    right = UseOrConstantAtStart(instr->MostConstantOperand());
+    left = UseRegisterAtStart(instr->BetterLeftOperand());
+    right = UseOrConstantAtStart(instr->BetterRightOperand());
   } else {
     ASSERT(instr->representation().IsDouble());
     ASSERT(instr->left()->representation().IsDouble());
@@ -2022,7 +2038,6 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
 
 
 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
-  ASSERT(instr->representation().IsTagged());
   LOperand* obj = UseRegisterAtStart(instr->object());
   return DefineAsRegister(new(zone()) LLoadNamedField(obj));
 }
@@ -2059,12 +2074,6 @@ LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
 }
 
 
-LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
-  LOperand* input = UseRegisterAtStart(instr->value());
-  return DefineAsRegister(new(zone()) LLoadElements(input));
-}
-
-
 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
     HLoadExternalArrayPointer* instr) {
   LOperand* input = UseRegisterAtStart(instr->value());
@@ -2115,19 +2124,6 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
 }
 
 
-// DoStoreKeyed and DoStoreNamedField have special considerations for allowing
-// use of a constant instead of a register.
-static bool StoreConstantValueAllowed(HValue* value) {
-  if (value->IsConstant()) {
-    HConstant* constant_value = HConstant::cast(value);
-    return constant_value->HasSmiValue()
-        || constant_value->HasDoubleValue()
-        || constant_value->ImmortalImmovable();
-  }
-  return false;
-}
-
-
 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
   ElementsKind elements_kind = instr->elements_kind();
   bool clobbers_key = instr->key()->representation().IsTagged();
@@ -2151,18 +2147,12 @@ LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
         val = UseTempRegister(instr->value());
         key = UseTempRegister(instr->key());
       } else {
-        if (StoreConstantValueAllowed(instr->value())) {
-          val = UseRegisterOrConstantAtStart(instr->value());
-        } else {
-          val = UseRegisterAtStart(instr->value());
-        }
+        val = UseRegisterOrConstantAtStart(instr->value());
 
         if (clobbers_key) {
           key = UseTempRegister(instr->key());
-        } else if (StoreConstantValueAllowed(instr->key())) {
-          key = UseRegisterOrConstantAtStart(instr->key());
         } else {
-          key = UseRegisterAtStart(instr->key());
+          key = UseRegisterOrConstantAtStart(instr->key());
         }
       }
     }
@@ -2258,11 +2248,20 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
         : UseRegisterAtStart(instr->object());
   }
 
+  bool can_be_constant = instr->value()->IsConstant() &&
+      HConstant::cast(instr->value())->NotInNewSpace() &&
+      !(FLAG_track_double_fields && instr->field_representation().IsDouble());
+
   LOperand* val;
   if (needs_write_barrier) {
     val = UseTempRegister(instr->value());
-  } else if (StoreConstantValueAllowed(instr->value())) {
+  } else if (can_be_constant) {
     val = UseRegisterOrConstant(instr->value());
+  } else if (FLAG_track_fields && instr->field_representation().IsSmi()) {
+    val = UseTempRegister(instr->value());
+  } else if (FLAG_track_double_fields &&
+             instr->field_representation().IsDouble()) {
+    val = UseRegisterAtStart(instr->value());
   } else {
     val = UseRegister(instr->value());
   }
@@ -2272,7 +2271,11 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
   LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
       needs_write_barrier_for_map) ? TempRegister() : NULL;
 
-  return new(zone()) LStoreNamedField(obj, val, temp);
+  LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp);
+  if (FLAG_track_fields && instr->field_representation().IsSmi()) {
+    return AssignEnvironment(result);
+  }
+  return result;
 }
 
 
@@ -2323,7 +2326,9 @@ LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
 
 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
   info()->MarkAsDeferredCalling();
-  LOperand* size = UseTempRegister(instr->size());
+  LOperand* size = instr->size()->IsConstant()
+      ? UseConstant(instr->size())
+      : UseTempRegister(instr->size());
   LOperand* temp = TempRegister();
   LAllocate* result = new(zone()) LAllocate(size, temp);
   return AssignPointerMap(DefineAsRegister(result));
@@ -2384,7 +2389,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
 
 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
   int spill_index = chunk()->GetNextSpillIndex(false);  // Not double-width.
-  if (spill_index > LUnallocated::kMaxFixedIndex) {
+  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
     Abort("Too many spill slots needed for OSR");
     spill_index = 0;
   }
index 9154b04..d1f7e76 100644 (file)
@@ -123,7 +123,6 @@ class LCodeGen;
   V(Label)                                      \
   V(LazyBailout)                                \
   V(LoadContextSlot)                            \
-  V(LoadElements)                               \
   V(LoadExternalArrayPointer)                   \
   V(LoadFunctionPrototype)                      \
   V(LoadGlobalCell)                             \
@@ -1346,6 +1345,11 @@ class LAddI: public LTemplateInstruction<1, 2, 0> {
   LOperand* left() { return inputs_[0]; }
   LOperand* right() { return inputs_[1]; }
 
+  static bool UseLea(HAdd* add) {
+    return !add->CheckFlag(HValue::kCanOverflow) &&
+        add->BetterLeftOperand()->UseCount() > 1;
+  }
+
   DECLARE_CONCRETE_INSTRUCTION(AddI, "add-i")
   DECLARE_HYDROGEN_ACCESSOR(Add)
 };
@@ -1512,18 +1516,6 @@ class LLoadFunctionPrototype: public LTemplateInstruction<1, 1, 0> {
 };
 
 
-class LLoadElements: public LTemplateInstruction<1, 1, 0> {
- public:
-  explicit LLoadElements(LOperand* object) {
-    inputs_[0] = object;
-  }
-
-  LOperand* object() { return inputs_[0]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(LoadElements, "load-elements")
-};
-
-
 class LLoadExternalArrayPointer: public LTemplateInstruction<1, 1, 0> {
  public:
   explicit LLoadExternalArrayPointer(LOperand* object) {
@@ -2063,6 +2055,9 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
   bool is_in_object() { return hydrogen()->is_in_object(); }
   int offset() { return hydrogen()->offset(); }
   Handle<Map> transition() const { return hydrogen()->transition(); }
+  Representation representation() const {
+    return hydrogen()->field_representation();
+  }
 };
 
 
@@ -2674,6 +2669,9 @@ class LChunkBuilder BASE_EMBEDDED {
   MUST_USE_RESULT LOperand* UseRegisterOrConstant(HValue* value);
   MUST_USE_RESULT LOperand* UseRegisterOrConstantAtStart(HValue* value);
 
+  // An input operand in a constant operand.
+  MUST_USE_RESULT LOperand* UseConstant(HValue* value);
+
   // An input operand in register, stack slot or a constant operand.
   // Will not be moved to a register even if one is freely available.
   MUST_USE_RESULT LOperand* UseAny(HValue* value);
index 76491a3..691894c 100644 (file)
@@ -4066,9 +4066,33 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  // Allocate heap number in new space.
-  Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
-           TAG_OBJECT);
+  Label allocate_new_space, install_map;
+  AllocationFlags flags = TAG_OBJECT;
+
+  ExternalReference high_promotion_mode = ExternalReference::
+      new_space_high_promotion_mode_active_address(isolate());
+
+  Load(scratch1, high_promotion_mode);
+  testb(scratch1, Immediate(1));
+  j(zero, &allocate_new_space);
+  Allocate(ConsString::kSize,
+           result,
+           scratch1,
+           scratch2,
+           gc_required,
+           static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
+
+  jmp(&install_map);
+
+  bind(&allocate_new_space);
+  Allocate(ConsString::kSize,
+           result,
+           scratch1,
+           scratch2,
+           gc_required,
+           flags);
+
+  bind(&install_map);
 
   // Set the map. The other fields are left uninitialized.
   LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
@@ -4414,6 +4438,19 @@ void MacroAssembler::CheckPageFlag(
 }
 
 
+void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
+                                        Register scratch,
+                                        Label* if_deprecated) {
+  if (map->CanBeDeprecated()) {
+    Move(scratch, map);
+    movq(scratch, FieldOperand(scratch, Map::kBitField3Offset));
+    SmiToInteger32(scratch, scratch);
+    and_(scratch, Immediate(Map::Deprecated::kMask));
+    j(not_zero, if_deprecated);
+  }
+}
+
+
 void MacroAssembler::JumpIfBlack(Register object,
                                  Register bitmap_scratch,
                                  Register mask_scratch,
index f640beb..76941ff 100644 (file)
@@ -168,6 +168,10 @@ class MacroAssembler: public Assembler {
                      Label* condition_met,
                      Label::Distance condition_met_distance = Label::kFar);
 
+  void CheckMapDeprecated(Handle<Map> map,
+                          Register scratch,
+                          Label* if_deprecated);
+
   // Check if object is in new space.  Jumps if the object is not in new space.
   // The register scratch can be object itself, but scratch will be clobbered.
   void JumpIfNotInNewSpace(Register object,
index 54d2a11..a7faf9b 100644 (file)
@@ -343,11 +343,13 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
 }
 
 
-void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm,
-                                              Register dst,
-                                              Register src,
-                                              bool inobject,
-                                              int index) {
+void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
+                                            Register dst,
+                                            Register src,
+                                            bool inobject,
+                                            int index,
+                                            Representation representation) {
+  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
   int offset = index * kPointerSize;
   if (!inobject) {
     // Calculate the offset into the properties array.
@@ -745,8 +747,10 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
                                            Register value_reg,
                                            Register scratch1,
                                            Register scratch2,
+                                           Register unused,
                                            Label* miss_label,
-                                           Label* miss_restore_name) {
+                                           Label* miss_restore_name,
+                                           Label* slow) {
   // Check that the map of the object hasn't changed.
   __ CheckMap(receiver_reg, Handle<Map>(object->map()),
               miss_label, DO_SMI_CHECK, REQUIRE_EXACT_MAP);
@@ -756,6 +760,15 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
   }
 
+  int descriptor = transition->LastAdded();
+  DescriptorArray* descriptors = transition->instance_descriptors();
+  PropertyDetails details = descriptors->GetDetails(descriptor);
+  Representation representation = details.representation();
+  ASSERT(!representation.IsNone());
+
+  // Ensure no transitions to deprecated maps are followed.
+  __ CheckMapDeprecated(transition, scratch1, miss_label);
+
   // Check that we are allowed to write this.
   if (object->GetPrototype()->IsJSObject()) {
     JSObject* holder;
@@ -771,7 +784,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     }
     Register holder_reg = CheckPrototypes(
         object, receiver_reg, Handle<JSObject>(holder), name_reg,
-        scratch1, scratch2, name, miss_restore_name);
+        scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER);
     // If no property was found, and the holder (the last object in the
     // prototype chain) is in slow mode, we need to do a negative lookup on the
     // holder.
@@ -790,6 +803,28 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
     }
   }
 
+  Register storage_reg = name_reg;
+
+  if (FLAG_track_fields && representation.IsSmi()) {
+    __ JumpIfNotSmi(value_reg, miss_restore_name);
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    Label do_store, heap_number;
+    __ AllocateHeapNumber(storage_reg, scratch1, slow);
+
+    __ JumpIfNotSmi(value_reg, &heap_number);
+    __ SmiToInteger32(scratch1, value_reg);
+    __ cvtlsi2sd(xmm0, scratch1);
+    __ jmp(&do_store);
+
+    __ bind(&heap_number);
+    __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
+                miss_restore_name, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP);
+    __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
+
+    __ bind(&do_store);
+    __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
+  }
+
   // Stub never generated for non-global objects that require access
   // checks.
   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
@@ -815,12 +850,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
   __ Move(scratch1, transition);
   __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
 
-  // Update the write barrier for the map field and pass the now unused
-  // name_reg as scratch register.
+  // Update the write barrier for the map field.
   __ RecordWriteField(receiver_reg,
                       HeapObject::kMapOffset,
                       scratch1,
-                      name_reg,
+                      scratch2,
                       kDontSaveFPRegs,
                       OMIT_REMEMBERED_SET,
                       OMIT_SMI_CHECK);
@@ -837,25 +871,45 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
   if (index < 0) {
     // Set the property straight into the object.
     int offset = object->map()->instance_size() + (index * kPointerSize);
-    __ movq(FieldOperand(receiver_reg, offset), value_reg);
+    if (FLAG_track_double_fields && representation.IsDouble()) {
+      __ movq(FieldOperand(receiver_reg, offset), storage_reg);
+    } else {
+      __ movq(FieldOperand(receiver_reg, offset), value_reg);
+    }
 
-    // Update the write barrier for the array address.
-    // Pass the value being stored in the now unused name_reg.
-    __ movq(name_reg, value_reg);
-    __ RecordWriteField(
-        receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Update the write barrier for the array address.
+      // Pass the value being stored in the now unused name_reg.
+      if (!FLAG_track_double_fields || !representation.IsDouble()) {
+        __ movq(name_reg, value_reg);
+      } else {
+        ASSERT(storage_reg.is(name_reg));
+      }
+      __ RecordWriteField(
+          receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs);
+    }
   } else {
     // Write to the properties array.
     int offset = index * kPointerSize + FixedArray::kHeaderSize;
     // Get the properties array (optimistically).
     __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
-    __ movq(FieldOperand(scratch1, offset), value_reg);
+    if (FLAG_track_double_fields && representation.IsDouble()) {
+      __ movq(FieldOperand(scratch1, offset), storage_reg);
+    } else {
+      __ movq(FieldOperand(scratch1, offset), value_reg);
+    }
 
-    // Update the write barrier for the array address.
-    // Pass the value being stored in the now unused name_reg.
-    __ movq(name_reg, value_reg);
-    __ RecordWriteField(
-        scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Update the write barrier for the array address.
+      // Pass the value being stored in the now unused name_reg.
+      if (!FLAG_track_double_fields || !representation.IsDouble()) {
+        __ movq(name_reg, value_reg);
+      } else {
+        ASSERT(storage_reg.is(name_reg));
+      }
+      __ RecordWriteField(
+          scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs);
+    }
   }
 
   // Return the value (register rax).
@@ -895,17 +949,55 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
   // object and the number of in-object properties is not going to change.
   index -= object->map()->inobject_properties();
 
+  Representation representation = lookup->representation();
+  ASSERT(!representation.IsNone());
+  if (FLAG_track_fields && representation.IsSmi()) {
+    __ JumpIfNotSmi(value_reg, miss_label);
+  } else if (FLAG_track_double_fields && representation.IsDouble()) {
+    // Load the double storage.
+    if (index < 0) {
+      int offset = object->map()->instance_size() + (index * kPointerSize);
+      __ movq(scratch1, FieldOperand(receiver_reg, offset));
+    } else {
+      __ movq(scratch1,
+              FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
+      int offset = index * kPointerSize + FixedArray::kHeaderSize;
+      __ movq(scratch1, FieldOperand(scratch1, offset));
+    }
+
+    // Store the value into the storage.
+    Label do_store, heap_number;
+    __ JumpIfNotSmi(value_reg, &heap_number);
+    __ SmiToInteger32(scratch2, value_reg);
+    __ cvtlsi2sd(xmm0, scratch2);
+    __ jmp(&do_store);
+
+    __ bind(&heap_number);
+    __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
+                miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP);
+    __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
+
+    __ bind(&do_store);
+    __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
+    // Return the value (register rax).
+    ASSERT(value_reg.is(rax));
+    __ ret(0);
+    return;
+  }
+
   // TODO(verwaest): Share this code as a code stub.
   if (index < 0) {
     // Set the property straight into the object.
     int offset = object->map()->instance_size() + (index * kPointerSize);
     __ movq(FieldOperand(receiver_reg, offset), value_reg);
 
-    // Update the write barrier for the array address.
-    // Pass the value being stored in the now unused name_reg.
-    __ movq(name_reg, value_reg);
-    __ RecordWriteField(
-        receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Update the write barrier for the array address.
+      // Pass the value being stored in the now unused name_reg.
+      __ movq(name_reg, value_reg);
+      __ RecordWriteField(
+          receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs);
+    }
   } else {
     // Write to the properties array.
     int offset = index * kPointerSize + FixedArray::kHeaderSize;
@@ -913,11 +1005,13 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
     __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
     __ movq(FieldOperand(scratch1, offset), value_reg);
 
-    // Update the write barrier for the array address.
-    // Pass the value being stored in the now unused name_reg.
-    __ movq(name_reg, value_reg);
-    __ RecordWriteField(
-        scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs);
+    if (!FLAG_track_fields || !representation.IsSmi()) {
+      // Update the write barrier for the array address.
+      // Pass the value being stored in the now unused name_reg.
+      __ movq(name_reg, value_reg);
+      __ RecordWriteField(
+          scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs);
+    }
   }
 
   // Return the value (register rax).
@@ -1157,10 +1251,20 @@ void BaseLoadStubCompiler::NonexistentHandlerFrontend(
 
 void BaseLoadStubCompiler::GenerateLoadField(Register reg,
                                              Handle<JSObject> holder,
-                                             PropertyIndex index) {
-  // Get the value from the properties.
-  GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
-  __ ret(0);
+                                             PropertyIndex field,
+                                             Representation representation) {
+  if (!reg.is(receiver())) __ movq(receiver(), reg);
+  if (kind() == Code::LOAD_IC) {
+    LoadFieldStub stub(field.is_inobject(holder),
+                       field.translate(holder),
+                       representation);
+    GenerateTailCall(masm(), stub.GetCode(isolate()));
+  } else {
+    KeyedLoadFieldStub stub(field.is_inobject(holder),
+                            field.translate(holder),
+                            representation);
+    GenerateTailCall(masm(), stub.GetCode(isolate()));
+  }
 }
 
 
@@ -1415,7 +1519,8 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
   Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi,
                                  name, &miss);
 
-  GenerateFastPropertyLoad(masm(), rdi, reg, holder, index);
+  GenerateFastPropertyLoad(masm(), rdi, reg, index.is_inobject(holder),
+                           index.translate(holder), Representation::Tagged());
 
   // Check that the function really is a function.
   __ JumpIfSmi(rdi, &miss);
@@ -2807,18 +2912,24 @@ Handle<Code> BaseLoadStubCompiler::CompilePolymorphicIC(
   Register map_reg = scratch1();
   __ movq(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
   int receiver_count = receiver_maps->length();
+  int number_of_handled_maps = 0;
   for (int current = 0; current < receiver_count; ++current) {
-    // Check map and tail call if there's a match
-    __ Cmp(map_reg, receiver_maps->at(current));
-    __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET);
+    Handle<Map> map = receiver_maps->at(current);
+    if (!map->is_deprecated()) {
+      number_of_handled_maps++;
+      // Check map and tail call if there's a match
+      __ Cmp(map_reg, receiver_maps->at(current));
+      __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET);
+    }
   }
+  ASSERT(number_of_handled_maps > 0);
 
   __  bind(&miss);
   TailCallBuiltin(masm(), MissBuiltin(kind()));
 
   // Return the generated code.
   InlineCacheState state =
-      receiver_maps->length() > 1 ? POLYMORPHIC : MONOMORPHIC;
+      number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
   return GetICCode(kind(), type, name, state);
 }
 
index a0091ff..1cdaca4 100644 (file)
@@ -25,6 +25,9 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include <v8.h>
 #include "cctest.h"
 #include "debug.h"
index 854d89e..c249b5e 100644 (file)
@@ -83,17 +83,22 @@ class CcTest {
   const char* name() { return name_; }
   const char* dependency() { return dependency_; }
   bool enabled() { return enabled_; }
-  static void set_default_isolate(v8::Isolate* default_isolate) {
-    default_isolate_ = default_isolate;
-  }
   static v8::Isolate* default_isolate() { return default_isolate_; }
-  static v8::Isolate* isolate() { return context_->GetIsolate(); }
-  static v8::Handle<v8::Context> env() { return context_; }
+
+  static v8::Handle<v8::Context> env() {
+    return v8::Local<v8::Context>::New(default_isolate_, context_);
+  }
+
+  static v8::Isolate* isolate() { return default_isolate_; }
 
   // Helper function to initialize the VM.
   static void InitializeVM(CcTestExtensionFlags extensions = NO_EXTENSIONS);
 
  private:
+  friend int main(int argc, char** argv);
+  static void set_default_isolate(v8::Isolate* default_isolate) {
+    default_isolate_ = default_isolate;
+  }
   TestFunction* callback_;
   const char* file_;
   const char* name_;
@@ -195,15 +200,21 @@ class RegisterThreadedTest {
   const char* name_;
 };
 
-
+namespace v8 {
 // A LocalContext holds a reference to a v8::Context.
 class LocalContext {
  public:
   LocalContext(v8::ExtensionConfiguration* extensions = 0,
                v8::Handle<v8::ObjectTemplate> global_template =
                    v8::Handle<v8::ObjectTemplate>(),
-               v8::Handle<v8::Value> global_object = v8::Handle<v8::Value>())
-    : context_(v8::Context::New(extensions, global_template, global_object)) {
+               v8::Handle<v8::Value> global_object = v8::Handle<v8::Value>()) {
+    v8::Isolate* isolate = v8::Isolate::GetCurrent();
+    HandleScope scope(isolate);
+    context_.Reset(isolate,
+                   Context::New(isolate,
+                                extensions,
+                                global_template,
+                                global_object));
     context_->Enter();
     // We can't do this later perhaps because of a fatal error.
     isolate_ = context_->GetIsolate();
@@ -219,14 +230,15 @@ class LocalContext {
   bool IsReady() { return !context_.IsEmpty(); }
 
   v8::Local<v8::Context> local() {
-    return v8::Local<v8::Context>::New(context_);
+    return v8::Local<v8::Context>::New(isolate_, context_);
   }
 
  private:
   v8::Persistent<v8::Context> context_;
   v8::Isolate* isolate_;
 };
-
+}
+typedef v8::LocalContext LocalContext;
 
 static inline v8::Local<v8::Value> v8_num(double x) {
   return v8::Number::New(x);
index 616f712..e5523b2 100644 (file)
@@ -39,6 +39,9 @@ test-api/ApplyInterruption: PASS || TIMEOUT
 # when snapshot is on, so I am marking it PASS || FAIL
 test-heap-profiler/HeapSnapshotsDiff: PASS || FAIL
 
+# BUG(2628): This test is flaky and sometimes fails, but should not crash.
+test-cpu-profiler/CollectCpuProfile: PASS || FAIL
+
 # These tests always fail.  They are here to test test.py.  If
 # they don't fail then test.py has failed.
 test-serialize/TestThatAlwaysFails: FAIL
@@ -103,3 +106,6 @@ test-log/ProfLazyMode: SKIP
 test-debug/DebuggerAgent: SKIP
 test-debug/DebuggerAgentProtocolOverflowHeader: SKIP
 test-sockets/Socket: SKIP
+
+# Profiling doesn't work on Native Client.
+test-cpu-profiler/*: SKIP
index d71247c..bc469aa 100644 (file)
@@ -95,8 +95,8 @@ static Handle<Object> Test() {
 
 
 TEST(StressHandles) {
-  v8::Persistent<v8::Context> env = v8::Context::New();
-  v8::HandleScope scope(env->GetIsolate());
+  v8::HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Handle<v8::Context> env = v8::Context::New(v8::Isolate::GetCurrent());
   env->Enter();
   Handle<Object> o = Test();
   CHECK(o->IsSmi() && Smi::cast(*o)->value() == 42);
@@ -117,8 +117,8 @@ const AccessorDescriptor kDescriptor = {
 
 
 TEST(StressJS) {
-  v8::Persistent<v8::Context> env = v8::Context::New();
-  v8::HandleScope scope(env->GetIsolate());
+  v8::HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Handle<v8::Context> env = v8::Context::New(v8::Isolate::GetCurrent());
   env->Enter();
   Handle<JSFunction> function =
       FACTORY->NewFunction(FACTORY->function_string(), FACTORY->null_value());
@@ -142,8 +142,7 @@ TEST(StressJS) {
 
   CallbacksDescriptor d(*name,
                         *foreign,
-                        static_cast<PropertyAttributes>(0),
-                        v8::internal::PropertyDetails::kInitialIndex);
+                        static_cast<PropertyAttributes>(0));
   map->AppendDescriptor(&d, witness);
 
   // Add the Foo constructor the global object.
index 8fca65c..c9685f8 100644 (file)
 #include <unistd.h>  // getpid
 #endif  // WIN32
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "api.h"
@@ -162,8 +167,8 @@ THREADED_TEST(Handles) {
 
 
 THREADED_TEST(IsolateOfContext) {
-  v8::Persistent<Context> env = Context::New();
-  v8::HandleScope scope(env->GetIsolate());
+  v8::HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Handle<Context> env = Context::New(v8::Isolate::GetCurrent());
 
   CHECK(!env->InContext());
   CHECK(env->GetIsolate() == v8::Isolate::GetCurrent());
@@ -173,8 +178,6 @@ THREADED_TEST(IsolateOfContext) {
   env->Exit();
   CHECK(!env->InContext());
   CHECK(env->GetIsolate() == v8::Isolate::GetCurrent());
-
-  env.Dispose(env->GetIsolate());
 }
 
 
@@ -289,8 +292,8 @@ THREADED_TEST(HulIgennem) {
   v8::HandleScope scope(env->GetIsolate());
   v8::Handle<v8::Primitive> undef = v8::Undefined();
   Local<String> undef_str = undef->ToString();
-  char* value = i::NewArray<char>(undef_str->Length() + 1);
-  undef_str->WriteAscii(value);
+  char* value = i::NewArray<char>(undef_str->Utf8Length() + 1);
+  undef_str->WriteUtf8(value);
   CHECK_EQ(0, strcmp(value, "undefined"));
   i::DeleteArray(value);
 }
@@ -2254,6 +2257,7 @@ THREADED_TEST(SymbolProperties) {
 
 
 THREADED_TEST(ArrayBuffer) {
+  i::FLAG_harmony_array_buffer = true;
   i::FLAG_harmony_typed_arrays = true;
 
   LocalContext env;
@@ -2271,7 +2275,7 @@ THREADED_TEST(ArrayBuffer) {
   v8::Handle<v8::Value> result = CompileRun("ab.byteLength");
   CHECK_EQ(1024, result->Int32Value());
 
-  result = CompileRun("var u8 = new __Uint8Array(ab);"
+  result = CompileRun("var u8 = new Uint8Array(ab);"
                       "u8[0] = 0xFF;"
                       "u8[1] = 0xAA;"
                       "u8.length");
@@ -2283,8 +2287,8 @@ THREADED_TEST(ArrayBuffer) {
   result = CompileRun("u8[0] + u8[1]");
   CHECK_EQ(0xDD, result->Int32Value());
 
-  result = CompileRun("var ab1 = new __ArrayBuffer(2);"
-                      "var u8_a = new __Uint8Array(ab1);"
+  result = CompileRun("var ab1 = new ArrayBuffer(2);"
+                      "var u8_a = new Uint8Array(ab1);"
                       "u8_a[0] = 0xAA;"
                       "u8_a[1] = 0xFF; u8_a.buffer");
   Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::Cast(*result);
@@ -2303,7 +2307,7 @@ THREADED_TEST(ArrayBuffer) {
   CHECK_EQ(100, static_cast<int>(ab3->ByteLength()));
   CHECK_EQ(my_data, ab3->Data());
   env->Global()->Set(v8_str("ab3"), ab3);
-  result = CompileRun("var u8_b = new __Uint8Array(ab3);"
+  result = CompileRun("var u8_b = new Uint8Array(ab3);"
                       "u8_b[0] = 0xBB;"
                       "u8_b[1] = 0xCC;"
                       "u8_b.length");
@@ -2315,11 +2319,13 @@ THREADED_TEST(ArrayBuffer) {
   result = CompileRun("u8_b[0] + u8_b[1]");
   CHECK_EQ(0xDD, result->Int32Value());
 
-
   delete[] my_data;
 }
 
 
+
+
+
 THREADED_TEST(HiddenProperties) {
   LocalContext env;
   v8::HandleScope scope(env->GetIsolate());
@@ -2481,6 +2487,58 @@ THREADED_TEST(GlobalHandle) {
 }
 
 
+THREADED_TEST(ResettingGlobalHandle) {
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::internal::GlobalHandles* global_handles = NULL;
+  int initial_handle_count = 0;
+  v8::Persistent<String> global;
+  {
+    v8::HandleScope scope(isolate);
+    Local<String> str = v8_str("str");
+    global_handles =
+        reinterpret_cast<v8::internal::Isolate*>(isolate)->global_handles();
+    initial_handle_count = global_handles->NumberOfGlobalHandles();
+    global = v8::Persistent<String>::New(isolate, str);
+  }
+  CHECK_EQ(global->Length(), 3);
+  CHECK_EQ(global_handles->NumberOfGlobalHandles(), initial_handle_count + 1);
+  {
+    v8::HandleScope scope(isolate);
+    Local<String> str = v8_str("longer");
+    global.Reset(isolate, str);
+  }
+  CHECK_EQ(global->Length(), 6);
+  CHECK_EQ(global_handles->NumberOfGlobalHandles(), initial_handle_count + 1);
+  global.Dispose(isolate);
+  CHECK_EQ(global_handles->NumberOfGlobalHandles(), initial_handle_count);
+}
+
+
+THREADED_TEST(ResettingGlobalHandleToEmpty) {
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::internal::GlobalHandles* global_handles = NULL;
+  int initial_handle_count = 0;
+  v8::Persistent<String> global;
+  {
+    v8::HandleScope scope(isolate);
+    Local<String> str = v8_str("str");
+    global_handles =
+        reinterpret_cast<v8::internal::Isolate*>(isolate)->global_handles();
+    initial_handle_count = global_handles->NumberOfGlobalHandles();
+    global = v8::Persistent<String>::New(isolate, str);
+  }
+  CHECK_EQ(global->Length(), 3);
+  CHECK_EQ(global_handles->NumberOfGlobalHandles(), initial_handle_count + 1);
+  {
+    v8::HandleScope scope(isolate);
+    Local<String> empty;
+    global.Reset(isolate, empty);
+  }
+  CHECK(global.IsEmpty());
+  CHECK_EQ(global_handles->NumberOfGlobalHandles(), initial_handle_count);
+}
+
+
 THREADED_TEST(LocalHandle) {
   v8::HandleScope scope(v8::Isolate::GetCurrent());
   v8::Local<String> local = v8::Local<String>::New(v8_str("str"));
@@ -2504,12 +2562,11 @@ class WeakCallCounter {
 
 
 static void WeakPointerCallback(v8::Isolate* isolate,
-                                Persistent<Value> handle,
-                                void* id) {
-  WeakCallCounter* counter = reinterpret_cast<WeakCallCounter*>(id);
+                                Persistent<Object>* handle,
+                                WeakCallCounter* counter) {
   CHECK_EQ(1234, counter->id());
   counter->increment();
-  handle.Dispose(isolate);
+  handle->Dispose(isolate);
 }
 
 
@@ -2532,23 +2589,23 @@ THREADED_TEST(OldApiObjectGroups) {
     g1s1 = Persistent<Object>::New(iso, Object::New());
     g1s2 = Persistent<Object>::New(iso, Object::New());
     g1c1 = Persistent<Object>::New(iso, Object::New());
-    g1s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g1s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g1c1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g1s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g1s2.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g1c1.MakeWeak(iso, &counter, &WeakPointerCallback);
 
     g2s1 = Persistent<Object>::New(iso, Object::New());
     g2s2 = Persistent<Object>::New(iso, Object::New());
     g2c1 = Persistent<Object>::New(iso, Object::New());
-    g2s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g2s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g2c1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g2s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g2s2.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g2c1.MakeWeak(iso, &counter, &WeakPointerCallback);
   }
 
   Persistent<Object> root = Persistent<Object>::New(iso, g1s1);  // make a root.
 
   // Connect group 1 and 2, make a cycle.
-  CHECK(g1s2->Set(0, g2s2));
-  CHECK(g2s1->Set(0, g1s1));
+  CHECK(g1s2->Set(0, Handle<Object>(*g2s2)));
+  CHECK(g2s1->Set(0, Handle<Object>(*g1s1)));
 
   {
     Persistent<Value> g1_objects[] = { g1s1, g1s2 };
@@ -2567,7 +2624,7 @@ THREADED_TEST(OldApiObjectGroups) {
   CHECK_EQ(0, counter.NumberOfWeakCalls());
 
   // Weaken the root.
-  root.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  root.MakeWeak(iso, &counter, &WeakPointerCallback);
   // But make children strong roots---all the objects (except for children)
   // should be collectable now.
   g1c1.ClearWeak(iso);
@@ -2591,8 +2648,8 @@ THREADED_TEST(OldApiObjectGroups) {
   CHECK_EQ(5, counter.NumberOfWeakCalls());
 
   // And now make children weak again and collect them.
-  g1c1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-  g2c1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  g1c1.MakeWeak(iso, &counter, &WeakPointerCallback);
+  g2c1.MakeWeak(iso, &counter, &WeakPointerCallback);
 
   HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
   CHECK_EQ(7, counter.NumberOfWeakCalls());
@@ -2618,23 +2675,23 @@ THREADED_TEST(ApiObjectGroups) {
     g1s1 = Persistent<Object>::New(iso, Object::New());
     g1s2 = Persistent<Object>::New(iso, Object::New());
     g1c1 = Persistent<Object>::New(iso, Object::New());
-    g1s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g1s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g1c1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g1s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g1s2.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g1c1.MakeWeak(iso, &counter, &WeakPointerCallback);
 
     g2s1 = Persistent<Object>::New(iso, Object::New());
     g2s2 = Persistent<Object>::New(iso, Object::New());
     g2c1 = Persistent<Object>::New(iso, Object::New());
-    g2s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g2s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g2c1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g2s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g2s2.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g2c1.MakeWeak(iso, &counter, &WeakPointerCallback);
   }
 
   Persistent<Object> root = Persistent<Object>::New(iso, g1s1);  // make a root.
 
   // Connect group 1 and 2, make a cycle.
-  CHECK(g1s2->Set(0, g2s2));
-  CHECK(g2s1->Set(0, g1s1));
+  CHECK(g1s2->Set(0, Local<Value>(*g2s2)));
+  CHECK(g2s1->Set(0, Local<Value>(*g1s1)));
 
   {
     UniqueId id1(reinterpret_cast<intptr_t>(*g1s1));
@@ -2655,7 +2712,7 @@ THREADED_TEST(ApiObjectGroups) {
   CHECK_EQ(0, counter.NumberOfWeakCalls());
 
   // Weaken the root.
-  root.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  root.MakeWeak(iso, &counter, &WeakPointerCallback);
   // But make children strong roots---all the objects (except for children)
   // should be collectable now.
   g1c1.ClearWeak(iso);
@@ -2679,8 +2736,8 @@ THREADED_TEST(ApiObjectGroups) {
   CHECK_EQ(5, counter.NumberOfWeakCalls());
 
   // And now make children weak again and collect them.
-  g1c1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-  g2c1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  g1c1.MakeWeak(iso, &counter, &WeakPointerCallback);
+  g2c1.MakeWeak(iso, &counter, &WeakPointerCallback);
 
   heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
   CHECK_EQ(7, counter.NumberOfWeakCalls());
@@ -2707,29 +2764,29 @@ THREADED_TEST(OldApiObjectGroupsCycle) {
     HandleScope scope(iso);
     g1s1 = Persistent<Object>::New(iso, Object::New());
     g1s2 = Persistent<Object>::New(iso, Object::New());
-    g1s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g1s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g1s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g1s2.MakeWeak(iso, &counter, &WeakPointerCallback);
     CHECK(g1s1.IsWeak(iso));
     CHECK(g1s2.IsWeak(iso));
 
     g2s1 = Persistent<Object>::New(iso, Object::New());
     g2s2 = Persistent<Object>::New(iso, Object::New());
-    g2s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g2s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g2s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g2s2.MakeWeak(iso, &counter, &WeakPointerCallback);
     CHECK(g2s1.IsWeak(iso));
     CHECK(g2s2.IsWeak(iso));
 
     g3s1 = Persistent<Object>::New(iso, Object::New());
     g3s2 = Persistent<Object>::New(iso, Object::New());
-    g3s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g3s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g3s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g3s2.MakeWeak(iso, &counter, &WeakPointerCallback);
     CHECK(g3s1.IsWeak(iso));
     CHECK(g3s2.IsWeak(iso));
 
     g4s1 = Persistent<Object>::New(iso, Object::New());
     g4s2 = Persistent<Object>::New(iso, Object::New());
-    g4s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g4s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g4s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g4s2.MakeWeak(iso, &counter, &WeakPointerCallback);
     CHECK(g4s1.IsWeak(iso));
     CHECK(g4s2.IsWeak(iso));
   }
@@ -2764,7 +2821,7 @@ THREADED_TEST(OldApiObjectGroupsCycle) {
   CHECK_EQ(0, counter.NumberOfWeakCalls());
 
   // Weaken the root.
-  root.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  root.MakeWeak(iso, &counter, &WeakPointerCallback);
 
   // Groups are deleted, rebuild groups.
   {
@@ -2813,29 +2870,29 @@ THREADED_TEST(ApiObjectGroupsCycle) {
     HandleScope scope(iso);
     g1s1 = Persistent<Object>::New(iso, Object::New());
     g1s2 = Persistent<Object>::New(iso, Object::New());
-    g1s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g1s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g1s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g1s2.MakeWeak(iso, &counter, &WeakPointerCallback);
     CHECK(g1s1.IsWeak(iso));
     CHECK(g1s2.IsWeak(iso));
 
     g2s1 = Persistent<Object>::New(iso, Object::New());
     g2s2 = Persistent<Object>::New(iso, Object::New());
-    g2s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g2s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g2s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g2s2.MakeWeak(iso, &counter, &WeakPointerCallback);
     CHECK(g2s1.IsWeak(iso));
     CHECK(g2s2.IsWeak(iso));
 
     g3s1 = Persistent<Object>::New(iso, Object::New());
     g3s2 = Persistent<Object>::New(iso, Object::New());
-    g3s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g3s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g3s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g3s2.MakeWeak(iso, &counter, &WeakPointerCallback);
     CHECK(g3s1.IsWeak(iso));
     CHECK(g3s2.IsWeak(iso));
 
     g4s1 = Persistent<Object>::New(iso, Object::New());
     g4s2 = Persistent<Object>::New(iso, Object::New());
-    g4s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g4s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g4s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g4s2.MakeWeak(iso, &counter, &WeakPointerCallback);
     CHECK(g4s1.IsWeak(iso));
     CHECK(g4s2.IsWeak(iso));
   }
@@ -2872,7 +2929,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
   CHECK_EQ(0, counter.NumberOfWeakCalls());
 
   // Weaken the root.
-  root.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  root.MakeWeak(iso, &counter, &WeakPointerCallback);
 
   // Groups are deleted, rebuild groups.
   {
@@ -2923,18 +2980,18 @@ TEST(OldApiObjectGroupsCycleForScavenger) {
     HandleScope scope(iso);
     g1s1 = Persistent<Object>::New(iso, Object::New());
     g1s2 = Persistent<Object>::New(iso, Object::New());
-    g1s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g1s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g1s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g1s2.MakeWeak(iso, &counter, &WeakPointerCallback);
 
     g2s1 = Persistent<Object>::New(iso, Object::New());
     g2s2 = Persistent<Object>::New(iso, Object::New());
-    g2s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g2s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g2s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g2s2.MakeWeak(iso, &counter, &WeakPointerCallback);
 
     g3s1 = Persistent<Object>::New(iso, Object::New());
     g3s2 = Persistent<Object>::New(iso, Object::New());
-    g3s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g3s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g3s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g3s2.MakeWeak(iso, &counter, &WeakPointerCallback);
   }
 
   // Make a root.
@@ -2955,11 +3012,11 @@ TEST(OldApiObjectGroupsCycleForScavenger) {
     Persistent<Value> g2_objects[] = { g2s1, g2s2 };
     Persistent<Value> g3_objects[] = { g3s1, g3s2 };
     V8::AddObjectGroup(g1_objects, 2);
-    g1s1->Set(v8_str("x"), g2s1);
+    g1s1->Set(v8_str("x"), Handle<Object>(*g2s1));
     V8::AddObjectGroup(g2_objects, 2);
-    g2s1->Set(v8_str("x"), g3s1);
+    g2s1->Set(v8_str("x"), Handle<Object>(*g3s1));
     V8::AddObjectGroup(g3_objects, 2);
-    g3s1->Set(v8_str("x"), g1s1);
+    g3s1->Set(v8_str("x"), Handle<Object>(*g1s1));
   }
 
   HEAP->CollectGarbage(i::NEW_SPACE);
@@ -2968,7 +3025,7 @@ TEST(OldApiObjectGroupsCycleForScavenger) {
   CHECK_EQ(0, counter.NumberOfWeakCalls());
 
   // Weaken the root.
-  root.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  root.MakeWeak(iso, &counter, &WeakPointerCallback);
   root.MarkPartiallyDependent(iso);
 
   v8::Isolate* isolate = v8::Isolate::GetCurrent();
@@ -2984,11 +3041,11 @@ TEST(OldApiObjectGroupsCycleForScavenger) {
     Persistent<Value> g2_objects[] = { g2s1, g2s2 };
     Persistent<Value> g3_objects[] = { g3s1, g3s2 };
     V8::AddObjectGroup(g1_objects, 2);
-    g1s1->Set(v8_str("x"), g2s1);
+    g1s1->Set(v8_str("x"), Handle<Object>(*g2s1));
     V8::AddObjectGroup(g2_objects, 2);
-    g2s1->Set(v8_str("x"), g3s1);
+    g2s1->Set(v8_str("x"), Handle<Object>(*g3s1));
     V8::AddObjectGroup(g3_objects, 2);
-    g3s1->Set(v8_str("x"), g1s1);
+    g3s1->Set(v8_str("x"), Handle<Object>(*g1s1));
   }
 
   HEAP->CollectGarbage(i::NEW_SPACE);
@@ -3020,18 +3077,18 @@ TEST(ApiObjectGroupsCycleForScavenger) {
     HandleScope scope(iso);
     g1s1 = Persistent<Object>::New(iso, Object::New());
     g1s2 = Persistent<Object>::New(iso, Object::New());
-    g1s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g1s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g1s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g1s2.MakeWeak(iso, &counter, &WeakPointerCallback);
 
     g2s1 = Persistent<Object>::New(iso, Object::New());
     g2s2 = Persistent<Object>::New(iso, Object::New());
-    g2s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g2s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g2s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g2s2.MakeWeak(iso, &counter, &WeakPointerCallback);
 
     g3s1 = Persistent<Object>::New(iso, Object::New());
     g3s2 = Persistent<Object>::New(iso, Object::New());
-    g3s1.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
-    g3s2.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g3s1.MakeWeak(iso, &counter, &WeakPointerCallback);
+    g3s2.MakeWeak(iso, &counter, &WeakPointerCallback);
   }
 
   // Make a root.
@@ -3050,13 +3107,13 @@ TEST(ApiObjectGroupsCycleForScavenger) {
     g3s2.MarkPartiallyDependent(iso);
     iso->SetObjectGroupId(g1s1, UniqueId(1));
     iso->SetObjectGroupId(g1s2, UniqueId(1));
-    g1s1->Set(v8_str("x"), g2s1);
+    g1s1->Set(v8_str("x"), Local<Value>(*g2s1));
     iso->SetObjectGroupId(g2s1, UniqueId(2));
     iso->SetObjectGroupId(g2s2, UniqueId(2));
-    g2s1->Set(v8_str("x"), g3s1);
+    g2s1->Set(v8_str("x"), Local<Value>(*g3s1));
     iso->SetObjectGroupId(g3s1, UniqueId(3));
     iso->SetObjectGroupId(g3s2, UniqueId(3));
-    g3s1->Set(v8_str("x"), g1s1);
+    g3s1->Set(v8_str("x"), Local<Value>(*g1s1));
   }
 
   v8::internal::Heap* heap = reinterpret_cast<v8::internal::Isolate*>(
@@ -3067,7 +3124,7 @@ TEST(ApiObjectGroupsCycleForScavenger) {
   CHECK_EQ(0, counter.NumberOfWeakCalls());
 
   // Weaken the root.
-  root.MakeWeak(iso, reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  root.MakeWeak(iso, &counter, &WeakPointerCallback);
   root.MarkPartiallyDependent(iso);
 
   v8::Isolate* isolate = v8::Isolate::GetCurrent();
@@ -3081,13 +3138,13 @@ TEST(ApiObjectGroupsCycleForScavenger) {
     g3s2.MarkPartiallyDependent(isolate);
     iso->SetObjectGroupId(g1s1, UniqueId(1));
     iso->SetObjectGroupId(g1s2, UniqueId(1));
-    g1s1->Set(v8_str("x"), g2s1);
+    g1s1->Set(v8_str("x"), Local<Value>(*g2s1));
     iso->SetObjectGroupId(g2s1, UniqueId(2));
     iso->SetObjectGroupId(g2s2, UniqueId(2));
-    g2s1->Set(v8_str("x"), g3s1);
+    g2s1->Set(v8_str("x"), Local<Value>(*g3s1));
     iso->SetObjectGroupId(g3s1, UniqueId(3));
     iso->SetObjectGroupId(g3s2, UniqueId(3));
-    g3s1->Set(v8_str("x"), g1s1);
+    g3s1->Set(v8_str("x"), Local<Value>(*g1s1));
   }
 
   heap->CollectGarbage(i::NEW_SPACE);
@@ -4541,7 +4598,7 @@ THREADED_TEST(SimplePropertyWrite) {
   for (int i = 0; i < 10; i++) {
     CHECK(xValue.IsEmpty());
     script->Run();
-    CHECK_EQ(v8_num(4), xValue);
+    CHECK_EQ(v8_num(4), Handle<Value>(*xValue));
     xValue.Dispose(context->GetIsolate());
     xValue = v8::Persistent<Value>();
   }
@@ -4558,7 +4615,7 @@ THREADED_TEST(SetterOnly) {
   for (int i = 0; i < 10; i++) {
     CHECK(xValue.IsEmpty());
     script->Run();
-    CHECK_EQ(v8_num(4), xValue);
+    CHECK_EQ(v8_num(4), Handle<Value>(*xValue));
     xValue.Dispose(context->GetIsolate());
     xValue = v8::Persistent<Value>();
   }
@@ -4631,9 +4688,9 @@ THREADED_TEST(NamedInterceptorDictionaryIC) {
 
 
 THREADED_TEST(NamedInterceptorDictionaryICMultipleContext) {
-  v8::HandleScope scope(v8::Isolate::GetCurrent());
-
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
+  v8::Local<Context> context1 = Context::New(isolate);
 
   context1->Enter();
   Local<ObjectTemplate> templ = ObjectTemplate::New();
@@ -4668,8 +4725,6 @@ THREADED_TEST(NamedInterceptorDictionaryICMultipleContext) {
   context1->Enter();
   CompileRun("var obj = { x : 0 }; delete obj.x;");
   context1->Exit();
-
-  context1.Dispose(context1->GetIsolate());
 }
 
 
@@ -5444,14 +5499,14 @@ static v8::Handle<Value> HandleLogDelegator(const v8::Arguments& args) {
 
 
 THREADED_TEST(GlobalObjectTemplate) {
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
   Local<ObjectTemplate> global_template = ObjectTemplate::New();
   global_template->Set(v8_str("JSNI_Log"),
                        v8::FunctionTemplate::New(HandleLogDelegator));
-  v8::Persistent<Context> context = Context::New(0, global_template);
+  v8::Local<Context> context = Context::New(isolate, 0, global_template);
   Context::Scope context_scope(context);
   Script::Compile(v8_str("JSNI_Log('LOG')"))->Run();
-  context.Dispose(context->GetIsolate());
 }
 
 
@@ -5466,7 +5521,8 @@ THREADED_TEST(SimpleExtensions) {
   v8::RegisterExtension(new Extension("simpletest", kSimpleExtensionSource));
   const char* extension_names[] = { "simpletest" };
   v8::ExtensionConfiguration extensions(1, extension_names);
-  v8::Handle<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   Context::Scope lock(context);
   v8::Handle<Value> result = Script::Compile(v8_str("Foo()"))->Run();
   CHECK_EQ(result, v8::Integer::New(4));
@@ -5478,7 +5534,8 @@ THREADED_TEST(NullExtensions) {
   v8::RegisterExtension(new Extension("nulltest", NULL));
   const char* extension_names[] = { "nulltest" };
   v8::ExtensionConfiguration extensions(1, extension_names);
-  v8::Handle<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   Context::Scope lock(context);
   v8::Handle<Value> result = Script::Compile(v8_str("1+3"))->Run();
   CHECK_EQ(result, v8::Integer::New(4));
@@ -5497,7 +5554,8 @@ THREADED_TEST(ExtensionMissingSourceLength) {
                                       kEmbeddedExtensionSource));
   const char* extension_names[] = { "srclentest_fail" };
   v8::ExtensionConfiguration extensions(1, extension_names);
-  v8::Handle<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   CHECK_EQ(0, *context);
 }
 
@@ -5513,7 +5571,8 @@ THREADED_TEST(ExtensionWithSourceLength) {
                                         source_len));
     const char* extension_names[1] = { extension_name.start() };
     v8::ExtensionConfiguration extensions(1, extension_names);
-    v8::Handle<Context> context = Context::New(&extensions);
+    v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
     if (source_len == kEmbeddedExtensionSourceValidLen) {
       Context::Scope lock(context);
       v8::Handle<Value> result = Script::Compile(v8_str("Ret54321()"))->Run();
@@ -5549,7 +5608,8 @@ THREADED_TEST(UseEvalFromExtension) {
   v8::RegisterExtension(new Extension("evaltest2", kEvalExtensionSource2));
   const char* extension_names[] = { "evaltest1", "evaltest2" };
   v8::ExtensionConfiguration extensions(2, extension_names);
-  v8::Handle<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   Context::Scope lock(context);
   v8::Handle<Value> result = Script::Compile(v8_str("UseEval1()"))->Run();
   CHECK_EQ(result, v8::Integer::New(42));
@@ -5582,7 +5642,8 @@ THREADED_TEST(UseWithFromExtension) {
   v8::RegisterExtension(new Extension("withtest2", kWithExtensionSource2));
   const char* extension_names[] = { "withtest1", "withtest2" };
   v8::ExtensionConfiguration extensions(2, extension_names);
-  v8::Handle<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   Context::Scope lock(context);
   v8::Handle<Value> result = Script::Compile(v8_str("UseWith1()"))->Run();
   CHECK_EQ(result, v8::Integer::New(87));
@@ -5596,7 +5657,8 @@ THREADED_TEST(AutoExtensions) {
   Extension* extension = new Extension("autotest", kSimpleExtensionSource);
   extension->set_auto_enable(true);
   v8::RegisterExtension(extension);
-  v8::Handle<Context> context = Context::New();
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent());
   Context::Scope lock(context);
   v8::Handle<Value> result = Script::Compile(v8_str("Foo()"))->Run();
   CHECK_EQ(result, v8::Integer::New(4));
@@ -5615,7 +5677,8 @@ THREADED_TEST(SyntaxErrorExtensions) {
                                       kSyntaxErrorInExtensionSource));
   const char* extension_names[] = { "syntaxerror" };
   v8::ExtensionConfiguration extensions(1, extension_names);
-  v8::Handle<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   CHECK(context.IsEmpty());
 }
 
@@ -5632,7 +5695,8 @@ THREADED_TEST(ExceptionExtensions) {
                                       kExceptionInExtensionSource));
   const char* extension_names[] = { "exception" };
   v8::ExtensionConfiguration extensions(1, extension_names);
-  v8::Handle<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   CHECK(context.IsEmpty());
 }
 
@@ -5653,7 +5717,8 @@ THREADED_TEST(NativeCallInExtensions) {
                                       kNativeCallInExtensionSource));
   const char* extension_names[] = { "nativecall" };
   v8::ExtensionConfiguration extensions(1, extension_names);
-  v8::Handle<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   Context::Scope lock(context);
   v8::Handle<Value> result = Script::Compile(v8_str(kNativeCallTest))->Run();
   CHECK_EQ(result, v8::Integer::New(3));
@@ -5689,7 +5754,8 @@ THREADED_TEST(NativeFunctionDeclaration) {
                                                     "native function foo();"));
   const char* extension_names[] = { name };
   v8::ExtensionConfiguration extensions(1, extension_names);
-  v8::Handle<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   Context::Scope lock(context);
   v8::Handle<Value> result = Script::Compile(v8_str("foo(42);"))->Run();
   CHECK_EQ(result, v8::Integer::New(42));
@@ -5704,7 +5770,8 @@ THREADED_TEST(NativeFunctionDeclarationError) {
                                                     "native\nfunction foo();"));
   const char* extension_names[] = { name };
   v8::ExtensionConfiguration extensions(1, extension_names);
-  v8::Handle<Context> context(Context::New(&extensions));
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   CHECK(context.IsEmpty());
 }
 
@@ -5719,7 +5786,8 @@ THREADED_TEST(NativeFunctionDeclarationErrorEscape) {
       "nativ\\u0065 function foo();"));
   const char* extension_names[] = { name };
   v8::ExtensionConfiguration extensions(1, extension_names);
-  v8::Handle<Context> context(Context::New(&extensions));
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   CHECK(context.IsEmpty());
 }
 
@@ -5859,7 +5927,8 @@ TEST(ErrorReporting) {
   v8::RegisterExtension(new Extension("B", "", 1, bDeps));
   last_location = NULL;
   v8::ExtensionConfiguration config(1, bDeps);
-  v8::Handle<Context> context = Context::New(&config);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &config);
   CHECK(context.IsEmpty());
   CHECK_NE(last_location, NULL);
 }
@@ -5939,11 +6008,10 @@ class Whammy {
 };
 
 static void HandleWeakReference(v8::Isolate* isolate,
-                                v8::Persistent<v8::Value> obj,
-                                void* data) {
-  Snorkel* snorkel = reinterpret_cast<Snorkel*>(data);
+                                v8::Persistent<v8::Value>* obj,
+                                Snorkel* snorkel) {
   delete snorkel;
-  obj.ClearWeak(isolate);
+  obj->ClearWeak(isolate);
 }
 
 v8::Handle<Value> WhammyPropertyGetter(Local<String> name,
@@ -5958,7 +6026,9 @@ v8::Handle<Value> WhammyPropertyGetter(Local<String> name,
       v8::Persistent<v8::Object>::New(info.GetIsolate(), obj);
   if (!prev.IsEmpty()) {
     prev->Set(v8_str("next"), obj);
-    prev.MakeWeak(info.GetIsolate(), new Snorkel(), &HandleWeakReference);
+    prev.MakeWeak<Value, Snorkel>(info.GetIsolate(),
+                                  new Snorkel(),
+                                  &HandleWeakReference);
     whammy->objects_[whammy->cursor_].Clear();
   }
   whammy->objects_[whammy->cursor_] = global;
@@ -5975,7 +6045,8 @@ THREADED_TEST(WeakReference) {
                                  v8::External::New(whammy));
   const char* extension_list[] = { "v8/gc" };
   v8::ExtensionConfiguration extensions(1, extension_list);
-  v8::Persistent<Context> context = Context::New(&extensions);
+  v8::Handle<Context> context =
+      Context::New(v8::Isolate::GetCurrent(), &extensions);
   Context::Scope context_scope(context);
 
   v8::Handle<v8::Object> interceptor = templ->NewInstance();
@@ -5992,22 +6063,21 @@ THREADED_TEST(WeakReference) {
   v8::Handle<Value> result = CompileRun(code);
   CHECK_EQ(4.0, result->NumberValue());
   delete whammy;
-  context.Dispose(context->GetIsolate());
 }
 
 
 static void DisposeAndSetFlag(v8::Isolate* isolate,
-                              v8::Persistent<v8::Value> obj,
-                              void* data) {
-  obj.Dispose(isolate);
-  obj.Clear();
-  *(reinterpret_cast<bool*>(data)) = true;
+                              v8::Persistent<v8::Object>* obj,
+                              bool* data) {
+  obj->Dispose(isolate);
+  *(data) = true;
 }
 
 
 THREADED_TEST(IndependentWeakHandle) {
-  v8::Persistent<Context> context = Context::New();
-  v8::Isolate* iso = context->GetIsolate();
+  v8::Isolate* iso = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(iso);
+  v8::Handle<Context> context = Context::New(iso);
   Context::Scope context_scope(context);
 
   v8::Persistent<v8::Object> object_a, object_b;
@@ -6043,32 +6113,32 @@ static void InvokeMarkSweep() {
 
 
 static void ForceScavenge(v8::Isolate* isolate,
-                          v8::Persistent<v8::Value> obj,
-                          void* data) {
-  obj.Dispose(isolate);
-  obj.Clear();
-  *(reinterpret_cast<bool*>(data)) = true;
+                          v8::Persistent<v8::Object>* obj,
+                          bool* data) {
+  obj->Dispose(isolate);
+  *(data) = true;
   InvokeScavenge();
 }
 
 
 static void ForceMarkSweep(v8::Isolate* isolate,
-                           v8::Persistent<v8::Value> obj,
-                           void* data) {
-  obj.Dispose(isolate);
-  obj.Clear();
-  *(reinterpret_cast<bool*>(data)) = true;
+                           v8::Persistent<v8::Object>* obj,
+                           bool* data) {
+  obj->Dispose(isolate);
+  *(data) = true;
   InvokeMarkSweep();
 }
 
 
 THREADED_TEST(GCFromWeakCallbacks) {
-  v8::Persistent<Context> context = Context::New();
-  v8::Isolate* isolate = context->GetIsolate();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
+  v8::Handle<Context> context = Context::New(isolate);
   Context::Scope context_scope(context);
 
   static const int kNumberOfGCTypes = 2;
-  v8::NearDeathCallback gc_forcing_callback[kNumberOfGCTypes] =
+  typedef v8::WeakReferenceCallbacks<v8::Object, bool>::Revivable Callback;
+  Callback gc_forcing_callback[kNumberOfGCTypes] =
       {&ForceScavenge, &ForceMarkSweep};
 
   typedef void (*GCInvoker)();
@@ -6092,17 +6162,18 @@ THREADED_TEST(GCFromWeakCallbacks) {
 
 
 static void RevivingCallback(v8::Isolate* isolate,
-                             v8::Persistent<v8::Value> obj,
-                             void* data) {
-  obj.ClearWeak(isolate);
-  *(reinterpret_cast<bool*>(data)) = true;
+                             v8::Persistent<v8::Object>* obj,
+                             bool* data) {
+  obj->ClearWeak(isolate);
+  *(data) = true;
 }
 
 
 THREADED_TEST(IndependentHandleRevival) {
-  v8::Persistent<Context> context = Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
+  v8::Handle<Context> context = Context::New(isolate);
   Context::Scope context_scope(context);
-  v8::Isolate* isolate = context->GetIsolate();
 
   v8::Persistent<v8::Object> object;
   {
@@ -6543,7 +6614,7 @@ THREADED_TEST(StringWrite) {
 
   memset(buf, 0x1, sizeof(buf));
   memset(wbuf, 0x1, sizeof(wbuf));
-  len = str->WriteAscii(buf);
+  len = str->WriteOneByte(reinterpret_cast<uint8_t*>(buf));
   CHECK_EQ(5, len);
   len = str->Write(wbuf);
   CHECK_EQ(5, len);
@@ -6553,7 +6624,7 @@ THREADED_TEST(StringWrite) {
 
   memset(buf, 0x1, sizeof(buf));
   memset(wbuf, 0x1, sizeof(wbuf));
-  len = str->WriteAscii(buf, 0, 4);
+  len = str->WriteOneByte(reinterpret_cast<uint8_t*>(buf), 0, 4);
   CHECK_EQ(4, len);
   len = str->Write(wbuf, 0, 4);
   CHECK_EQ(4, len);
@@ -6563,7 +6634,7 @@ THREADED_TEST(StringWrite) {
 
   memset(buf, 0x1, sizeof(buf));
   memset(wbuf, 0x1, sizeof(wbuf));
-  len = str->WriteAscii(buf, 0, 5);
+  len = str->WriteOneByte(reinterpret_cast<uint8_t*>(buf), 0, 5);
   CHECK_EQ(5, len);
   len = str->Write(wbuf, 0, 5);
   CHECK_EQ(5, len);
@@ -6573,7 +6644,7 @@ THREADED_TEST(StringWrite) {
 
   memset(buf, 0x1, sizeof(buf));
   memset(wbuf, 0x1, sizeof(wbuf));
-  len = str->WriteAscii(buf, 0, 6);
+  len = str->WriteOneByte(reinterpret_cast<uint8_t*>(buf), 0, 6);
   CHECK_EQ(5, len);
   len = str->Write(wbuf, 0, 6);
   CHECK_EQ(5, len);
@@ -6583,7 +6654,7 @@ THREADED_TEST(StringWrite) {
 
   memset(buf, 0x1, sizeof(buf));
   memset(wbuf, 0x1, sizeof(wbuf));
-  len = str->WriteAscii(buf, 4, -1);
+  len = str->WriteOneByte(reinterpret_cast<uint8_t*>(buf), 4, -1);
   CHECK_EQ(1, len);
   len = str->Write(wbuf, 4, -1);
   CHECK_EQ(1, len);
@@ -6593,7 +6664,7 @@ THREADED_TEST(StringWrite) {
 
   memset(buf, 0x1, sizeof(buf));
   memset(wbuf, 0x1, sizeof(wbuf));
-  len = str->WriteAscii(buf, 4, 6);
+  len = str->WriteOneByte(reinterpret_cast<uint8_t*>(buf), 4, 6);
   CHECK_EQ(1, len);
   len = str->Write(wbuf, 4, 6);
   CHECK_EQ(1, len);
@@ -6602,7 +6673,7 @@ THREADED_TEST(StringWrite) {
 
   memset(buf, 0x1, sizeof(buf));
   memset(wbuf, 0x1, sizeof(wbuf));
-  len = str->WriteAscii(buf, 4, 1);
+  len = str->WriteOneByte(reinterpret_cast<uint8_t*>(buf), 4, 1);
   CHECK_EQ(1, len);
   len = str->Write(wbuf, 4, 1);
   CHECK_EQ(1, len);
@@ -6612,7 +6683,7 @@ THREADED_TEST(StringWrite) {
 
   memset(buf, 0x1, sizeof(buf));
   memset(wbuf, 0x1, sizeof(wbuf));
-  len = str->WriteAscii(buf, 3, 1);
+  len = str->WriteOneByte(reinterpret_cast<uint8_t*>(buf), 3, 1);
   CHECK_EQ(1, len);
   len = str->Write(wbuf, 3, 1);
   CHECK_EQ(1, len);
@@ -6634,7 +6705,10 @@ THREADED_TEST(StringWrite) {
 
   memset(buf, 0x1, sizeof(buf));
   buf[5] = 'X';
-  len = str->WriteAscii(buf, 0, 6, String::NO_NULL_TERMINATION);
+  len = str->WriteOneByte(reinterpret_cast<uint8_t*>(buf),
+                          0,
+                          6,
+                          String::NO_NULL_TERMINATION);
   CHECK_EQ(5, len);
   CHECK_EQ('X', buf[5]);
   CHECK_EQ(0, strncmp("abcde", buf, 5));
@@ -6665,18 +6739,13 @@ THREADED_TEST(StringWrite) {
   CHECK_EQ(0, strcmp(utf8buf, "abcde"));
 
   memset(buf, 0x1, sizeof(buf));
-  len = str3->WriteAscii(buf);
-  CHECK_EQ(7, len);
-  CHECK_EQ(0, strcmp("abc def", buf));
-
-  memset(buf, 0x1, sizeof(buf));
-  len = str3->WriteAscii(buf, 0, -1, String::PRESERVE_ASCII_NULL);
+  len = str3->WriteOneByte(reinterpret_cast<uint8_t*>(buf));
   CHECK_EQ(7, len);
   CHECK_EQ(0, strcmp("abc", buf));
   CHECK_EQ(0, buf[3]);
   CHECK_EQ(0, strcmp("def", buf + 4));
 
-  CHECK_EQ(0, str->WriteAscii(NULL, 0, 0, String::NO_NULL_TERMINATION));
+  CHECK_EQ(0, str->WriteOneByte(NULL, 0, 0, String::NO_NULL_TERMINATION));
   CHECK_EQ(0, str->WriteUtf8(NULL, 0, 0, String::NO_NULL_TERMINATION));
   CHECK_EQ(0, str->Write(NULL, 0, 0, String::NO_NULL_TERMINATION));
 }
@@ -7157,8 +7226,8 @@ TEST(SecurityHandler) {
   global_template->SetAccessCheckCallbacks(NamedSecurityTestCallback,
                                            IndexedSecurityTestCallback);
   // Create an environment
-  v8::Persistent<Context> context0 =
-    Context::New(NULL, global_template);
+  v8::Handle<Context> context0 =
+    Context::New(v8::Isolate::GetCurrent(), NULL, global_template);
   context0->Enter();
 
   v8::Handle<v8::Object> global0 = context0->Global();
@@ -7173,8 +7242,8 @@ TEST(SecurityHandler) {
   // Create another environment, should fail security checks.
   v8::HandleScope scope1(v8::Isolate::GetCurrent());
 
-  v8::Persistent<Context> context1 =
-    Context::New(NULL, global_template);
+  v8::Handle<Context> context1 =
+    Context::New(v8::Isolate::GetCurrent(), NULL, global_template);
   context1->Enter();
 
   v8::Handle<v8::Object> global1 = context1->Global();
@@ -7206,17 +7275,14 @@ TEST(SecurityHandler) {
   }
 
   context1->Exit();
-  context1.Dispose(context1->GetIsolate());
-
   context0->Exit();
-  context0.Dispose(context0->GetIsolate());
 }
 
 
 THREADED_TEST(SecurityChecks) {
   LocalContext env1;
   v8::HandleScope handle_scope(env1->GetIsolate());
-  v8::Persistent<Context> env2 = Context::New();
+  v8::Handle<Context> env2 = Context::New(env1->GetIsolate());
 
   Local<Value> foo = v8_str("foo");
   Local<Value> bar = v8_str("bar");
@@ -7252,8 +7318,6 @@ THREADED_TEST(SecurityChecks) {
     Function::Cast(*spy2)->Call(env2->Global(), 0, NULL);
     CHECK(try_catch.HasCaught());
   }
-
-  env2.Dispose(env2->GetIsolate());
 }
 
 
@@ -7261,7 +7325,7 @@ THREADED_TEST(SecurityChecks) {
 THREADED_TEST(SecurityChecksForPrototypeChain) {
   LocalContext current;
   v8::HandleScope scope(current->GetIsolate());
-  v8::Persistent<Context> other = Context::New();
+  v8::Handle<Context> other = Context::New(current->GetIsolate());
 
   // Change context to be able to get to the Object function in the
   // other context without hitting the security checks.
@@ -7322,14 +7386,13 @@ THREADED_TEST(SecurityChecksForPrototypeChain) {
     CHECK(!access_f3->Run()->Equals(v8_num(101)));
     CHECK(access_f3->Run()->IsUndefined());
   }
-  other.Dispose(other->GetIsolate());
 }
 
 
 THREADED_TEST(CrossDomainDelete) {
   LocalContext env1;
   v8::HandleScope handle_scope(env1->GetIsolate());
-  v8::Persistent<Context> env2 = Context::New();
+  v8::Handle<Context> env2 = Context::New(env1->GetIsolate());
 
   Local<Value> foo = v8_str("foo");
   Local<Value> bar = v8_str("bar");
@@ -7354,15 +7417,13 @@ THREADED_TEST(CrossDomainDelete) {
   Local<Value> v = env1->Global()->Get(v8_str("prop"));
   CHECK(v->IsNumber());
   CHECK_EQ(3, v->Int32Value());
-
-  env2.Dispose(env2->GetIsolate());
 }
 
 
 THREADED_TEST(CrossDomainIsPropertyEnumerable) {
   LocalContext env1;
   v8::HandleScope handle_scope(env1->GetIsolate());
-  v8::Persistent<Context> env2 = Context::New();
+  v8::Handle<Context> env2 = Context::New(env1->GetIsolate());
 
   Local<Value> foo = v8_str("foo");
   Local<Value> bar = v8_str("bar");
@@ -7389,15 +7450,13 @@ THREADED_TEST(CrossDomainIsPropertyEnumerable) {
     Local<Value> result = Script::Compile(test)->Run();
     CHECK(result->IsFalse());
   }
-
-  env2.Dispose(env2->GetIsolate());
 }
 
 
 THREADED_TEST(CrossDomainForIn) {
   LocalContext env1;
   v8::HandleScope handle_scope(env1->GetIsolate());
-  v8::Persistent<Context> env2 = Context::New();
+  v8::Handle<Context> env2 = Context::New(env1->GetIsolate());
 
   Local<Value> foo = v8_str("foo");
   Local<Value> bar = v8_str("bar");
@@ -7423,14 +7482,13 @@ THREADED_TEST(CrossDomainForIn) {
                    "return true;})()");
     CHECK(result->IsTrue());
   }
-  env2.Dispose(env2->GetIsolate());
 }
 
 
 TEST(ContextDetachGlobal) {
   LocalContext env1;
   v8::HandleScope handle_scope(env1->GetIsolate());
-  v8::Persistent<Context> env2 = Context::New();
+  v8::Handle<Context> env2 = Context::New(env1->GetIsolate());
 
   Local<v8::Object> global1 = env1->Global();
 
@@ -7457,8 +7515,10 @@ TEST(ContextDetachGlobal) {
   // env2 has a new global object.
   CHECK(!env2->Global()->Equals(global2));
 
-  v8::Persistent<Context> env3 =
-      Context::New(0, v8::Handle<v8::ObjectTemplate>(), global2);
+  v8::Handle<Context> env3 = Context::New(env1->GetIsolate(),
+                                          0,
+                                          v8::Handle<v8::ObjectTemplate>(),
+                                          global2);
   env3->SetSecurityToken(v8_str("bar"));
   env3->Enter();
 
@@ -7485,9 +7545,6 @@ TEST(ContextDetachGlobal) {
     Local<Value> r = global3->Get(v8_str("prop2"));
     CHECK(r->IsUndefined());
   }
-
-  env2.Dispose(env2->GetIsolate());
-  env3.Dispose(env3->GetIsolate());
 }
 
 
@@ -7496,7 +7553,7 @@ TEST(DetachAndReattachGlobal) {
   v8::HandleScope scope(env1->GetIsolate());
 
   // Create second environment.
-  v8::Persistent<Context> env2 = Context::New();
+  v8::Handle<Context> env2 = Context::New(env1->GetIsolate());
 
   Local<Value> foo = v8_str("foo");
 
@@ -7528,8 +7585,10 @@ TEST(DetachAndReattachGlobal) {
   CHECK(result->IsUndefined());
 
   // Reuse global2 for env3.
-  v8::Persistent<Context> env3 =
-      Context::New(0, v8::Handle<v8::ObjectTemplate>(), global2);
+  v8::Handle<Context> env3 = Context::New(env1->GetIsolate(),
+                                          0,
+                                          v8::Handle<v8::ObjectTemplate>(),
+                                          global2);
   CHECK_EQ(global2, env3->Global());
 
   // Start by using the same security token for env3 as for env1 and env2.
@@ -7564,9 +7623,6 @@ TEST(DetachAndReattachGlobal) {
   result = CompileRun("other.p");
   CHECK(result->IsInt32());
   CHECK_EQ(42, result->Int32Value());
-
-  env2.Dispose(env2->GetIsolate());
-  env3.Dispose(env3->GetIsolate());
 }
 
 
@@ -7618,7 +7674,8 @@ static void UnreachableSetter(Local<String>, Local<Value>,
 
 
 TEST(AccessControl) {
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
   v8::Handle<v8::ObjectTemplate> global_template = v8::ObjectTemplate::New();
 
   global_template->SetAccessCheckCallbacks(NamedAccessBlocker,
@@ -7638,7 +7695,7 @@ TEST(AccessControl) {
                                v8::DEFAULT);
 
   // Create an environment
-  v8::Persistent<Context> context0 = Context::New(NULL, global_template);
+  v8::Local<Context> context0 = Context::New(isolate, NULL, global_template);
   context0->Enter();
 
   v8::Handle<v8::Object> global0 = context0->Global();
@@ -7664,9 +7721,9 @@ TEST(AccessControl) {
   Local<Value> el_getter = global0->Get(v8_str("el_getter"));
   Local<Value> el_setter = global0->Get(v8_str("el_setter"));
 
-  v8::HandleScope scope1(v8::Isolate::GetCurrent());
+  v8::HandleScope scope1(isolate);
 
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Local<Context> context1 = Context::New(isolate);
   context1->Enter();
 
   v8::Handle<v8::Object> global1 = context1->Global();
@@ -7856,13 +7913,12 @@ TEST(AccessControl) {
 
   context1->Exit();
   context0->Exit();
-  context1.Dispose(context1->GetIsolate());
-  context0.Dispose(context0->GetIsolate());
 }
 
 
 TEST(AccessControlES5) {
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
   v8::Handle<v8::ObjectTemplate> global_template = v8::ObjectTemplate::New();
 
   global_template->SetAccessCheckCallbacks(NamedAccessBlocker,
@@ -7883,12 +7939,12 @@ TEST(AccessControlES5) {
                                v8::DEFAULT);
 
   // Create an environment
-  v8::Persistent<Context> context0 = Context::New(NULL, global_template);
+  v8::Local<Context> context0 = Context::New(isolate, NULL, global_template);
   context0->Enter();
 
   v8::Handle<v8::Object> global0 = context0->Global();
 
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Local<Context> context1 = Context::New(isolate);
   context1->Enter();
   v8::Handle<v8::Object> global1 = context1->Global();
   global1->Set(v8_str("other"), global0);
@@ -7948,7 +8004,8 @@ static bool GetOwnPropertyNamesIndexedBlocker(Local<v8::Object> global,
 
 
 THREADED_TEST(AccessControlGetOwnPropertyNames) {
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
   v8::Handle<v8::ObjectTemplate> obj_template = v8::ObjectTemplate::New();
 
   obj_template->Set(v8_str("x"), v8::Integer::New(42));
@@ -7956,14 +8013,14 @@ THREADED_TEST(AccessControlGetOwnPropertyNames) {
                                         GetOwnPropertyNamesIndexedBlocker);
 
   // Create an environment
-  v8::Persistent<Context> context0 = Context::New(NULL, obj_template);
+  v8::Local<Context> context0 = Context::New(isolate, NULL, obj_template);
   context0->Enter();
 
   v8::Handle<v8::Object> global0 = context0->Global();
 
   v8::HandleScope scope1(v8::Isolate::GetCurrent());
 
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Local<Context> context1 = Context::New(isolate);
   context1->Enter();
 
   v8::Handle<v8::Object> global1 = context1->Global();
@@ -7985,8 +8042,6 @@ THREADED_TEST(AccessControlGetOwnPropertyNames) {
 
   context1->Exit();
   context0->Exit();
-  context1.Dispose(context1->GetIsolate());
-  context0.Dispose(context0->GetIsolate());
 }
 
 
@@ -8042,7 +8097,8 @@ static v8::Handle<Value> ConstTenGetter(Local<String> name,
 
 
 THREADED_TEST(CrossDomainAccessors) {
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
 
   v8::Handle<v8::FunctionTemplate> func_template = v8::FunctionTemplate::New();
 
@@ -8064,7 +8120,7 @@ THREADED_TEST(CrossDomainAccessors) {
                                v8::Handle<Value>(),
                                v8::DEFAULT);
 
-  v8::Persistent<Context> context0 = Context::New(NULL, global_template);
+  v8::Local<Context> context0 = Context::New(isolate, NULL, global_template);
   context0->Enter();
 
   Local<v8::Object> global = context0->Global();
@@ -8073,7 +8129,7 @@ THREADED_TEST(CrossDomainAccessors) {
 
   // Enter a new context.
   v8::HandleScope scope1(v8::Isolate::GetCurrent());
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Local<Context> context1 = Context::New(isolate);
   context1->Enter();
 
   v8::Handle<v8::Object> global1 = context1->Global();
@@ -8089,8 +8145,6 @@ THREADED_TEST(CrossDomainAccessors) {
 
   context1->Exit();
   context0->Exit();
-  context1.Dispose(context1->GetIsolate());
-  context0.Dispose(context0->GetIsolate());
 }
 
 
@@ -8120,10 +8174,11 @@ TEST(AccessControlIC) {
   named_access_count = 0;
   indexed_access_count = 0;
 
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
 
   // Create an environment.
-  v8::Persistent<Context> context0 = Context::New();
+  v8::Local<Context> context0 = Context::New(isolate);
   context0->Enter();
 
   // Create an object that requires access-check functions to be
@@ -8133,10 +8188,10 @@ TEST(AccessControlIC) {
                                            IndexedAccessCounter);
   Local<v8::Object> object = object_template->NewInstance();
 
-  v8::HandleScope scope1(v8::Isolate::GetCurrent());
+  v8::HandleScope scope1(isolate);
 
   // Create another environment.
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Local<Context> context1 = Context::New(isolate);
   context1->Enter();
 
   // Make easy access to the object from the other environment.
@@ -8224,8 +8279,6 @@ TEST(AccessControlIC) {
 
   context1->Exit();
   context0->Exit();
-  context1.Dispose(context1->GetIsolate());
-  context0.Dispose(context0->GetIsolate());
 }
 
 
@@ -8239,7 +8292,7 @@ static bool NamedAccessFlatten(Local<v8::Object> global,
   CHECK(name->IsString());
 
   memset(buf, 0x1, sizeof(buf));
-  len = name.As<String>()->WriteAscii(buf);
+  len = name.As<String>()->WriteOneByte(reinterpret_cast<uint8_t*>(buf));
   CHECK_EQ(4, len);
 
   uint16_t buf2[100];
@@ -8269,10 +8322,11 @@ THREADED_TEST(AccessControlFlatten) {
   named_access_count = 0;
   indexed_access_count = 0;
 
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
 
   // Create an environment.
-  v8::Persistent<Context> context0 = Context::New();
+  v8::Local<Context> context0 = Context::New(isolate);
   context0->Enter();
 
   // Create an object that requires access-check functions to be
@@ -8282,10 +8336,10 @@ THREADED_TEST(AccessControlFlatten) {
                                            IndexedAccessFlatten);
   Local<v8::Object> object = object_template->NewInstance();
 
-  v8::HandleScope scope1(v8::Isolate::GetCurrent());
+  v8::HandleScope scope1(isolate);
 
   // Create another environment.
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Local<Context> context1 = Context::New(isolate);
   context1->Enter();
 
   // Make easy access to the object from the other environment.
@@ -8299,8 +8353,6 @@ THREADED_TEST(AccessControlFlatten) {
 
   context1->Exit();
   context0->Exit();
-  context1.Dispose(context1->GetIsolate());
-  context0.Dispose(context0->GetIsolate());
 }
 
 
@@ -8333,10 +8385,11 @@ THREADED_TEST(AccessControlInterceptorIC) {
   named_access_count = 0;
   indexed_access_count = 0;
 
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
 
   // Create an environment.
-  v8::Persistent<Context> context0 = Context::New();
+  v8::Local<Context> context0 = Context::New(isolate);
   context0->Enter();
 
   // Create an object that requires access-check functions to be
@@ -8351,10 +8404,10 @@ THREADED_TEST(AccessControlInterceptorIC) {
                                              AccessControlIndexedSetter);
   Local<v8::Object> object = object_template->NewInstance();
 
-  v8::HandleScope scope1(v8::Isolate::GetCurrent());
+  v8::HandleScope scope1(isolate);
 
   // Create another environment.
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Local<Context> context1 = Context::New(isolate);
   context1->Enter();
 
   // Make easy access to the object from the other environment.
@@ -8391,8 +8444,6 @@ THREADED_TEST(AccessControlInterceptorIC) {
 
   context1->Exit();
   context0->Exit();
-  context1.Dispose(context1->GetIsolate());
-  context0.Dispose(context0->GetIsolate());
 }
 
 
@@ -9253,10 +9304,11 @@ THREADED_TEST(CrossEval) {
 // its global throws an exception.  This behavior is consistent with
 // other JavaScript implementations.
 THREADED_TEST(EvalInDetachedGlobal) {
-  v8::HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
 
-  v8::Persistent<Context> context0 = Context::New();
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Local<Context> context0 = Context::New(isolate);
+  v8::Local<Context> context1 = Context::New(isolate);
 
   // Set up function in context0 that uses eval from context0.
   context0->Enter();
@@ -9281,9 +9333,6 @@ THREADED_TEST(EvalInDetachedGlobal) {
   CHECK(x_value.IsEmpty());
   CHECK(catcher.HasCaught());
   context1->Exit();
-
-  context1.Dispose(context1->GetIsolate());
-  context0.Dispose(context0->GetIsolate());
 }
 
 
@@ -9635,7 +9684,7 @@ static v8::Handle<Value> InterceptorLoadXICGetter(Local<String> name,
                                                  const AccessorInfo& info) {
   ApiTestFuzzer::Fuzz();
   return v8_str("x")->Equals(name)
-      ? v8::Integer::New(42) : v8::Handle<v8::Value>();
+      ? v8::Handle<v8::Value>(v8::Integer::New(42)) : v8::Handle<v8::Value>();
 }
 
 
@@ -11804,11 +11853,11 @@ v8::Persistent<v8::Object> some_object;
 v8::Persistent<v8::Object> bad_handle;
 
 void NewPersistentHandleCallback(v8::Isolate* isolate,
-                                 v8::Persistent<v8::Value> handle,
+                                 v8::Persistent<v8::Value>* handle,
                                  void*) {
   v8::HandleScope scope(isolate);
   bad_handle = v8::Persistent<v8::Object>::New(isolate, some_object);
-  handle.Dispose(isolate);
+  handle->Dispose(isolate);
 }
 
 
@@ -11827,7 +11876,9 @@ THREADED_TEST(NewPersistentHandleFromWeakCallback) {
   // global handle nodes are processed by PostGarbageCollectionProcessing
   // in reverse allocation order, so if second allocated handle is deleted,
   // weak callback of the first handle would be able to 'reallocate' it.
-  handle1.MakeWeak(isolate, NULL, NewPersistentHandleCallback);
+  handle1.MakeWeak<v8::Value, void>(isolate,
+                                    NULL,
+                                    NewPersistentHandleCallback);
   handle2.Dispose(isolate);
   HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
 }
@@ -11836,11 +11887,11 @@ THREADED_TEST(NewPersistentHandleFromWeakCallback) {
 v8::Persistent<v8::Object> to_be_disposed;
 
 void DisposeAndForceGcCallback(v8::Isolate* isolate,
-                               v8::Persistent<v8::Value> handle,
+                               v8::Persistent<v8::Value>* handle,
                                void*) {
   to_be_disposed.Dispose(isolate);
   HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
-  handle.Dispose(isolate);
+  handle->Dispose(isolate);
 }
 
 
@@ -11854,23 +11905,23 @@ THREADED_TEST(DoNotUseDeletedNodesInSecondLevelGc) {
     handle1 = v8::Persistent<v8::Object>::New(isolate, v8::Object::New());
     handle2 = v8::Persistent<v8::Object>::New(isolate, v8::Object::New());
   }
-  handle1.MakeWeak(isolate, NULL, DisposeAndForceGcCallback);
+  handle1.MakeWeak<v8::Value, void>(isolate, NULL, DisposeAndForceGcCallback);
   to_be_disposed = handle2;
   HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
 }
 
 void DisposingCallback(v8::Isolate* isolate,
-                       v8::Persistent<v8::Value> handle,
+                       v8::Persistent<v8::Value>* handle,
                        void*) {
-  handle.Dispose(isolate);
+  handle->Dispose(isolate);
 }
 
 void HandleCreatingCallback(v8::Isolate* isolate,
-                            v8::Persistent<v8::Value> handle,
+                            v8::Persistent<v8::Value>* handle,
                             void*) {
   v8::HandleScope scope(isolate);
   v8::Persistent<v8::Object>::New(isolate, v8::Object::New());
-  handle.Dispose(isolate);
+  handle->Dispose(isolate);
 }
 
 
@@ -11885,8 +11936,8 @@ THREADED_TEST(NoGlobalHandlesOrphaningDueToWeakCallback) {
     handle2 = v8::Persistent<v8::Object>::New(isolate, v8::Object::New());
     handle1 = v8::Persistent<v8::Object>::New(isolate, v8::Object::New());
   }
-  handle2.MakeWeak(isolate, NULL, DisposingCallback);
-  handle3.MakeWeak(isolate, NULL, HandleCreatingCallback);
+  handle2.MakeWeak<v8::Value, void>(isolate, NULL, DisposingCallback);
+  handle3.MakeWeak<v8::Value, void>(isolate, NULL, HandleCreatingCallback);
   HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
 }
 
@@ -11914,7 +11965,7 @@ THREADED_TEST(CheckForCrossContextObjectLiterals) {
 }
 
 
-static v8::Handle<Value> NestedScope(v8::Persistent<Context> env) {
+static v8::Handle<Value> NestedScope(v8::Local<Context> env) {
   v8::HandleScope inner(env->GetIsolate());
   env->Enter();
   v8::Handle<Value> three = v8_num(3);
@@ -11925,14 +11976,14 @@ static v8::Handle<Value> NestedScope(v8::Persistent<Context> env) {
 
 
 THREADED_TEST(NestedHandleScopeAndContexts) {
-  v8::HandleScope outer(v8::Isolate::GetCurrent());
-  v8::Persistent<Context> env = Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope outer(isolate);
+  v8::Local<Context> env = Context::New(isolate);
   env->Enter();
   v8::Handle<Value> value = NestedScope(env);
   v8::Handle<String> str(value->ToString());
   CHECK(!str.IsEmpty());
   env->Exit();
-  env.Dispose(env->GetIsolate());
 }
 
 
@@ -11964,8 +12015,9 @@ static void RunLoopInNewEnv() {
   bar_ptr = NULL;
   foo_ptr = NULL;
 
-  v8::HandleScope outer(v8::Isolate::GetCurrent());
-  v8::Persistent<Context> env = Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope outer(isolate);
+  v8::Local<Context> env = Context::New(isolate);
   env->Enter();
 
   const char* script =
@@ -12188,6 +12240,7 @@ static bool MatchPointers(void* key1, void* key2) {
 
 TEST(SetJitCodeEventHandler) {
   i::FLAG_stress_compaction = true;
+  i::FLAG_incremental_marking = false;
   const char* script =
     "function bar() {"
     "  var sum = 0;"
@@ -12290,22 +12343,29 @@ static int64_t cast(intptr_t x) { return static_cast<int64_t>(x); }
 
 
 THREADED_TEST(ExternalAllocatedMemory) {
-  v8::HandleScope outer(v8::Isolate::GetCurrent());
-  v8::Persistent<Context> env(Context::New());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope outer(isolate);
+  v8::Local<Context> env(Context::New(isolate));
   CHECK(!env.IsEmpty());
   const intptr_t kSize = 1024*1024;
-  v8::Isolate* isolate = env->GetIsolate();
-  CHECK_EQ(cast(isolate->AdjustAmountOfExternalAllocatedMemory(kSize)),
-           cast(kSize));
-  CHECK_EQ(cast(isolate->AdjustAmountOfExternalAllocatedMemory(-kSize)),
-           cast(0));
+  int64_t baseline = cast(isolate->AdjustAmountOfExternalAllocatedMemory(0));
+  CHECK_EQ(baseline + cast(kSize),
+           cast(isolate->AdjustAmountOfExternalAllocatedMemory(kSize)));
+  CHECK_EQ(baseline,
+           cast(isolate->AdjustAmountOfExternalAllocatedMemory(-kSize)));
 }
 
 
 THREADED_TEST(DisposeEnteredContext) {
   LocalContext outer;
-  v8::HandleScope scope(outer->GetIsolate());
-  { v8::Persistent<v8::Context> inner = v8::Context::New();
+  v8::Isolate* isolate = outer->GetIsolate();
+  v8::Persistent<v8::Context> inner;
+  {
+    v8::HandleScope scope(isolate);
+    inner.Reset(isolate, v8::Context::New(isolate));
+  }
+  v8::HandleScope scope(isolate);
+  {
     inner->Enter();
     inner.Dispose(inner->GetIsolate());
     inner.Clear();
@@ -12625,7 +12685,8 @@ THREADED_TEST(AccessChecksReenabledCorrectly) {
 // This tests that access check information remains on the global
 // object template when creating contexts.
 THREADED_TEST(AccessControlRepeatedContextCreation) {
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
   v8::Handle<v8::ObjectTemplate> global_template = v8::ObjectTemplate::New();
   global_template->SetAccessCheckCallbacks(NamedSetAccessBlocker,
                                            IndexedSetAccessBlocker);
@@ -12635,14 +12696,15 @@ THREADED_TEST(AccessControlRepeatedContextCreation) {
   i::Handle<i::FunctionTemplateInfo> constructor(
       i::FunctionTemplateInfo::cast(internal_template->constructor()));
   CHECK(!constructor->access_check_info()->IsUndefined());
-  v8::Persistent<Context> context0(Context::New(NULL, global_template));
+  v8::Local<Context> context0(Context::New(isolate, NULL, global_template));
   CHECK(!context0.IsEmpty());
   CHECK(!constructor->access_check_info()->IsUndefined());
 }
 
 
 THREADED_TEST(TurnOnAccessCheck) {
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
 
   // Create an environment with access check to the global object disabled by
   // default.
@@ -12651,7 +12713,7 @@ THREADED_TEST(TurnOnAccessCheck) {
                                            IndexedGetAccessBlocker,
                                            v8::Handle<v8::Value>(),
                                            false);
-  v8::Persistent<Context> context = Context::New(NULL, global_template);
+  v8::Local<Context> context = Context::New(isolate, NULL, global_template);
   Context::Scope context_scope(context);
 
   // Set up a property and a number of functions.
@@ -12722,7 +12784,8 @@ static bool NamedGetAccessBlockAandH(Local<v8::Object> obj,
 
 
 THREADED_TEST(TurnOnAccessCheckAndRecompile) {
-  v8::HandleScope handle_scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
 
   // Create an environment with access check to the global object disabled by
   // default. When the registered access checker will block access to properties
@@ -12732,7 +12795,7 @@ THREADED_TEST(TurnOnAccessCheckAndRecompile) {
                                            IndexedGetAccessBlocker,
                                            v8::Handle<v8::Value>(),
                                            false);
-  v8::Persistent<Context> context = Context::New(NULL, global_template);
+  v8::Local<Context> context = Context::New(isolate, NULL, global_template);
   Context::Scope context_scope(context);
 
   // Set up a property and a number of functions.
@@ -12991,9 +13054,10 @@ THREADED_TEST(DictionaryICLoadedFunction) {
 // Test that cross-context new calls use the context of the callee to
 // create the new JavaScript object.
 THREADED_TEST(CrossContextNew) {
-  v8::HandleScope scope(v8::Isolate::GetCurrent());
-  v8::Persistent<Context> context0 = Context::New();
-  v8::Persistent<Context> context1 = Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
+  v8::Local<Context> context0 = Context::New(isolate);
+  v8::Local<Context> context1 = Context::New(isolate);
 
   // Allow cross-domain access.
   Local<String> token = v8_str("<security token>");
@@ -13014,10 +13078,6 @@ THREADED_TEST(CrossContextNew) {
   CHECK(value->IsInt32());
   CHECK_EQ(42, value->Int32Value());
   context1->Exit();
-
-  // Dispose the contexts to allow them to be garbage collected.
-  context0.Dispose(context0->GetIsolate());
-  context1.Dispose(context1->GetIsolate());
 }
 
 
@@ -13798,9 +13858,10 @@ THREADED_TEST(ForceDeleteIC) {
 
 TEST(InlinedFunctionAcrossContexts) {
   i::FLAG_allow_natives_syntax = true;
-  v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
-  v8::Persistent<v8::Context> ctx1 = v8::Context::New();
-  v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope outer_scope(isolate);
+  v8::Local<v8::Context> ctx1 = v8::Context::New(isolate);
+  v8::Local<v8::Context> ctx2 = v8::Context::New(isolate);
   ctx1->Enter();
 
   {
@@ -13830,15 +13891,13 @@ TEST(InlinedFunctionAcrossContexts) {
         "ReferenceError: G is not defined");
     ctx2->Exit();
     ctx1->Exit();
-    ctx1.Dispose(ctx1->GetIsolate());
   }
-  ctx2.Dispose(ctx2->GetIsolate());
 }
 
 
-v8::Persistent<Context> calling_context0;
-v8::Persistent<Context> calling_context1;
-v8::Persistent<Context> calling_context2;
+static v8::Local<Context> calling_context0;
+static v8::Local<Context> calling_context1;
+static v8::Local<Context> calling_context2;
 
 
 // Check that the call to the callback is initiated in
@@ -13855,11 +13914,15 @@ static v8::Handle<Value> GetCallingContextCallback(const v8::Arguments& args) {
 
 
 THREADED_TEST(GetCallingContext) {
-  v8::HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
 
-  calling_context0 = Context::New();
-  calling_context1 = Context::New();
-  calling_context2 = Context::New();
+  Local<Context> calling_context0(Context::New(isolate));
+  Local<Context> calling_context1(Context::New(isolate));
+  Local<Context> calling_context2(Context::New(isolate));
+  ::calling_context0 = calling_context0;
+  ::calling_context1 = calling_context1;
+  ::calling_context2 = calling_context2;
 
   // Allow cross-domain access.
   Local<String> token = v8_str("<security token>");
@@ -13890,14 +13953,9 @@ THREADED_TEST(GetCallingContext) {
                                   calling_context1->Global());
   CompileRun("context1.f()");
   calling_context2->Exit();
-
-  // Dispose the contexts to allow them to be garbage collected.
-  calling_context0.Dispose(calling_context0->GetIsolate());
-  calling_context1.Dispose(calling_context1->GetIsolate());
-  calling_context2.Dispose(calling_context2->GetIsolate());
-  calling_context0.Clear();
-  calling_context1.Clear();
-  calling_context2.Clear();
+  ::calling_context0.Clear();
+  ::calling_context1.Clear();
+  ::calling_context2.Clear();
 }
 
 
@@ -14435,39 +14493,13 @@ static int ExternalArrayElementSize(v8::ExternalArrayType array_type) {
 
 
 template <class ExternalArrayClass, class ElementType>
-static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
-                                    int64_t low,
-                                    int64_t high) {
-  LocalContext context;
-  v8::HandleScope scope(context->GetIsolate());
-  const int kElementCount = 40;
-  int element_size = ExternalArrayElementSize(array_type);
-  ElementType* array_data =
-      static_cast<ElementType*>(malloc(kElementCount * element_size));
-  i::Handle<ExternalArrayClass> array =
-      i::Handle<ExternalArrayClass>::cast(
-          FACTORY->NewExternalArray(kElementCount, array_type, array_data));
-  // Force GC to trigger verification.
-  HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
-  for (int i = 0; i < kElementCount; i++) {
-    array->set(i, static_cast<ElementType>(i));
-  }
-  // Force GC to trigger verification.
-  HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
-  for (int i = 0; i < kElementCount; i++) {
-    CHECK_EQ(static_cast<int64_t>(i),
-             static_cast<int64_t>(array->get_scalar(i)));
-    CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array_data[i]));
-  }
-
-  v8::Handle<v8::Object> obj = v8::Object::New();
+static void ObjectWithExternalArrayTestHelper(
+    Handle<Context> context,
+    v8::Handle<Object> obj,
+    int element_count,
+    v8::ExternalArrayType array_type,
+    int64_t low, int64_t high) {
   i::Handle<i::JSObject> jsobj = v8::Utils::OpenHandle(*obj);
-  // Set the elements to be the external array.
-  obj->SetIndexedPropertiesToExternalArrayData(array_data,
-                                               array_type,
-                                               kElementCount);
-  CHECK_EQ(
-      1, static_cast<int>(jsobj->GetElement(1)->ToObjectChecked()->Number()));
   obj->Set(v8_str("field"), v8::Int32::New(1503));
   context->Global()->Set(v8_str("ext_array"), obj);
   v8::Handle<v8::Value> result = CompileRun("ext_array.field");
@@ -14584,7 +14616,7 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
                   "  caught_exception = true;"
                   "}"
                   "caught_exception;",
-                  kElementCount);
+                  element_count);
   result = CompileRun(test_buf.start());
   CHECK_EQ(false, result->BooleanValue());
 
@@ -14597,7 +14629,7 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
                   "  caught_exception = true;"
                   "}"
                   "caught_exception;",
-                  kElementCount);
+                  element_count);
   result = CompileRun(test_buf.start());
   CHECK_EQ(false, result->BooleanValue());
 
@@ -14698,9 +14730,12 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
     CHECK_EQ(true, result->BooleanValue());
   }
 
-  for (int i = 0; i < kElementCount; i++) {
+  i::Handle<ExternalArrayClass> array(
+      ExternalArrayClass::cast(jsobj->elements()));
+  for (int i = 0; i < element_count; i++) {
     array->set(i, static_cast<ElementType>(i));
   }
+
   // Test complex assignments
   result = CompileRun("function ee_op_test_complex_func(sum) {"
                       " for (var i = 0; i < 40; ++i) {"
@@ -14757,6 +14792,48 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
 
   result = CompileRun("ext_array[1] = 23;");
   CHECK_EQ(23, result->Int32Value());
+}
+
+
+template <class ExternalArrayClass, class ElementType>
+static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
+                                    int64_t low,
+                                    int64_t high) {
+  LocalContext context;
+  v8::HandleScope scope(context->GetIsolate());
+  const int kElementCount = 40;
+  int element_size = ExternalArrayElementSize(array_type);
+  ElementType* array_data =
+      static_cast<ElementType*>(malloc(kElementCount * element_size));
+  i::Handle<ExternalArrayClass> array =
+      i::Handle<ExternalArrayClass>::cast(
+          FACTORY->NewExternalArray(kElementCount, array_type, array_data));
+  // Force GC to trigger verification.
+  HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
+  for (int i = 0; i < kElementCount; i++) {
+    array->set(i, static_cast<ElementType>(i));
+  }
+  // Force GC to trigger verification.
+  HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
+  for (int i = 0; i < kElementCount; i++) {
+    CHECK_EQ(static_cast<int64_t>(i),
+             static_cast<int64_t>(array->get_scalar(i)));
+    CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array_data[i]));
+  }
+
+  v8::Handle<v8::Object> obj = v8::Object::New();
+  i::Handle<i::JSObject> jsobj = v8::Utils::OpenHandle(*obj);
+  // Set the elements to be the external array.
+  obj->SetIndexedPropertiesToExternalArrayData(array_data,
+                                               array_type,
+                                               kElementCount);
+  CHECK_EQ(
+      1, static_cast<int>(jsobj->GetElement(1)->ToObjectChecked()->Number()));
+
+  ObjectWithExternalArrayTestHelper<ExternalArrayClass, ElementType>(
+      context.local(), obj, kElementCount, array_type, low, high);
+
+  v8::Handle<v8::Value> result;
 
   // Test more complex manipulations which cause eax to contain values
   // that won't be completely overwritten by loads from the arrays.
@@ -15074,6 +15151,122 @@ TEST(ExternalArrayLimits) {
 }
 
 
+template <typename ElementType, typename TypedArray,
+          class ExternalArrayClass>
+void TypedArrayTestHelper(v8::ExternalArrayType array_type,
+                          int64_t low, int64_t high) {
+  const int kElementCount = 50;
+  i::FLAG_harmony_array_buffer = true;
+  i::FLAG_harmony_typed_arrays = true;
+
+  LocalContext env;
+  v8::Isolate* isolate = env->GetIsolate();
+  v8::HandleScope handle_scope(isolate);
+
+  Local<v8::ArrayBuffer> ab =
+      v8::ArrayBuffer::New((kElementCount+2)*sizeof(ElementType));
+  Local<TypedArray> ta =
+      TypedArray::New(ab, 2*sizeof(ElementType), kElementCount);
+  CHECK_EQ(kElementCount, static_cast<int>(ta->Length()));
+  CHECK_EQ(2*sizeof(ElementType), static_cast<int>(ta->ByteOffset()));
+  CHECK_EQ(kElementCount*sizeof(ElementType),
+           static_cast<int>(ta->ByteLength()));
+  CHECK_EQ(ab, ta->Buffer());
+
+  ElementType* data = static_cast<ElementType*>(ab->Data()) + 2;
+  for (int i = 0; i < kElementCount; i++) {
+    data[i] = static_cast<ElementType>(i);
+  }
+
+  ObjectWithExternalArrayTestHelper<ExternalArrayClass, ElementType>(
+      env.local(), ta, kElementCount, array_type, low, high);
+}
+
+
+THREADED_TEST(Uint8Array) {
+  TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::ExternalUnsignedByteArray>(
+      v8::kExternalUnsignedByteArray, 0, 0xFF);
+}
+
+
+THREADED_TEST(Int8Array) {
+  TypedArrayTestHelper<int8_t, v8::Int8Array, i::ExternalByteArray>(
+      v8::kExternalByteArray, -0x80, 0x7F);
+}
+
+
+THREADED_TEST(Uint16Array) {
+  TypedArrayTestHelper<uint16_t,
+                       v8::Uint16Array,
+                       i::ExternalUnsignedShortArray>(
+      v8::kExternalUnsignedShortArray, 0, 0xFFFF);
+}
+
+
+THREADED_TEST(Int16Array) {
+  TypedArrayTestHelper<int16_t, v8::Int16Array, i::ExternalShortArray>(
+      v8::kExternalShortArray, -0x8000, 0x7FFF);
+}
+
+
+THREADED_TEST(Uint32Array) {
+  TypedArrayTestHelper<uint32_t, v8::Uint32Array, i::ExternalUnsignedIntArray>(
+      v8::kExternalUnsignedIntArray, 0, UINT_MAX);
+}
+
+
+THREADED_TEST(Int32Array) {
+  TypedArrayTestHelper<int32_t, v8::Int32Array, i::ExternalIntArray>(
+      v8::kExternalIntArray, INT_MIN, INT_MAX);
+}
+
+
+THREADED_TEST(Float32Array) {
+  TypedArrayTestHelper<float, v8::Float32Array, i::ExternalFloatArray>(
+      v8::kExternalFloatArray, -500, 500);
+}
+
+
+THREADED_TEST(Float64Array) {
+  TypedArrayTestHelper<double, v8::Float64Array, i::ExternalDoubleArray>(
+      v8::kExternalDoubleArray, -500, 500);
+}
+
+
+THREADED_TEST(Uint8ClampedArray) {
+  TypedArrayTestHelper<uint8_t, v8::Uint8ClampedArray, i::ExternalPixelArray>(
+      v8::kExternalPixelArray, 0, 0xFF);
+}
+
+
+#define IS_TYPED_ARRAY_TEST(TypedArray) \
+  THREADED_TEST(Is##TypedArray) {                                             \
+    i::FLAG_harmony_array_buffer = true;                                      \
+    i::FLAG_harmony_typed_arrays = true;                                      \
+    LocalContext env;                                                         \
+    v8::Isolate* isolate = env->GetIsolate();                                 \
+    v8::HandleScope handle_scope(isolate);                                    \
+                                                                              \
+    Handle<Value> result = CompileRun(                                        \
+        "var ab = new ArrayBuffer(128);"                                      \
+        "new " #TypedArray "(ab)");                                           \
+    CHECK(result->Is##TypedArray());                                          \
+  }
+
+IS_TYPED_ARRAY_TEST(Uint8Array)
+IS_TYPED_ARRAY_TEST(Int8Array)
+IS_TYPED_ARRAY_TEST(Uint16Array)
+IS_TYPED_ARRAY_TEST(Int16Array)
+IS_TYPED_ARRAY_TEST(Uint32Array)
+IS_TYPED_ARRAY_TEST(Int32Array)
+IS_TYPED_ARRAY_TEST(Float32Array)
+IS_TYPED_ARRAY_TEST(Float64Array)
+IS_TYPED_ARRAY_TEST(Uint8ClampedArray)
+
+#undef IS_TYPED_ARRAY_TEST
+
+
+
 THREADED_TEST(ScriptContextDependence) {
   LocalContext c1;
   v8::HandleScope scope(c1->GetIsolate());
@@ -15607,17 +15800,20 @@ TEST(Regress2107) {
   const int kShortIdlePauseInMs = 100;
   const int kLongIdlePauseInMs = 1000;
   LocalContext env;
+  v8::Isolate* isolate = env->GetIsolate();
   v8::HandleScope scope(env->GetIsolate());
   intptr_t initial_size = HEAP->SizeOfObjects();
   // Send idle notification to start a round of incremental GCs.
   v8::V8::IdleNotification(kShortIdlePauseInMs);
   // Emulate 7 page reloads.
   for (int i = 0; i < 7; i++) {
-    v8::Persistent<v8::Context> ctx = v8::Context::New();
-    ctx->Enter();
-    CreateGarbageInOldSpace();
-    ctx->Exit();
-    ctx.Dispose(ctx->GetIsolate());
+    {
+      v8::HandleScope inner_scope(env->GetIsolate());
+      v8::Local<v8::Context> ctx = v8::Context::New(isolate);
+      ctx->Enter();
+      CreateGarbageInOldSpace();
+      ctx->Exit();
+    }
     v8::V8::ContextDisposedNotification();
     v8::V8::IdleNotification(kLongIdlePauseInMs);
   }
@@ -15946,22 +16142,21 @@ THREADED_TEST(SpaghettiStackReThrow) {
 
 TEST(Regress528) {
   v8::V8::Initialize();
-
-  v8::HandleScope scope(v8::Isolate::GetCurrent());
-  v8::Persistent<Context> context;
-  v8::Persistent<Context> other_context;
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
+  v8::Local<Context> other_context;
   int gc_count;
 
   // Create a context used to keep the code from aging in the compilation
   // cache.
-  other_context = Context::New();
+  other_context = Context::New(isolate);
 
   // Context-dependent context data creates reference from the compilation
   // cache to the global object.
   const char* source_simple = "1";
-  context = Context::New();
   {
-    v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::HandleScope scope(isolate);
+    v8::Local<Context> context = Context::New(isolate);
 
     context->Enter();
     Local<v8::String> obj = v8::String::New("");
@@ -15969,7 +16164,6 @@ TEST(Regress528) {
     CompileRun(source_simple);
     context->Exit();
   }
-  context.Dispose(context->GetIsolate());
   v8::V8::ContextDisposedNotification();
   for (gc_count = 1; gc_count < 10; gc_count++) {
     other_context->Enter();
@@ -15984,15 +16178,14 @@ TEST(Regress528) {
   // Eval in a function creates reference from the compilation cache to the
   // global object.
   const char* source_eval = "function f(){eval('1')}; f()";
-  context = Context::New();
   {
-    v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::HandleScope scope(isolate);
+    v8::Local<Context> context = Context::New(isolate);
 
     context->Enter();
     CompileRun(source_eval);
     context->Exit();
   }
-  context.Dispose(context->GetIsolate());
   v8::V8::ContextDisposedNotification();
   for (gc_count = 1; gc_count < 10; gc_count++) {
     other_context->Enter();
@@ -16007,9 +16200,9 @@ TEST(Regress528) {
   // Looking up the line number for an exception creates reference from the
   // compilation cache to the global object.
   const char* source_exception = "function f(){throw 1;} f()";
-  context = Context::New();
   {
-    v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::HandleScope scope(isolate);
+    v8::Local<Context> context = Context::New(isolate);
 
     context->Enter();
     v8::TryCatch try_catch;
@@ -16020,7 +16213,6 @@ TEST(Regress528) {
     CHECK_EQ(1, message->GetLineNumber());
     context->Exit();
   }
-  context.Dispose(context->GetIsolate());
   v8::V8::ContextDisposedNotification();
   for (gc_count = 1; gc_count < 10; gc_count++) {
     other_context->Enter();
@@ -16032,7 +16224,6 @@ TEST(Regress528) {
   CHECK_GE(2, gc_count);
   CHECK_EQ(1, GetGlobalObjectsCount());
 
-  other_context.Dispose(other_context->GetIsolate());
   v8::V8::ContextDisposedNotification();
 }
 
@@ -16242,7 +16433,7 @@ THREADED_TEST(InterceptorOnConstructorPrototype) {
 }
 
 
-TEST(Bug618) {
+TEST(Regress618) {
   const char* source = "function C1() {"
                        "  this.x = 23;"
                        "};"
@@ -16749,11 +16940,15 @@ TEST(RunTwoIsolatesOnSingleThread) {
   // Run isolate 1.
   v8::Isolate* isolate1 = v8::Isolate::New();
   isolate1->Enter();
-  v8::Persistent<v8::Context> context1 = v8::Context::New();
+  v8::Persistent<v8::Context> context1;
+  {
+    v8::HandleScope scope(isolate1);
+    context1.Reset(isolate1, Context::New(isolate1));
+  }
 
   {
-    v8::Context::Scope cscope(context1);
     v8::HandleScope scope(isolate1);
+    v8::Context::Scope cscope(isolate1, context1);
     // Run something in new isolate.
     CompileRun("var foo = 'isolate 1';");
     ExpectString("function f() { return foo; }; f()", "isolate 1");
@@ -16765,9 +16960,9 @@ TEST(RunTwoIsolatesOnSingleThread) {
 
   {
     v8::Isolate::Scope iscope(isolate2);
-    context2 = v8::Context::New();
-    v8::Context::Scope cscope(context2);
     v8::HandleScope scope(isolate2);
+    context2.Reset(isolate2, Context::New(isolate2));
+    v8::Context::Scope cscope(isolate2, context2);
 
     // Run something in new isolate.
     CompileRun("var foo = 'isolate 2';");
@@ -16775,8 +16970,8 @@ TEST(RunTwoIsolatesOnSingleThread) {
   }
 
   {
-    v8::Context::Scope cscope(context1);
     v8::HandleScope scope(isolate1);
+    v8::Context::Scope cscope(isolate1, context1);
     // Now again in isolate 1
     ExpectString("function f() { return foo; }; f()", "isolate 1");
   }
@@ -16784,11 +16979,17 @@ TEST(RunTwoIsolatesOnSingleThread) {
   isolate1->Exit();
 
   // Run some stuff in default isolate.
-  v8::Persistent<v8::Context> context_default = v8::Context::New();
+  v8::Persistent<v8::Context> context_default;
+  {
+    v8::Isolate* isolate = v8::Isolate::GetCurrent();
+    v8::Isolate::Scope iscope(isolate);
+    v8::HandleScope scope(isolate);
+    context_default.Reset(isolate, Context::New(isolate));
+  }
 
   {
-    v8::Context::Scope cscope(context_default);
     v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::Context::Scope cscope(v8::Isolate::GetCurrent(), context_default);
     // Variables in other isolates should be not available, verify there
     // is an exception.
     ExpectTrue("function f() {"
@@ -16807,14 +17008,14 @@ TEST(RunTwoIsolatesOnSingleThread) {
 
   {
     v8::Isolate::Scope iscope(isolate2);
-    v8::Context::Scope cscope(context2);
     v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::Context::Scope cscope(isolate2, context2);
     ExpectString("function f() { return foo; }; f()", "isolate 2");
   }
 
   {
-    v8::Context::Scope cscope(context1);
     v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::Context::Scope cscope(v8::Isolate::GetCurrent(), context1);
     ExpectString("function f() { return foo; }; f()", "isolate 1");
   }
 
@@ -16839,8 +17040,8 @@ TEST(RunTwoIsolatesOnSingleThread) {
 
   // Check that default isolate still runs.
   {
-    v8::Context::Scope cscope(context_default);
     v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::Context::Scope cscope(v8::Isolate::GetCurrent(), context_default);
     ExpectTrue("function f() { return isDefaultIsolate; }; f()");
   }
 }
@@ -16910,11 +17111,11 @@ TEST(MultipleIsolatesOnIndividualThreads) {
 
 TEST(IsolateDifferentContexts) {
   v8::Isolate* isolate = v8::Isolate::New();
-  Persistent<v8::Context> context;
+  Local<v8::Context> context;
   {
     v8::Isolate::Scope isolate_scope(isolate);
     v8::HandleScope handle_scope(isolate);
-    context = v8::Context::New();
+    context = v8::Context::New(isolate);
     v8::Context::Scope context_scope(context);
     Local<Value> v = CompileRun("2");
     CHECK(v->IsNumber());
@@ -16923,13 +17124,12 @@ TEST(IsolateDifferentContexts) {
   {
     v8::Isolate::Scope isolate_scope(isolate);
     v8::HandleScope handle_scope(isolate);
-    context = v8::Context::New();
+    context = v8::Context::New(isolate);
     v8::Context::Scope context_scope(context);
     Local<Value> v = CompileRun("22");
     CHECK(v->IsNumber());
     CHECK_EQ(22, static_cast<int>(v->NumberValue()));
   }
-  isolate->Dispose();
 }
 
 class InitDefaultIsolateThread : public v8::internal::Thread {
@@ -17199,7 +17399,7 @@ class Visitor42 : public v8::PersistentHandleVisitor {
       v8::Persistent<v8::Object> visited =
           v8::Persistent<v8::Object>::Cast(value);
       CHECK_EQ(42, visited.WrapperClassId(v8::Isolate::GetCurrent()));
-      CHECK_EQ(object_, visited);
+      CHECK_EQ(Handle<Value>(*object_), Handle<Value>(*visited));
       ++counter_;
     }
   }
@@ -17414,11 +17614,11 @@ static void CheckContextId(v8::Handle<Object> object, int expected) {
 
 THREADED_TEST(CreationContext) {
   HandleScope handle_scope(v8::Isolate::GetCurrent());
-  Persistent<Context> context1 = Context::New();
+  Handle<Context> context1 = Context::New(v8::Isolate::GetCurrent());
   InstallContextId(context1, 1);
-  Persistent<Context> context2 = Context::New();
+  Handle<Context> context2 = Context::New(v8::Isolate::GetCurrent());
   InstallContextId(context2, 2);
-  Persistent<Context> context3 = Context::New();
+  Handle<Context> context3 = Context::New(v8::Isolate::GetCurrent());
   InstallContextId(context3, 3);
 
   Local<v8::FunctionTemplate> tmpl = v8::FunctionTemplate::New();
@@ -17492,16 +17692,12 @@ THREADED_TEST(CreationContext) {
     CHECK(instance2->CreationContext() == context2);
     CheckContextId(instance2, 2);
   }
-
-  context1.Dispose(context1->GetIsolate());
-  context2.Dispose(context2->GetIsolate());
-  context3.Dispose(context3->GetIsolate());
 }
 
 
 THREADED_TEST(CreationContextOfJsFunction) {
   HandleScope handle_scope(v8::Isolate::GetCurrent());
-  Persistent<Context> context = Context::New();
+  Handle<Context> context = Context::New(v8::Isolate::GetCurrent());
   InstallContextId(context, 1);
 
   Local<Object> function;
@@ -17512,8 +17708,6 @@ THREADED_TEST(CreationContextOfJsFunction) {
 
   CHECK(function->CreationContext() == context);
   CheckContextId(function, 1);
-
-  context.Dispose(context->GetIsolate());
 }
 
 
@@ -17824,7 +18018,8 @@ static bool BlockProtoNamedSecurityTestCallback(Local<v8::Object> global,
 
 
 THREADED_TEST(Regress93759) {
-  HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  HandleScope scope(isolate);
 
   // Template for object with security check.
   Local<ObjectTemplate> no_proto_template = v8::ObjectTemplate::New();
@@ -17845,7 +18040,7 @@ THREADED_TEST(Regress93759) {
   protected_hidden_proto_template->SetHiddenPrototype(true);
 
   // Context for "foreign" objects used in test.
-  Persistent<Context> context = v8::Context::New();
+  Local<Context> context = v8::Context::New(isolate);
   context->Enter();
 
   // Plain object, no security check.
@@ -17909,8 +18104,6 @@ THREADED_TEST(Regress93759) {
 
   Local<Value> result6 = CompileRun("Object.getPrototypeOf(phidden)");
   CHECK(result6->Equals(Undefined()));
-
-  context.Dispose(context->GetIsolate());
 }
 
 
@@ -17949,14 +18142,15 @@ static void TestReceiver(Local<Value> expected_result,
 
 
 THREADED_TEST(ForeignFunctionReceiver) {
-  HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  HandleScope scope(isolate);
 
   // Create two contexts with different "id" properties ('i' and 'o').
   // Call a function both from its own context and from a the foreign
   // context, and see what "this" is bound to (returning both "this"
   // and "this.id" for comparison).
 
-  Persistent<Context> foreign_context = v8::Context::New();
+  Local<Context> foreign_context = v8::Context::New(isolate);
   foreign_context->Enter();
   Local<Value> foreign_function =
     CompileRun("function func() { return { 0: this.id, "
@@ -18037,8 +18231,6 @@ THREADED_TEST(ForeignFunctionReceiver) {
   TestReceiver(o, context->Global(), "func()");
   // Calling with no base.
   TestReceiver(o, context->Global(), "(1,func)()");
-
-  foreign_context.Dispose(foreign_context->GetIsolate());
 }
 
 
index 58ce5ec..8cce084 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "disassembler.h"
index 8d39ee7..7c8e70c 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "disassembler.h"
index 669475a..a989fbb 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "macro-assembler.h"
index f200435..cff0f82 100644 (file)
 #include <stdlib.h>
 #include <wchar.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "compiler.h"
index f5f5d65..d73be18 100644 (file)
 //
 // Tests of profiles generator and utilities.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 #include "cpu-profiler-inl.h"
 #include "cctest.h"
index d66d97f..1afe890 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "api.h"
@@ -132,12 +136,16 @@ class DebugLocalContext {
       v8::Handle<v8::ObjectTemplate> global_template =
           v8::Handle<v8::ObjectTemplate>(),
       v8::Handle<v8::Value> global_object = v8::Handle<v8::Value>())
-      : context_(v8::Context::New(extensions, global_template, global_object)) {
+      : scope_(v8::Isolate::GetCurrent()),
+        context_(
+          v8::Context::New(v8::Isolate::GetCurrent(),
+                           extensions,
+                           global_template,
+                           global_object)) {
     context_->Enter();
   }
   inline ~DebugLocalContext() {
     context_->Exit();
-    context_.Dispose(context_->GetIsolate());
   }
   inline v8::Context* operator->() { return *context_; }
   inline v8::Context* operator*() { return *context_; }
@@ -162,7 +170,8 @@ class DebugLocalContext {
   }
 
  private:
-  v8::Persistent<v8::Context> context_;
+  v8::HandleScope scope_;
+  v8::Local<v8::Context> context_;
 };
 
 
@@ -675,7 +684,7 @@ static void DebugEventBreakPointHitCount(v8::DebugEvent event,
       } else {
         CHECK(result->IsString());
         v8::Handle<v8::String> function_name(result->ToString());
-        function_name->WriteAscii(last_function_hit);
+        function_name->WriteUtf8(last_function_hit);
       }
     }
 
@@ -710,7 +719,7 @@ static void DebugEventBreakPointHitCount(v8::DebugEvent event,
       } else {
         CHECK(result->IsString());
         v8::Handle<v8::String> script_name(result->ToString());
-        script_name->WriteAscii(last_script_name_hit);
+        script_name->WriteUtf8(last_script_name_hit);
       }
     }
 
@@ -726,7 +735,7 @@ static void DebugEventBreakPointHitCount(v8::DebugEvent event,
         result = result->ToString();
         CHECK(result->IsString());
         v8::Handle<v8::String> script_data(result->ToString());
-        script_data->WriteAscii(last_script_data_hit);
+        script_data->WriteUtf8(last_script_data_hit);
       }
     }
 
@@ -746,7 +755,7 @@ static void DebugEventBreakPointHitCount(v8::DebugEvent event,
       result = result->ToString();
       CHECK(result->IsString());
       v8::Handle<v8::String> script_data(result->ToString());
-      script_data->WriteAscii(last_script_data_hit);
+      script_data->WriteUtf8(last_script_data_hit);
     }
   }
 }
@@ -1090,6 +1099,15 @@ TEST(DebugStub) {
           Builtins::kKeyedLoadIC_DebugBreak));
 #endif
 
+  CheckDebugBreakFunction(
+      &env,
+      "function f6(a){return a==null;}",
+      "f6",
+      0,
+      v8::internal::RelocInfo::CODE_TARGET,
+      Isolate::Current()->builtins()->builtin(
+          Builtins::kCompareNilIC_DebugBreak));
+
   // Check the debug break code stubs for call ICs with different number of
   // parameters.
   Handle<Code> debug_break_0 = v8::internal::ComputeCallDebugBreak(0);
@@ -4221,7 +4239,8 @@ static const char* kSimpleExtensionSource =
 // http://crbug.com/28933
 // Test that debug break is disabled when bootstrapper is active.
 TEST(NoBreakWhenBootstrapping) {
-  v8::HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
 
   // Register a debug event listener which sets the break flag and counts.
   v8::Debug::SetDebugEventListener(DebugEventCounter);
@@ -4236,8 +4255,8 @@ TEST(NoBreakWhenBootstrapping) {
                                             kSimpleExtensionSource));
     const char* extension_names[] = { "simpletest" };
     v8::ExtensionConfiguration extensions(1, extension_names);
-    v8::Persistent<v8::Context> context = v8::Context::New(&extensions);
-    context.Dispose(context->GetIsolate());
+    v8::HandleScope handle_scope(isolate);
+    v8::Context::New(isolate, &extensions);
   }
   // Check that no DebugBreak events occured during the context creation.
   CHECK_EQ(0, break_point_hit_count);
@@ -5144,7 +5163,9 @@ void V8Thread::Run() {
   v8::Handle<v8::ObjectTemplate> global_template = v8::ObjectTemplate::New();
   global_template->Set(v8::String::New("ThreadedAtBarrier1"),
                        v8::FunctionTemplate::New(ThreadedAtBarrier1));
-  v8::Handle<v8::Context> context = v8::Context::New(NULL, global_template);
+  v8::Handle<v8::Context> context = v8::Context::New(v8::Isolate::GetCurrent(),
+                                                     NULL,
+                                                     global_template);
   v8::Context::Scope context_scope(context);
 
   CompileRun(source);
@@ -5520,7 +5541,9 @@ TEST(CallFunctionInDebugger) {
                        v8::FunctionTemplate::New(CheckDataParameter));
   global_template->Set(v8::String::New("CheckClosure"),
                        v8::FunctionTemplate::New(CheckClosure));
-  v8::Handle<v8::Context> context = v8::Context::New(NULL, global_template);
+  v8::Handle<v8::Context> context = v8::Context::New(v8::Isolate::GetCurrent(),
+                                                     NULL,
+                                                     global_template);
   v8::Context::Scope context_scope(context);
 
   // Compile a function for checking the number of JavaScript frames.
@@ -6218,7 +6241,7 @@ TEST(ScriptNameAndData) {
 }
 
 
-static v8::Persistent<v8::Context> expected_context;
+static v8::Handle<v8::Context> expected_context;
 static v8::Handle<v8::Value> expected_context_data;
 
 
@@ -6244,18 +6267,19 @@ static void ContextCheckMessageHandler(const v8::Debug::Message& message) {
 // Checks that this data is set correctly and that when the debug message
 // handler is called the expected context is the one active.
 TEST(ContextData) {
-  v8::HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
 
   v8::Debug::SetMessageHandler2(ContextCheckMessageHandler);
 
   // Create two contexts.
-  v8::Persistent<v8::Context> context_1;
-  v8::Persistent<v8::Context> context_2;
+  v8::Handle<v8::Context> context_1;
+  v8::Handle<v8::Context> context_2;
   v8::Handle<v8::ObjectTemplate> global_template =
       v8::Handle<v8::ObjectTemplate>();
   v8::Handle<v8::Value> global_object = v8::Handle<v8::Value>();
-  context_1 = v8::Context::New(NULL, global_template, global_object);
-  context_2 = v8::Context::New(NULL, global_template, global_object);
+  context_1 = v8::Context::New(isolate, NULL, global_template, global_object);
+  context_2 = v8::Context::New(isolate, NULL, global_template, global_object);
 
   // Default data value is undefined.
   CHECK(context_1->GetEmbedderData(0)->IsUndefined());
@@ -6367,7 +6391,7 @@ static void DebugEventDebugBreak(
       } else {
         CHECK(result->IsString());
         v8::Handle<v8::String> function_name(result->ToString());
-        function_name->WriteAscii(last_function_hit);
+        function_name->WriteUtf8(last_function_hit);
       }
     }
 
@@ -6416,10 +6440,11 @@ TEST(RegExpDebugBreak) {
 // Common part of EvalContextData and NestedBreakEventContextData tests.
 static void ExecuteScriptForContextCheck() {
   // Create a context.
-  v8::Persistent<v8::Context> context_1;
+  v8::Handle<v8::Context> context_1;
   v8::Handle<v8::ObjectTemplate> global_template =
       v8::Handle<v8::ObjectTemplate>();
-  context_1 = v8::Context::New(NULL, global_template);
+  context_1 =
+      v8::Context::New(v8::Isolate::GetCurrent(), NULL, global_template);
 
   // Default data value is undefined.
   CHECK(context_1->GetEmbedderData(0)->IsUndefined());
@@ -6435,7 +6460,7 @@ static void ExecuteScriptForContextCheck() {
   // Enter and run function in the context.
   {
     v8::Context::Scope context_scope(context_1);
-    expected_context = context_1;
+    expected_context = v8::Local<v8::Context>(*context_1);
     expected_context_data = data_1;
     v8::Local<v8::Function> f = CompileFunction(source, "f");
     f->Call(context_1->Global(), 0, NULL);
@@ -6596,22 +6621,26 @@ TEST(ScriptCollectedEventContext) {
   script_collected_message_count = 0;
   v8::HandleScope scope(isolate);
 
-  { // Scope for the DebugLocalContext.
-    DebugLocalContext env;
+  v8::Persistent<v8::Context> context;
+  {
+    v8::HandleScope scope(isolate);
+    context.Reset(isolate, v8::Context::New(isolate));
+  }
+  context->Enter();
 
-    // Request the loaded scripts to initialize the debugger script cache.
-    debug->GetLoadedScripts();
+  // Request the loaded scripts to initialize the debugger script cache.
+  debug->GetLoadedScripts();
 
-    // Do garbage collection to ensure that only the script in this test will be
-    // collected afterwards.
-    HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+  // Do garbage collection to ensure that only the script in this test will be
+  // collected afterwards.
+  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
 
-    v8::Debug::SetMessageHandler2(ScriptCollectedMessageHandler);
-    {
-      v8::Script::Compile(v8::String::New("eval('a=1')"))->Run();
-      v8::Script::Compile(v8::String::New("eval('a=2')"))->Run();
-    }
-  }
+  v8::Debug::SetMessageHandler2(ScriptCollectedMessageHandler);
+  v8::Script::Compile(v8::String::New("eval('a=1')"))->Run();
+  v8::Script::Compile(v8::String::New("eval('a=2')"))->Run();
+
+  context->Exit();
+  context.Dispose(isolate);
 
   // Do garbage collection to collect the script above which is no longer
   // referenced.
@@ -7070,14 +7099,14 @@ static void DebugEventContextChecker(const v8::Debug::EventDetails& details) {
 
 // Check that event details contain context where debug event occured.
 TEST(DebugEventContext) {
-  v8::HandleScope scope(v8::Isolate::GetCurrent());
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
   expected_callback_data = v8::Int32::New(2010);
   v8::Debug::SetDebugEventListener2(DebugEventContextChecker,
                                     expected_callback_data);
-  expected_context = v8::Context::New();
+  expected_context = v8::Context::New(isolate);
   v8::Context::Scope context_scope(expected_context);
   v8::Script::Compile(v8::String::New("(function(){debugger;})();"))->Run();
-  expected_context.Dispose(expected_context->GetIsolate());
   expected_context.Clear();
   v8::Debug::SetDebugEventListener(NULL);
   expected_context_data = v8::Handle<v8::Value>();
@@ -7167,7 +7196,7 @@ static void DebugEventBreakDeoptimize(v8::DebugEvent event,
         char fn[80];
         CHECK(result->IsString());
         v8::Handle<v8::String> function_name(result->ToString());
-        function_name->WriteAscii(fn);
+        function_name->WriteUtf8(fn);
         if (strcmp(fn, "bar") == 0) {
           i::Deoptimizer::DeoptimizeAll(v8::internal::Isolate::Current());
           debug_event_break_deoptimize_done = true;
index 569848c..b09a29d 100644 (file)
@@ -27,6 +27,9 @@
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "cctest.h"
index ed8da5c..6be5303 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "heap.h"
@@ -113,7 +118,8 @@ DeclarationContext::DeclarationContext()
 
 void DeclarationContext::InitializeIfNeeded() {
   if (is_initialized_) return;
-  HandleScope scope(Isolate::GetCurrent());
+  Isolate* isolate = Isolate::GetCurrent();
+  HandleScope scope(isolate);
   Local<FunctionTemplate> function = FunctionTemplate::New();
   Local<Value> data = External::New(this);
   GetHolder(function)->SetNamedPropertyHandler(&HandleGet,
@@ -121,10 +127,14 @@ void DeclarationContext::InitializeIfNeeded() {
                                                &HandleQuery,
                                                0, 0,
                                                data);
-  context_ = Context::New(0, function->InstanceTemplate(), Local<Value>());
+  context_.Reset(isolate,
+                 Context::New(isolate,
+                              0,
+                              function->InstanceTemplate(),
+                              Local<Value>()));
   context_->Enter();
   is_initialized_ = true;
-  PostInitializeContext(context_);
+  PostInitializeContext(Local<Context>::New(isolate, context_));
 }
 
 
@@ -694,14 +704,14 @@ TEST(ExistsInHiddenPrototype) {
 
 class SimpleContext {
  public:
-  SimpleContext() {
-    context_ = Context::New();
+  SimpleContext()
+      : handle_scope_(Isolate::GetCurrent()),
+        context_(Context::New(Isolate::GetCurrent())) {
     context_->Enter();
   }
 
-  virtual ~SimpleContext() {
+  ~SimpleContext() {
     context_->Exit();
-    context_.Dispose(context_->GetIsolate());
   }
 
   void Check(const char* source,
@@ -732,7 +742,8 @@ class SimpleContext {
   }
 
  private:
-  Persistent<Context> context_;
+  HandleScope handle_scope_;
+  Local<Context> context_;
 };
 
 
index 9f12232..8cba75b 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "debug.h"
index ca81a5a..f81b173 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "debug.h"
index 5b8293f..5ebc679 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "api.h"
index b124b27..e8ffb8b 100644 (file)
@@ -36,7 +36,7 @@ using namespace v8;
 TEST(StrictUndeclaredGlobalVariable) {
   HandleScope scope(Isolate::GetCurrent());
   v8::Local<v8::String> var_name = v8_str("x");
-  LocalContext context;
+  v8::LocalContext context;
   v8::TryCatch try_catch;
   v8::Local<v8::Script> script = v8_compile("\"use strict\"; x = 42;");
   v8::Handle<v8::Object> proto = v8::Object::New();
index 605b59b..1547613 100644 (file)
@@ -27,6 +27,9 @@
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "factory.h"
@@ -44,8 +47,6 @@ using namespace v8::internal;
 
 typedef uint32_t (*HASH_FUNCTION)();
 
-static v8::Persistent<v8::Context> env;
-
 #define __ masm->
 
 
@@ -232,7 +233,10 @@ static uint32_t PseudoRandom(uint32_t i, uint32_t j) {
 
 
 TEST(StringHash) {
-  if (env.IsEmpty()) env = v8::Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
+  v8::Context::Scope context_scope(v8::Context::New(isolate));
+
   for (uint8_t a = 0; a < String::kMaxOneByteCharCode; a++) {
     // Numbers are hashed differently.
     if (a >= '0' && a <= '9') continue;
@@ -250,7 +254,9 @@ TEST(StringHash) {
 
 
 TEST(NumberHash) {
-  if (env.IsEmpty()) env = v8::Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handle_scope(isolate);
+  v8::Context::Scope context_scope(v8::Context::New(isolate));
 
   // Some specific numbers
   for (uint32_t key = 0; key < 42; key += 7) {
index a4680e4..b2c9b72 100644 (file)
@@ -29,6 +29,9 @@
 
 #include <ctype.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "cctest.h"
@@ -1196,7 +1199,8 @@ class GraphWithImplicitRefs {
       objects_[i] =
           v8::Persistent<v8::Object>::New(isolate_, v8::Object::New());
     }
-    (*env)->Global()->Set(v8_str("root_object"), objects_[0]);
+    (*env)->Global()->Set(v8_str("root_object"),
+                          v8::Local<v8::Value>::New(isolate_, objects_[0]));
   }
   ~GraphWithImplicitRefs() {
     instance_ = NULL;
@@ -1410,7 +1414,7 @@ TEST(GetHeapValue) {
       GetProperty(obj, v8::HeapGraphEdge::kProperty, "n_prop");
   v8::Local<v8::Number> js_n_prop =
       js_obj->Get(v8_str("n_prop")).As<v8::Number>();
-  CHECK(js_n_prop == n_prop->GetHeapValue());
+  CHECK(js_n_prop->NumberValue() == n_prop->GetHeapValue()->NumberValue());
 }
 
 
@@ -1582,9 +1586,9 @@ bool HasWeakGlobalHandle() {
 
 
 static void PersistentHandleCallback(v8::Isolate* isolate,
-                                     v8::Persistent<v8::Value> handle,
+                                     v8::Persistent<v8::Value>* handle,
                                      void*) {
-  handle.Dispose(isolate);
+  handle->Dispose(isolate);
 }
 
 
@@ -1596,7 +1600,9 @@ TEST(WeakGlobalHandle) {
 
   v8::Persistent<v8::Object> handle =
       v8::Persistent<v8::Object>::New(env->GetIsolate(), v8::Object::New());
-  handle.MakeWeak(env->GetIsolate(), NULL, PersistentHandleCallback);
+  handle.MakeWeak<v8::Value, void>(env->GetIsolate(),
+                                   NULL,
+                                   PersistentHandleCallback);
 
   CHECK(HasWeakGlobalHandle());
 }
index c0bd08a..0711454 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "compilation-cache.h"
@@ -392,10 +396,10 @@ TEST(GlobalHandles) {
 static bool WeakPointerCleared = false;
 
 static void TestWeakGlobalHandleCallback(v8::Isolate* isolate,
-                                         v8::Persistent<v8::Value> handle,
+                                         v8::Persistent<v8::Value>* handle,
                                          void* id) {
   if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true;
-  handle.Dispose(isolate);
+  handle->Dispose(isolate);
 }
 
 
@@ -423,8 +427,8 @@ TEST(WeakGlobalHandlesScavenge) {
 
   global_handles->MakeWeak(h2.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &TestWeakGlobalHandleCallback);
+                           &TestWeakGlobalHandleCallback,
+                           NULL);
 
   // Scavenge treats weak pointers as normal roots.
   heap->PerformScavenge();
@@ -470,8 +474,8 @@ TEST(WeakGlobalHandlesMark) {
 
   global_handles->MakeWeak(h2.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &TestWeakGlobalHandleCallback);
+                           &TestWeakGlobalHandleCallback,
+                           NULL);
   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
   CHECK(!GlobalHandles::IsNearDeath(h2.location()));
 
@@ -507,8 +511,8 @@ TEST(DeleteWeakGlobalHandle) {
 
   global_handles->MakeWeak(h.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &TestWeakGlobalHandleCallback);
+                           &TestWeakGlobalHandleCallback,
+                           NULL);
 
   // Scanvenge does not recognize weak reference.
   heap->PerformScavenge();
@@ -1293,13 +1297,13 @@ TEST(TestInternalWeakLists) {
   Isolate* isolate = Isolate::Current();
   Heap* heap = isolate->heap();
   HandleScope scope(isolate);
-  v8::Persistent<v8::Context> ctx[kNumTestContexts];
+  v8::Handle<v8::Context> ctx[kNumTestContexts];
 
   CHECK_EQ(0, CountNativeContexts());
 
   // Create a number of global contests which gets linked together.
   for (int i = 0; i < kNumTestContexts; i++) {
-    ctx[i] = v8::Context::New();
+    ctx[i] = v8::Context::New(v8::Isolate::GetCurrent());
 
     bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
 
@@ -1366,7 +1370,9 @@ TEST(TestInternalWeakLists) {
 
   // Dispose the native contexts one by one.
   for (int i = 0; i < kNumTestContexts; i++) {
-    ctx[i].Dispose(ctx[i]->GetIsolate());
+    // TODO(dcarney): is there a better way to do this?
+    i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
+    *unsafe = HEAP->undefined_value();
     ctx[i].Clear();
 
     // Scavenge treats these references as strong.
@@ -1430,14 +1436,14 @@ TEST(TestInternalWeakListsTraverseWithGC) {
   static const int kNumTestContexts = 10;
 
   HandleScope scope(isolate);
-  v8::Persistent<v8::Context> ctx[kNumTestContexts];
+  v8::Handle<v8::Context> ctx[kNumTestContexts];
 
   CHECK_EQ(0, CountNativeContexts());
 
   // Create an number of contexts and check the length of the weak list both
   // with and without GCs while iterating the list.
   for (int i = 0; i < kNumTestContexts; i++) {
-    ctx[i] = v8::Context::New();
+    ctx[i] = v8::Context::New(v8::Isolate::GetCurrent());
     CHECK_EQ(i + 1, CountNativeContexts());
     CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
   }
@@ -1655,9 +1661,15 @@ static int NumberOfGlobalObjects() {
 // optimized code.
 TEST(LeakNativeContextViaMap) {
   i::FLAG_allow_natives_syntax = true;
-  v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
-  v8::Persistent<v8::Context> ctx1 = v8::Context::New();
-  v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope outer_scope(isolate);
+  v8::Persistent<v8::Context> ctx1;
+  v8::Persistent<v8::Context> ctx2;
+  {
+    v8::HandleScope scope(isolate);
+    ctx1.Reset(isolate, v8::Context::New(isolate));
+    ctx2.Reset(isolate, v8::Context::New(isolate));
+  }
   ctx1->Enter();
 
   HEAP->CollectAllAvailableGarbage();
@@ -1693,9 +1705,15 @@ TEST(LeakNativeContextViaMap) {
 // optimized code.
 TEST(LeakNativeContextViaFunction) {
   i::FLAG_allow_natives_syntax = true;
-  v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
-  v8::Persistent<v8::Context> ctx1 = v8::Context::New();
-  v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope outer_scope(isolate);
+  v8::Persistent<v8::Context> ctx1;
+  v8::Persistent<v8::Context> ctx2;
+  {
+    v8::HandleScope scope(isolate);
+    ctx1.Reset(isolate, v8::Context::New(isolate));
+    ctx2.Reset(isolate, v8::Context::New(isolate));
+  }
   ctx1->Enter();
 
   HEAP->CollectAllAvailableGarbage();
@@ -1729,9 +1747,15 @@ TEST(LeakNativeContextViaFunction) {
 
 TEST(LeakNativeContextViaMapKeyed) {
   i::FLAG_allow_natives_syntax = true;
-  v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
-  v8::Persistent<v8::Context> ctx1 = v8::Context::New();
-  v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope outer_scope(isolate);
+  v8::Persistent<v8::Context> ctx1;
+  v8::Persistent<v8::Context> ctx2;
+  {
+    v8::HandleScope scope(isolate);
+    ctx1.Reset(isolate, v8::Context::New(isolate));
+    ctx2.Reset(isolate, v8::Context::New(isolate));
+  }
   ctx1->Enter();
 
   HEAP->CollectAllAvailableGarbage();
@@ -1765,9 +1789,15 @@ TEST(LeakNativeContextViaMapKeyed) {
 
 TEST(LeakNativeContextViaMapProto) {
   i::FLAG_allow_natives_syntax = true;
-  v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
-  v8::Persistent<v8::Context> ctx1 = v8::Context::New();
-  v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope outer_scope(isolate);
+  v8::Persistent<v8::Context> ctx1;
+  v8::Persistent<v8::Context> ctx2;
+  {
+    v8::HandleScope scope(isolate);
+    ctx1.Reset(isolate, v8::Context::New(isolate));
+    ctx2.Reset(isolate, v8::Context::New(isolate));
+  }
   ctx1->Enter();
 
   HEAP->CollectAllAvailableGarbage();
@@ -2373,7 +2403,7 @@ TEST(Regress2211) {
     // Check size.
     DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
     ObjectHashTable* hashtable = ObjectHashTable::cast(
-        internal_obj->FastPropertyAt(descriptors->GetFieldIndex(0)));
+        internal_obj->RawFastPropertyAt(descriptors->GetFieldIndex(0)));
     // HashTable header (5) and 4 initial entries (8).
     CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
   }
index a619c85..ca0f073 100644 (file)
@@ -27,6 +27,9 @@
 
 #include <limits.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "api.h"
@@ -70,7 +73,7 @@ class KangarooThread : public v8::internal::Thread {
       v8::Isolate::Scope isolate_scope(isolate_);
       CHECK_EQ(isolate_, v8::internal::Isolate::Current());
       v8::HandleScope scope(isolate_);
-      v8::Context::Scope context_scope(context_);
+      v8::Context::Scope context_scope(isolate_, context_);
       Local<Value> v = CompileRun("getValue()");
       CHECK(v->IsNumber());
       CHECK_EQ(30, static_cast<int>(v->NumberValue()));
@@ -78,8 +81,8 @@ class KangarooThread : public v8::internal::Thread {
     {
       v8::Locker locker(isolate_);
       v8::Isolate::Scope isolate_scope(isolate_);
-      v8::Context::Scope context_scope(context_);
       v8::HandleScope scope(isolate_);
+      v8::Context::Scope context_scope(isolate_, context_);
       Local<Value> v = CompileRun("getValue()");
       CHECK(v->IsNumber());
       CHECK_EQ(30, static_cast<int>(v->NumberValue()));
@@ -228,7 +231,7 @@ class IsolateNonlockingThread : public JoinableThread {
     {
       v8::Isolate::Scope isolate_scope(isolate);
       v8::HandleScope handle_scope(isolate);
-      v8::Handle<v8::Context> context = v8::Context::New();
+      v8::Handle<v8::Context> context = v8::Context::New(isolate);
       v8::Context::Scope context_scope(context);
       CHECK_EQ(isolate, v8::internal::Isolate::Current());
       CalcFibAndCheck();
@@ -349,7 +352,7 @@ class LockIsolateAndCalculateFibSharedContextThread : public JoinableThread {
     v8::Locker lock(isolate_);
     v8::Isolate::Scope isolate_scope(isolate_);
     HandleScope handle_scope(isolate_);
-    v8::Context::Scope context_scope(context_);
+    v8::Context::Scope context_scope(isolate_, context_);
     CalcFibAndCheck();
   }
  private:
@@ -470,19 +473,20 @@ class LockAndUnlockDifferentIsolatesThread : public JoinableThread {
   }
 
   virtual void Run() {
-    Persistent<v8::Context> context1;
-    Persistent<v8::Context> context2;
+    i::SmartPointer<LockIsolateAndCalculateFibSharedContextThread> thread;
     v8::Locker lock1(isolate1_);
     CHECK(v8::Locker::IsLocked(isolate1_));
     CHECK(!v8::Locker::IsLocked(isolate2_));
     {
       v8::Isolate::Scope isolate_scope(isolate1_);
       v8::HandleScope handle_scope(isolate1_);
-      context1 = v8::Context::New();
+      v8::Handle<v8::Context> context1 = v8::Context::New(isolate1_);
       {
         v8::Context::Scope context_scope(context1);
         CalcFibAndCheck();
       }
+      thread.Reset(new LockIsolateAndCalculateFibSharedContextThread(
+          isolate1_, context1));
     }
     v8::Locker lock2(isolate2_);
     CHECK(v8::Locker::IsLocked(isolate1_));
@@ -490,26 +494,14 @@ class LockAndUnlockDifferentIsolatesThread : public JoinableThread {
     {
       v8::Isolate::Scope isolate_scope(isolate2_);
       v8::HandleScope handle_scope(isolate2_);
-      context2 = v8::Context::New();
+      v8::Handle<v8::Context> context2 = v8::Context::New(isolate2_);
       {
         v8::Context::Scope context_scope(context2);
         CalcFibAndCheck();
       }
-    }
-    {
-      i::SmartPointer<LockIsolateAndCalculateFibSharedContextThread> thread;
-      {
-        CHECK(v8::Locker::IsLocked(isolate1_));
-        v8::Isolate::Scope isolate_scope(isolate1_);
-        v8::HandleScope handle_scope(isolate1_);
-        thread.Reset(new LockIsolateAndCalculateFibSharedContextThread(
-            isolate1_, v8::Local<v8::Context>::New(isolate1_, context1)));
-      }
       v8::Unlocker unlock1(isolate1_);
       CHECK(!v8::Locker::IsLocked(isolate1_));
       CHECK(v8::Locker::IsLocked(isolate2_));
-      v8::Isolate::Scope isolate_scope(isolate2_);
-      v8::HandleScope handle_scope(isolate2_);
       v8::Context::Scope context_scope(context2);
       thread->Start();
       CalcFibAndCheck();
@@ -535,7 +527,7 @@ TEST(LockAndUnlockDifferentIsolates) {
 
 class LockUnlockLockThread : public JoinableThread {
  public:
-  LockUnlockLockThread(v8::Isolate* isolate, v8::Local<v8::Context> context)
+  LockUnlockLockThread(v8::Isolate* isolate, v8::Handle<v8::Context> context)
     : JoinableThread("LockUnlockLockThread"),
       isolate_(isolate),
       context_(isolate, context) {
@@ -548,7 +540,7 @@ class LockUnlockLockThread : public JoinableThread {
     {
       v8::Isolate::Scope isolate_scope(isolate_);
       v8::HandleScope handle_scope(isolate_);
-      v8::Context::Scope context_scope(context_);
+      v8::Context::Scope context_scope(isolate_, context_);
       CalcFibAndCheck();
     }
     {
@@ -561,7 +553,7 @@ class LockUnlockLockThread : public JoinableThread {
         v8::HandleScope handle_scope(isolate_);
         CHECK(v8::Locker::IsLocked(isolate_));
         CHECK(!v8::Locker::IsLocked(CcTest::default_isolate()));
-        v8::Context::Scope context_scope(context_);
+        v8::Context::Scope context_scope(isolate_, context_);
         CalcFibAndCheck();
       }
     }
@@ -580,16 +572,15 @@ TEST(LockUnlockLockMultithreaded) {
   const int kNThreads = 100;
 #endif
   v8::Isolate* isolate = v8::Isolate::New();
-  Persistent<v8::Context> context;
   i::List<JoinableThread*> threads(kNThreads);
   {
     v8::Locker locker_(isolate);
     v8::Isolate::Scope isolate_scope(isolate);
     v8::HandleScope handle_scope(isolate);
-    context = v8::Context::New();
+    v8::Handle<v8::Context> context = v8::Context::New(isolate);
     for (int i = 0; i < kNThreads; i++) {
       threads.Add(new LockUnlockLockThread(
-          isolate, v8::Local<v8::Context>::New(isolate, context)));
+          isolate, context));
     }
   }
   StartJoinAndDeleteThreads(threads);
@@ -598,7 +589,7 @@ TEST(LockUnlockLockMultithreaded) {
 
 class LockUnlockLockDefaultIsolateThread : public JoinableThread {
  public:
-  explicit LockUnlockLockDefaultIsolateThread(v8::Local<v8::Context> context)
+  explicit LockUnlockLockDefaultIsolateThread(v8::Handle<v8::Context> context)
       : JoinableThread("LockUnlockLockDefaultIsolateThread"),
         context_(CcTest::default_isolate(), context) {}
 
@@ -606,7 +597,7 @@ class LockUnlockLockDefaultIsolateThread : public JoinableThread {
     v8::Locker lock1(CcTest::default_isolate());
     {
       v8::HandleScope handle_scope(CcTest::default_isolate());
-      v8::Context::Scope context_scope(context_);
+      v8::Context::Scope context_scope(CcTest::default_isolate(), context_);
       CalcFibAndCheck();
     }
     {
@@ -614,7 +605,7 @@ class LockUnlockLockDefaultIsolateThread : public JoinableThread {
       {
         v8::Locker lock2(CcTest::default_isolate());
         v8::HandleScope handle_scope(CcTest::default_isolate());
-        v8::Context::Scope context_scope(context_);
+        v8::Context::Scope context_scope(CcTest::default_isolate(), context_);
         CalcFibAndCheck();
       }
     }
@@ -631,15 +622,14 @@ TEST(LockUnlockLockDefaultIsolateMultithreaded) {
 #else
   const int kNThreads = 100;
 #endif
-  Persistent<v8::Context> context;
+  Local<v8::Context> context;
   i::List<JoinableThread*> threads(kNThreads);
   {
     v8::Locker locker_(CcTest::default_isolate());
     v8::HandleScope handle_scope(CcTest::default_isolate());
-    context = v8::Context::New();
+    context = v8::Context::New(CcTest::default_isolate());
     for (int i = 0; i < kNThreads; i++) {
-      threads.Add(new LockUnlockLockDefaultIsolateThread(
-          v8::Local<v8::Context>::New(CcTest::default_isolate(), context)));
+      threads.Add(new LockUnlockLockDefaultIsolateThread(context));
     }
   }
   StartJoinAndDeleteThreads(threads);
@@ -653,13 +643,12 @@ TEST(Regress1433) {
       v8::Locker lock(isolate);
       v8::Isolate::Scope isolate_scope(isolate);
       v8::HandleScope handle_scope(isolate);
-      v8::Persistent<Context> context = v8::Context::New();
+      v8::Handle<Context> context = v8::Context::New(isolate);
       v8::Context::Scope context_scope(context);
       v8::Handle<String> source = v8::String::New("1+1");
       v8::Handle<Script> script = v8::Script::Compile(source);
       v8::Handle<Value> result = script->Run();
       v8::String::AsciiValue ascii(result);
-      context.Dispose(isolate);
     }
     isolate->Dispose();
   }
@@ -685,9 +674,9 @@ class IsolateGenesisThread : public JoinableThread {
       v8::Isolate::Scope isolate_scope(isolate);
       CHECK(!i::Isolate::Current()->has_installed_extensions());
       v8::ExtensionConfiguration extensions(count_, extension_names_);
-      v8::Persistent<v8::Context> context = v8::Context::New(&extensions);
+      v8::HandleScope handle_scope(isolate);
+      v8::Context::New(isolate, &extensions);
       CHECK(i::Isolate::Current()->has_installed_extensions());
-      context.Dispose(isolate);
     }
     isolate->Dispose();
   }
index ca6c7ae..3c34ede 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "api.h"
index 89b11c9..3288fc8 100644 (file)
 #include <cmath>
 #endif  // __linux__
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 #include "log.h"
 #include "cpu-profiler.h"
@@ -62,7 +67,7 @@ class ScopedLoggerInitializer {
         // Need to run this prior to creating the scope.
         trick_to_run_init_flags_(init_flags_(prof_lazy)),
         scope_(v8::Isolate::GetCurrent()),
-        env_(v8::Context::New()),
+        env_(v8::Context::New(v8::Isolate::GetCurrent())),
         logger_(i::Isolate::Current()->logger()) {
     env_->Enter();
   }
@@ -371,7 +376,7 @@ class SimpleExternalString : public v8::String::ExternalStringResource {
 
 TEST(Issue23768) {
   v8::HandleScope scope(v8::Isolate::GetCurrent());
-  v8::Handle<v8::Context> env = v8::Context::New();
+  v8::Handle<v8::Context> env = v8::Context::New(v8::Isolate::GetCurrent());
   env->Enter();
 
   SimpleExternalString source_ext_str("(function ext() {})();");
@@ -405,7 +410,8 @@ TEST(LogCallbacks) {
                                                 v8::FunctionTemplate::New());
   obj->SetClassName(v8_str("Obj"));
   v8::Handle<v8::ObjectTemplate> proto = obj->PrototypeTemplate();
-  v8::Local<v8::Signature> signature = v8::Signature::New(obj);
+  v8::Local<v8::Signature> signature =
+      v8::Signature::New(v8::Handle<v8::FunctionTemplate>(*obj));
   proto->Set(v8_str("method1"),
              v8::FunctionTemplate::New(ObjMethod1,
                                        v8::Handle<v8::Value>(),
@@ -563,9 +569,9 @@ TEST(EquivalenceOfLoggingAndTraversal) {
   // The result either be a "true" literal or problem description.
   if (!result->IsTrue()) {
     v8::Local<v8::String> s = result->ToString();
-    i::ScopedVector<char> data(s->Length() + 1);
+    i::ScopedVector<char> data(s->Utf8Length() + 1);
     CHECK_NE(NULL, data.start());
-    s->WriteAscii(data.start());
+    s->WriteUtf8(data.start());
     printf("%s\n", data.start());
     // Make sure that our output is written prior crash due to CHECK failure.
     fflush(stdout);
index 0cc68eb..2cb4646 100644 (file)
 #include <errno.h>
 #endif
 
+
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "global-handles.h"
@@ -299,11 +304,11 @@ TEST(GCCallback) {
 
 static int NumberOfWeakCalls = 0;
 static void WeakPointerCallback(v8::Isolate* isolate,
-                                v8::Persistent<v8::Value> handle,
+                                v8::Persistent<v8::Value>* handle,
                                 void* id) {
   ASSERT(id == reinterpret_cast<void*>(1234));
   NumberOfWeakCalls++;
-  handle.Dispose(isolate);
+  handle->Dispose(isolate);
 }
 
 TEST(ObjectGroups) {
@@ -322,16 +327,16 @@ TEST(ObjectGroups) {
       global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
   global_handles->MakeWeak(g1s1.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &WeakPointerCallback);
+                           &WeakPointerCallback,
+                           NULL);
   global_handles->MakeWeak(g1s2.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &WeakPointerCallback);
+                           &WeakPointerCallback,
+                           NULL);
   global_handles->MakeWeak(g1c1.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &WeakPointerCallback);
+                           &WeakPointerCallback,
+                           NULL);
 
   Handle<Object> g2s1 =
       global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
@@ -341,16 +346,16 @@ TEST(ObjectGroups) {
     global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
   global_handles->MakeWeak(g2s1.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &WeakPointerCallback);
+                           &WeakPointerCallback,
+                           NULL);
   global_handles->MakeWeak(g2s2.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &WeakPointerCallback);
+                           &WeakPointerCallback,
+                           NULL);
   global_handles->MakeWeak(g2c1.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &WeakPointerCallback);
+                           &WeakPointerCallback,
+                           NULL);
 
   Handle<Object> root = global_handles->Create(*g1s1);  // make a root.
 
@@ -379,8 +384,8 @@ TEST(ObjectGroups) {
   // Weaken the root.
   global_handles->MakeWeak(root.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &WeakPointerCallback);
+                           &WeakPointerCallback,
+                           NULL);
   // But make children strong roots---all the objects (except for children)
   // should be collectable now.
   global_handles->ClearWeakness(g1c1.location());
@@ -408,12 +413,12 @@ TEST(ObjectGroups) {
   // And now make children weak again and collect them.
   global_handles->MakeWeak(g1c1.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &WeakPointerCallback);
+                           &WeakPointerCallback,
+                           NULL);
   global_handles->MakeWeak(g2c1.location(),
                            reinterpret_cast<void*>(1234),
-                           NULL,
-                           &WeakPointerCallback);
+                           &WeakPointerCallback,
+                           NULL);
 
   HEAP->CollectGarbage(OLD_POINTER_SPACE);
   CHECK_EQ(7, NumberOfWeakCalls);
index 49007ba..9eb6f3c 100644 (file)
@@ -57,7 +57,7 @@ class HarmonyIsolate {
 TEST(PerIsolateState) {
   HarmonyIsolate isolate;
   HandleScope scope(isolate.GetIsolate());
-  LocalContext context1;
+  v8::LocalContext context1;
   CompileRun(
       "var count = 0;"
       "var calls = 0;"
@@ -70,20 +70,20 @@ TEST(PerIsolateState) {
       "(function() { obj.foo = 'bar'; })");
   Handle<Value> notify_fun2;
   {
-    LocalContext context2;
+    v8::LocalContext context2;
     context2->Global()->Set(String::New("obj"), obj);
     notify_fun2 = CompileRun(
         "(function() { obj.foo = 'baz'; })");
   }
   Handle<Value> notify_fun3;
   {
-    LocalContext context3;
+    v8::LocalContext context3;
     context3->Global()->Set(String::New("obj"), obj);
     notify_fun3 = CompileRun(
         "(function() { obj.foo = 'bat'; })");
   }
   {
-    LocalContext context4;
+    v8::LocalContext context4;
     context4->Global()->Set(String::New("observer"), observer);
     context4->Global()->Set(String::New("fun1"), notify_fun1);
     context4->Global()->Set(String::New("fun2"), notify_fun2);
@@ -97,7 +97,7 @@ TEST(PerIsolateState) {
 TEST(EndOfMicrotaskDelivery) {
   HarmonyIsolate isolate;
   HandleScope scope(isolate.GetIsolate());
-  LocalContext context;
+  v8::LocalContext context;
   CompileRun(
       "var obj = {};"
       "var count = 0;"
@@ -110,7 +110,7 @@ TEST(EndOfMicrotaskDelivery) {
 TEST(DeliveryOrdering) {
   HarmonyIsolate isolate;
   HandleScope scope(isolate.GetIsolate());
-  LocalContext context;
+  v8::LocalContext context;
   CompileRun(
       "var obj1 = {};"
       "var obj2 = {};"
@@ -141,7 +141,7 @@ TEST(DeliveryOrdering) {
 TEST(DeliveryOrderingReentrant) {
   HarmonyIsolate isolate;
   HandleScope scope(isolate.GetIsolate());
-  LocalContext context;
+  v8::LocalContext context;
   CompileRun(
       "var obj = {};"
       "var reentered = false;"
@@ -172,7 +172,7 @@ TEST(DeliveryOrderingReentrant) {
 TEST(DeliveryOrderingDeliverChangeRecords) {
   HarmonyIsolate isolate;
   HandleScope scope(isolate.GetIsolate());
-  LocalContext context;
+  v8::LocalContext context;
   CompileRun(
       "var obj = {};"
       "var ordering = [];"
@@ -197,14 +197,14 @@ TEST(ObjectHashTableGrowth) {
   HarmonyIsolate isolate;
   HandleScope scope(isolate.GetIsolate());
   // Initializing this context sets up initial hash tables.
-  LocalContext context;
+  v8::LocalContext context;
   Handle<Value> obj = CompileRun("obj = {};");
   Handle<Value> observer = CompileRun(
       "var ran = false;"
       "(function() { ran = true })");
   {
     // As does initializing this context.
-    LocalContext context2;
+    v8::LocalContext context2;
     context2->Global()->Set(String::New("obj"), obj);
     context2->Global()->Set(String::New("observer"), observer);
     CompileRun(
@@ -224,7 +224,7 @@ TEST(ObjectHashTableGrowth) {
 
 TEST(GlobalObjectObservation) {
   HarmonyIsolate isolate;
-  LocalContext context;
+  v8::LocalContext context;
   HandleScope scope(isolate.GetIsolate());
   Handle<Object> global_proxy = context->Global();
   Handle<Object> inner_global = global_proxy->GetPrototype().As<Object>();
@@ -256,7 +256,7 @@ TEST(GlobalObjectObservation) {
   // to the old context.
   context->DetachGlobal();
   {
-    LocalContext context2;
+    v8::LocalContext context2;
     context2->DetachGlobal();
     context2->ReattachGlobal(global_proxy);
     CompileRun(
@@ -271,7 +271,7 @@ TEST(GlobalObjectObservation) {
   // Attaching by passing to Context::New
   {
     // Delegates to Context::New
-    LocalContext context3(NULL, Handle<ObjectTemplate>(), global_proxy);
+    v8::LocalContext context3(NULL, Handle<ObjectTemplate>(), global_proxy);
     CompileRun(
         "var records3 = [];"
         "Object.observe(this, function(r) { [].push.apply(records3, r) });"
@@ -320,7 +320,7 @@ static void ExpectRecords(Handle<Value> records,
 TEST(APITestBasicMutation) {
   HarmonyIsolate isolate;
   HandleScope scope(isolate.GetIsolate());
-  LocalContext context;
+  v8::LocalContext context;
   Handle<Object> obj = Handle<Object>::Cast(CompileRun(
       "var records = [];"
       "var obj = {};"
@@ -363,7 +363,7 @@ TEST(APITestBasicMutation) {
 TEST(HiddenPrototypeObservation) {
   HarmonyIsolate isolate;
   HandleScope scope(isolate.GetIsolate());
-  LocalContext context;
+  v8::LocalContext context;
   Handle<FunctionTemplate> tmpl = FunctionTemplate::New();
   tmpl->SetHiddenPrototype(true);
   tmpl->InstanceTemplate()->Set(String::New("foo"), Number::New(75));
@@ -412,7 +412,7 @@ static int NumberOfElements(i::Handle<i::JSWeakMap> map) {
 TEST(ObservationWeakMap) {
   HarmonyIsolate isolate;
   HandleScope scope(isolate.GetIsolate());
-  LocalContext context;
+  v8::LocalContext context;
   CompileRun(
       "var obj = {};"
       "Object.observe(obj, function(){});"
index 3ea1fb1..05fea0b 100644 (file)
@@ -29,6 +29,9 @@
 #include <stdio.h>
 #include <string.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 
 #include "cctest.h"
@@ -173,8 +176,9 @@ class ScriptResource : public v8::String::ExternalAsciiStringResource {
 
 
 TEST(Preparsing) {
-  v8::HandleScope handles(v8::Isolate::GetCurrent());
-  v8::Persistent<v8::Context> context = v8::Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handles(isolate);
+  v8::Local<v8::Context> context = v8::Context::New(isolate);
   v8::Context::Scope context_scope(context);
   int marker;
   i::Isolate::Current()->stack_guard()->SetStackLimit(
@@ -539,8 +543,9 @@ void TestCharacterStream(const char* ascii_source,
 
 
 TEST(CharacterStreams) {
-  v8::HandleScope handles(v8::Isolate::GetCurrent());
-  v8::Persistent<v8::Context> context = v8::Context::New();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handles(isolate);
+  v8::Local<v8::Context> context = v8::Context::New(isolate);
   v8::Context::Scope context_scope(context);
 
   TestCharacterStream("abc\0\n\r\x7f", 7);
@@ -984,7 +989,7 @@ TEST(ScopePositions) {
   };
 
   v8::HandleScope handles(v8::Isolate::GetCurrent());
-  v8::Persistent<v8::Context> context = v8::Context::New();
+  v8::Handle<v8::Context> context = v8::Context::New(v8::Isolate::GetCurrent());
   v8::Context::Scope context_scope(context);
 
   int marker;
@@ -1243,7 +1248,7 @@ TEST(ParserSync) {
   if (i::FLAG_stress_compaction) return;
 
   v8::HandleScope handles(v8::Isolate::GetCurrent());
-  v8::Persistent<v8::Context> context = v8::Context::New();
+  v8::Handle<v8::Context> context = v8::Context::New(v8::Isolate::GetCurrent());
   v8::Context::Scope context_scope(context);
 
   int marker;
@@ -1284,7 +1289,8 @@ TEST(PreparserStrictOctal) {
   v8::internal::FLAG_min_preparse_length = 1;  // Force preparsing.
   v8::V8::Initialize();
   v8::HandleScope scope(v8::Isolate::GetCurrent());
-  v8::Context::Scope context_scope(v8::Context::New());
+  v8::Context::Scope context_scope(
+      v8::Context::New(v8::Isolate::GetCurrent()));
   v8::TryCatch try_catch;
   const char* script =
       "\"use strict\";       \n"
index 85adeca..70b34e3 100644 (file)
@@ -27,6 +27,9 @@
 //
 // Tests of profiles generator and utilities.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+
 #include "v8.h"
 #include "profile-generator-inl.h"
 #include "cctest.h"
@@ -852,7 +855,6 @@ v8::Handle<v8::Value> ProfilerExtension::StopProfiling(
 
 static ProfilerExtension kProfilerExtension;
 v8::DeclareExtension kProfilerExtensionDeclaration(&kProfilerExtension);
-static v8::Persistent<v8::Context> env;
 
 static const ProfileNode* PickChild(const ProfileNode* parent,
                                     const char* name) {
@@ -869,14 +871,12 @@ TEST(RecordStackTraceAtStartProfiling) {
   // don't appear in the stack trace.
   i::FLAG_use_inlining = false;
 
-  if (env.IsEmpty()) {
-    v8::HandleScope scope(v8::Isolate::GetCurrent());
-    const char* extensions[] = { "v8/profiler" };
-    v8::ExtensionConfiguration config(1, extensions);
-    env = v8::Context::New(&config);
-  }
-  v8::HandleScope scope(v8::Isolate::GetCurrent());
-  env->Enter();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
+  const char* extensions[] = { "v8/profiler" };
+  v8::ExtensionConfiguration config(1, extensions);
+  v8::Local<v8::Context> context = v8::Context::New(isolate);
+  context->Enter();
 
   CpuProfiler* profiler = i::Isolate::Current()->cpu_profiler();
   CHECK_EQ(0, profiler->GetProfilesCount());
index 9394fc9..0837ab3 100644 (file)
@@ -25,6 +25,9 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
 
 #include "v8.h"
 
@@ -36,8 +39,6 @@
 
 using namespace v8::internal;
 
-static v8::Persistent<v8::Context> env;
-
 
 void SetSeeds(Handle<ByteArray> seeds, uint32_t state0, uint32_t state1) {
   for (int i = 0; i < 4; i++) {
@@ -70,11 +71,12 @@ void TestSeeds(Handle<JSFunction> fun,
 
 
 TEST(CrankshaftRandom) {
-  if (env.IsEmpty()) env = v8::Context::New();
+  v8::V8::Initialize();
   // Skip test if crankshaft is disabled.
   if (!V8::UseCrankshaft()) return;
-  v8::HandleScope scope(env->GetIsolate());
-  env->Enter();
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
+  v8::Context::Scope context_scope(v8::Context::New(isolate));
 
   Handle<Context> context(Isolate::Current()->context());
   Handle<JSObject> global(context->global_object());
index c4175cb..f311dcc 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "ast.h"
@@ -707,19 +711,17 @@ typedef RegExpMacroAssemblerMIPS ArchRegExpMacroAssembler;
 class ContextInitializer {
  public:
   ContextInitializer()
-      : env_(),
-        scope_(v8::Isolate::GetCurrent()),
+      : scope_(v8::Isolate::GetCurrent()),
+        env_(v8::Context::New(v8::Isolate::GetCurrent())),
         zone_(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT) {
-    env_ = v8::Context::New();
     env_->Enter();
   }
   ~ContextInitializer() {
     env_->Exit();
-    env_.Dispose(env_->GetIsolate());
   }
  private:
-  v8::Persistent<v8::Context> env_;
   v8::HandleScope scope_;
+  v8::Handle<v8::Context> env_;
   v8::internal::ZoneScope zone_;
 };
 
index 777c769..0cf8044 100644 (file)
 #include <signal.h>
 
 #include "sys/stat.h"
+
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "debug.h"
@@ -246,8 +251,11 @@ static void Serialize() {
   // can be loaded from v8natives.js and their addresses can be processed.  This
   // will clear the pending fixups array, which would otherwise contain GC roots
   // that would confuse the serialization/deserialization process.
-  v8::Persistent<v8::Context> env = v8::Context::New();
-  env.Dispose(env->GetIsolate());
+  {
+    v8::Isolate* isolate = v8::Isolate::GetCurrent();
+    v8::HandleScope scope(isolate);
+    v8::Context::New(isolate);
+  }
   WriteToFile(FLAG_testing_serialization_file);
 }
 
@@ -299,10 +307,11 @@ DEPENDENT_TEST(Deserialize, Serialize) {
   // serialization.  That doesn't matter.  We don't need to be able to
   // serialize a snapshot in a VM that is booted from a snapshot.
   if (!Snapshot::HaveASnapshotToStartFrom()) {
-    v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::Isolate* isolate = v8::Isolate::GetCurrent();
+    v8::HandleScope scope(isolate);
     Deserialize();
 
-    v8::Persistent<v8::Context> env = v8::Context::New();
+    v8::Local<v8::Context> env = v8::Context::New(isolate);
     env->Enter();
 
     SanityCheck();
@@ -312,10 +321,11 @@ DEPENDENT_TEST(Deserialize, Serialize) {
 
 DEPENDENT_TEST(DeserializeFromSecondSerialization, SerializeTwice) {
   if (!Snapshot::HaveASnapshotToStartFrom()) {
-    v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::Isolate* isolate = v8::Isolate::GetCurrent();
+    v8::HandleScope scope(isolate);
     Deserialize();
 
-    v8::Persistent<v8::Context> env = v8::Context::New();
+    v8::Local<v8::Context> env = v8::Context::New(isolate);
     env->Enter();
 
     SanityCheck();
@@ -325,10 +335,11 @@ DEPENDENT_TEST(DeserializeFromSecondSerialization, SerializeTwice) {
 
 DEPENDENT_TEST(DeserializeAndRunScript2, Serialize) {
   if (!Snapshot::HaveASnapshotToStartFrom()) {
-    v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::Isolate* isolate = v8::Isolate::GetCurrent();
+    v8::HandleScope scope(isolate);
     Deserialize();
 
-    v8::Persistent<v8::Context> env = v8::Context::New();
+    v8::Local<v8::Context> env = v8::Context::New(isolate);
     env->Enter();
 
     const char* c_source = "\"1234\".length";
@@ -342,10 +353,11 @@ DEPENDENT_TEST(DeserializeAndRunScript2, Serialize) {
 DEPENDENT_TEST(DeserializeFromSecondSerializationAndRunScript2,
                SerializeTwice) {
   if (!Snapshot::HaveASnapshotToStartFrom()) {
-    v8::HandleScope scope(v8::Isolate::GetCurrent());
+    v8::Isolate* isolate = v8::Isolate::GetCurrent();
+    v8::HandleScope scope(isolate);
     Deserialize();
 
-    v8::Persistent<v8::Context> env = v8::Context::New();
+    v8::Local<v8::Context> env = v8::Context::New(isolate);
     env->Enter();
 
     const char* c_source = "\"1234\".length";
@@ -363,7 +375,12 @@ TEST(PartialSerialization) {
     Isolate* isolate = Isolate::Current();
     Heap* heap = isolate->heap();
 
-    v8::Persistent<v8::Context> env = v8::Context::New();
+    v8::Persistent<v8::Context> env;
+    {
+      HandleScope scope(isolate);
+      env.Reset(v8::Isolate::GetCurrent(),
+                v8::Context::New(v8::Isolate::GetCurrent()));
+    }
     ASSERT(!env.IsEmpty());
     env->Enter();
     // Make sure all builtin scripts are cached.
@@ -497,7 +514,12 @@ TEST(ContextSerialization) {
     Isolate* isolate = Isolate::Current();
     Heap* heap = isolate->heap();
 
-    v8::Persistent<v8::Context> env = v8::Context::New();
+    v8::Persistent<v8::Context> env;
+    {
+      HandleScope scope(isolate);
+      env.Reset(v8::Isolate::GetCurrent(),
+                v8::Context::New(v8::Isolate::GetCurrent()));
+    }
     ASSERT(!env.IsEmpty());
     env->Enter();
     // Make sure all builtin scripts are cached.
index 2889172..77e8e1b 100644 (file)
 
 #include <stdlib.h>
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "api.h"
index 6a8323b..e1b3ea7 100644 (file)
@@ -5,6 +5,10 @@
 // of ConsStrings.  These operations may not be very fast, but they
 // should be possible without getting errors due to too deep recursion.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "cctest.h"
index b22c150..4008663 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 #include "platform.h"
 #include "cctest.h"
@@ -124,7 +128,8 @@ TEST(TerminateOnlyV8ThreadFromThreadItself) {
   v8::HandleScope scope(v8::Isolate::GetCurrent());
   v8::Handle<v8::ObjectTemplate> global =
       CreateGlobalTemplate(TerminateCurrentThread, DoLoop);
-  v8::Persistent<v8::Context> context = v8::Context::New(NULL, global);
+  v8::Handle<v8::Context> context =
+      v8::Context::New(v8::Isolate::GetCurrent(), NULL, global);
   v8::Context::Scope context_scope(context);
   CHECK(!v8::V8::IsExecutionTerminating());
   // Run a loop that will be infinite if thread termination does not work.
@@ -134,7 +139,6 @@ TEST(TerminateOnlyV8ThreadFromThreadItself) {
   // Test that we can run the code again after thread termination.
   CHECK(!v8::V8::IsExecutionTerminating());
   v8::Script::Compile(source)->Run();
-  context.Dispose(context->GetIsolate());
 }
 
 
@@ -144,7 +148,8 @@ TEST(TerminateOnlyV8ThreadFromThreadItselfNoLoop) {
   v8::HandleScope scope(v8::Isolate::GetCurrent());
   v8::Handle<v8::ObjectTemplate> global =
       CreateGlobalTemplate(TerminateCurrentThread, DoLoopNoCall);
-  v8::Persistent<v8::Context> context = v8::Context::New(NULL, global);
+  v8::Handle<v8::Context> context =
+      v8::Context::New(v8::Isolate::GetCurrent(), NULL, global);
   v8::Context::Scope context_scope(context);
   CHECK(!v8::V8::IsExecutionTerminating());
   // Run a loop that will be infinite if thread termination does not work.
@@ -154,7 +159,6 @@ TEST(TerminateOnlyV8ThreadFromThreadItselfNoLoop) {
   CHECK(!v8::V8::IsExecutionTerminating());
   // Test that we can run the code again after thread termination.
   v8::Script::Compile(source)->Run();
-  context.Dispose(context->GetIsolate());
 }
 
 
@@ -183,7 +187,8 @@ TEST(TerminateOnlyV8ThreadFromOtherThread) {
 
   v8::HandleScope scope(v8::Isolate::GetCurrent());
   v8::Handle<v8::ObjectTemplate> global = CreateGlobalTemplate(Signal, DoLoop);
-  v8::Persistent<v8::Context> context = v8::Context::New(NULL, global);
+  v8::Handle<v8::Context> context =
+      v8::Context::New(v8::Isolate::GetCurrent(), NULL, global);
   v8::Context::Scope context_scope(context);
   CHECK(!v8::V8::IsExecutionTerminating());
   // Run a loop that will be infinite if thread termination does not work.
@@ -194,7 +199,6 @@ TEST(TerminateOnlyV8ThreadFromOtherThread) {
   thread.Join();
   delete semaphore;
   semaphore = NULL;
-  context.Dispose(context->GetIsolate());
 }
 
 
@@ -207,14 +211,14 @@ class LoopingThread : public v8::internal::Thread {
     v8_thread_id_ = v8::V8::GetCurrentThreadId();
     v8::Handle<v8::ObjectTemplate> global =
         CreateGlobalTemplate(Signal, DoLoop);
-    v8::Persistent<v8::Context> context = v8::Context::New(NULL, global);
+    v8::Handle<v8::Context> context =
+        v8::Context::New(v8::Isolate::GetCurrent(), NULL, global);
     v8::Context::Scope context_scope(context);
     CHECK(!v8::V8::IsExecutionTerminating());
     // Run a loop that will be infinite if thread termination does not work.
     v8::Handle<v8::String> source =
         v8::String::New("try { loop(); fail(); } catch(e) { fail(); }");
     v8::Script::Compile(source)->Run();
-    context.Dispose(context->GetIsolate());
   }
 
   int GetV8ThreadId() { return v8_thread_id_; }
@@ -314,7 +318,8 @@ TEST(TerminateLoadICException) {
   global->Set(v8::String::New("loop"),
               v8::FunctionTemplate::New(LoopGetProperty));
 
-  v8::Persistent<v8::Context> context = v8::Context::New(NULL, global);
+  v8::Handle<v8::Context> context =
+      v8::Context::New(v8::Isolate::GetCurrent(), NULL, global);
   v8::Context::Scope context_scope(context);
   CHECK(!v8::V8::IsExecutionTerminating());
   // Run a loop that will be infinite if thread termination does not work.
@@ -326,7 +331,6 @@ TEST(TerminateLoadICException) {
   CHECK(!v8::V8::IsExecutionTerminating());
   call_count = 0;
   v8::Script::Compile(source)->Run();
-  context.Dispose(context->GetIsolate());
 }
 
 v8::Handle<v8::Value> ReenterAfterTermination(const v8::Arguments& args) {
@@ -360,7 +364,8 @@ TEST(TerminateAndReenterFromThreadItself) {
   v8::HandleScope scope(v8::Isolate::GetCurrent());
   v8::Handle<v8::ObjectTemplate> global =
       CreateGlobalTemplate(TerminateCurrentThread, ReenterAfterTermination);
-  v8::Persistent<v8::Context> context = v8::Context::New(NULL, global);
+  v8::Handle<v8::Context> context =
+      v8::Context::New(v8::Isolate::GetCurrent(), NULL, global);
   v8::Context::Scope context_scope(context);
   CHECK(!v8::V8::IsExecutionTerminating());
   v8::Handle<v8::String> source =
@@ -370,7 +375,6 @@ TEST(TerminateAndReenterFromThreadItself) {
   // Check we can run JS again after termination.
   CHECK(v8::Script::Compile(v8::String::New("function f() { return true; }"
                                             "f()"))->Run()->IsTrue());
-  context.Dispose(context->GetIsolate());
 }
 
 v8::Handle<v8::Value> DoLoopCancelTerminate(const v8::Arguments& args) {
@@ -399,13 +403,13 @@ TEST(TerminateCancelTerminateFromThreadItself) {
   v8::HandleScope scope;
   v8::Handle<v8::ObjectTemplate> global =
       CreateGlobalTemplate(TerminateCurrentThread, DoLoopCancelTerminate);
-  v8::Persistent<v8::Context> context = v8::Context::New(NULL, global);
+  v8::Handle<v8::Context> context =
+      v8::Context::New(v8::Isolate::GetCurrent(), NULL, global);
   v8::Context::Scope context_scope(context);
   CHECK(!v8::V8::IsExecutionTerminating());
   v8::Handle<v8::String> source =
       v8::String::New("try { doloop(); } catch(e) { fail(); } 'completed';");
   // Check that execution completed with correct return value.
   CHECK(v8::Script::Compile(source)->Run()->Equals(v8_str("completed")));
-  context.Dispose(context->GetIsolate());
 }
 
index 5a010a8..edec8bf 100644 (file)
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+// TODO(dcarney): remove
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_IMPLICIT
+#define V8_ALLOW_ACCESS_TO_PERSISTENT_ARROW
+
 #include "v8.h"
 
 #include "platform.h"
 
 
 TEST(Preemption) {
-  v8::Locker locker(CcTest::default_isolate());
+  v8::Isolate* isolate = CcTest::default_isolate();
+  v8::Locker locker(isolate);
   v8::V8::Initialize();
-  v8::HandleScope scope(CcTest::default_isolate());
-  v8::Context::Scope context_scope(v8::Context::New());
+  v8::HandleScope scope(isolate);
+  v8::Handle<v8::Context> context = v8::Context::New(isolate);
+  v8::Context::Scope context_scope(context);
 
   v8::Locker::StartPreemption(100);
 
@@ -67,9 +73,11 @@ class ThreadA : public v8::internal::Thread {
  public:
   ThreadA() : Thread("ThreadA") { }
   void Run() {
-    v8::Locker locker(CcTest::default_isolate());
-    v8::HandleScope scope(CcTest::default_isolate());
-    v8::Context::Scope context_scope(v8::Context::New());
+    v8::Isolate* isolate = CcTest::default_isolate();
+    v8::Locker locker(isolate);
+    v8::HandleScope scope(isolate);
+    v8::Handle<v8::Context> context = v8::Context::New(isolate);
+    v8::Context::Scope context_scope(context);
 
     CHECK_EQ(FILL_CACHE, turn);
 
@@ -105,10 +113,12 @@ class ThreadB : public v8::internal::Thread {
   void Run() {
     do {
       {
-        v8::Locker locker(CcTest::default_isolate());
+        v8::Isolate* isolate = CcTest::default_isolate();
+        v8::Locker locker(isolate);
         if (turn == CLEAN_CACHE) {
-          v8::HandleScope scope(CcTest::default_isolate());
-          v8::Context::Scope context_scope(v8::Context::New());
+          v8::HandleScope scope(isolate);
+          v8::Handle<v8::Context> context = v8::Context::New(isolate);
+          v8::Context::Scope context_scope(context);
 
           // Clear the caches by forcing major GC.
           HEAP->CollectAllGarbage(v8::internal::Heap::kNoGCFlags);
index 1dd43a8..499286c 100644 (file)
@@ -65,11 +65,11 @@ static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
 
 static int NumberOfWeakCalls = 0;
 static void WeakPointerCallback(v8::Isolate* isolate,
-                                v8::Persistent<v8::Value> handle,
+                                v8::Persistent<v8::Value>* handle,
                                 void* id) {
   ASSERT(id == reinterpret_cast<void*>(1234));
   NumberOfWeakCalls++;
-  handle.Dispose(isolate);
+  handle->Dispose(isolate);
 }
 
 
@@ -114,8 +114,8 @@ TEST(Weakness) {
     HandleScope scope(isolate);
     global_handles->MakeWeak(key.location(),
                              reinterpret_cast<void*>(1234),
-                             NULL,
-                             &WeakPointerCallback);
+                             &WeakPointerCallback,
+                             NULL);
   }
   CHECK(global_handles->IsWeak(key.location()));
 
diff --git a/deps/v8/test/message/isvar.js b/deps/v8/test/message/isvar.js
new file mode 100644 (file)
index 0000000..67793a0
--- /dev/null
@@ -0,0 +1,31 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+var x;
+%IS_VAR(x);
+%IS_VAR(x+x);
\ No newline at end of file
diff --git a/deps/v8/test/message/isvar.out b/deps/v8/test/message/isvar.out
new file mode 100644 (file)
index 0000000..6d5cca3
--- /dev/null
@@ -0,0 +1,4 @@
+*%(basename)s:31: SyntaxError: builtin %%IS_VAR: not a variable
+%%IS_VAR(x+x);
+           ^
+SyntaxError: builtin %%IS_VAR: not a variable
diff --git a/deps/v8/test/message/paren_in_arg_string.js b/deps/v8/test/message/paren_in_arg_string.js
new file mode 100644 (file)
index 0000000..bab762a
--- /dev/null
@@ -0,0 +1,29 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var paren_in_arg_string_good = new Function('x', 'return;');
+var paren_in_arg_string_bad = new Function(')', 'return;');
\ No newline at end of file
diff --git a/deps/v8/test/message/paren_in_arg_string.out b/deps/v8/test/message/paren_in_arg_string.out
new file mode 100644 (file)
index 0000000..3bc978b
--- /dev/null
@@ -0,0 +1,6 @@
+*%(basename)s:29: SyntaxError: Function arg string contains parenthesis
+var paren_in_arg_string_bad = new Function(')', 'return;');
+                              ^
+SyntaxError: Function arg string contains parenthesis
+    at Function (<anonymous>)
+    at *%(basename)s:29:31
\ No newline at end of file
diff --git a/deps/v8/test/message/single-function-literal.js b/deps/v8/test/message/single-function-literal.js
new file mode 100644 (file)
index 0000000..3720637
--- /dev/null
@@ -0,0 +1,32 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+var single_function_good = "(function() { return 5; })";
+%CompileString(single_function_good, true);
+var single_function_bad = "(function() { return 5; })();";
+%CompileString(single_function_bad, true);
\ No newline at end of file
diff --git a/deps/v8/test/message/single-function-literal.out b/deps/v8/test/message/single-function-literal.out
new file mode 100644 (file)
index 0000000..a6a54b6
--- /dev/null
@@ -0,0 +1,5 @@
+undefined:1: SyntaxError: Single function literal required
+(function() { return 5; })();
+                            ^
+SyntaxError: Single function literal required
+    at *%(basename)s:32:16
\ No newline at end of file
index d85ce3e..5c7ae13 100644 (file)
@@ -284,5 +284,25 @@ if (support_smi_only_arrays) {
     assertKind(elements_kind.fast, obj);
     obj = newarraycase_list_smiobj(2);
     assertKind(elements_kind.fast, obj);
+
+    // Verify that cross context calls work
+    var realmA = Realm.current();
+    var realmB = Realm.create();
+    assertEquals(0, realmA);
+    assertEquals(1, realmB);
+
+    function instanceof_check(type) {
+      assertTrue(new type() instanceof type);
+      assertTrue(new type(5) instanceof type);
+      assertTrue(new type(1,2,3) instanceof type);
+    }
+
+    var realmBArray = Realm.eval(realmB, "Array");
+    instanceof_check(Array);
+    instanceof_check(realmBArray);
+    %OptimizeFunctionOnNextCall(instanceof_check);
+    instanceof_check(Array);
+    instanceof_check(realmBArray);
+    assertTrue(2 != %GetOptimizationStatus(instanceof_check));
   }
 }
diff --git a/deps/v8/test/mjsunit/compare-nil.js b/deps/v8/test/mjsunit/compare-nil.js
new file mode 100644 (file)
index 0000000..0895a31
--- /dev/null
@@ -0,0 +1,36 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function test(v) {
+  return (v == null);
+}
+assertFalse(test(true));
+assertFalse(test(true));
+assertTrue(test(null));
+assertTrue(test(null));
index 966b386..e5f4f66 100644 (file)
@@ -71,7 +71,7 @@ if (support_smi_only_arrays) {
   // deopt in simple cases.
   function testExactMapHoisting(a) {
     var object = new Object();
-    a.foo = 0;
+    a.foo = {};
     a[0] = 0;
     a[1] = 1;
     var count = 3;
index 0b843d8..b3d91a5 100644 (file)
@@ -49,12 +49,6 @@ f(a);
 assertEquals(0, a[0]);
 assertEquals(0, a[1]);
 
-// No-parameter constructor should fail right now.
-function abfunc1() {
-  return new ArrayBuffer();
-}
-assertThrows(abfunc1);
-
 // Test derivation from an ArrayBuffer
 var ab = new ArrayBuffer(12);
 assertInstance(ab, ArrayBuffer);
@@ -161,12 +155,10 @@ assertSame(a.buffer, (new Float32Array(a.buffer,4)).buffer);
 assertSame(a.buffer, (new Int8Array(a.buffer,3,51)).buffer);
 assertInstance(a.buffer, ArrayBuffer);
 
-// Test the correct behavior of the |BYTES_PER_ELEMENT| property (which is
-// "constant", but not read-only).
+// Test the correct behavior of the |BYTES_PER_ELEMENT| property.
 a = new Int32Array(2);
 assertEquals(4, a.BYTES_PER_ELEMENT);
 a.BYTES_PER_ELEMENT = 42;
-assertEquals(42, a.BYTES_PER_ELEMENT);
 a = new Uint8Array(2);
 assertEquals(1, a.BYTES_PER_ELEMENT);
 a = new Int16Array(2);
@@ -202,15 +194,6 @@ assertEquals(3.5, get(array, 1));
 // Test non-number parameters.
 var array_with_length_from_non_number = new Int32Array("2");
 assertEquals(2, array_with_length_from_non_number.length);
-array_with_length_from_non_number = new Int32Array(undefined);
-assertEquals(0, array_with_length_from_non_number.length);
-var foo = { valueOf: function() { return 3; } };
-array_with_length_from_non_number = new Int32Array(foo);
-assertEquals(3, array_with_length_from_non_number.length);
-foo = { toString: function() { return "4"; } };
-array_with_length_from_non_number = new Int32Array(foo);
-assertEquals(4, array_with_length_from_non_number.length);
-
 
 // Test loads and stores.
 types = [Array, Int8Array, Uint8Array, Int16Array, Uint16Array, Int32Array,
@@ -359,8 +342,6 @@ for (var t = 0; t < types.length; t++) {
     a.length = 2;
     assertEquals(kElementCount, a.length);
     assertTrue(delete a.length);
-    a.length = 2;
-    assertEquals(2, a.length);
 
     // Make sure bounds checks are handled correctly for external arrays.
     run_bounds_test(a);
@@ -539,8 +520,6 @@ assertSame(a.buffer, aa.buffer);
 
 assertThrows(function(){ a.subarray.call({}, 0) });
 assertThrows(function(){ a.subarray.call([], 0) });
-assertThrows(function(){ a.subarray.call(a) });
-
 
 // Call constructors directly as functions, and through .call and .apply
 
index 85a8cc5..e61ff45 100644 (file)
@@ -49,12 +49,6 @@ f(a);
 assertEquals(0, a[0]);
 assertEquals(0, a[1]);
 
-// No-parameter constructor should fail right now.
-function abfunc1() {
-  return new ArrayBuffer();
-}
-assertThrows(abfunc1);
-
 // Test derivation from an ArrayBuffer
 var ab = new ArrayBuffer(12);
 assertInstance(ab, ArrayBuffer);
@@ -161,12 +155,10 @@ assertSame(a.buffer, (new Float32Array(a.buffer,4)).buffer);
 assertSame(a.buffer, (new Int8Array(a.buffer,3,51)).buffer);
 assertInstance(a.buffer, ArrayBuffer);
 
-// Test the correct behavior of the |BYTES_PER_ELEMENT| property (which is
-// "constant", but not read-only).
+// Test the correct behavior of the |BYTES_PER_ELEMENT| property
 a = new Int32Array(2);
 assertEquals(4, a.BYTES_PER_ELEMENT);
 a.BYTES_PER_ELEMENT = 42;
-assertEquals(42, a.BYTES_PER_ELEMENT);
 a = new Uint8Array(2);
 assertEquals(1, a.BYTES_PER_ELEMENT);
 a = new Int16Array(2);
@@ -202,15 +194,6 @@ assertEquals(3.5, get(array, 1));
 // Test non-number parameters.
 var array_with_length_from_non_number = new Int32Array("2");
 assertEquals(2, array_with_length_from_non_number.length);
-array_with_length_from_non_number = new Int32Array(undefined);
-assertEquals(0, array_with_length_from_non_number.length);
-var foo = { valueOf: function() { return 3; } };
-array_with_length_from_non_number = new Int32Array(foo);
-assertEquals(3, array_with_length_from_non_number.length);
-foo = { toString: function() { return "4"; } };
-array_with_length_from_non_number = new Int32Array(foo);
-assertEquals(4, array_with_length_from_non_number.length);
-
 
 // Test loads and stores.
 types = [Array, Int8Array, Uint8Array, Int16Array, Uint16Array, Int32Array,
@@ -359,8 +342,6 @@ for (var t = 0; t < types.length; t++) {
     a.length = 2;
     assertEquals(kElementCount, a.length);
     assertTrue(delete a.length);
-    a.length = 2;
-    assertEquals(2, a.length);
 
     // Make sure bounds checks are handled correctly for external arrays.
     run_bounds_test(a);
@@ -452,7 +433,6 @@ assertEquals(0, a.length);
 a[0] = 1;
 assertEquals(undefined, a[0]);
 
-
 // Check construction from arrays.
 a = new Uint32Array([]);
 assertInstance(a, Uint32Array);
@@ -539,8 +519,6 @@ assertSame(a.buffer, aa.buffer);
 
 assertThrows(function(){ a.subarray.call({}, 0) });
 assertThrows(function(){ a.subarray.call([], 0) });
-assertThrows(function(){ a.subarray.call(a) });
-
 
 // Call constructors directly as functions, and through .call and .apply
 
index bc0bde0..d120ac7 100644 (file)
 
 var GeneratorFunction = (function*(){yield 1;}).__proto__.constructor;
 
+function assertIteratorResult(value, done, result) {
+  assertEquals({ value: value, done: done}, result);
+}
+
+function TestGeneratorResultPrototype() {
+  function* g() { yield 1; }
+  var iter = g();
+  var result = iter.next();
+
+  assertSame(Object.prototype, Object.getPrototypeOf(result));
+  property_names = Object.getOwnPropertyNames(result);
+  property_names.sort();
+  assertEquals(["done", "value"], property_names);
+  assertIteratorResult(1, false, result);
+}
+TestGeneratorResultPrototype()
+
 function TestGenerator(g, expected_values_for_next,
                        send_val, expected_values_for_send) {
   function testNext(thunk) {
     var iter = thunk();
     for (var i = 0; i < expected_values_for_next.length; i++) {
-      assertEquals(expected_values_for_next[i], iter.next());
+      assertIteratorResult(expected_values_for_next[i],
+                           i == expected_values_for_next.length - 1,
+                           iter.next());
     }
     assertThrows(function() { iter.next(); }, Error);
   }
   function testSend(thunk) {
     var iter = thunk();
     for (var i = 0; i < expected_values_for_send.length; i++) {
-      assertEquals(expected_values_for_send[i], iter.send(send_val));
+      assertIteratorResult(expected_values_for_send[i],
+                           i == expected_values_for_send.length - 1,
+                           iter.send(send_val));
     }
     assertThrows(function() { iter.send(send_val); }, Error);
   }
@@ -51,7 +72,9 @@ function TestGenerator(g, expected_values_for_next,
     for (var i = 0; i < expected_values_for_next.length; i++) {
       var iter = thunk();
       for (var j = 0; j < i; j++) {
-        assertEquals(expected_values_for_next[j], iter.next());
+        assertIteratorResult(expected_values_for_next[j],
+                             j == expected_values_for_next.length - 1,
+                             iter.next());
       }
       function Sentinel() {}
       assertThrows(function () { iter.throw(new Sentinel); }, Sentinel);
@@ -234,7 +257,7 @@ TestGenerator(
     [1, 2, undefined]);
 
 TestGenerator(
-    function* g() {
+    function* g19() {
       var x = 1;
       yield x;
       with({x:2}) { yield x; }
@@ -244,6 +267,251 @@ TestGenerator(
     "foo",
     [1, 2, 1, undefined]);
 
+TestGenerator(
+    function* g20() { yield (1 + (yield 2) + 3); },
+    [2, NaN, undefined],
+    "foo",
+    [2, "1foo3", undefined]);
+
+TestGenerator(
+    function* g21() { return (1 + (yield 2) + 3); },
+    [2, NaN],
+    "foo",
+    [2, "1foo3"]);
+
+TestGenerator(
+    function* g22() { yield (1 + (yield 2) + 3); yield (4 + (yield 5) + 6); },
+    [2, NaN, 5, NaN, undefined],
+    "foo",
+    [2, "1foo3", 5, "4foo6", undefined]);
+
+TestGenerator(
+    function* g23() {
+      return (yield (1 + (yield 2) + 3)) + (yield (4 + (yield 5) + 6));
+    },
+    [2, NaN, 5, NaN, NaN],
+    "foo",
+    [2, "1foo3", 5, "4foo6", "foofoo"]);
+
+// Rewind a try context with and without operands on the stack.
+TestGenerator(
+    function* g24() {
+      try {
+        return (yield (1 + (yield 2) + 3)) + (yield (4 + (yield 5) + 6));
+      } catch (e) {
+        throw e;
+      }
+    },
+    [2, NaN, 5, NaN, NaN],
+    "foo",
+    [2, "1foo3", 5, "4foo6", "foofoo"]);
+
+// Yielding in a catch context, with and without operands on the stack.
+TestGenerator(
+    function* g25() {
+      try {
+        throw (yield (1 + (yield 2) + 3))
+      } catch (e) {
+        if (typeof e == 'object') throw e;
+        return e + (yield (4 + (yield 5) + 6));
+      }
+    },
+    [2, NaN, 5, NaN, NaN],
+    "foo",
+    [2, "1foo3", 5, "4foo6", "foofoo"]);
+
+function TestTryCatch() {
+  function* g() { yield 1; try { yield 2; } catch (e) { yield e; } yield 3; }
+  function Sentinel() {}
+  var iter;
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  assertIteratorResult(3, false, iter.next());
+  assertIteratorResult(undefined, true, iter.next());
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertThrows(function() { iter.throw(new Sentinel); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertThrows(function() { iter.throw(new Sentinel); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  var exn = new Sentinel;
+  assertIteratorResult(exn, false, iter.throw(exn));
+  assertIteratorResult(3, false, iter.next());
+  assertIteratorResult(undefined, true, iter.next());
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  var exn = new Sentinel;
+  assertIteratorResult(exn, false, iter.throw(exn));
+  assertIteratorResult(3, false, iter.next());
+  assertThrows(function() { iter.throw(new Sentinel); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  var exn = new Sentinel;
+  assertIteratorResult(exn, false, iter.throw(exn));
+  assertThrows(function() { iter.throw(new Sentinel); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  assertIteratorResult(3, false, iter.next());
+  assertIteratorResult(undefined, true, iter.next());
+  assertThrows(function() { iter.next(); }, Error);
+}
+TestTryCatch();
+
+function TestTryFinally() {
+  function* g() { yield 1; try { yield 2; } finally { yield 3; } yield 4; }
+  function Sentinel() {}
+  function Sentinel2() {}
+  var iter;
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  assertIteratorResult(3, false, iter.next());
+  assertIteratorResult(4, false, iter.next());
+  assertIteratorResult(undefined, true, iter.next());
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertThrows(function() { iter.throw(new Sentinel); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertThrows(function() { iter.throw(new Sentinel); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  assertIteratorResult(3, false, iter.throw(new Sentinel));
+  assertThrows(function() { iter.next(); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  assertIteratorResult(3, false, iter.throw(new Sentinel));
+  assertThrows(function() { iter.throw(new Sentinel2); }, Sentinel2);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  assertIteratorResult(3, false, iter.next());
+  assertThrows(function() { iter.throw(new Sentinel); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  assertIteratorResult(3, false, iter.next());
+  assertIteratorResult(4, false, iter.next());
+  assertThrows(function() { iter.throw(new Sentinel); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  assertIteratorResult(3, false, iter.next());
+  assertIteratorResult(4, false, iter.next());
+  assertIteratorResult(undefined, true, iter.next());
+  assertThrows(function() { iter.next(); }, Error);
+}
+TestTryFinally();
+
+function TestNestedTry() {
+  function* g() {
+    try {
+      yield 1;
+      try { yield 2; } catch (e) { yield e; }
+      yield 3;
+    } finally {
+      yield 4;
+    }
+    yield 5;
+  }
+  function Sentinel() {}
+  function Sentinel2() {}
+  var iter;
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  assertIteratorResult(3, false, iter.next());
+  assertIteratorResult(4, false, iter.next());
+  assertIteratorResult(5, false, iter.next());
+  assertIteratorResult(undefined, true, iter.next());
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertThrows(function() { iter.throw(new Sentinel); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(4, false, iter.throw(new Sentinel));
+  assertThrows(function() { iter.next(); }, Sentinel);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(4, false, iter.throw(new Sentinel));
+  assertThrows(function() { iter.throw(new Sentinel2); }, Sentinel2);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  var exn = new Sentinel;
+  assertIteratorResult(exn, false, iter.throw(exn));
+  assertIteratorResult(3, false, iter.next());
+  assertIteratorResult(4, false, iter.next());
+  assertIteratorResult(5, false, iter.next());
+  assertIteratorResult(undefined, true, iter.next());
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  var exn = new Sentinel;
+  assertIteratorResult(exn, false, iter.throw(exn));
+  assertIteratorResult(4, false, iter.throw(new Sentinel2));
+  assertThrows(function() { iter.next(); }, Sentinel2);
+  assertThrows(function() { iter.next(); }, Error);
+
+  iter = g();
+  assertIteratorResult(1, false, iter.next());
+  assertIteratorResult(2, false, iter.next());
+  var exn = new Sentinel;
+  assertIteratorResult(exn, false, iter.throw(exn));
+  assertIteratorResult(3, false, iter.next());
+  assertIteratorResult(4, false, iter.throw(new Sentinel2));
+  assertThrows(function() { iter.next(); }, Sentinel2);
+  assertThrows(function() { iter.next(); }, Error);
+
+  // That's probably enough.
+}
+TestNestedTry();
+
 function TestRecursion() {
   function TestNextRecursion() {
     function* g() { yield iter.next(); }
index 0182fc3..b4e8f95 100644 (file)
@@ -105,6 +105,8 @@ function TestGeneratorFunction() {
   assertSame(Function, Object.getPrototypeOf(GeneratorFunction));
   assertTrue(g instanceof Function);
 
+  assertEquals("function* g() { yield 1; }", g.toString());
+
   // Not all functions are generators.
   assertTrue(f instanceof Function);  // Sanity check.
   assertTrue(!(f instanceof GeneratorFunction));
index 7f9c763..c5c19e1 100644 (file)
@@ -30,7 +30,7 @@
 // ArrayBuffer
 
 function TestByteLength(param, expectedByteLength) {
-  var ab = new __ArrayBuffer(param);
+  var ab = new ArrayBuffer(param);
   assertSame(expectedByteLength, ab.byteLength);
 }
 
@@ -47,18 +47,18 @@ function TestArrayBufferCreation() {
 
 /* TODO[dslomov]: Reenable the test
   assertThrows(function() {
-    var ab1 = new __ArrayBuffer(0xFFFFFFFFFFFF)
+    var ab1 = new ArrayBuffer(0xFFFFFFFFFFFF)
   }, RangeError);
 */
 
-  var ab = new __ArrayBuffer();
+  var ab = new ArrayBuffer();
   assertSame(0, ab.byteLength);
 }
 
 TestArrayBufferCreation();
 
 function TestByteLengthNotWritable() {
-  var ab = new __ArrayBuffer(1024);
+  var ab = new ArrayBuffer(1024);
   assertSame(1024, ab.byteLength);
 
   assertThrows(function() { "use strict"; ab.byteLength = 42; }, TypeError);
@@ -67,13 +67,21 @@ function TestByteLengthNotWritable() {
 TestByteLengthNotWritable();
 
 function TestSlice(expectedResultLen, initialLen, start, end) {
-  var ab = new __ArrayBuffer(initialLen);
+  var ab = new ArrayBuffer(initialLen);
+  var a1 = new Uint8Array(ab);
+  for (var i = 0; i < a1.length; i++) {
+    a1[i] = 0xCA;
+  }
   var slice = ab.slice(start, end);
   assertSame(expectedResultLen, slice.byteLength);
+  var a2 = new Uint8Array(slice);
+  for (var i = 0; i < a2.length; i++) {
+    assertSame(0xCA, a2[i]);
+  }
 }
 
 function TestArrayBufferSlice() {
-  var ab = new __ArrayBuffer(1024);
+  var ab = new ArrayBuffer(1024);
   var ab1 = ab.slice(512, 1024);
   assertSame(512, ab1.byteLength);
 
@@ -111,7 +119,28 @@ TestArrayBufferSlice();
 // Typed arrays
 
 function TestTypedArray(proto, elementSize, typicalElement) {
-  var ab = new __ArrayBuffer(256*elementSize);
+  var ab = new ArrayBuffer(256*elementSize);
+
+  var a0 = new proto(30);
+  assertSame(elementSize, a0.BYTES_PER_ELEMENT);
+  assertSame(30, a0.length);
+  assertSame(30*elementSize, a0.byteLength);
+  assertSame(0, a0.byteOffset);
+  assertSame(30*elementSize, a0.buffer.byteLength);
+
+  var aLen0 = new proto(0);
+  assertSame(elementSize, aLen0.BYTES_PER_ELEMENT);
+  assertSame(0, aLen0.length);
+  assertSame(0, aLen0.byteLength);
+  assertSame(0, aLen0.byteOffset);
+  assertSame(0, aLen0.buffer.byteLength);
+
+  var aOverBufferLen0 = new proto(ab, 128*elementSize, 0);
+  assertSame(ab, aOverBufferLen0.buffer);
+  assertSame(elementSize, aOverBufferLen0.BYTES_PER_ELEMENT);
+  assertSame(0, aOverBufferLen0.length);
+  assertSame(0, aOverBufferLen0.byteLength);
+  assertSame(128*elementSize, aOverBufferLen0.byteOffset);
 
   var a1 = new proto(ab, 128*elementSize, 128);
   assertSame(ab, a1.buffer);
@@ -170,31 +199,263 @@ function TestTypedArray(proto, elementSize, typicalElement) {
     assertSame(typicalElement, a4[i]);
   }
 
-  assertThrows(function () { new proto(ab, 256*elementSize); }, RangeError);
+  var aAtTheEnd = new proto(ab, 256*elementSize);
+  assertSame(elementSize, aAtTheEnd.BYTES_PER_ELEMENT);
+  assertSame(0, aAtTheEnd.length);
+  assertSame(0, aAtTheEnd.byteLength);
+  assertSame(256*elementSize, aAtTheEnd.byteOffset);
+
+  assertThrows(function () { new proto(ab, 257*elementSize); }, RangeError);
+  assertThrows(
+      function () { new proto(ab, 128*elementSize, 192); },
+      RangeError);
 
   if (elementSize !== 1) {
     assertThrows(function() { new proto(ab, 128*elementSize - 1, 10); },
                  RangeError);
-    var unalignedArrayBuffer = new __ArrayBuffer(10*elementSize + 1);
+    var unalignedArrayBuffer = new ArrayBuffer(10*elementSize + 1);
     var goodArray = new proto(unalignedArrayBuffer, 0, 10);
     assertSame(10, goodArray.length);
     assertSame(10*elementSize, goodArray.byteLength);
     assertThrows(function() { new proto(unalignedArrayBuffer)}, RangeError);
     assertThrows(function() { new proto(unalignedArrayBuffer, 5*elementSize)},
                  RangeError);
+    assertThrows(function() { new proto() }, TypeError);
+  }
+
+  var aFromString = new proto("30");
+  assertSame(elementSize, aFromString.BYTES_PER_ELEMENT);
+  assertSame(30, aFromString.length);
+  assertSame(30*elementSize, aFromString.byteLength);
+  assertSame(0, aFromString.byteOffset);
+  assertSame(30*elementSize, aFromString.buffer.byteLength);
+
+  var jsArray = [];
+  for (i = 0; i < 30; i++) {
+    jsArray.push(typicalElement);
+  }
+  var aFromArray = new proto(jsArray);
+  assertSame(elementSize, aFromArray.BYTES_PER_ELEMENT);
+  assertSame(30, aFromArray.length);
+  assertSame(30*elementSize, aFromArray.byteLength);
+  assertSame(0, aFromArray.byteOffset);
+  assertSame(30*elementSize, aFromArray.buffer.byteLength);
+  for (i = 0; i < 30; i++) {
+    assertSame(typicalElement, aFromArray[i]);
   }
 
+  var abLen0 = new ArrayBuffer(0);
+  var aOverAbLen0 = new proto(abLen0);
+  assertSame(abLen0, aOverAbLen0.buffer);
+  assertSame(elementSize, aOverAbLen0.BYTES_PER_ELEMENT);
+  assertSame(0, aOverAbLen0.length);
+  assertSame(0, aOverAbLen0.byteLength);
+  assertSame(0, aOverAbLen0.byteOffset);
+}
+
+TestTypedArray(Uint8Array, 1, 0xFF);
+TestTypedArray(Int8Array, 1, -0x7F);
+TestTypedArray(Uint16Array, 2, 0xFFFF);
+TestTypedArray(Int16Array, 2, -0x7FFF);
+TestTypedArray(Uint32Array, 4, 0xFFFFFFFF);
+TestTypedArray(Int32Array, 4, -0x7FFFFFFF);
+TestTypedArray(Float32Array, 4, 0.5);
+TestTypedArray(Float64Array, 8, 0.5);
+TestTypedArray(Uint8ClampedArray, 1, 0xFF);
+
+function SubarrayTestCase(constructor, item, expectedResultLen, expectedStartIndex,
+                          initialLen, start, end) {
+  var a = new constructor(initialLen);
+  var s = a.subarray(start, end);
+  assertSame(constructor, s.constructor);
+  assertSame(expectedResultLen, s.length);
+  if (s.length > 0) {
+    s[0] = item;
+    assertSame(item, a[expectedStartIndex]);
+  }
 }
 
-TestTypedArray(__Uint8Array, 1, 0xFF);
-TestTypedArray(__Int8Array, 1, -0x7F);
-TestTypedArray(__Uint16Array, 2, 0xFFFF);
-TestTypedArray(__Int16Array, 2, -0x7FFF);
-TestTypedArray(__Uint32Array, 4, 0xFFFFFFFF);
-TestTypedArray(__Int32Array, 4, -0x7FFFFFFF);
-TestTypedArray(__Float32Array, 4, 0.5);
-TestTypedArray(__Float64Array, 8, 0.5);
+function TestSubArray(constructor, item) {
+  SubarrayTestCase(constructor, item, 512, 512, 1024, 512, 1024);
+  SubarrayTestCase(constructor, item, 512, 512, 1024, 512);
 
+  SubarrayTestCase(constructor, item, 0, undefined, 0, 1, 20);
+  SubarrayTestCase(constructor, item, 100, 0,       100, 0, 100);
+  SubarrayTestCase(constructor, item, 100, 0,       100,  0, 1000);
+  SubarrayTestCase(constructor, item, 0, undefined, 100, 5, 1);
+
+  SubarrayTestCase(constructor, item, 1, 89,        100, -11, -10);
+  SubarrayTestCase(constructor, item, 9, 90,        100, -10, 99);
+  SubarrayTestCase(constructor, item, 0, undefined, 100, -10, 80);
+  SubarrayTestCase(constructor, item, 10,80,        100, 80, -10);
+
+  SubarrayTestCase(constructor, item, 10,90,        100, 90, "100");
+  SubarrayTestCase(constructor, item, 10,90,        100, "90", "100");
+
+  SubarrayTestCase(constructor, item, 0, undefined, 100, 90, "abc");
+  SubarrayTestCase(constructor, item, 10,0,         100, "abc", 10);
+
+  SubarrayTestCase(constructor, item, 10,0,         100, 0.96, 10.96);
+  SubarrayTestCase(constructor, item, 10,0,         100, 0.96, 10.01);
+  SubarrayTestCase(constructor, item, 10,0,         100, 0.01, 10.01);
+  SubarrayTestCase(constructor, item, 10,0,         100, 0.01, 10.96);
+
+
+  SubarrayTestCase(constructor, item, 10,90,        100, 90);
+  SubarrayTestCase(constructor, item, 10,90,        100, -10);
+}
+
+TestSubArray(Uint8Array, 0xFF);
+TestSubArray(Int8Array, -0x7F);
+TestSubArray(Uint16Array, 0xFFFF);
+TestSubArray(Int16Array, -0x7FFF);
+TestSubArray(Uint32Array, 0xFFFFFFFF);
+TestSubArray(Int32Array, -0x7FFFFFFF);
+TestSubArray(Float32Array, 0.5);
+TestSubArray(Float64Array, 0.5);
+TestSubArray(Uint8ClampedArray, 0xFF);
+
+function TestTypedArrayOutOfRange(constructor, value, result) {
+  var a = new constructor(1);
+  a[0] = value;
+  assertSame(result, a[0]);
+}
+
+TestTypedArrayOutOfRange(Uint8Array, 0x1FA, 0xFA);
+TestTypedArrayOutOfRange(Uint8Array, -1, 0xFF);
+
+TestTypedArrayOutOfRange(Int8Array, 0x1FA, 0x7A - 0x80);
+
+TestTypedArrayOutOfRange(Uint16Array, 0x1FFFA, 0xFFFA);
+TestTypedArrayOutOfRange(Uint16Array, -1, 0xFFFF);
+TestTypedArrayOutOfRange(Int16Array, 0x1FFFA, 0x7FFA - 0x8000);
+
+TestTypedArrayOutOfRange(Uint32Array, 0x1FFFFFFFA, 0xFFFFFFFA);
+TestTypedArrayOutOfRange(Uint32Array, -1, 0xFFFFFFFF);
+TestTypedArrayOutOfRange(Int32Array, 0x1FFFFFFFA, 0x7FFFFFFA - 0x80000000);
+
+TestTypedArrayOutOfRange(Uint8ClampedArray, 0x1FA, 0xFF);
+TestTypedArrayOutOfRange(Uint8ClampedArray, -1, 0);
+
+var typedArrayConstructors = [
+  Uint8Array,
+  Int8Array,
+  Uint16Array,
+  Int16Array,
+  Uint32Array,
+  Int32Array,
+  Uint8ClampedArray,
+  Float32Array,
+  Float64Array];
+
+function TestPropertyTypeChecks(constructor) {
+  var a = new constructor();
+  function CheckProperty(name) {
+    var d = Object.getOwnPropertyDescriptor(constructor.prototype, name);
+    var o = {}
+    assertThrows(function() {d.get.call(o);}, TypeError);
+    d.get.call(a); // shouldn't throw
+    for (var i = 0 ; i < typedArrayConstructors.length; i++) {
+      d.get.call(new typedArrayConstructors[i](10));
+    }
+  }
+
+  CheckProperty("buffer");
+  CheckProperty("byteOffset");
+  CheckProperty("byteLength");
+  CheckProperty("length");
+}
+
+for(i = 0; i < typedArrayConstructors.lenght; i++) {
+  TestPropertyTypeChecks(typedArrayConstructors[i]);
+}
+
+
+function TestTypedArraySet() {
+  // Test array.set in different combinations.
+
+  function assertArrayPrefix(expected, array) {
+    for (var i = 0; i < expected.length; ++i) {
+      assertEquals(expected[i], array[i]);
+    }
+  }
+
+  var a11 = new Int16Array([1, 2, 3, 4, 0, -1])
+  var a12 = new Uint16Array(15)
+  a12.set(a11, 3)
+  assertArrayPrefix([0, 0, 0, 1, 2, 3, 4, 0, 0xffff, 0, 0], a12)
+  assertThrows(function(){ a11.set(a12) })
+
+  var a21 = [1, undefined, 10, NaN, 0, -1, {valueOf: function() {return 3}}]
+  var a22 = new Int32Array(12)
+  a22.set(a21, 2)
+  assertArrayPrefix([0, 0, 1, 0, 10, 0, 0, -1, 3, 0], a22)
+
+  var a31 = new Float32Array([2, 4, 6, 8, 11, NaN, 1/0, -3])
+  var a32 = a31.subarray(2, 6)
+  a31.set(a32, 4)
+  assertArrayPrefix([2, 4, 6, 8, 6, 8, 11, NaN], a31)
+  assertArrayPrefix([6, 8, 6, 8], a32)
+
+  var a4 = new Uint8ClampedArray([3,2,5,6])
+  a4.set(a4)
+  assertArrayPrefix([3, 2, 5, 6], a4)
+
+  // Cases with overlapping backing store but different element sizes.
+  var b = new ArrayBuffer(4)
+  var a5 = new Int16Array(b)
+  var a50 = new Int8Array(b)
+  var a51 = new Int8Array(b, 0, 2)
+  var a52 = new Int8Array(b, 1, 2)
+  var a53 = new Int8Array(b, 2, 2)
+
+  a5.set([0x5050, 0x0a0a])
+  assertArrayPrefix([0x50, 0x50, 0x0a, 0x0a], a50)
+  assertArrayPrefix([0x50, 0x50], a51)
+  assertArrayPrefix([0x50, 0x0a], a52)
+  assertArrayPrefix([0x0a, 0x0a], a53)
+
+  a50.set([0x50, 0x50, 0x0a, 0x0a])
+  a51.set(a5)
+  assertArrayPrefix([0x50, 0x0a, 0x0a, 0x0a], a50)
+
+  a50.set([0x50, 0x50, 0x0a, 0x0a])
+  a52.set(a5)
+  assertArrayPrefix([0x50, 0x50, 0x0a, 0x0a], a50)
+
+  a50.set([0x50, 0x50, 0x0a, 0x0a])
+  a53.set(a5)
+  assertArrayPrefix([0x50, 0x50, 0x50, 0x0a], a50)
+
+  a50.set([0x50, 0x51, 0x0a, 0x0b])
+  a5.set(a51)
+  assertArrayPrefix([0x0050, 0x0051], a5)
+
+  a50.set([0x50, 0x51, 0x0a, 0x0b])
+  a5.set(a52)
+  assertArrayPrefix([0x0051, 0x000a], a5)
+
+  a50.set([0x50, 0x51, 0x0a, 0x0b])
+  a5.set(a53)
+  assertArrayPrefix([0x000a, 0x000b], a5)
+
+  // Mixed types of same size.
+  var a61 = new Float32Array([1.2, 12.3])
+  var a62 = new Int32Array(2)
+  a62.set(a61)
+  assertArrayPrefix([1, 12], a62)
+  a61.set(a62)
+  assertArrayPrefix([1, 12], a61)
+
+  // Invalid source
+  var a = new Uint16Array(50);
+  assertThrows(function() { a.set(0) }, TypeError);
+  assertThrows(function() { a.set({}) }, TypeError);
+  assertThrows(function() { a.set.call({}) }, TypeError);
+  assertThrows(function() { a.set.call([]) }, TypeError);
+}
+
+TestTypedArraySet();
 
 // General tests for properties
 
@@ -210,9 +471,10 @@ function TestEnumerable(func, obj) {
   if (obj)
     assertArrayEquals([], props(obj));
 }
-TestEnumerable(__ArrayBuffer, new __ArrayBuffer());
-TestEnumerable(__Uint8Array);
-
+TestEnumerable(ArrayBuffer, new ArrayBuffer());
+for(i = 0; i < typedArrayConstructors.lenght; i++) {
+  TestEnumerable(typedArrayConstructors[i]);
+}
 
 // Test arbitrary properties on ArrayBuffer
 function TestArbitrary(m) {
@@ -225,7 +487,12 @@ function TestArbitrary(m) {
     TestProperty(m, 'foo' + i, 'bar' + i);
   }
 }
-TestArbitrary(new __ArrayBuffer(256));
+TestArbitrary(new ArrayBuffer(256));
+for(i = 0; i < typedArrayConstructors.lenght; i++) {
+  TestArbitary(new typedArrayConstructors[i](10));
+}
+
+
 
 // Test direct constructor call
-assertTrue(__ArrayBuffer() instanceof __ArrayBuffer);
+assertTrue(ArrayBuffer() instanceof ArrayBuffer);
diff --git a/deps/v8/test/mjsunit/lea-add.js b/deps/v8/test/mjsunit/lea-add.js
new file mode 100644 (file)
index 0000000..28a1494
--- /dev/null
@@ -0,0 +1,84 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function a() {
+  var sum = 0;
+  for (var i = 0; i < 500; ++i) {
+    sum = (i + sum) | 0;
+  }
+  return sum;
+}
+
+function b() {
+  var sum = 0;
+  for (var i = -500; i < 0; ++i) {
+    sum = (i + sum) | 0;
+  }
+  return sum;
+}
+
+function c() {
+  var sum = 0;
+  for (var i = 0; i < 500; ++i) {
+    sum += (i + -0x7fffffff) | 0;
+  }
+  return sum;
+}
+
+function d() {
+  var sum = 0;
+  for (var i = -501; i < 0; ++i) {
+    sum += (i + 501) | 0;
+  }
+  return sum;
+}
+
+a();
+a();
+%OptimizeFunctionOnNextCall(a);
+assertEquals(124750, a());
+assertEquals(124750, a());
+
+b();
+b();
+%OptimizeFunctionOnNextCall(b);
+assertEquals(-125250, b());
+assertEquals(-125250, b());
+
+c();
+c();
+%OptimizeFunctionOnNextCall(c);
+assertEquals(-1073741698750, c());
+assertEquals(-1073741698750, c());
+
+d();
+d();
+%OptimizeFunctionOnNextCall(d);
+assertEquals(125250, d());
+assertEquals(125250, d());
diff --git a/deps/v8/test/mjsunit/regress/regress-235311.js b/deps/v8/test/mjsunit/regress/regress-235311.js
new file mode 100644 (file)
index 0000000..3007147
--- /dev/null
@@ -0,0 +1,32 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var new_space_string = "";
+for (var i = 0; i < 12800; ++i) {
+  new_space_string +=
+      String.fromCharCode(Math.random() * 26 + (4294967295) | 0);
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-2671-1.js b/deps/v8/test/mjsunit/regress/regress-2671-1.js
new file mode 100644 (file)
index 0000000..042a501
--- /dev/null
@@ -0,0 +1,45 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var y;
+function f() {
+  var a = [];
+  a[20] = 0;
+  y = 3;
+  var i = 7 * (y + -0);
+  a[i]++;
+  assertTrue(isNaN(a[i]));
+}
+
+f();
+f();
+f();
+%OptimizeFunctionOnNextCall(f);
+f();
+
diff --git a/deps/v8/test/mjsunit/regress/regress-2671.js b/deps/v8/test/mjsunit/regress/regress-2671.js
new file mode 100644 (file)
index 0000000..8da1b8f
--- /dev/null
@@ -0,0 +1,45 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var y;
+function f() {
+  var a = [];
+  a[20] = 0;
+  y = 3;
+  var i = 7 * (y + -0);
+  a[i] = 1/y;
+  assertFalse(isNaN(a[i]));
+}
+
+f();
+f();
+f();
+%OptimizeFunctionOnNextCall(f);
+f();
+
diff --git a/deps/v8/test/mjsunit/track-fields.js b/deps/v8/test/mjsunit/track-fields.js
new file mode 100644 (file)
index 0000000..bcf37ae
--- /dev/null
@@ -0,0 +1,263 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --track-fields --track-double-fields --allow-natives-syntax
+
+// Test transitions caused by changes to field representations.
+
+function create_smi_object() {
+  var o = {};
+  o.x = 1;
+  o.y = 2;
+  o.z = 3;
+  return o;
+}
+
+var o1 = create_smi_object();
+var o2 = create_smi_object();
+
+// o1,o2 are smi, smi, smi
+assertTrue(%HaveSameMap(o1, o2));
+o1.y = 1.3;
+// o1 is smi, double, smi
+assertFalse(%HaveSameMap(o1, o2));
+o2.y = 1.5;
+// o2 is smi, double, smi
+assertTrue(%HaveSameMap(o1, o2));
+
+// o3 is initialized as smi, double, smi
+var o3 = create_smi_object();
+assertTrue(%HaveSameMap(o1, o3));
+
+function set_large(o, v) {
+  o.x01 = v; o.x02 = v; o.x03 = v; o.x04 = v; o.x05 = v; o.x06 = v; o.x07 = v;
+  o.x08 = v; o.x09 = v; o.x10 = v; o.x11 = v; o.x12 = v; o.x13 = v; o.x14 = v;
+  o.x15 = v; o.x16 = v; o.x17 = v; o.x18 = v; o.x19 = v; o.x20 = v; o.x21 = v;
+  o.x22 = v; o.x23 = v; o.x24 = v; o.x25 = v; o.x26 = v; o.x27 = v; o.x28 = v;
+  o.x29 = v; o.x30 = v; o.x31 = v; o.x32 = v; o.x33 = v; o.x34 = v; o.x35 = v;
+  o.x36 = v; o.x37 = v; o.x38 = v; o.x39 = v; o.x40 = v; o.x41 = v; o.x42 = v;
+  o.y01 = v; o.y02 = v; o.y03 = v; o.y04 = v; o.y05 = v; o.y06 = v; o.y07 = v;
+  o.y08 = v; o.y09 = v; o.y10 = v; o.y11 = v; o.y12 = v; o.y13 = v; o.y14 = v;
+  o.y15 = v; o.y16 = v; o.y17 = v; o.y18 = v; o.y19 = v; o.y20 = v; o.y21 = v;
+}
+
+// Check that large object migrations work.
+var o4 = {};
+// All smi.
+set_large(o4, 0);
+assertTrue(%HasFastProperties(o4));
+// All double.
+set_large(o4, 1.5);
+// o5 is immediately allocated with doubles.
+var o5 = {};
+set_large(o5, 0);
+assertTrue(%HaveSameMap(o4, o5));
+
+function create_smi_object2() {
+  var o = {};
+  o.a = 1;
+  o.b = 2;
+  o.c = 3;
+  return o;
+}
+
+// All smi
+var o6 = create_smi_object2();
+var o7 = create_smi_object2();
+
+assertTrue(%HaveSameMap(o6, o7));
+// Smi, double, smi.
+o6.b = 1.5;
+assertFalse(%HaveSameMap(o6, o7));
+// Smi, double, object.
+o7.c = {};
+assertFalse(%HaveSameMap(o6, o7));
+// Smi, double, object.
+o6.c = {};
+assertTrue(%HaveSameMap(o6, o7));
+
+function poly_load(o, b) {
+  var v = o.field;
+  if (b) {
+    return v + 10;
+  }
+  return o;
+}
+
+var of1 = {a:0};
+of1.field = {};
+var of2 = {b:0};
+of2.field = 10;
+
+poly_load(of1, false);
+poly_load(of1, false);
+poly_load(of2, true);
+%OptimizeFunctionOnNextCall(poly_load);
+assertEquals("[object Object]10", poly_load(of1, true));
+
+// Ensure small object literals with doubles do not share double storage.
+function object_literal() { return {"a":1.5}; }
+var o8 = object_literal();
+var o9 = object_literal();
+o8.a = 4.6
+assertEquals(1.5, o9.a);
+
+// Ensure double storage is not leaked in the case of polymorphic loads.
+function load_poly(o) {
+  return o.a;
+}
+
+var o10 = { "a": 1.6 };
+var o11 = { "b": 1, "a": 1.7 };
+load_poly(o10);
+load_poly(o10);
+load_poly(o11);
+%OptimizeFunctionOnNextCall(load_poly);
+var val = load_poly(o10);
+o10.a = 19.5;
+assertFalse(o10.a == val);
+
+// Ensure polymorphic loads only go monomorphic when the representations are
+// compatible.
+
+// Check polymorphic load from double + object fields.
+function load_mono(o) {
+  return o.a1;
+}
+
+var object = {"x": 1};
+var o10 = { "a1": 1.6 };
+var o11 = { "a1": object, "b": 1 };
+load_mono(o10);
+load_mono(o10);
+load_mono(o11);
+%OptimizeFunctionOnNextCall(load_mono);
+assertEquals(object, load_mono(o11));
+
+// Check polymorphic load from smi + object fields.
+function load_mono2(o) {
+  return o.a2;
+}
+
+var o12 = { "a2": 5 };
+var o13 = { "a2": object, "b": 1 };
+load_mono2(o12);
+load_mono2(o12);
+load_mono2(o13);
+%OptimizeFunctionOnNextCall(load_mono2);
+assertEquals(object, load_mono2(o13));
+
+// Check polymorphic load from double + double fields.
+function load_mono3(o) {
+  return o.a3;
+}
+
+var o14 = { "a3": 1.6 };
+var o15 = { "a3": 1.8, "b": 1 };
+load_mono3(o14);
+load_mono3(o14);
+load_mono3(o15);
+%OptimizeFunctionOnNextCall(load_mono3);
+assertEquals(1.6, load_mono3(o14));
+assertEquals(1.8, load_mono3(o15));
+
+// Check that JSON parsing respects existing representations.
+var o16 = JSON.parse('{"a":1.5}');
+var o17 = JSON.parse('{"a":100}');
+assertTrue(%HaveSameMap(o16, o17));
+var o17_a = o17.a;
+assertEquals(100, o17_a);
+o17.a = 200;
+assertEquals(100, o17_a);
+assertEquals(200, o17.a);
+
+// Ensure normalizing results in ignored representations.
+var o18 = {};
+o18.field1 = 100;
+o18.field2 = 1;
+o18.to_delete = 100;
+
+var o19 = {};
+o19.field1 = 100;
+o19.field2 = 1.6;
+o19.to_delete = 100;
+
+assertFalse(%HaveSameMap(o18, o19));
+
+delete o18.to_delete;
+delete o19.to_delete;
+
+assertTrue(%HaveSameMap(o18, o19));
+assertEquals(1, o18.field2);
+assertEquals(1.6, o19.field2);
+
+// Test megamorphic keyed stub behaviour in combination with representations.
+var some_object20 = {"a":1};
+var o20 = {};
+o20.smi = 1;
+o20.dbl = 1.5;
+o20.obj = some_object20;
+
+function keyed_load(o, k) {
+  return o[k];
+}
+
+function keyed_store(o, k, v) {
+  return o[k] = v;
+}
+
+var smi20 = keyed_load(o20, "smi");
+var dbl20 = keyed_load(o20, "dbl");
+var obj20 = keyed_load(o20, "obj");
+keyed_load(o20, "smi");
+keyed_load(o20, "dbl");
+keyed_load(o20, "obj");
+keyed_load(o20, "smi");
+keyed_load(o20, "dbl");
+keyed_load(o20, "obj");
+
+assertEquals(1, smi20);
+assertEquals(1.5, dbl20);
+assertEquals(some_object20, obj20);
+
+keyed_store(o20, "smi", 100);
+keyed_store(o20, "dbl", 100);
+keyed_store(o20, "obj", 100);
+keyed_store(o20, "smi", 100);
+keyed_store(o20, "dbl", 100);
+keyed_store(o20, "obj", 100);
+keyed_store(o20, "smi", 100);
+keyed_store(o20, "dbl", 100);
+keyed_store(o20, "obj", 100);
+
+assertEquals(1, smi20);
+assertEquals(1.5, dbl20);
+assertEquals(some_object20, obj20);
+
+assertEquals(100, o20.smi);
+assertEquals(100, o20.dbl);
+assertEquals(100, o20.dbl);
index f3ae8a2..1be5cb8 100755 (executable)
@@ -2036,17 +2036,24 @@ class InspectionShell(cmd.Cmd):
 
   def do_u(self, args):
     """
-     u 0x<address> 0x<size>
-     Unassemble memory in the region [address, address + size)
+     Unassemble memory in the region [address, address + size). If the
+     size is not specified, a default value of 32 bytes is used.
+     Synopsis: u 0x<address> 0x<size>
     """
     args = args.split(' ')
     start = int(args[0], 16)
-    size = int(args[1], 16)
+    size = int(args[1], 16) if len(args) > 1 else 0x20
+    if not self.reader.IsValidAddress(start):
+      print "Address is not contained within the minidump!"
+      return
     lines = self.reader.GetDisasmLines(start, size)
     for line in lines:
       print FormatDisasmLine(start, self.heap, line)
     print
 
+  def do_EOF(self, none):
+    raise KeyboardInterrupt
+
 EIP_PROXIMITY = 64
 
 CONTEXT_FOR_ARCH = {
@@ -2131,7 +2138,10 @@ def AnalyzeMinidump(options, minidump_name):
     FullDump(reader, heap)
 
   if options.shell:
-    InspectionShell(reader, heap).cmdloop("type help to get help")
+    try:
+      InspectionShell(reader, heap).cmdloop("type help to get help")
+    except KeyboardInterrupt:
+      print "Kthxbye."
   else:
     if reader.exception is not None:
       print "Annotated stack (from exception.esp to bottom):"
index 7070d97..fc6296a 100644 (file)
               '../../src/proxy.js',
               '../../src/collection.js',
               '../../src/object-observe.js',
+              '../../src/arraybuffer.js',
               '../../src/typedarray.js',
               '../../src/generator.js'
             ],