Allocation Info Tracking, continued.
authormvstanton@chromium.org <mvstanton@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 1 Mar 2013 16:06:34 +0000 (16:06 +0000)
committermvstanton@chromium.org <mvstanton@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 1 Mar 2013 16:06:34 +0000 (16:06 +0000)
Addresses missing cases for array literals.
Adds support for "new Array()" call sites. This isn't complete yet, I have to run with --noinline_new.

BUG=

Review URL: https://codereview.chromium.org/11818021

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13790 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

46 files changed:
src/arm/builtins-arm.cc
src/arm/code-stubs-arm.cc
src/arm/lithium-arm.cc
src/arm/lithium-arm.h
src/arm/lithium-codegen-arm.cc
src/arm/macro-assembler-arm.cc
src/arm/macro-assembler-arm.h
src/ast.cc
src/ast.h
src/bootstrapper.cc
src/builtins.cc
src/builtins.h
src/code-stubs-hydrogen.cc
src/code-stubs.h
src/elements.cc
src/elements.h
src/flag-definitions.h
src/heap-inl.h
src/heap.cc
src/heap.h
src/hydrogen-instructions.h
src/hydrogen.cc
src/ia32/builtins-ia32.cc
src/ia32/code-stubs-ia32.cc
src/ia32/lithium-codegen-ia32.cc
src/ia32/lithium-ia32.cc
src/ia32/lithium-ia32.h
src/ia32/macro-assembler-ia32.cc
src/ia32/macro-assembler-ia32.h
src/objects-inl.h
src/objects-printer.cc
src/objects.cc
src/objects.h
src/runtime.cc
src/type-info.cc
src/type-info.h
src/x64/builtins-x64.cc
src/x64/code-stubs-x64.cc
src/x64/lithium-codegen-x64.cc
src/x64/lithium-x64.cc
src/x64/lithium-x64.h
src/x64/macro-assembler-x64.cc
src/x64/macro-assembler-x64.h
test/mjsunit/allocation-site-info.js
test/mjsunit/array-natives-elements.js
test/mjsunit/elements-transition.js

index 6a5f61fd1684ea3affda9d951eff675bd157c2d5..ec27bd6665c27db8ed62f480829408fc02f84147 100644 (file)
@@ -542,31 +542,63 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- r0     : number of arguments
   //  -- r1     : constructor function
+  //  -- r2     : type info cell
   //  -- lr     : return address
   //  -- sp[...]: constructor arguments
   // -----------------------------------
-  Label generic_constructor;
 
   if (FLAG_debug_code) {
     // The array construct code is only set for the builtin and internal
     // Array functions which always have a map.
     // Initial map for the builtin Array function should be a map.
-    __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
-    __ tst(r2, Operand(kSmiTagMask));
+    __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
+    __ tst(r3, Operand(kSmiTagMask));
     __ Assert(ne, "Unexpected initial map for Array function");
-    __ CompareObjectType(r2, r3, r4, MAP_TYPE);
+    __ CompareObjectType(r1, r3, r4, MAP_TYPE);
     __ Assert(eq, "Unexpected initial map for Array function");
-  }
 
-  // Run the native code for the Array function called as a constructor.
-  ArrayNativeCode(masm, &generic_constructor);
+    // We should either have undefined in r2 or a valid jsglobalpropertycell
+    Label okay_here;
+    Handle<Object> undefined_sentinel(
+        masm->isolate()->heap()->undefined_value(), masm->isolate());
+    Handle<Map> global_property_cell_map(
+        masm->isolate()->heap()->global_property_cell_map());
+    __ cmp(r2, Operand(undefined_sentinel));
+    __ b(eq, &okay_here);
+    __ ldr(r3, FieldMemOperand(r2, 0));
+    __ cmp(r3, Operand(global_property_cell_map));
+    __ Assert(eq, "Expected property cell in register ebx");
+    __ bind(&okay_here);
+  }
 
-  // Jump to the generic construct code in case the specialized code cannot
-  // handle the construction.
-  __ bind(&generic_constructor);
-  Handle<Code> generic_construct_stub =
-      masm->isolate()->builtins()->JSConstructStubGeneric();
-  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
+  if (FLAG_optimize_constructed_arrays) {
+    Label not_zero_case, not_one_case;
+    __ tst(r0, r0);
+    __ b(ne, &not_zero_case);
+    ArrayNoArgumentConstructorStub no_argument_stub;
+    __ TailCallStub(&no_argument_stub);
+
+    __ bind(&not_zero_case);
+    __ cmp(r0, Operand(1));
+    __ b(gt, &not_one_case);
+    ArraySingleArgumentConstructorStub single_argument_stub;
+    __ TailCallStub(&single_argument_stub);
+
+    __ bind(&not_one_case);
+    ArrayNArgumentsConstructorStub n_argument_stub;
+    __ TailCallStub(&n_argument_stub);
+  } else {
+    Label generic_constructor;
+    // Run the native code for the Array function called as a constructor.
+    ArrayNativeCode(masm, &generic_constructor);
+
+    // Jump to the generic construct code in case the specialized code cannot
+    // handle the construction.
+    __ bind(&generic_constructor);
+    Handle<Code> generic_construct_stub =
+        masm->isolate()->builtins()->JSConstructStubGeneric();
+    __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
+  }
 }
 
 
@@ -1145,6 +1177,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
     // Invoke the code and pass argc as r0.
     __ mov(r0, Operand(r3));
     if (is_construct) {
+      // No type feedback cell is available
+      Handle<Object> undefined_sentinel(
+          masm->isolate()->heap()->undefined_value(), masm->isolate());
+      __ mov(r2, Operand(undefined_sentinel));
       CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
       __ CallStub(&stub);
     } else {
index 1ee669234896eee209036fbba23169a4db9a5371..e016a8c92e34f88fd9e1614486e0f46f57ea3d67 100644 (file)
@@ -73,6 +73,44 @@ void TransitionElementsKindStub::InitializeInterfaceDescriptor(
 }
 
 
+static void InitializeArrayConstructorDescriptor(Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  // register state
+  // r1 -- constructor function
+  // r2 -- type info cell with elements kind
+  // r0 -- number of arguments to the constructor function
+  static Register registers[] = { r1, r2 };
+  descriptor->register_param_count_ = 2;
+  // stack param count needs (constructor pointer, and single argument)
+  descriptor->stack_parameter_count_ = &r0;
+  descriptor->register_params_ = registers;
+  descriptor->extra_expression_stack_count_ = 1;
+  descriptor->deoptimization_handler_ =
+      FUNCTION_ADDR(ArrayConstructor_StubFailure);
+}
+
+
+void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  InitializeArrayConstructorDescriptor(isolate, descriptor);
+}
+
+
+void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  InitializeArrayConstructorDescriptor(isolate, descriptor);
+}
+
+
+void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  InitializeArrayConstructorDescriptor(isolate, descriptor);
+}
+
+
 #define __ ACCESS_MASM(masm)
 
 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
@@ -5546,12 +5584,13 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
 }
 
 
-static void GenerateRecordCallTarget(MacroAssembler* masm) {
+static void GenerateRecordCallTargetNoArray(MacroAssembler* masm) {
   // Cache the called function in a global property cell.  Cache states
   // are uninitialized, monomorphic (indicated by a JSFunction), and
   // megamorphic.
   // r1 : the function to call
   // r2 : cache cell for call target
+  ASSERT(!FLAG_optimize_constructed_arrays);
   Label done;
 
   ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
@@ -5585,6 +5624,82 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
 }
 
 
+static void GenerateRecordCallTarget(MacroAssembler* masm) {
+  // Cache the called function in a global property cell.  Cache states
+  // are uninitialized, monomorphic (indicated by a JSFunction), and
+  // megamorphic.
+  // r1 : the function to call
+  // r2 : cache cell for call target
+  ASSERT(FLAG_optimize_constructed_arrays);
+  Label initialize, done, miss, megamorphic, not_array_function;
+
+  ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
+            masm->isolate()->heap()->undefined_value());
+  ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()),
+            masm->isolate()->heap()->the_hole_value());
+
+  // Load the cache state into r3.
+  __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+
+  // A monomorphic cache hit or an already megamorphic state: invoke the
+  // function without changing the state.
+  __ cmp(r3, r1);
+  __ b(eq, &done);
+  __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
+  __ b(eq, &done);
+
+  // Special handling of the Array() function, which caches not only the
+  // monomorphic Array function but the initial ElementsKind with special
+  // sentinels
+  Handle<Object> terminal_kind_sentinel =
+      TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
+                                                  LAST_FAST_ELEMENTS_KIND);
+  __ cmp(r3, Operand(terminal_kind_sentinel));
+  __ b(ne, &miss);
+  // Make sure the function is the Array() function
+  __ LoadArrayFunction(r3);
+  __ cmp(r1, r3);
+  __ b(ne, &megamorphic);
+  __ jmp(&done);
+
+  __ bind(&miss);
+
+  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
+  // megamorphic.
+  __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
+  __ b(eq, &initialize);
+  // MegamorphicSentinel is an immortal immovable object (undefined) so no
+  // write-barrier is needed.
+  __ bind(&megamorphic);
+  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+  __ str(ip, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+
+  // An uninitialized cache is patched with the function or sentinel to
+  // indicate the ElementsKind if function is the Array constructor.
+  __ bind(&initialize);
+  // Make sure the function is the Array() function
+  __ LoadArrayFunction(r3);
+  __ cmp(r1, r3);
+  __ b(ne, &not_array_function);
+
+  // The target function is the Array constructor, install a sentinel value in
+  // the constructor's type info cell that will track the initial ElementsKind
+  // that should be used for the array when its constructed.
+  Handle<Object> initial_kind_sentinel =
+      TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
+          GetInitialFastElementsKind());
+  __ mov(r3, Operand(initial_kind_sentinel));
+  __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+  __ b(&done);
+
+  __ bind(&not_array_function);
+  __ str(r1, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+  // No need for a write barrier here - cells are rescanned.
+
+  __ bind(&done);
+}
+
+
 void CallFunctionStub::Generate(MacroAssembler* masm) {
   // r1 : the function to call
   // r2 : cache cell for call target
@@ -5617,7 +5732,11 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   __ b(ne, &slow);
 
   if (RecordCallTarget()) {
-    GenerateRecordCallTarget(masm);
+    if (FLAG_optimize_constructed_arrays) {
+      GenerateRecordCallTarget(masm);
+    } else {
+      GenerateRecordCallTargetNoArray(masm);
+    }
   }
 
   // Fast-case: Invoke the function now.
@@ -5692,13 +5811,19 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
   __ b(ne, &slow);
 
   if (RecordCallTarget()) {
-    GenerateRecordCallTarget(masm);
+    if (FLAG_optimize_constructed_arrays) {
+      GenerateRecordCallTarget(masm);
+    } else {
+      GenerateRecordCallTargetNoArray(masm);
+    }
   }
 
   // Jump to the function-specific construct stub.
-  __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
-  __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kConstructStubOffset));
-  __ add(pc, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
+  Register jmp_reg = FLAG_optimize_constructed_arrays ? r3 : r2;
+  __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+  __ ldr(jmp_reg, FieldMemOperand(jmp_reg,
+                                  SharedFunctionInfo::kConstructStubOffset));
+  __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
 
   // r0: number of arguments
   // r1: called object
index bab398cae313d797e27ccb580d16eb1fd4c82128..d9575946822025f803f904bf9adaecbf099f3105 100644 (file)
@@ -355,6 +355,17 @@ void LCallNew::PrintDataTo(StringStream* stream) {
 }
 
 
+void LCallNewArray::PrintDataTo(StringStream* stream) {
+  stream->Add("= ");
+  constructor()->PrintTo(stream);
+  stream->Add(" #%d / ", arity());
+  ASSERT(hydrogen()->property_cell()->value()->IsSmi());
+  ElementsKind kind = static_cast<ElementsKind>(
+      Smi::cast(hydrogen()->property_cell()->value())->value());
+  stream->Add(" (%s) ", ElementsKindToString(kind));
+}
+
+
 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
   arguments()->PrintTo(stream);
   stream->Add(" length ");
@@ -1139,6 +1150,14 @@ LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
 }
 
 
+LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
+  LOperand* constructor = UseFixed(instr->constructor(), r1);
+  argument_count_ -= instr->argument_count();
+  LCallNewArray* result = new(zone()) LCallNewArray(constructor);
+  return MarkAsCall(DefineFixed(result, r0), instr);
+}
+
+
 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
   LOperand* function = UseFixed(instr->function(), r1);
   argument_count_ -= instr->argument_count();
index e81734ea855d973fb967a2884b95eda0f88bcd1b..ecc77a85563f3ba15f24571b60b54e43c677c269 100644 (file)
@@ -68,6 +68,7 @@ class LCodeGen;
   V(CallKnownGlobal)                            \
   V(CallNamed)                                  \
   V(CallNew)                                    \
+  V(CallNewArray)                               \
   V(CallRuntime)                                \
   V(CallStub)                                   \
   V(CheckFunction)                              \
@@ -1817,6 +1818,23 @@ class LCallNew: public LTemplateInstruction<1, 1, 0> {
 };
 
 
+class LCallNewArray: public LTemplateInstruction<1, 1, 0> {
+ public:
+  explicit LCallNewArray(LOperand* constructor) {
+    inputs_[0] = constructor;
+  }
+
+  LOperand* constructor() { return inputs_[0]; }
+
+  DECLARE_CONCRETE_INSTRUCTION(CallNewArray, "call-new-array")
+  DECLARE_HYDROGEN_ACCESSOR(CallNewArray)
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  int arity() const { return hydrogen()->argument_count() - 1; }
+};
+
+
 class LCallRuntime: public LTemplateInstruction<1, 0, 0> {
  public:
   DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime")
index 943886c7dc3eec5fbc46cd9c55f6bf84efb7014e..8887391935452741fa25efd4600a731d7c6e2400 100644 (file)
@@ -4295,12 +4295,32 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   ASSERT(ToRegister(instr->constructor()).is(r1));
   ASSERT(ToRegister(instr->result()).is(r0));
 
-  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
   __ mov(r0, Operand(instr->arity()));
+  if (FLAG_optimize_constructed_arrays) {
+    // No cell in r2 for construct type feedback in optimized code
+    Handle<Object> undefined_value(isolate()->heap()->undefined_value(),
+                                   isolate());
+    __ mov(r2, Operand(undefined_value));
+  }
+  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
   CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
 }
 
 
+void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
+  ASSERT(ToRegister(instr->constructor()).is(r1));
+  ASSERT(ToRegister(instr->result()).is(r0));
+  ASSERT(FLAG_optimize_constructed_arrays);
+
+  __ mov(r0, Operand(instr->arity()));
+  __ mov(r2, Operand(instr->hydrogen()->property_cell()));
+  Handle<Code> array_construct_code =
+      isolate()->builtins()->ArrayConstructCode();
+
+  CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+}
+
+
 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
   CallRuntime(instr->function(), instr->arity(), instr);
 }
index bc3c7864209a46cd2444301a14eff384d9ae962d..326f555a0f4da95374889ac5ea8fb754ad3f8bd3 100644 (file)
@@ -3076,6 +3076,19 @@ void MacroAssembler::LoadGlobalFunction(int index, Register function) {
 }
 
 
+void MacroAssembler::LoadArrayFunction(Register function) {
+  // Load the global or builtins object from the current context.
+  ldr(function,
+      MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+  // Load the global context from the global or builtins object.
+  ldr(function,
+      FieldMemOperand(function, GlobalObject::kGlobalContextOffset));
+  // Load the array function from the native context.
+  ldr(function,
+      MemOperand(function, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
+}
+
+
 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
                                                   Register map,
                                                   Register scratch) {
index d26eda7f8578cd7068b02498a815f3d7efeafa4a..0826b7d7f05362ae2fd51ff8402b5f538a8897d6 100644 (file)
@@ -514,6 +514,7 @@ class MacroAssembler: public Assembler {
                            bool can_have_holes);
 
   void LoadGlobalFunction(int index, Register function);
+  void LoadArrayFunction(Register function);
 
   // Load the initial map from the global function. The registers
   // function and map can be the same, function is then overwritten.
index d220f5560caea2f3d34f73552f14b42cbb475110..02c815cd77c7b6290e3e24d695edb221468dfa94 100644 (file)
@@ -605,6 +605,7 @@ void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
   is_monomorphic_ = oracle->CallNewIsMonomorphic(this);
   if (is_monomorphic_) {
     target_ = oracle->GetCallNewTarget(this);
+    elements_kind_ = oracle->GetCallNewElementsKind(this);
   }
 }
 
index 740aa442936160709f7e23586ee2a910b6827aaa..66b4dd46c46fa23d577edb1ef2197ceafcf7d225 100644 (file)
--- a/src/ast.h
+++ b/src/ast.h
@@ -1595,6 +1595,7 @@ class CallNew: public Expression {
   Handle<JSFunction> target() { return target_; }
 
   BailoutId ReturnId() const { return return_id_; }
+  ElementsKind elements_kind() const { return elements_kind_; }
 
  protected:
   CallNew(Isolate* isolate,
@@ -1606,7 +1607,8 @@ class CallNew: public Expression {
         arguments_(arguments),
         pos_(pos),
         is_monomorphic_(false),
-        return_id_(GetNextId(isolate)) { }
+        return_id_(GetNextId(isolate)),
+        elements_kind_(GetInitialFastElementsKind()) { }
 
  private:
   Expression* expression_;
@@ -1617,6 +1619,7 @@ class CallNew: public Expression {
   Handle<JSFunction> target_;
 
   const BailoutId return_id_;
+  ElementsKind elements_kind_;
 };
 
 
index 5417d9926b03c6dc86b614e434a2453c40409ccc..fcb1de9f0aaaa99b9e87a2d3b4ddabf764143f85 100644 (file)
@@ -1487,8 +1487,14 @@ Handle<JSFunction> Genesis::InstallInternalArray(
       factory()->NewJSObject(isolate()->object_function(), TENURED);
   SetPrototype(array_function, prototype);
 
+  // TODO(mvstanton): For performance reasons, this code would have to
+  // be changed to successfully run with FLAG_optimize_constructed_arrays.
+  // The next checkin to enable FLAG_optimize_constructed_arrays by
+  // default will address this.
+  CHECK(!FLAG_optimize_constructed_arrays);
   array_function->shared()->set_construct_stub(
       isolate()->builtins()->builtin(Builtins::kArrayConstructCode));
+
   array_function->shared()->DontAdaptArguments();
 
   MaybeObject* maybe_map = array_function->initial_map()->Copy();
@@ -1744,7 +1750,6 @@ bool Genesis::InstallNatives() {
     native_context()->set_opaque_reference_function(*opaque_reference_fun);
   }
 
-
   // InternalArrays should not use Smi-Only array optimizations. There are too
   // many places in the C++ runtime code (e.g. RegEx) that assume that
   // elements in InternalArrays can be set to non-Smi values without going
index 279594e0fe84fa7d13d4d6399382eb24115dcbec..aa6920375b1a015d5a15e9ebea7f62719da599cb 100644 (file)
@@ -186,9 +186,67 @@ BUILTIN(EmptyFunction) {
 }
 
 
+#define CONVERT_ARG_STUB_CALLER_ARGS(name)                           \
+  Arguments* name = reinterpret_cast<Arguments*>(args[0]);
+
+
+RUNTIME_FUNCTION(MaybeObject*, ArrayConstructor_StubFailure) {
+  CONVERT_ARG_STUB_CALLER_ARGS(caller_args);
+  // ASSERT(args.length() == 3);
+  Handle<JSFunction> function = args.at<JSFunction>(1);
+  Handle<Object> type_info = args.at<Object>(2);
+
+  JSArray* array = NULL;
+  bool holey = false;
+  if (caller_args->length() == 1 && (*caller_args)[0]->IsSmi()) {
+    int value = Smi::cast((*caller_args)[0])->value();
+    holey = (value > 0 && value < JSObject::kInitialMaxFastElementArray);
+  }
+
+  ASSERT(function->has_initial_map());
+  ElementsKind kind = function->initial_map()->elements_kind();
+  if (holey) {
+    kind = GetHoleyElementsKind(kind);
+  }
+
+  MaybeObject* maybe_array;
+  if (*type_info != isolate->heap()->undefined_value()) {
+    JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(*type_info);
+    if (cell->value()->IsSmi()) {
+      Smi* smi = Smi::cast(cell->value());
+      ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
+      if (holey && !IsFastHoleyElementsKind(to_kind)) {
+        to_kind = GetHoleyElementsKind(to_kind);
+        // Update the allocation site info to reflect the advice alteration.
+        cell->set_value(Smi::FromInt(to_kind));
+      }
+
+      AllocationSiteMode mode = AllocationSiteInfo::GetMode(to_kind);
+      if (mode == TRACK_ALLOCATION_SITE) {
+        maybe_array = isolate->heap()->AllocateEmptyJSArrayWithAllocationSite(
+            kind, type_info);
+      } else {
+        maybe_array = isolate->heap()->AllocateEmptyJSArray(kind);
+      }
+      if (!maybe_array->To(&array)) return maybe_array;
+    }
+  }
+
+  if (array == NULL) {
+    maybe_array = isolate->heap()->AllocateEmptyJSArray(kind);
+    if (!maybe_array->To(&array)) return maybe_array;
+  }
+
+  maybe_array = ArrayConstructInitializeElements(array, caller_args);
+  if (maybe_array->IsFailure()) return maybe_array;
+  return array;
+}
+
+
 static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
                                            Isolate* isolate,
                                            JSFunction* constructor) {
+  ASSERT(args->length() >= 1);
   Heap* heap = isolate->heap();
   isolate->counters()->array_function_runtime()->Increment();
 
@@ -197,8 +255,29 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
     array = JSArray::cast((*args)[0]);
     // Initialize elements and length in case later allocations fail so that the
     // array object is initialized in a valid state.
-    array->set_length(Smi::FromInt(0));
-    array->set_elements(heap->empty_fixed_array());
+    MaybeObject* maybe_array = array->Initialize(0);
+    if (maybe_array->IsFailure()) return maybe_array;
+
+    if (FLAG_optimize_constructed_arrays) {
+      AllocationSiteInfo* info = AllocationSiteInfo::FindForJSObject(array);
+      ElementsKind to_kind = array->GetElementsKind();
+      if (info != NULL && info->GetElementsKindPayload(&to_kind)) {
+        if (IsMoreGeneralElementsKindTransition(array->GetElementsKind(),
+                                                to_kind)) {
+          // We have advice that we should change the elements kind
+          if (FLAG_trace_track_allocation_sites) {
+            PrintF("AllocationSiteInfo: pre-transitioning array %p(%s->%s)\n",
+                   reinterpret_cast<void*>(array),
+                   ElementsKindToString(array->GetElementsKind()),
+                   ElementsKindToString(to_kind));
+          }
+
+          maybe_array = array->TransitionElementsKind(to_kind);
+          if (maybe_array->IsFailure()) return maybe_array;
+        }
+      }
+    }
+
     if (!FLAG_smi_only_arrays) {
       Context* native_context = isolate->context()->native_context();
       if (array->GetElementsKind() == GetInitialFastElementsKind() &&
@@ -215,97 +294,10 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
     if (!maybe_obj->To(&array)) return maybe_obj;
   }
 
-  // Optimize the case where there is one argument and the argument is a
-  // small smi.
-  if (args->length() == 2) {
-    Object* obj = (*args)[1];
-    if (obj->IsSmi()) {
-      int len = Smi::cast(obj)->value();
-      if (len >= 0 && len < JSObject::kInitialMaxFastElementArray) {
-        Object* fixed_array;
-        { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len);
-          if (!maybe_obj->ToObject(&fixed_array)) return maybe_obj;
-        }
-        ElementsKind elements_kind = array->GetElementsKind();
-        if (!IsFastHoleyElementsKind(elements_kind)) {
-          elements_kind = GetHoleyElementsKind(elements_kind);
-          MaybeObject* maybe_array =
-              array->TransitionElementsKind(elements_kind);
-          if (maybe_array->IsFailure()) return maybe_array;
-        }
-        // We do not use SetContent to skip the unnecessary elements type check.
-        array->set_elements(FixedArray::cast(fixed_array));
-        array->set_length(Smi::cast(obj));
-        return array;
-      }
-    }
-    // Take the argument as the length.
-    { MaybeObject* maybe_obj = array->Initialize(0);
-      if (!maybe_obj->ToObject(&obj)) return maybe_obj;
-    }
-    return array->SetElementsLength((*args)[1]);
-  }
-
-  // Optimize the case where there are no parameters passed.
-  if (args->length() == 1) {
-    return array->Initialize(JSArray::kPreallocatedArrayElements);
-  }
-
-  // Set length and elements on the array.
-  int number_of_elements = args->length() - 1;
-  MaybeObject* maybe_object =
-      array->EnsureCanContainElements(args, 1, number_of_elements,
-                                      ALLOW_CONVERTED_DOUBLE_ELEMENTS);
-  if (maybe_object->IsFailure()) return maybe_object;
-
-  // Allocate an appropriately typed elements array.
-  MaybeObject* maybe_elms;
-  ElementsKind elements_kind = array->GetElementsKind();
-  if (IsFastDoubleElementsKind(elements_kind)) {
-    maybe_elms = heap->AllocateUninitializedFixedDoubleArray(
-        number_of_elements);
-  } else {
-    maybe_elms = heap->AllocateFixedArrayWithHoles(number_of_elements);
-  }
-  FixedArrayBase* elms;
-  if (!maybe_elms->To(&elms)) return maybe_elms;
-
-  // Fill in the content
-  switch (array->GetElementsKind()) {
-    case FAST_HOLEY_SMI_ELEMENTS:
-    case FAST_SMI_ELEMENTS: {
-      FixedArray* smi_elms = FixedArray::cast(elms);
-      for (int index = 0; index < number_of_elements; index++) {
-        smi_elms->set(index, (*args)[index+1], SKIP_WRITE_BARRIER);
-      }
-      break;
-    }
-    case FAST_HOLEY_ELEMENTS:
-    case FAST_ELEMENTS: {
-      AssertNoAllocation no_gc;
-      WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
-      FixedArray* object_elms = FixedArray::cast(elms);
-      for (int index = 0; index < number_of_elements; index++) {
-        object_elms->set(index, (*args)[index+1], mode);
-      }
-      break;
-    }
-    case FAST_HOLEY_DOUBLE_ELEMENTS:
-    case FAST_DOUBLE_ELEMENTS: {
-      FixedDoubleArray* double_elms = FixedDoubleArray::cast(elms);
-      for (int index = 0; index < number_of_elements; index++) {
-        double_elms->set(index, (*args)[index+1]->Number());
-      }
-      break;
-    }
-    default:
-      UNREACHABLE();
-      break;
-  }
-
-  array->set_elements(elms);
-  array->set_length(Smi::FromInt(number_of_elements));
-  return array;
+  Arguments adjusted_arguments(args->length() - 1, args->arguments() - 1);
+  ASSERT(adjusted_arguments.length() < 1 ||
+         adjusted_arguments[0] == (*args)[1]);
+  return ArrayConstructInitializeElements(array, &adjusted_arguments);
 }
 
 
index cb0c9c516cef5e7d6c226505fa8ce1c75c482a3a..83b134c6d2e251cf6456a535989b625dcd67b30a 100644 (file)
@@ -272,6 +272,7 @@ enum BuiltinExtraArguments {
   V(APPLY_PREPARE, 1)                    \
   V(APPLY_OVERFLOW, 1)
 
+MaybeObject* ArrayConstructor_StubFailure(Arguments args, Isolate* isolate);
 
 class BuiltinFunctionTable;
 class ObjectVisitor;
index 20abec4fa51f27a9c7b95fdc8db0a71c999bc5f7..491e25550a2ec9cf02678fd6b3f01d0133147c09 100644 (file)
@@ -306,6 +306,33 @@ void CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
 }
 
 
+template <>
+void CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
+  HInstruction* deopt = new(zone()) HSoftDeoptimize();
+  AddInstruction(deopt);
+  current_block()->MarkAsDeoptimizing();
+  HReturn* ret = new(zone()) HReturn(GetParameter(0), context());
+  current_block()->Finish(ret);
+}
+
+
+Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
+  CodeStubGraphBuilder<ArrayNoArgumentConstructorStub> builder(this);
+  LChunk* chunk = OptimizeGraph(builder.CreateGraph());
+  return chunk->Codegen(Code::COMPILED_STUB);
+}
+
+
+template <>
+void CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::BuildCodeStub() {
+  HInstruction* deopt = new(zone()) HSoftDeoptimize();
+  AddInstruction(deopt);
+  current_block()->MarkAsDeoptimizing();
+  HReturn* ret = new(zone()) HReturn(GetParameter(0), context());
+  current_block()->Finish(ret);
+}
+
+
 Handle<Code> TransitionElementsKindStub::GenerateCode() {
   CodeStubGraphBuilder<TransitionElementsKindStub> builder(this);
   LChunk* chunk = OptimizeGraph(builder.CreateGraph());
@@ -313,4 +340,27 @@ Handle<Code> TransitionElementsKindStub::GenerateCode() {
 }
 
 
+Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
+  CodeStubGraphBuilder<ArraySingleArgumentConstructorStub> builder(this);
+  LChunk* chunk = OptimizeGraph(builder.CreateGraph());
+  return chunk->Codegen(Code::COMPILED_STUB);
+}
+
+
+template <>
+void CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
+  HInstruction* deopt = new(zone()) HSoftDeoptimize();
+  AddInstruction(deopt);
+  current_block()->MarkAsDeoptimizing();
+  HReturn* ret = new(zone()) HReturn(GetParameter(0), context());
+  current_block()->Finish(ret);
+}
+
+
+Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
+  CodeStubGraphBuilder<ArrayNArgumentsConstructorStub> builder(this);
+  LChunk* chunk = OptimizeGraph(builder.CreateGraph());
+  return chunk->Codegen(Code::COMPILED_STUB);
+}
+
 } }  // namespace v8::internal
index 9d4e21d15e8f2663695445e7743d9de8586b6cd1..0cb3d423f3740ce66cf767c0d50a4a0bb68a8f9c 100644 (file)
@@ -73,6 +73,9 @@ namespace internal {
   V(CEntry)                              \
   V(JSEntry)                             \
   V(KeyedLoadElement)                    \
+  V(ArrayNoArgumentConstructor)          \
+  V(ArraySingleArgumentConstructor)      \
+  V(ArrayNArgumentsConstructor)          \
   V(KeyedStoreElement)                   \
   V(DebuggerStatement)                   \
   V(StringDictionaryLookup)              \
@@ -1289,6 +1292,63 @@ class TransitionElementsKindStub : public HydrogenCodeStub {
 };
 
 
+class ArrayNoArgumentConstructorStub : public HydrogenCodeStub {
+ public:
+  ArrayNoArgumentConstructorStub() {
+  }
+
+  Major MajorKey() { return ArrayNoArgumentConstructor; }
+  int MinorKey() { return 0; }
+
+  virtual Handle<Code> GenerateCode();
+
+  virtual void InitializeInterfaceDescriptor(
+      Isolate* isolate,
+      CodeStubInterfaceDescriptor* descriptor);
+
+ private:
+  DISALLOW_COPY_AND_ASSIGN(ArrayNoArgumentConstructorStub);
+};
+
+
+class ArraySingleArgumentConstructorStub : public HydrogenCodeStub {
+ public:
+  ArraySingleArgumentConstructorStub() {
+  }
+
+  Major MajorKey() { return ArraySingleArgumentConstructor; }
+  int MinorKey() { return 0; }
+
+  virtual Handle<Code> GenerateCode();
+
+  virtual void InitializeInterfaceDescriptor(
+      Isolate* isolate,
+      CodeStubInterfaceDescriptor* descriptor);
+
+ private:
+  DISALLOW_COPY_AND_ASSIGN(ArraySingleArgumentConstructorStub);
+};
+
+
+class ArrayNArgumentsConstructorStub : public HydrogenCodeStub {
+ public:
+  ArrayNArgumentsConstructorStub() {
+  }
+
+  Major MajorKey() { return ArrayNArgumentsConstructor; }
+  int MinorKey() { return 0; }
+
+  virtual Handle<Code> GenerateCode();
+
+  virtual void InitializeInterfaceDescriptor(
+      Isolate* isolate,
+      CodeStubInterfaceDescriptor* descriptor);
+
+ private:
+  DISALLOW_COPY_AND_ASSIGN(ArrayNArgumentsConstructorStub);
+};
+
+
 class KeyedStoreElementStub : public PlatformCodeStub {
  public:
   KeyedStoreElementStub(bool is_js_array,
index e37ae58c45a3017138ab2f0adf538a9231803b82..9deef60619312c58e03cca5d3f8e9c63aff54dfa 100644 (file)
@@ -27,6 +27,7 @@
 
 #include "v8.h"
 
+#include "arguments.h"
 #include "objects.h"
 #include "elements.h"
 #include "utils.h"
@@ -1973,4 +1974,100 @@ MUST_USE_RESULT MaybeObject* ElementsAccessorBase<ElementsAccessorSubclass,
 }
 
 
+MUST_USE_RESULT MaybeObject* ArrayConstructInitializeElements(
+    JSArray* array, Arguments* args) {
+  Heap* heap = array->GetIsolate()->heap();
+
+  // Optimize the case where there is one argument and the argument is a
+  // small smi.
+  if (args->length() == 1) {
+    Object* obj = (*args)[0];
+    if (obj->IsSmi()) {
+      int len = Smi::cast(obj)->value();
+      if (len > 0 && len < JSObject::kInitialMaxFastElementArray) {
+        ElementsKind elements_kind = array->GetElementsKind();
+        MaybeObject* maybe_array = array->Initialize(len, len);
+        if (maybe_array->IsFailure()) return maybe_array;
+
+        if (!IsFastHoleyElementsKind(elements_kind)) {
+          elements_kind = GetHoleyElementsKind(elements_kind);
+          maybe_array = array->TransitionElementsKind(elements_kind);
+          if (maybe_array->IsFailure()) return maybe_array;
+        }
+
+        return array;
+      } else if (len == 0) {
+        return array->Initialize(JSArray::kPreallocatedArrayElements);
+      }
+    }
+
+    // Take the argument as the length.
+    MaybeObject* maybe_obj = array->Initialize(0);
+    if (!maybe_obj->To(&obj)) return maybe_obj;
+
+    return array->SetElementsLength((*args)[0]);
+  }
+
+  // Optimize the case where there are no parameters passed.
+  if (args->length() == 0) {
+    return array->Initialize(JSArray::kPreallocatedArrayElements);
+  }
+
+  // Set length and elements on the array.
+  int number_of_elements = args->length();
+  MaybeObject* maybe_object =
+      array->EnsureCanContainElements(args, 0, number_of_elements,
+                                      ALLOW_CONVERTED_DOUBLE_ELEMENTS);
+  if (maybe_object->IsFailure()) return maybe_object;
+
+  // Allocate an appropriately typed elements array.
+  MaybeObject* maybe_elms;
+  ElementsKind elements_kind = array->GetElementsKind();
+  if (IsFastDoubleElementsKind(elements_kind)) {
+    maybe_elms = heap->AllocateUninitializedFixedDoubleArray(
+        number_of_elements);
+  } else {
+    maybe_elms = heap->AllocateFixedArrayWithHoles(number_of_elements);
+  }
+  FixedArrayBase* elms;
+  if (!maybe_elms->To(&elms)) return maybe_elms;
+
+  // Fill in the content
+  switch (array->GetElementsKind()) {
+    case FAST_HOLEY_SMI_ELEMENTS:
+    case FAST_SMI_ELEMENTS: {
+      FixedArray* smi_elms = FixedArray::cast(elms);
+      for (int index = 0; index < number_of_elements; index++) {
+        smi_elms->set(index, (*args)[index], SKIP_WRITE_BARRIER);
+      }
+      break;
+    }
+    case FAST_HOLEY_ELEMENTS:
+    case FAST_ELEMENTS: {
+      AssertNoAllocation no_gc;
+      WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
+      FixedArray* object_elms = FixedArray::cast(elms);
+      for (int index = 0; index < number_of_elements; index++) {
+        object_elms->set(index, (*args)[index], mode);
+      }
+      break;
+    }
+    case FAST_HOLEY_DOUBLE_ELEMENTS:
+    case FAST_DOUBLE_ELEMENTS: {
+      FixedDoubleArray* double_elms = FixedDoubleArray::cast(elms);
+      for (int index = 0; index < number_of_elements; index++) {
+        double_elms->set(index, (*args)[index]->Number());
+      }
+      break;
+    }
+    default:
+      UNREACHABLE();
+      break;
+  }
+
+  array->set_elements(elms);
+  array->set_length(Smi::FromInt(number_of_elements));
+  return array;
+}
+
 } }  // namespace v8::internal
index e31ceb44b4fb29a68145bafab3f117ef966a7826..6353aaecf5cce58cc3129eb7f229302ac3227f05 100644 (file)
@@ -200,6 +200,9 @@ class ElementsAccessor {
 void CheckArrayAbuse(JSObject* obj, const char* op, uint32_t key,
                      bool allow_appending = false);
 
+MUST_USE_RESULT MaybeObject* ArrayConstructInitializeElements(
+    JSArray* array, Arguments* args);
+
 } }  // namespace v8::internal
 
 #endif  // V8_ELEMENTS_H_
index cbc38b07106cd993e57a2be4c71f8f1535ab568d..dcc6c2cd1483d7625dce2397985cda442af553bb 100644 (file)
@@ -222,6 +222,8 @@ DEFINE_bool(unreachable_code_elimination, false,
             "eliminate unreachable code (hidden behind soft deopts)")
 DEFINE_bool(track_allocation_sites, true,
             "Use allocation site info to reduce transitions")
+DEFINE_bool(optimize_constructed_arrays, false,
+            "Use allocation site info on constructed arrays")
 DEFINE_bool(trace_osr, false, "trace on-stack replacement")
 DEFINE_int(stress_runs, 0, "number of stress runs")
 DEFINE_bool(optimize_closures, true, "optimize closures")
index 87d741fb3b26c9c92d808fc63dc50d80a60d9482..916a6a293c695a2c02ce370733231320a2b0139b 100644 (file)
@@ -456,6 +456,15 @@ void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
 }
 
 
+MaybeObject* Heap::AllocateEmptyJSArrayWithAllocationSite(
+      ElementsKind elements_kind,
+      Handle<Object> allocation_site_payload) {
+  return AllocateJSArrayAndStorageWithAllocationSite(elements_kind, 0, 0,
+      allocation_site_payload,
+      DONT_INITIALIZE_ARRAY_ELEMENTS);
+}
+
+
 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) {
   const char* collector_reason = NULL;
   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
index e1260aed8a589ff1d061ca1897c8e47f23ea7e86..7ce9263125d563d84f21838a0ae9f26b5535abc9 100644 (file)
@@ -3904,6 +3904,28 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
 }
 
 
+MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space,
+    Handle<Object> allocation_site_info_payload) {
+  ASSERT(gc_state_ == NOT_IN_GC);
+  ASSERT(map->instance_type() != MAP_TYPE);
+  // If allocation failures are disallowed, we may allocate in a different
+  // space when new space is full and the object is not a large object.
+  AllocationSpace retry_space =
+      (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
+  int size = map->instance_size() + AllocationSiteInfo::kSize;
+  Object* result;
+  MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
+  if (!maybe_result->ToObject(&result)) return maybe_result;
+  // No need for write barrier since object is white and map is in old space.
+  HeapObject::cast(result)->set_map_no_write_barrier(map);
+  AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>(
+      reinterpret_cast<Address>(result) + map->instance_size());
+  alloc_info->set_map_no_write_barrier(allocation_site_info_map());
+  alloc_info->set_payload(*allocation_site_info_payload, SKIP_WRITE_BARRIER);
+  return result;
+}
+
+
 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
   ASSERT(gc_state_ == NOT_IN_GC);
   ASSERT(map->instance_type() != MAP_TYPE);
@@ -3911,11 +3933,10 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
   // space when new space is full and the object is not a large object.
   AllocationSpace retry_space =
       (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
+  int size = map->instance_size();
   Object* result;
-  { MaybeObject* maybe_result =
-        AllocateRaw(map->instance_size(), space, retry_space);
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
+  if (!maybe_result->ToObject(&result)) return maybe_result;
   // No need for write barrier since object is white and map is in old space.
   HeapObject::cast(result)->set_map_no_write_barrier(map);
   return result;
@@ -4183,10 +4204,48 @@ MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
       (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
   if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
   Object* obj;
-  { MaybeObject* maybe_obj = Allocate(map, space);
-    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+  MaybeObject* maybe_obj = Allocate(map, space);
+  if (!maybe_obj->To(&obj)) return maybe_obj;
+
+  // Initialize the JSObject.
+  InitializeJSObjectFromMap(JSObject::cast(obj),
+                            FixedArray::cast(properties),
+                            map);
+  ASSERT(JSObject::cast(obj)->HasFastElements());
+  return obj;
+}
+
+
+MaybeObject* Heap::AllocateJSObjectFromMapWithAllocationSite(Map* map,
+    Handle<Object> allocation_site_info_payload) {
+  // JSFunctions should be allocated using AllocateFunction to be
+  // properly initialized.
+  ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
+
+  // Both types of global objects should be allocated using
+  // AllocateGlobalObject to be properly initialized.
+  ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
+  ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
+
+  // Allocate the backing storage for the properties.
+  int prop_size =
+      map->pre_allocated_property_fields() +
+      map->unused_property_fields() -
+      map->inobject_properties();
+  ASSERT(prop_size >= 0);
+  Object* properties;
+  { MaybeObject* maybe_properties = AllocateFixedArray(prop_size);
+    if (!maybe_properties->ToObject(&properties)) return maybe_properties;
   }
 
+  // Allocate the JSObject.
+  AllocationSpace space = NEW_SPACE;
+  if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
+  Object* obj;
+  MaybeObject* maybe_obj = AllocateWithAllocationSite(map, space,
+      allocation_site_info_payload);
+  if (!maybe_obj->To(&obj)) return maybe_obj;
+
   // Initialize the JSObject.
   InitializeJSObjectFromMap(JSObject::cast(obj),
                             FixedArray::cast(properties),
@@ -4219,6 +4278,51 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
 }
 
 
+MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor,
+    Handle<Object> allocation_site_info_payload) {
+  // Allocate the initial map if absent.
+  if (!constructor->has_initial_map()) {
+    Object* initial_map;
+    { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
+      if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
+    }
+    constructor->set_initial_map(Map::cast(initial_map));
+    Map::cast(initial_map)->set_constructor(constructor);
+  }
+  // Allocate the object based on the constructors initial map, or the payload
+  // advice
+  Map* initial_map = constructor->initial_map();
+
+  JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(
+      *allocation_site_info_payload);
+  Smi* smi = Smi::cast(cell->value());
+  ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
+  AllocationSiteMode mode = TRACK_ALLOCATION_SITE;
+  if (to_kind != initial_map->elements_kind()) {
+    MaybeObject* maybe_new_map = constructor->GetElementsTransitionMap(
+        isolate(), to_kind);
+    if (!maybe_new_map->To(&initial_map)) return maybe_new_map;
+    // Possibly alter the mode, since we found an updated elements kind
+    // in the type info cell.
+    mode = AllocationSiteInfo::GetMode(to_kind);
+  }
+
+  MaybeObject* result;
+  if (mode == TRACK_ALLOCATION_SITE) {
+    result = AllocateJSObjectFromMapWithAllocationSite(initial_map,
+        allocation_site_info_payload);
+  } else {
+    result = AllocateJSObjectFromMap(initial_map, NOT_TENURED);
+  }
+#ifdef DEBUG
+  // Make sure result is NOT a global object if valid.
+  Object* non_failure;
+  ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
+#endif
+  return result;
+}
+
+
 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) {
   // Allocate a fresh map. Modules do not have a prototype.
   Map* map;
@@ -4240,11 +4344,14 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
     int capacity,
     ArrayStorageAllocationMode mode,
     PretenureFlag pretenure) {
-  ASSERT(capacity >= length);
   MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
   JSArray* array;
   if (!maybe_array->To(&array)) return maybe_array;
 
+  // TODO(mvstanton): this body of code is duplicate with AllocateJSArrayStorage
+  // for performance reasons.
+  ASSERT(capacity >= length);
+
   if (capacity == 0) {
     array->set_length(Smi::FromInt(0));
     array->set_elements(empty_fixed_array());
@@ -4277,6 +4384,60 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
 }
 
 
+MaybeObject* Heap::AllocateJSArrayAndStorageWithAllocationSite(
+    ElementsKind elements_kind,
+    int length,
+    int capacity,
+    Handle<Object> allocation_site_payload,
+    ArrayStorageAllocationMode mode) {
+  MaybeObject* maybe_array = AllocateJSArrayWithAllocationSite(elements_kind,
+      allocation_site_payload);
+  JSArray* array;
+  if (!maybe_array->To(&array)) return maybe_array;
+  return AllocateJSArrayStorage(array, length, capacity, mode);
+}
+
+
+MaybeObject* Heap::AllocateJSArrayStorage(
+    JSArray* array,
+    int length,
+    int capacity,
+    ArrayStorageAllocationMode mode) {
+  ASSERT(capacity >= length);
+
+  if (capacity == 0) {
+    array->set_length(Smi::FromInt(0));
+    array->set_elements(empty_fixed_array());
+    return array;
+  }
+
+  FixedArrayBase* elms;
+  MaybeObject* maybe_elms = NULL;
+  ElementsKind elements_kind = array->GetElementsKind();
+  if (IsFastDoubleElementsKind(elements_kind)) {
+    if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
+      maybe_elms = AllocateUninitializedFixedDoubleArray(capacity);
+    } else {
+      ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
+      maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity);
+    }
+  } else {
+    ASSERT(IsFastSmiOrObjectElementsKind(elements_kind));
+    if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
+      maybe_elms = AllocateUninitializedFixedArray(capacity);
+    } else {
+      ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
+      maybe_elms = AllocateFixedArrayWithHoles(capacity);
+    }
+  }
+  if (!maybe_elms->To(&elms)) return maybe_elms;
+
+  array->set_elements(elms);
+  array->set_length(Smi::FromInt(length));
+  return array;
+}
+
+
 MaybeObject* Heap::AllocateJSArrayWithElements(
     FixedArrayBase* elements,
     ElementsKind elements_kind,
@@ -4407,8 +4568,7 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
 }
 
 
-MaybeObject* Heap::CopyJSObject(JSObject* source,
-                                AllocationSiteMode mode) {
+MaybeObject* Heap::CopyJSObject(JSObject* source) {
   // Never used to copy functions.  If functions need to be copied we
   // have to be careful to clear the literals array.
   SLOW_ASSERT(!source->IsJSFunction());
@@ -4418,9 +4578,81 @@ MaybeObject* Heap::CopyJSObject(JSObject* source,
   int object_size = map->instance_size();
   Object* clone;
 
-  bool track_origin = mode == TRACK_ALLOCATION_SITE &&
-      map->CanTrackAllocationSite();
+  WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
+
+  // If we're forced to always allocate, we use the general allocation
+  // functions which may leave us with an object in old space.
+  if (always_allocate()) {
+    { MaybeObject* maybe_clone =
+          AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
+      if (!maybe_clone->ToObject(&clone)) return maybe_clone;
+    }
+    Address clone_address = HeapObject::cast(clone)->address();
+    CopyBlock(clone_address,
+              source->address(),
+              object_size);
+    // Update write barrier for all fields that lie beyond the header.
+    RecordWrites(clone_address,
+                 JSObject::kHeaderSize,
+                 (object_size - JSObject::kHeaderSize) / kPointerSize);
+  } else {
+    wb_mode = SKIP_WRITE_BARRIER;
+
+    { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
+      if (!maybe_clone->ToObject(&clone)) return maybe_clone;
+    }
+    SLOW_ASSERT(InNewSpace(clone));
+    // Since we know the clone is allocated in new space, we can copy
+    // the contents without worrying about updating the write barrier.
+    CopyBlock(HeapObject::cast(clone)->address(),
+              source->address(),
+              object_size);
+  }
+
+  SLOW_ASSERT(
+      JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
+  FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
+  FixedArray* properties = FixedArray::cast(source->properties());
+  // Update elements if necessary.
+  if (elements->length() > 0) {
+    Object* elem;
+    { MaybeObject* maybe_elem;
+      if (elements->map() == fixed_cow_array_map()) {
+        maybe_elem = FixedArray::cast(elements);
+      } else if (source->HasFastDoubleElements()) {
+        maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
+      } else {
+        maybe_elem = CopyFixedArray(FixedArray::cast(elements));
+      }
+      if (!maybe_elem->ToObject(&elem)) return maybe_elem;
+    }
+    JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
+  }
+  // Update properties if necessary.
+  if (properties->length() > 0) {
+    Object* prop;
+    { MaybeObject* maybe_prop = CopyFixedArray(properties);
+      if (!maybe_prop->ToObject(&prop)) return maybe_prop;
+    }
+    JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
+  }
+  // Return the new clone.
+  return clone;
+}
+
+
+MaybeObject* Heap::CopyJSObjectWithAllocationSite(JSObject* source) {
+  // Never used to copy functions.  If functions need to be copied we
+  // have to be careful to clear the literals array.
+  SLOW_ASSERT(!source->IsJSFunction());
+
+  // Make the clone.
+  Map* map = source->map();
+  int object_size = map->instance_size();
+  Object* clone;
 
+  ASSERT(map->CanTrackAllocationSite());
+  ASSERT(map->instance_type() == JS_ARRAY_TYPE);
   WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
 
   // If we're forced to always allocate, we use the general allocation
@@ -4428,11 +4660,9 @@ MaybeObject* Heap::CopyJSObject(JSObject* source,
   int adjusted_object_size = object_size;
   if (always_allocate()) {
     // We'll only track origin if we are certain to allocate in new space
-    if (track_origin) {
-      const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
-      if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) {
-        adjusted_object_size += AllocationSiteInfo::kSize;
-      }
+    const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
+    if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) {
+      adjusted_object_size += AllocationSiteInfo::kSize;
     }
 
     { MaybeObject* maybe_clone =
@@ -4444,14 +4674,29 @@ MaybeObject* Heap::CopyJSObject(JSObject* source,
               source->address(),
               object_size);
     // Update write barrier for all fields that lie beyond the header.
-    RecordWrites(clone_address,
-                 JSObject::kHeaderSize,
-                 (object_size - JSObject::kHeaderSize) / kPointerSize);
+    int write_barrier_offset = adjusted_object_size > object_size
+        ? JSArray::kSize + AllocationSiteInfo::kSize
+        : JSObject::kHeaderSize;
+    if (((object_size - write_barrier_offset) / kPointerSize) > 0) {
+      RecordWrites(clone_address,
+                   write_barrier_offset,
+                   (object_size - write_barrier_offset) / kPointerSize);
+    }
+
+    // Track allocation site information, if we failed to allocate it inline.
+    if (InNewSpace(clone) &&
+        adjusted_object_size == object_size) {
+      MaybeObject* maybe_alloc_info =
+          AllocateStruct(ALLOCATION_SITE_INFO_TYPE);
+      AllocationSiteInfo* alloc_info;
+      if (maybe_alloc_info->To(&alloc_info)) {
+        alloc_info->set_map_no_write_barrier(allocation_site_info_map());
+        alloc_info->set_payload(source, SKIP_WRITE_BARRIER);
+      }
+    }
   } else {
     wb_mode = SKIP_WRITE_BARRIER;
-    if (track_origin) {
-      adjusted_object_size += AllocationSiteInfo::kSize;
-    }
+    adjusted_object_size += AllocationSiteInfo::kSize;
 
     { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
       if (!maybe_clone->ToObject(&clone)) return maybe_clone;
@@ -4467,8 +4712,8 @@ MaybeObject* Heap::CopyJSObject(JSObject* source,
   if (adjusted_object_size > object_size) {
     AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>(
         reinterpret_cast<Address>(clone) + object_size);
-    alloc_info->set_map(allocation_site_info_map());
-    alloc_info->set_payload(source);
+    alloc_info->set_map_no_write_barrier(allocation_site_info_map());
+    alloc_info->set_payload(source, SKIP_WRITE_BARRIER);
   }
 
   SLOW_ASSERT(
@@ -4900,6 +5145,25 @@ MaybeObject* Heap::AllocateJSArray(
 }
 
 
+MaybeObject* Heap::AllocateJSArrayWithAllocationSite(
+    ElementsKind elements_kind,
+    Handle<Object> allocation_site_info_payload) {
+  Context* native_context = isolate()->context()->native_context();
+  JSFunction* array_function = native_context->array_function();
+  Map* map = array_function->initial_map();
+  Object* maybe_map_array = native_context->js_array_maps();
+  if (!maybe_map_array->IsUndefined()) {
+    Object* maybe_transitioned_map =
+        FixedArray::cast(maybe_map_array)->get(elements_kind);
+    if (!maybe_transitioned_map->IsUndefined()) {
+      map = Map::cast(maybe_transitioned_map);
+    }
+  }
+  return AllocateJSObjectFromMapWithAllocationSite(map,
+      allocation_site_info_payload);
+}
+
+
 MaybeObject* Heap::AllocateEmptyFixedArray() {
   int size = FixedArray::SizeFor(0);
   Object* result;
index 05f8b634bb488bd79e87095673a1fd53b706c5b3..a5ff86ea0b4e27343b7bdd4fb2e6fb81000d0a0c 100644 (file)
@@ -601,7 +601,12 @@ class Heap {
   // failed.
   // Please note this does not perform a garbage collection.
   MUST_USE_RESULT MaybeObject* AllocateJSObject(
-      JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
+      JSFunction* constructor,
+      PretenureFlag pretenure = NOT_TENURED);
+
+  MUST_USE_RESULT MaybeObject* AllocateJSObjectWithAllocationSite(
+      JSFunction* constructor,
+      Handle<Object> allocation_site_info_payload);
 
   MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
                                                 ScopeInfo* scope_info);
@@ -615,6 +620,10 @@ class Heap {
                                      pretenure);
   }
 
+  inline MUST_USE_RESULT MaybeObject* AllocateEmptyJSArrayWithAllocationSite(
+      ElementsKind elements_kind,
+      Handle<Object> allocation_site_payload);
+
   // Allocate a JSArray with a specified length but elements that are left
   // uninitialized.
   MUST_USE_RESULT MaybeObject* AllocateJSArrayAndStorage(
@@ -624,6 +633,19 @@ class Heap {
       ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS,
       PretenureFlag pretenure = NOT_TENURED);
 
+  MUST_USE_RESULT MaybeObject* AllocateJSArrayAndStorageWithAllocationSite(
+      ElementsKind elements_kind,
+      int length,
+      int capacity,
+      Handle<Object> allocation_site_payload,
+      ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS);
+
+  MUST_USE_RESULT MaybeObject* AllocateJSArrayStorage(
+      JSArray* array,
+      int length,
+      int capacity,
+      ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS);
+
   // Allocate a JSArray with no elements
   MUST_USE_RESULT MaybeObject* AllocateJSArrayWithElements(
       FixedArrayBase* array_base,
@@ -640,9 +662,9 @@ class Heap {
   // Returns a deep copy of the JavaScript object.
   // Properties and elements are copied too.
   // Returns failure if allocation failed.
-  MUST_USE_RESULT MaybeObject* CopyJSObject(
-      JSObject* source,
-      AllocationSiteMode mode = DONT_TRACK_ALLOCATION_SITE);
+  MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
+
+  MUST_USE_RESULT MaybeObject* CopyJSObjectWithAllocationSite(JSObject* source);
 
   // Allocates the function prototype.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
@@ -685,12 +707,18 @@ class Heap {
   MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
       Map* map, PretenureFlag pretenure = NOT_TENURED);
 
+  MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMapWithAllocationSite(
+      Map* map, Handle<Object> allocation_site_info_payload);
+
   // Allocates a heap object based on the map.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
   // Please note this function does not perform a garbage collection.
   MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
 
+  MUST_USE_RESULT MaybeObject* AllocateWithAllocationSite(Map* map,
+      AllocationSpace space, Handle<Object> allocation_site_info_payload);
+
   // Allocates a JS Map in the heap.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
@@ -2035,6 +2063,10 @@ class Heap {
       ElementsKind elements_kind,
       PretenureFlag pretenure = NOT_TENURED);
 
+  MUST_USE_RESULT MaybeObject* AllocateJSArrayWithAllocationSite(
+      ElementsKind elements_kind,
+      Handle<Object> allocation_site_info_payload);
+
   // Allocate empty fixed array.
   MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
 
index ded6ae34e8f05f53dad83f9aa0b7e13eaa14b29b..cef8a541d98f4bd21a9f5680274c672eebd11182 100644 (file)
@@ -83,6 +83,7 @@ class LChunkBuilder;
   V(CallKnownGlobal)                           \
   V(CallNamed)                                 \
   V(CallNew)                                   \
+  V(CallNewArray)                              \
   V(CallRuntime)                               \
   V(CallStub)                                  \
   V(Change)                                    \
@@ -2210,6 +2211,25 @@ class HCallNew: public HBinaryCall {
 };
 
 
+class HCallNewArray: public HCallNew {
+ public:
+  HCallNewArray(HValue* context, HValue* constructor, int argument_count,
+              Handle<JSGlobalPropertyCell> type_cell)
+      : HCallNew(context, constructor, argument_count),
+        type_cell_(type_cell) {
+  }
+
+  Handle<JSGlobalPropertyCell> property_cell() const {
+    return type_cell_;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(CallNewArray)
+
+ private:
+  Handle<JSGlobalPropertyCell> type_cell_;
+};
+
+
 class HCallRuntime: public HCall<1> {
  public:
   HCallRuntime(HValue* context,
index 79a1008abdc480ec97dbb7548f181b6d13564fd7..b34b644d03cffed4226d02e75cf2a4bc4bd065c3 100644 (file)
@@ -8490,8 +8490,21 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
     CHECK_ALIVE(VisitArgument(expr->expression()));
     HValue* constructor = HPushArgument::cast(Top())->argument();
     CHECK_ALIVE(VisitArgumentList(expr->arguments()));
-    HInstruction* call =
-        new(zone()) HCallNew(context, constructor, argument_count);
+    HCallNew* call;
+    if (FLAG_optimize_constructed_arrays &&
+        !(expr->target().is_null()) &&
+        *(expr->target()) == isolate()->global_context()->array_function()) {
+      Handle<Object> feedback = oracle()->GetInfo(expr->CallNewFeedbackId());
+      ASSERT(feedback->IsSmi());
+      Handle<JSGlobalPropertyCell> cell =
+          isolate()->factory()->NewJSGlobalPropertyCell(feedback);
+      AddInstruction(new(zone()) HCheckFunction(constructor,
+          Handle<JSFunction>(isolate()->global_context()->array_function())));
+      call = new(zone()) HCallNewArray(context, constructor, argument_count,
+                                       cell);
+    } else {
+      call = new(zone()) HCallNew(context, constructor, argument_count);
+    }
     Drop(argument_count);
     call->set_position(expr->position());
     return ast_context()->ReturnInstruction(call, expr->id());
index c8653f49983c09b23cb6458284c22de0c3ab675b..bcd713dce79ff1b56558b5f387c5c7898cb58eba 100644 (file)
@@ -1479,34 +1479,64 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax : argc
+  //  -- ebx : type info cell
   //  -- edi : constructor
   //  -- esp[0] : return address
   //  -- esp[4] : last argument
   // -----------------------------------
-  Label generic_constructor;
-
   if (FLAG_debug_code) {
     // The array construct code is only set for the global and natives
     // builtin Array functions which always have maps.
 
     // Initial map for the builtin Array function should be a map.
-    __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
+    __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
     // Will both indicate a NULL and a Smi.
-    __ test(ebx, Immediate(kSmiTagMask));
+    __ test(ecx, Immediate(kSmiTagMask));
     __ Assert(not_zero, "Unexpected initial map for Array function");
-    __ CmpObjectType(ebx, MAP_TYPE, ecx);
+    __ CmpObjectType(ecx, MAP_TYPE, ecx);
     __ Assert(equal, "Unexpected initial map for Array function");
-  }
 
-  // Run the native code for the Array function called as constructor.
-  ArrayNativeCode(masm, true, &generic_constructor);
+    // We should either have undefined in ebx or a valid jsglobalpropertycell
+    Label okay_here;
+    Handle<Object> undefined_sentinel(
+        masm->isolate()->heap()->undefined_value(), masm->isolate());
+    Handle<Map> global_property_cell_map(
+        masm->isolate()->heap()->global_property_cell_map());
+    __ cmp(ebx, Immediate(undefined_sentinel));
+    __ j(equal, &okay_here);
+    __ cmp(FieldOperand(ebx, 0), Immediate(global_property_cell_map));
+    __ Assert(equal, "Expected property cell in register ebx");
+    __ bind(&okay_here);
+  }
 
-  // Jump to the generic construct code in case the specialized code cannot
-  // handle the construction.
-  __ bind(&generic_constructor);
-  Handle<Code> generic_construct_stub =
-      masm->isolate()->builtins()->JSConstructStubGeneric();
-  __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
+  if (FLAG_optimize_constructed_arrays) {
+    Label not_zero_case, not_one_case;
+    __ test(eax, eax);
+    __ j(not_zero, &not_zero_case);
+    ArrayNoArgumentConstructorStub no_argument_stub;
+    __ TailCallStub(&no_argument_stub);
+
+    __ bind(&not_zero_case);
+    __ cmp(eax, 1);
+    __ j(greater, &not_one_case);
+    ArraySingleArgumentConstructorStub single_argument_stub;
+    __ TailCallStub(&single_argument_stub);
+
+    __ bind(&not_one_case);
+    ArrayNArgumentsConstructorStub n_argument_stub;
+    __ TailCallStub(&n_argument_stub);
+  } else {
+    Label generic_constructor;
+    // Run the native code for the Array function called as constructor.
+    ArrayNativeCode(masm, true, &generic_constructor);
+
+    // Jump to the generic construct code in case the specialized code cannot
+    // handle the construction.
+    __ bind(&generic_constructor);
+    Handle<Code> generic_construct_stub =
+        masm->isolate()->builtins()->JSConstructStubGeneric();
+    __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
+  }
 }
 
 
index a52685bf6563d598f383f0b4e27f36a09714058f..a2237e7a4f682837bfc6ec6e07d3f4513b3912c3 100644 (file)
@@ -37,6 +37,7 @@
 #include "runtime.h"
 #include "stub-cache.h"
 #include "codegen.h"
+#include "runtime.h"
 
 namespace v8 {
 namespace internal {
@@ -60,6 +61,7 @@ void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
   static Register registers[] = { edx, ecx };
   descriptor->register_param_count_ = 2;
   descriptor->register_params_ = registers;
+  descriptor->stack_parameter_count_ = NULL;
   descriptor->deoptimization_handler_ =
       FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
 }
@@ -76,6 +78,44 @@ void TransitionElementsKindStub::InitializeInterfaceDescriptor(
 }
 
 
+static void InitializeArrayConstructorDescriptor(Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  // register state
+  // edi -- constructor function
+  // ebx -- type info cell with elements kind
+  // eax -- number of arguments to the constructor function
+  static Register registers[] = { edi, ebx };
+  descriptor->register_param_count_ = 2;
+  // stack param count needs (constructor pointer, and single argument)
+  descriptor->stack_parameter_count_ = &eax;
+  descriptor->register_params_ = registers;
+  descriptor->extra_expression_stack_count_ = 1;
+  descriptor->deoptimization_handler_ =
+      FUNCTION_ADDR(ArrayConstructor_StubFailure);
+}
+
+
+void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  InitializeArrayConstructorDescriptor(isolate, descriptor);
+}
+
+
+void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  InitializeArrayConstructorDescriptor(isolate, descriptor);
+}
+
+
+void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  InitializeArrayConstructorDescriptor(isolate, descriptor);
+}
+
+
 #define __ ACCESS_MASM(masm)
 
 void ToNumberStub::Generate(MacroAssembler* masm) {
@@ -4787,12 +4827,13 @@ void InterruptStub::Generate(MacroAssembler* masm) {
 }
 
 
-static void GenerateRecordCallTarget(MacroAssembler* masm) {
+static void GenerateRecordCallTargetNoArray(MacroAssembler* masm) {
   // Cache the called function in a global property cell.  Cache states
   // are uninitialized, monomorphic (indicated by a JSFunction), and
   // megamorphic.
   // ebx : cache cell for call target
   // edi : the function to call
+  ASSERT(!FLAG_optimize_constructed_arrays);
   Isolate* isolate = masm->isolate();
   Label initialize, done;
 
@@ -4825,6 +4866,82 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
 }
 
 
+static void GenerateRecordCallTarget(MacroAssembler* masm) {
+  // Cache the called function in a global property cell.  Cache states
+  // are uninitialized, monomorphic (indicated by a JSFunction), and
+  // megamorphic.
+  // ebx : cache cell for call target
+  // edi : the function to call
+  ASSERT(FLAG_optimize_constructed_arrays);
+  Isolate* isolate = masm->isolate();
+  Label initialize, done, miss, megamorphic, not_array_function;
+
+  // Load the cache state into ecx.
+  __ mov(ecx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset));
+
+  // A monomorphic cache hit or an already megamorphic state: invoke the
+  // function without changing the state.
+  __ cmp(ecx, edi);
+  __ j(equal, &done);
+  __ cmp(ecx, Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
+  __ j(equal, &done);
+
+  // Special handling of the Array() function, which caches not only the
+  // monomorphic Array function but the initial ElementsKind with special
+  // sentinels
+  Handle<Object> terminal_kind_sentinel =
+      TypeFeedbackCells::MonomorphicArraySentinel(isolate,
+                                                  LAST_FAST_ELEMENTS_KIND);
+  __ cmp(ecx, Immediate(terminal_kind_sentinel));
+  __ j(above, &miss);
+  // Load the global or builtins object from the current context
+  __ LoadGlobalContext(ecx);
+  // Make sure the function is the Array() function
+  __ cmp(edi, Operand(ecx,
+                      Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
+  __ j(not_equal, &megamorphic);
+  __ jmp(&done);
+
+  __ bind(&miss);
+
+  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
+  // megamorphic.
+  __ cmp(ecx, Immediate(TypeFeedbackCells::UninitializedSentinel(isolate)));
+  __ j(equal, &initialize);
+  // MegamorphicSentinel is an immortal immovable object (undefined) so no
+  // write-barrier is needed.
+  __ bind(&megamorphic);
+  __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
+         Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
+  __ jmp(&done, Label::kNear);
+
+  // An uninitialized cache is patched with the function or sentinel to
+  // indicate the ElementsKind if function is the Array constructor.
+  __ bind(&initialize);
+  __ LoadGlobalContext(ecx);
+  // Make sure the function is the Array() function
+  __ cmp(edi, Operand(ecx,
+                      Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
+  __ j(not_equal, &not_array_function);
+
+  // The target function is the Array constructor, install a sentinel value in
+  // the constructor's type info cell that will track the initial ElementsKind
+  // that should be used for the array when its constructed.
+  Handle<Object> initial_kind_sentinel =
+      TypeFeedbackCells::MonomorphicArraySentinel(isolate,
+          GetInitialFastElementsKind());
+  __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
+         Immediate(initial_kind_sentinel));
+  __ jmp(&done);
+
+  __ bind(&not_array_function);
+  __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), edi);
+  // No need for a write barrier here - cells are rescanned.
+
+  __ bind(&done);
+}
+
+
 void CallFunctionStub::Generate(MacroAssembler* masm) {
   // ebx : cache cell for call target
   // edi : the function to call
@@ -4856,7 +4973,11 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   __ j(not_equal, &slow);
 
   if (RecordCallTarget()) {
-    GenerateRecordCallTarget(masm);
+    if (FLAG_optimize_constructed_arrays) {
+      GenerateRecordCallTarget(masm);
+    } else {
+      GenerateRecordCallTargetNoArray(masm);
+    }
   }
 
   // Fast-case: Just invoke the function.
@@ -4929,14 +5050,20 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
   __ j(not_equal, &slow);
 
   if (RecordCallTarget()) {
-    GenerateRecordCallTarget(masm);
+    if (FLAG_optimize_constructed_arrays) {
+      GenerateRecordCallTarget(masm);
+    } else {
+      GenerateRecordCallTargetNoArray(masm);
+    }
   }
 
   // Jump to the function-specific construct stub.
-  __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
-  __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kConstructStubOffset));
-  __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
-  __ jmp(ebx);
+  Register jmp_reg = FLAG_optimize_constructed_arrays ? ecx : ebx;
+  __ mov(jmp_reg, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+  __ mov(jmp_reg, FieldOperand(jmp_reg,
+                               SharedFunctionInfo::kConstructStubOffset));
+  __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
+  __ jmp(jmp_reg);
 
   // edi: called object
   // eax: number of arguments
index 20352cf4418c36d4b1cda57fbb99273d2bddc46b..d4b5d93c129cb325de8addd7ee5d9344c5997889 100644 (file)
@@ -4146,12 +4146,32 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   ASSERT(ToRegister(instr->constructor()).is(edi));
   ASSERT(ToRegister(instr->result()).is(eax));
 
+  if (FLAG_optimize_constructed_arrays) {
+    // No cell in ebx for construct type feedback in optimized code
+    Handle<Object> undefined_value(isolate()->heap()->undefined_value(),
+                                   isolate());
+    __ mov(ebx, Immediate(undefined_value));
+  }
   CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
   __ Set(eax, Immediate(instr->arity()));
   CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
 }
 
 
+void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
+  ASSERT(ToRegister(instr->context()).is(esi));
+  ASSERT(ToRegister(instr->constructor()).is(edi));
+  ASSERT(ToRegister(instr->result()).is(eax));
+  ASSERT(FLAG_optimize_constructed_arrays);
+
+  __ mov(ebx, instr->hydrogen()->property_cell());
+  Handle<Code> array_construct_code =
+      isolate()->builtins()->ArrayConstructCode();
+  __ Set(eax, Immediate(instr->arity()));
+  CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+}
+
+
 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
   CallRuntime(instr->function(), instr->arity(), instr);
 }
index d5cf125eb0b8e6abab408489d494cecae76f16d9..74580f3145359b422cd81122caaf145bb328c03e 100644 (file)
@@ -367,6 +367,19 @@ void LCallNew::PrintDataTo(StringStream* stream) {
 }
 
 
+void LCallNewArray::PrintDataTo(StringStream* stream) {
+  stream->Add("= ");
+  context()->PrintTo(stream);
+  stream->Add(" ");
+  constructor()->PrintTo(stream);
+  stream->Add(" #%d / ", arity());
+  ASSERT(hydrogen()->property_cell()->value()->IsSmi());
+  ElementsKind kind = static_cast<ElementsKind>(
+      Smi::cast(hydrogen()->property_cell()->value())->value());
+  stream->Add(" (%s) ", ElementsKindToString(kind));
+}
+
+
 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
   arguments()->PrintTo(stream);
 
@@ -1221,6 +1234,16 @@ LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
 }
 
 
+LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
+  ASSERT(FLAG_optimize_constructed_arrays);
+  LOperand* context = UseFixed(instr->context(), esi);
+  LOperand* constructor = UseFixed(instr->constructor(), edi);
+  argument_count_ -= instr->argument_count();
+  LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
+  return MarkAsCall(DefineFixed(result, eax), instr);
+}
+
+
 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
   LOperand* context = UseFixed(instr->context(), esi);
   LOperand* function = UseFixed(instr->function(), edi);
index ff4c477bc7844d3919226e9873aef5eb892d687f..cae42f37e301e0d08038b893049a418eec930ab1 100644 (file)
@@ -62,6 +62,7 @@ class LCodeGen;
   V(CallKnownGlobal)                            \
   V(CallNamed)                                  \
   V(CallNew)                                    \
+  V(CallNewArray)                               \
   V(CallRuntime)                                \
   V(CallStub)                                   \
   V(CheckFunction)                              \
@@ -1847,6 +1848,25 @@ class LCallNew: public LTemplateInstruction<1, 2, 0> {
 };
 
 
+class LCallNewArray: public LTemplateInstruction<1, 2, 0> {
+ public:
+  LCallNewArray(LOperand* context, LOperand* constructor) {
+    inputs_[0] = context;
+    inputs_[1] = constructor;
+  }
+
+  LOperand* context() { return inputs_[0]; }
+  LOperand* constructor() { return inputs_[1]; }
+
+  DECLARE_CONCRETE_INSTRUCTION(CallNewArray, "call-new-array")
+  DECLARE_HYDROGEN_ACCESSOR(CallNewArray)
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  int arity() const { return hydrogen()->argument_count() - 1; }
+};
+
+
 class LCallRuntime: public LTemplateInstruction<1, 1, 0> {
  public:
   explicit LCallRuntime(LOperand* context) {
index 5e4f21129159cf5334e1ef9c82dc9f0532abee74..587699f66d7b2c250ca72dae757c7c8ebe0ed4cf 100644 (file)
@@ -2409,12 +2409,23 @@ void MacroAssembler::LoadInitialArrayMap(
 }
 
 
+void MacroAssembler::LoadGlobalContext(Register global_context) {
+  // Load the global or builtins object from the current context.
+  mov(global_context,
+      Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+  // Load the native context from the global or builtins object.
+  mov(global_context,
+      FieldOperand(global_context, GlobalObject::kNativeContextOffset));
+}
+
+
 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   // Load the global or builtins object from the current context.
   mov(function,
       Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   // Load the native context from the global or builtins object.
-  mov(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
+  mov(function,
+      FieldOperand(function, GlobalObject::kNativeContextOffset));
   // Load the function from the native context.
   mov(function, Operand(function, Context::SlotOffset(index)));
 }
index 8c893dcdaffe5692101d86bf94e20b2450fb4b2c..65899cedb05cfa43b302dedbc2e9874a83c0c165 100644 (file)
@@ -249,6 +249,8 @@ class MacroAssembler: public Assembler {
                            Register map_out,
                            bool can_have_holes);
 
+  void LoadGlobalContext(Register global_context);
+
   // Load the global function with the given index.
   void LoadGlobalFunction(int index, Register function);
 
index 7c7facef4055d0d721d4cfddc0391a5a50f71a55..d1bc520ac0c01a54c1ffb4ac3087bdc22e5aa9fd 100644 (file)
@@ -5747,6 +5747,12 @@ Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
 }
 
 
+Handle<Object> TypeFeedbackCells::MonomorphicArraySentinel(Isolate* isolate,
+    ElementsKind elements_kind) {
+  return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
+}
+
+
 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
   return heap->the_hole_value();
 }
index 9c3a35d2e8fd9997c0d7158b5477b1668eb21bd1..32940c487fa3b018e4310d78db45ba6db78e9ba2 100644 (file)
@@ -1039,7 +1039,18 @@ void TypeSwitchInfo::TypeSwitchInfoPrint(FILE* out) {
 void AllocationSiteInfo::AllocationSiteInfoPrint(FILE* out) {
   HeapObject::PrintHeader(out, "AllocationSiteInfo");
   PrintF(out, " - payload: ");
-  if (payload()->IsJSArray()) {
+  if (payload()->IsJSGlobalPropertyCell()) {
+    JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(payload());
+    Object* cell_contents = cell->value();
+    if (cell_contents->IsSmi()) {
+      ElementsKind kind = static_cast<ElementsKind>(
+          Smi::cast(cell_contents)->value());
+      PrintF(out, "Array allocation with ElementsKind ");
+      PrintElementsKind(out, kind);
+      PrintF(out, "\n");
+      return;
+    }
+  } else if (payload()->IsJSArray()) {
     PrintF(out, "Array literal ");
     payload()->ShortPrint(out);
     PrintF(out, "\n");
index df72d609bad8361430e0473c1b2e7c8692bd4f16..f001d3c6fe35f4c44871a52b7b762f5298c12823 100644 (file)
@@ -1,4 +1,4 @@
-// Copyright 2012 the V8 project authors. All rights reserved.
+// Copyright 2013 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -7530,6 +7530,21 @@ AllocationSiteInfo* AllocationSiteInfo::FindForJSObject(JSObject* object) {
 }
 
 
+bool AllocationSiteInfo::GetElementsKindPayload(ElementsKind* kind) {
+  ASSERT(kind != NULL);
+  if (payload()->IsJSGlobalPropertyCell()) {
+    JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(payload());
+    Object* cell_contents = cell->value();
+    if (cell_contents->IsSmi()) {
+      *kind = static_cast<ElementsKind>(
+          Smi::cast(cell_contents)->value());
+      return true;
+    }
+  }
+  return false;
+}
+
+
 // Heuristic: We only need to create allocation site info if the boilerplate
 // elements kind is the initial elements kind.
 AllocationSiteMode AllocationSiteInfo::GetMode(
@@ -9353,19 +9368,10 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
 }
 
 
-MaybeObject* JSArray::Initialize(int capacity) {
-  Heap* heap = GetHeap();
+MaybeObject* JSArray::Initialize(int capacity, int length) {
   ASSERT(capacity >= 0);
-  set_length(Smi::FromInt(0));
-  FixedArray* new_elements;
-  if (capacity == 0) {
-    new_elements = heap->empty_fixed_array();
-  } else {
-    MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(capacity);
-    if (!maybe_obj->To(&new_elements)) return maybe_obj;
-  }
-  set_elements(new_elements);
-  return this;
+  return GetHeap()->AllocateJSArrayStorage(this, length, capacity,
+                                           INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
 }
 
 
@@ -10054,8 +10060,8 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
         ? FAST_HOLEY_DOUBLE_ELEMENTS
         : FAST_DOUBLE_ELEMENTS;
 
-    MaybeObject* trans = PossiblyTransitionArrayBoilerplate(to_kind);
-    if (trans->IsFailure()) return trans;
+    MaybeObject* maybe_failure = UpdateAllocationSiteInfo(to_kind);
+    if (maybe_failure->IsFailure()) return maybe_failure;
 
     MaybeObject* maybe =
         SetFastDoubleElementsCapacityAndLength(new_capacity, array_length);
@@ -10071,8 +10077,8 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
         ? FAST_HOLEY_ELEMENTS
         : FAST_ELEMENTS;
 
-    MaybeObject* trans = PossiblyTransitionArrayBoilerplate(kind);
-    if (trans->IsFailure()) return trans;
+    MaybeObject* maybe_failure = UpdateAllocationSiteInfo(kind);
+    if (maybe_failure->IsFailure()) return maybe_failure;
 
     MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
                                                           kind);
@@ -10619,38 +10625,53 @@ Handle<Object> JSObject::TransitionElementsKind(Handle<JSObject> object,
 }
 
 
-MaybeObject* JSObject::PossiblyTransitionArrayBoilerplate(
-    ElementsKind to_kind) {
-  MaybeObject* ret = NULL;
+MaybeObject* JSObject::UpdateAllocationSiteInfo(ElementsKind to_kind) {
   if (!FLAG_track_allocation_sites || !IsJSArray()) {
-    return ret;
+    return this;
   }
 
   AllocationSiteInfo* info = AllocationSiteInfo::FindForJSObject(this);
   if (info == NULL) {
-    return ret;
+    return this;
   }
 
-  ASSERT(info->payload()->IsJSArray());
-  JSArray* payload = JSArray::cast(info->payload());
-  ElementsKind kind = payload->GetElementsKind();
-  if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
-    // If the array is huge, it's not likely to be defined in a local
-    // function, so we shouldn't make new instances of it very often.
-    uint32_t length = 0;
-    CHECK(payload->length()->ToArrayIndex(&length));
-    if (length <= 8 * 1024) {
-      ret = payload->TransitionElementsKind(to_kind);
-      if (FLAG_trace_track_allocation_sites) {
-        PrintF(
-            "AllocationSiteInfo: JSArray %p boilerplate updated %s->%s\n",
-            reinterpret_cast<void*>(this),
-            ElementsKindToString(kind),
-            ElementsKindToString(to_kind));
+  if (info->payload()->IsJSArray()) {
+    JSArray* payload = JSArray::cast(info->payload());
+    ElementsKind kind = payload->GetElementsKind();
+    if (AllocationSiteInfo::GetMode(kind, to_kind) == TRACK_ALLOCATION_SITE) {
+      // If the array is huge, it's not likely to be defined in a local
+      // function, so we shouldn't make new instances of it very often.
+      uint32_t length = 0;
+      CHECK(payload->length()->ToArrayIndex(&length));
+      if (length <= AllocationSiteInfo::kMaximumArrayBytesToPretransition) {
+        if (FLAG_trace_track_allocation_sites) {
+          PrintF(
+              "AllocationSiteInfo: JSArray %p boilerplate updated %s->%s\n",
+              reinterpret_cast<void*>(this),
+              ElementsKindToString(kind),
+              ElementsKindToString(to_kind));
+        }
+        return payload->TransitionElementsKind(to_kind);
+      }
+    }
+  } else if (info->payload()->IsJSGlobalPropertyCell()) {
+    JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(info->payload());
+    Object* cell_contents = cell->value();
+    if (cell_contents->IsSmi()) {
+      ElementsKind kind = static_cast<ElementsKind>(
+          Smi::cast(cell_contents)->value());
+      if (AllocationSiteInfo::GetMode(kind, to_kind) == TRACK_ALLOCATION_SITE) {
+        if (FLAG_trace_track_allocation_sites) {
+          PrintF("AllocationSiteInfo: JSArray %p info updated %s->%s\n",
+                 reinterpret_cast<void*>(this),
+                 ElementsKindToString(kind),
+                 ElementsKindToString(to_kind));
+        }
+        cell->set_value(Smi::FromInt(to_kind));
       }
     }
   }
-  return ret;
+  return this;
 }
 
 
@@ -10664,8 +10685,8 @@ MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
 
   if (from_kind == to_kind) return this;
 
-  MaybeObject* trans = PossiblyTransitionArrayBoilerplate(to_kind);
-  if (trans->IsFailure()) return trans;
+  MaybeObject* maybe_failure = UpdateAllocationSiteInfo(to_kind);
+  if (maybe_failure->IsFailure()) return maybe_failure;
 
   Isolate* isolate = GetIsolate();
   if (elements() == isolate->heap()->empty_fixed_array() ||
index 53f4cfad4c2683a843a05a3840a41119a2d92f9a..79e2a5ee974effcdf534a54f1d380bb724b2f64c 100644 (file)
@@ -2051,7 +2051,7 @@ class JSObject: public JSReceiver {
                                                ElementsKind to_kind);
 
   MUST_USE_RESULT MaybeObject* TransitionElementsKind(ElementsKind to_kind);
-  MUST_USE_RESULT MaybeObject* PossiblyTransitionArrayBoilerplate(
+  MUST_USE_RESULT MaybeObject* UpdateAllocationSiteInfo(
       ElementsKind to_kind);
 
   // Replaces an existing transition with a transition to a map with a FIELD.
@@ -4203,6 +4203,11 @@ class TypeFeedbackCells: public FixedArray {
   // The object that indicates a megamorphic state.
   static inline Handle<Object> MegamorphicSentinel(Isolate* isolate);
 
+  // The object that indicates a monomorphic state of Array with
+  // ElementsKind
+  static inline Handle<Object> MonomorphicArraySentinel(Isolate* isolate,
+      ElementsKind elements_kind);
+
   // A raw version of the uninitialized sentinel that's safe to read during
   // garbage collection (e.g., for patching the cache).
   static inline Object* RawUninitializedSentinel(Heap* heap);
@@ -7083,7 +7088,9 @@ class AllocationSiteInfo: public Struct {
 
   static const int kPayloadOffset = HeapObject::kHeaderSize;
   static const int kSize = kPayloadOffset + kPointerSize;
+  static const uint32_t kMaximumArrayBytesToPretransition = 8 * 1024;
 
+  bool GetElementsKindPayload(ElementsKind* kind);
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationSiteInfo);
 };
@@ -8405,7 +8412,7 @@ class JSArray: public JSObject {
   // Initialize the array with the given capacity. The function may
   // fail due to out-of-memory situations, but only if the requested
   // capacity is non-zero.
-  MUST_USE_RESULT MaybeObject* Initialize(int capacity);
+  MUST_USE_RESULT MaybeObject* Initialize(int capacity, int length = 0);
 
   // Initializes the array to a certain length.
   inline bool AllowsSetElementsLength();
index 8aabcb585572115058072c33c2b9e9bca7559d19..2215eadcf3cda1c98d15770281688342ecf6d255 100644 (file)
@@ -80,9 +80,6 @@ namespace internal {
   RUNTIME_ASSERT(args[index]->Is##Type());                           \
   Handle<Type> name = args.at<Type>(index);
 
-#define CONVERT_ARG_STUB_CALLER_ARGS(name)                           \
-  Arguments* name = reinterpret_cast<Arguments*>(args[0]);
-
 // Cast the given object to a boolean and store it in a variable with
 // the given name.  If the object is not a boolean call IllegalOperation
 // and return.
@@ -677,7 +674,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteralShallow) {
   JSObject* boilerplate_object = JSObject::cast(*boilerplate);
   AllocationSiteMode mode = AllocationSiteInfo::GetMode(
       boilerplate_object->GetElementsKind());
-  return isolate->heap()->CopyJSObject(boilerplate_object, mode);
+  if (mode == TRACK_ALLOCATION_SITE) {
+    return isolate->heap()->CopyJSObjectWithAllocationSite(boilerplate_object);
+  }
+
+  return isolate->heap()->CopyJSObject(boilerplate_object);
 }
 
 
index ac8572a10ff0aff90c23a4409475aa669c9f4e23..fc88c3c55da100fd4b9e5d08ec12da0f65dfa085 100644 (file)
@@ -163,8 +163,13 @@ bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) {
 
 
 bool TypeFeedbackOracle::CallNewIsMonomorphic(CallNew* expr) {
-  Handle<Object> value = GetInfo(expr->CallNewFeedbackId());
-  return value->IsJSFunction();
+  Handle<Object> info = GetInfo(expr->CallNewFeedbackId());
+  if (info->IsSmi()) {
+    ASSERT(static_cast<ElementsKind>(Smi::cast(*info)->value()) <=
+           LAST_FAST_ELEMENTS_KIND);
+    return Isolate::Current()->global_context()->array_function();
+  }
+  return info->IsJSFunction();
 }
 
 
@@ -288,10 +293,33 @@ Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(Call* expr) {
 
 
 Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(CallNew* expr) {
-  return Handle<JSFunction>::cast(GetInfo(expr->CallNewFeedbackId()));
+  Handle<Object> info = GetInfo(expr->CallNewFeedbackId());
+  if (info->IsSmi()) {
+    ASSERT(static_cast<ElementsKind>(Smi::cast(*info)->value()) <=
+           LAST_FAST_ELEMENTS_KIND);
+    return Handle<JSFunction>(Isolate::Current()->global_context()->
+                              array_function());
+  } else {
+    return Handle<JSFunction>::cast(info);
+  }
 }
 
 
+ElementsKind TypeFeedbackOracle::GetCallNewElementsKind(CallNew* expr) {
+  Handle<Object> info = GetInfo(expr->CallNewFeedbackId());
+  if (info->IsSmi()) {
+    return static_cast<ElementsKind>(Smi::cast(*info)->value());
+  } else {
+    // TODO(mvstanton): avoided calling GetInitialFastElementsKind() for perf
+    // reasons. Is there a better fix?
+    if (FLAG_packed_arrays) {
+      return FAST_SMI_ELEMENTS;
+    } else {
+      return FAST_HOLEY_SMI_ELEMENTS;
+    }
+  }
+}
+
 Handle<Map> TypeFeedbackOracle::GetObjectLiteralStoreMap(
     ObjectLiteral::Property* prop) {
   ASSERT(ObjectLiteralStoreIsMonomorphic(prop));
index bcb6ca7d0430765922e2d9375810fdfbe1a4168f..a962803dabf403ac2d7a98a7e416a6a056bf9342 100644 (file)
@@ -277,6 +277,7 @@ class TypeFeedbackOracle: public ZoneObject {
 
   Handle<JSFunction> GetCallTarget(Call* expr);
   Handle<JSFunction> GetCallNewTarget(CallNew* expr);
+  ElementsKind GetCallNewElementsKind(CallNew* expr);
 
   Handle<Map> GetObjectLiteralStoreMap(ObjectLiteral::Property* prop);
 
@@ -323,8 +324,12 @@ class TypeFeedbackOracle: public ZoneObject {
 
   // Returns an element from the backing store. Returns undefined if
   // there is no information.
+ public:
+  // TODO(mvstanton): how to get this information without making the method
+  // public?
   Handle<Object> GetInfo(TypeFeedbackId ast_id);
 
+ private:
   Handle<Context> native_context_;
   Isolate* isolate_;
   Handle<UnseededNumberDictionary> dictionary_;
index f2f31aa8fea1ef09b5f21ae3feec03907f2d5413..c3d757a1ac08cd23dfa0095d8d4a9bc5eb0cd7e1 100644 (file)
@@ -527,6 +527,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
 
     // Invoke the code.
     if (is_construct) {
+      // No type feedback cell is available
+      Handle<Object> undefined_sentinel(
+          masm->isolate()->factory()->undefined_value());
+      __ Move(rbx, undefined_sentinel);
       // Expects rdi to hold function pointer.
       CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
       __ CallStub(&stub);
@@ -1507,30 +1511,60 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
   //  -- rsp[0] : return address
   //  -- rsp[8] : last argument
   // -----------------------------------
-  Label generic_constructor;
-
   if (FLAG_debug_code) {
     // The array construct code is only set for the builtin and internal
     // Array functions which always have a map.
+
     // Initial map for the builtin Array function should be a map.
-    __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
+    __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
     // Will both indicate a NULL and a Smi.
     STATIC_ASSERT(kSmiTag == 0);
-    Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
+    Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
     __ Check(not_smi, "Unexpected initial map for Array function");
-    __ CmpObjectType(rbx, MAP_TYPE, rcx);
+    __ CmpObjectType(rcx, MAP_TYPE, rcx);
     __ Check(equal, "Unexpected initial map for Array function");
-  }
 
-  // Run the native code for the Array function called as constructor.
-  ArrayNativeCode(masm, &generic_constructor);
+    // We should either have undefined in ebx or a valid jsglobalpropertycell
+    Label okay_here;
+    Handle<Object> undefined_sentinel(
+        masm->isolate()->factory()->undefined_value());
+    Handle<Map> global_property_cell_map(
+        masm->isolate()->heap()->global_property_cell_map());
+    __ Cmp(rbx, undefined_sentinel);
+    __ j(equal, &okay_here);
+    __ Cmp(FieldOperand(rbx, 0), global_property_cell_map);
+    __ Assert(equal, "Expected property cell in register rbx");
+    __ bind(&okay_here);
+  }
 
-  // Jump to the generic construct code in case the specialized code cannot
-  // handle the construction.
-  __ bind(&generic_constructor);
-  Handle<Code> generic_construct_stub =
-      masm->isolate()->builtins()->JSConstructStubGeneric();
-  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
+  if (FLAG_optimize_constructed_arrays) {
+    Label not_zero_case, not_one_case;
+    __ testq(rax, rax);
+    __ j(not_zero, &not_zero_case);
+    ArrayNoArgumentConstructorStub no_argument_stub;
+    __ TailCallStub(&no_argument_stub);
+
+    __ bind(&not_zero_case);
+    __ cmpq(rax, Immediate(1));
+    __ j(greater, &not_one_case);
+    ArraySingleArgumentConstructorStub single_argument_stub;
+    __ TailCallStub(&single_argument_stub);
+
+    __ bind(&not_one_case);
+    ArrayNArgumentsConstructorStub n_argument_stub;
+    __ TailCallStub(&n_argument_stub);
+  } else {
+    Label generic_constructor;
+    // Run the native code for the Array function called as constructor.
+    ArrayNativeCode(masm, &generic_constructor);
+
+    // Jump to the generic construct code in case the specialized code cannot
+    // handle the construction.
+    __ bind(&generic_constructor);
+    Handle<Code> generic_construct_stub =
+        masm->isolate()->builtins()->JSConstructStubGeneric();
+    __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
+  }
 }
 
 
index 638e23dbd028f6392808a83df3b4a25da6436853..49db9fe1638577470244d57bb813bff2412c7eba 100644 (file)
@@ -73,6 +73,44 @@ void TransitionElementsKindStub::InitializeInterfaceDescriptor(
 }
 
 
+static void InitializeArrayConstructorDescriptor(Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  // register state
+  // rdi -- constructor function
+  // rbx -- type info cell with elements kind
+  // rax -- number of arguments to the constructor function
+  static Register registers[] = { rdi, rbx };
+  descriptor->register_param_count_ = 2;
+  // stack param count needs (constructor pointer, and single argument)
+  descriptor->stack_parameter_count_ = &rax;
+  descriptor->register_params_ = registers;
+  descriptor->extra_expression_stack_count_ = 1;
+  descriptor->deoptimization_handler_ =
+      FUNCTION_ADDR(ArrayConstructor_StubFailure);
+}
+
+
+void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  InitializeArrayConstructorDescriptor(isolate, descriptor);
+}
+
+
+void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  InitializeArrayConstructorDescriptor(isolate, descriptor);
+}
+
+
+void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
+    Isolate* isolate,
+    CodeStubInterfaceDescriptor* descriptor) {
+  InitializeArrayConstructorDescriptor(isolate, descriptor);
+}
+
+
 #define __ ACCESS_MASM(masm)
 
 void ToNumberStub::Generate(MacroAssembler* masm) {
@@ -3852,12 +3890,13 @@ void InterruptStub::Generate(MacroAssembler* masm) {
 }
 
 
-static void GenerateRecordCallTarget(MacroAssembler* masm) {
+static void GenerateRecordCallTargetNoArray(MacroAssembler* masm) {
   // Cache the called function in a global property cell.  Cache states
   // are uninitialized, monomorphic (indicated by a JSFunction), and
   // megamorphic.
   // rbx : cache cell for call target
   // rdi : the function to call
+  ASSERT(!FLAG_optimize_constructed_arrays);
   Isolate* isolate = masm->isolate();
   Label initialize, done;
 
@@ -3890,6 +3929,79 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
 }
 
 
+static void GenerateRecordCallTarget(MacroAssembler* masm) {
+  // Cache the called function in a global property cell.  Cache states
+  // are uninitialized, monomorphic (indicated by a JSFunction), and
+  // megamorphic.
+  // rbx : cache cell for call target
+  // rdi : the function to call
+  ASSERT(FLAG_optimize_constructed_arrays);
+  Isolate* isolate = masm->isolate();
+  Label initialize, done, miss, megamorphic, not_array_function;
+
+  // Load the cache state into rcx.
+  __ movq(rcx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset));
+
+  // A monomorphic cache hit or an already megamorphic state: invoke the
+  // function without changing the state.
+  __ cmpq(rcx, rdi);
+  __ j(equal, &done);
+  __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate));
+  __ j(equal, &done);
+
+  // Special handling of the Array() function, which caches not only the
+  // monomorphic Array function but the initial ElementsKind with special
+  // sentinels
+  Handle<Object> terminal_kind_sentinel =
+      TypeFeedbackCells::MonomorphicArraySentinel(isolate,
+                                                  LAST_FAST_ELEMENTS_KIND);
+  __ Cmp(rcx, terminal_kind_sentinel);
+  __ j(not_equal, &miss);
+  // Make sure the function is the Array() function
+  __ LoadArrayFunction(rcx);
+  __ cmpq(rdi, rcx);
+  __ j(not_equal, &megamorphic);
+  __ jmp(&done);
+
+  __ bind(&miss);
+
+  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
+  // megamorphic.
+  __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate));
+  __ j(equal, &initialize);
+  // MegamorphicSentinel is an immortal immovable object (undefined) so no
+  // write-barrier is needed.
+  __ bind(&megamorphic);
+  __ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+          TypeFeedbackCells::MegamorphicSentinel(isolate));
+  __ jmp(&done, Label::kNear);
+
+  // An uninitialized cache is patched with the function or sentinel to
+  // indicate the ElementsKind if function is the Array constructor.
+  __ bind(&initialize);
+  // Make sure the function is the Array() function
+  __ LoadArrayFunction(rcx);
+  __ cmpq(rdi, rcx);
+  __ j(not_equal, &not_array_function);
+
+  // The target function is the Array constructor, install a sentinel value in
+  // the constructor's type info cell that will track the initial ElementsKind
+  // that should be used for the array when its constructed.
+  Handle<Object> initial_kind_sentinel =
+      TypeFeedbackCells::MonomorphicArraySentinel(isolate,
+          GetInitialFastElementsKind());
+  __ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+          initial_kind_sentinel);
+  __ jmp(&done);
+
+  __ bind(&not_array_function);
+  __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rdi);
+  // No need for a write barrier here - cells are rescanned.
+
+  __ bind(&done);
+}
+
+
 void CallFunctionStub::Generate(MacroAssembler* masm) {
   // rbx : cache cell for call target
   // rdi : the function to call
@@ -3921,7 +4033,11 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   __ j(not_equal, &slow);
 
   if (RecordCallTarget()) {
-    GenerateRecordCallTarget(masm);
+    if (FLAG_optimize_constructed_arrays) {
+      GenerateRecordCallTarget(masm);
+    } else {
+      GenerateRecordCallTargetNoArray(masm);
+    }
   }
 
   // Fast-case: Just invoke the function.
@@ -3996,14 +4112,20 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
   __ j(not_equal, &slow);
 
   if (RecordCallTarget()) {
-    GenerateRecordCallTarget(masm);
+    if (FLAG_optimize_constructed_arrays) {
+      GenerateRecordCallTarget(masm);
+    } else {
+      GenerateRecordCallTargetNoArray(masm);
+    }
   }
 
   // Jump to the function-specific construct stub.
-  __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
-  __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
-  __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
-  __ jmp(rbx);
+  Register jmp_reg = FLAG_optimize_constructed_arrays ? rcx : rbx;
+  __ movq(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+  __ movq(jmp_reg, FieldOperand(jmp_reg,
+                                SharedFunctionInfo::kConstructStubOffset));
+  __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
+  __ jmp(jmp_reg);
 
   // rdi: called object
   // rax: number of arguments
index fd873b3d8625b4728cfb7f6e376b622aa88cd820..73e278905b88ec0c7c08d897d60ab7c785110bc9 100644 (file)
@@ -3925,12 +3925,30 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   ASSERT(ToRegister(instr->constructor()).is(rdi));
   ASSERT(ToRegister(instr->result()).is(rax));
 
-  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
   __ Set(rax, instr->arity());
+  if (FLAG_optimize_constructed_arrays) {
+    // No cell in ebx for construct type feedback in optimized code
+    Handle<Object> undefined_value(isolate()->factory()->undefined_value());
+    __ Move(rbx, undefined_value);
+  }
+  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
   CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
 }
 
 
+void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
+  ASSERT(ToRegister(instr->constructor()).is(rdi));
+  ASSERT(ToRegister(instr->result()).is(rax));
+  ASSERT(FLAG_optimize_constructed_arrays);
+
+  __ Set(rax, instr->arity());
+  __ Move(rbx, instr->hydrogen()->property_cell());
+  Handle<Code> array_construct_code =
+      isolate()->builtins()->ArrayConstructCode();
+  CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+}
+
+
 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
   CallRuntime(instr->function(), instr->arity(), instr);
 }
index 7d680ae1d227683076373c9f5d2da33f62305005..1ce7d2e692f3a7b9cf6d8080154ed2e91f3748b5 100644 (file)
@@ -357,6 +357,17 @@ void LCallNew::PrintDataTo(StringStream* stream) {
 }
 
 
+void LCallNewArray::PrintDataTo(StringStream* stream) {
+  stream->Add("= ");
+  constructor()->PrintTo(stream);
+  stream->Add(" #%d / ", arity());
+  ASSERT(hydrogen()->property_cell()->value()->IsSmi());
+  ElementsKind kind = static_cast<ElementsKind>(
+      Smi::cast(hydrogen()->property_cell()->value())->value());
+  stream->Add(" (%s) ", ElementsKindToString(kind));
+}
+
+
 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
   arguments()->PrintTo(stream);
 
@@ -1149,6 +1160,15 @@ LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
 }
 
 
+LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
+  ASSERT(FLAG_optimize_constructed_arrays);
+  LOperand* constructor = UseFixed(instr->constructor(), rdi);
+  argument_count_ -= instr->argument_count();
+  LCallNewArray* result = new(zone()) LCallNewArray(constructor);
+  return MarkAsCall(DefineFixed(result, rax), instr);
+}
+
+
 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
   LOperand* function = UseFixed(instr->function(), rdi);
   argument_count_ -= instr->argument_count();
index 0744ed5bf2fcc53224d5088498b66b77cd05915b..2ee7668206798454ecb6769214e8d68174dbdc11 100644 (file)
@@ -68,6 +68,7 @@ class LCodeGen;
   V(CallKnownGlobal)                            \
   V(CallNamed)                                  \
   V(CallNew)                                    \
+  V(CallNewArray)                               \
   V(CallRuntime)                                \
   V(CallStub)                                   \
   V(CheckFunction)                              \
@@ -1748,6 +1749,23 @@ class LCallNew: public LTemplateInstruction<1, 1, 0> {
 };
 
 
+class LCallNewArray: public LTemplateInstruction<1, 1, 0> {
+ public:
+  explicit LCallNewArray(LOperand* constructor) {
+    inputs_[0] = constructor;
+  }
+
+  LOperand* constructor() { return inputs_[0]; }
+
+  DECLARE_CONCRETE_INSTRUCTION(CallNewArray, "call-new-array")
+  DECLARE_HYDROGEN_ACCESSOR(CallNewArray)
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  int arity() const { return hydrogen()->argument_count() - 1; }
+};
+
+
 class LCallRuntime: public LTemplateInstruction<1, 0, 0> {
  public:
   DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime")
index 807e90dbec86555ee90921dc3e1349204897e398..5f467e3ceea049bd7e8ef286c0419a9b57badc3b 100644 (file)
@@ -4269,6 +4269,15 @@ void MacroAssembler::LoadGlobalFunction(int index, Register function) {
 }
 
 
+void MacroAssembler::LoadArrayFunction(Register function) {
+  movq(function,
+       Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+  movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
+  movq(function,
+       Operand(function, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
+}
+
+
 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
                                                   Register map) {
   // Load the initial map.  The global functions all have initial maps.
index 35cc3f513f7d37ad0e6b3cbcbec898a02adbde85..6753724234edc5a0c322a9c8521aed0c28cc38f3 100644 (file)
@@ -1154,6 +1154,7 @@ class MacroAssembler: public Assembler {
 
   // Load the global function with the given index.
   void LoadGlobalFunction(int index, Register function);
+  void LoadArrayFunction(Register function);
 
   // Load the initial map from the global function. The registers
   // function and map can be the same.
index 3c8238710814be1beb7e55c3209fcd3840c898a5..d57fd321e43c4b71bcc2a827629ac403ed144bd7 100644 (file)
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 // Flags: --allow-natives-syntax --smi-only-arrays --expose-gc
-// Flags: --track-allocation-sites
+// Flags: --track-allocation-sites --nooptimize-constructed-arrays
+
+// TODO(mvstanton): remove --nooptimize-constructed-arrays and enable
+// the constructed array code below when the feature is turned on
+// by default.
 
 // Test element kind of objects.
 // Since --smi-only-arrays affects builtins, its default setting at compile
@@ -36,6 +40,7 @@
 // enabled, this test takes the appropriate code path to check smi-only arrays.
 
 support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6,7,8));
+optimize_constructed_arrays = false;
 
 if (support_smi_only_arrays) {
   print("Tests include smi-only arrays.");
@@ -43,6 +48,12 @@ if (support_smi_only_arrays) {
   print("Tests do NOT include smi-only arrays.");
 }
 
+if (optimize_constructed_arrays) {
+  print("Tests include constructed array optimizations.");
+} else {
+  print("Tests do NOT include constructed array optimizations.");
+}
+
 var elements_kind = {
   fast_smi_only            :  'fast smi only elements',
   fast                     :  'fast elements',
@@ -66,6 +77,11 @@ function getKind(obj) {
   if (%HasDictionaryElements(obj)) return elements_kind.dictionary;
 }
 
+function isHoley(obj) {
+  if (%HasFastHoleyElements(obj)) return true;
+  return false;
+}
+
 function assertKind(expected, obj, name_opt) {
   if (!support_smi_only_arrays &&
       expected == elements_kind.fast_smi_only) {
@@ -74,9 +90,45 @@ function assertKind(expected, obj, name_opt) {
   assertEquals(expected, getKind(obj), name_opt);
 }
 
+function assertHoley(obj, name_opt) {
+  assertEquals(true, isHoley(obj), name_opt);
+}
+
+function assertNotHoley(obj, name_opt) {
+  assertEquals(false, isHoley(obj), name_opt);
+}
+
 if (support_smi_only_arrays) {
+
+  obj = [];
+  assertNotHoley(obj);
+  assertKind(elements_kind.fast_smi_only, obj);
+
+  obj = [1, 2, 3];
+  assertNotHoley(obj);
+  assertKind(elements_kind.fast_smi_only, obj);
+
+  obj = new Array();
+  assertNotHoley(obj);
+  assertKind(elements_kind.fast_smi_only, obj);
+
+  obj = new Array(0);
+  assertNotHoley(obj);
+  assertKind(elements_kind.fast_smi_only, obj);
+
+  obj = new Array(2);
+  assertHoley(obj);
+  assertKind(elements_kind.fast_smi_only, obj);
+
+  obj = new Array(1,2,3);
+  assertNotHoley(obj);
+  assertKind(elements_kind.fast_smi_only, obj);
+
+  obj = new Array(1, "hi", 2, undefined);
+  assertNotHoley(obj);
+  assertKind(elements_kind.fast, obj);
+
   function fastliteralcase(literal, value) {
-    // var literal = [1, 2, 3];
     literal[0] = value;
     return literal;
   }
@@ -104,7 +156,6 @@ if (support_smi_only_arrays) {
   // Verify that we will not pretransition the double->fast path.
   obj = fastliteralcase(get_standard_literal(), "elliot");
   assertKind(elements_kind.fast, obj);
-
   // This fails until we turn off optimistic transitions to the
   // most general elements kind seen on keyed stores. It's a goal
   // to turn it off, but for now we need it.
@@ -123,4 +174,99 @@ if (support_smi_only_arrays) {
   assertKind(elements_kind.fast, obj);
   obj = fastliteralcase_smifast(2);
   assertKind(elements_kind.fast, obj);
+
+  if (optimize_constructed_arrays) {
+    function newarraycase_smidouble(value) {
+      var a = new Array();
+      a[0] = value;
+      return a;
+    }
+
+    // Case: new Array() as allocation site, smi->double
+    obj = newarraycase_smidouble(1);
+    assertKind(elements_kind.fast_smi_only, obj);
+    obj = newarraycase_smidouble(1.5);
+    assertKind(elements_kind.fast_double, obj);
+    obj = newarraycase_smidouble(2);
+    assertKind(elements_kind.fast_double, obj);
+
+    function newarraycase_smiobj(value) {
+      var a = new Array();
+      a[0] = value;
+      return a;
+    }
+
+    // Case: new Array() as allocation site, smi->fast
+    obj = newarraycase_smiobj(1);
+    assertKind(elements_kind.fast_smi_only, obj);
+    obj = newarraycase_smiobj("gloria");
+    assertKind(elements_kind.fast, obj);
+    obj = newarraycase_smiobj(2);
+    assertKind(elements_kind.fast, obj);
+
+    function newarraycase_length_smidouble(value) {
+      var a = new Array(3);
+      a[0] = value;
+      return a;
+    }
+
+    // Case: new Array(length) as allocation site
+    obj = newarraycase_length_smidouble(1);
+    assertKind(elements_kind.fast_smi_only, obj);
+    obj = newarraycase_length_smidouble(1.5);
+    assertKind(elements_kind.fast_double, obj);
+    obj = newarraycase_length_smidouble(2);
+    assertKind(elements_kind.fast_double, obj);
+
+    // Try to continue the transition to fast object, but
+    // we will not pretransition from double->fast, because
+    // it may hurt performance ("poisoning").
+    obj = newarraycase_length_smidouble("coates");
+    assertKind(elements_kind.fast, obj);
+    obj = newarraycase_length_smidouble(2.5);
+    // However, because of optimistic transitions, we will
+    // transition to the most general kind of elements kind found,
+    // therefore I can't count on this assert yet.
+    // assertKind(elements_kind.fast_double, obj);
+
+    function newarraycase_length_smiobj(value) {
+      var a = new Array(3);
+      a[0] = value;
+      return a;
+    }
+
+    // Case: new Array(<length>) as allocation site, smi->fast
+    obj = newarraycase_length_smiobj(1);
+    assertKind(elements_kind.fast_smi_only, obj);
+    obj = newarraycase_length_smiobj("gloria");
+    assertKind(elements_kind.fast, obj);
+    obj = newarraycase_length_smiobj(2);
+    assertKind(elements_kind.fast, obj);
+
+    function newarraycase_list_smidouble(value) {
+      var a = new Array(1, 2, 3);
+      a[0] = value;
+      return a;
+    }
+
+    obj = newarraycase_list_smidouble(1);
+    assertKind(elements_kind.fast_smi_only, obj);
+    obj = newarraycase_list_smidouble(1.5);
+    assertKind(elements_kind.fast_double, obj);
+    obj = newarraycase_list_smidouble(2);
+    assertKind(elements_kind.fast_double, obj);
+
+    function newarraycase_list_smiobj(value) {
+      var a = new Array(4, 5, 6);
+      a[0] = value;
+      return a;
+    }
+
+    obj = newarraycase_list_smiobj(1);
+    assertKind(elements_kind.fast_smi_only, obj);
+    obj = newarraycase_list_smiobj("coates");
+    assertKind(elements_kind.fast, obj);
+    obj = newarraycase_list_smiobj(2);
+    assertKind(elements_kind.fast, obj);
+  }
 }
index f74f8be0d94c38846e5cb77521b46e95a26b7c6c..b3a71410963db083a10ce6cb1e090ed09ab25890 100644 (file)
@@ -27,6 +27,7 @@
 
 // Flags: --allow-natives-syntax --smi-only-arrays
 // Flags: --noparallel-recompilation
+// Flags: --notrack-allocation-sites
 
 // Test element kind of objects.
 // Since --smi-only-arrays affects builtins, its default setting at compile time
index 0dffd3723e888524af54e7de421671351389a44b..e28f3c3d64fe30a5bf406418f65622b537122a14 100644 (file)
@@ -25,7 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Flags: --allow-natives-syntax --smi-only-arrays
+// Flags: --allow-natives-syntax --smi-only-arrays --notrack-allocation-sites
 
 support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6,7,8));