Inline zero argument array constructor.
authormvstanton@chromium.org <mvstanton@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Thu, 14 Nov 2013 12:05:09 +0000 (12:05 +0000)
committermvstanton@chromium.org <mvstanton@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Thu, 14 Nov 2013 12:05:09 +0000 (12:05 +0000)
patch from issue 54583003 (dependent code).

Zero arguments - very easy

1 argument - three special cases:
  a) If length is a constant in valid array length range,
     no need to check it at runtime.
  b) respect DoNotInline feedback on the AllocationSite for
     cases that the argument is not a smi or is an integer
     with a length that should create a dictionary.
  c) if kind feedback is non-holey, and length is non-constant,
     we'd have to generate a lot of code to be correct.
     Don't inline this case.

N arguments - one special case:
  a) If a deopt ever occurs because an input argument isn't
     compatible with the elements kind, then set the
     DoNotInline flag.

BUG=
R=verwaest@chromium.org

Review URL: https://codereview.chromium.org/55933002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@17741 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

18 files changed:
src/arm/code-stubs-arm.cc
src/arm/stub-cache-arm.cc
src/code-stubs-hydrogen.cc
src/elements-kind.cc
src/elements-kind.h
src/heap.cc
src/hydrogen.cc
src/hydrogen.h
src/ia32/code-stubs-ia32.cc
src/ia32/stub-cache-ia32.cc
src/objects-inl.h
src/objects-printer.cc
src/objects.cc
src/objects.h
src/runtime.cc
src/x64/code-stubs-x64.cc
src/x64/stub-cache-x64.cc
test/mjsunit/array-constructor-feedback.js

index 94e114f..8db65bc 100644 (file)
@@ -5850,11 +5850,13 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
       __ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
     }
 
-    // Save the resulting elements kind in type info
-    __ SmiTag(r3);
-    __ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
-    __ str(r3, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
-    __ SmiUntag(r3);
+    // Save the resulting elements kind in type info. We can't just store r3
+    // in the AllocationSite::transition_info field because elements kind is
+    // restricted to a portion of the field...upper bits need to be left alone.
+    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+    __ ldr(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
+    __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
+    __ str(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
 
     __ bind(&normal_sequence);
     int last_index = GetSequenceIndexFromFastElementsKind(
@@ -5996,6 +5998,8 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
 
   __ ldr(r3, FieldMemOperand(r3, AllocationSite::kTransitionInfoOffset));
   __ SmiUntag(r3);
+  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+  __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
 
   __ bind(&no_info);
index a7ddc8e..0362c1a 100644 (file)
@@ -1686,7 +1686,7 @@ Handle<Code> CallStubCompiler::CompileArrayCodeCall(
   }
 
   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
-  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+  site->SetElementsKind(GetInitialFastElementsKind());
   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   __ mov(r0, Operand(argc));
   __ mov(r2, Operand(site_feedback_cell));
index a790179..eb3bac8 100644 (file)
@@ -694,27 +694,7 @@ HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
   HInstruction* argument = Add<HAccessArgumentsAt>(
       elements, constant_one, constant_zero);
 
-  HConstant* max_alloc_length =
-      Add<HConstant>(JSObject::kInitialMaxFastElementArray);
-  const int initial_capacity = JSArray::kPreallocatedArrayElements;
-  HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
-
-  HInstruction* checked_arg = Add<HBoundsCheck>(argument, max_alloc_length);
-  IfBuilder if_builder(this);
-  if_builder.If<HCompareNumericAndBranch>(checked_arg, constant_zero,
-                                          Token::EQ);
-  if_builder.Then();
-  Push(initial_capacity_node);  // capacity
-  Push(constant_zero);  // length
-  if_builder.Else();
-  Push(checked_arg);  // capacity
-  Push(checked_arg);  // length
-  if_builder.End();
-
-  // Figure out total size
-  HValue* length = Pop();
-  HValue* capacity = Pop();
-  return array_builder->AllocateArray(capacity, length, true);
+  return BuildAllocateArrayFromLength(array_builder, argument);
 }
 
 
@@ -725,11 +705,16 @@ HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
   // the array because they aren't compatible with a smi array.
   // If it's a double array, no problem, and if it's fast then no
   // problem either because doubles are boxed.
+  //
+  // TODO(mvstanton): consider an instruction to memset fill the array
+  // with zero in this case instead.
   HValue* length = GetArgumentsLength();
-  bool fill_with_hole = IsFastSmiElementsKind(kind);
+  JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
+      ? JSArrayBuilder::FILL_WITH_HOLE
+      : JSArrayBuilder::DONT_FILL_WITH_HOLE;
   HValue* new_object = array_builder->AllocateArray(length,
                                                     length,
-                                                    fill_with_hole);
+                                                    fill_mode);
   HValue* elements = array_builder->GetElementsLocation();
   ASSERT(elements != NULL);
 
index 213aa35..16f5bff 100644 (file)
@@ -68,6 +68,14 @@ struct InitializeFastElementsKindSequence {
     fast_elements_kind_sequence[3] = FAST_HOLEY_DOUBLE_ELEMENTS;
     fast_elements_kind_sequence[4] = FAST_ELEMENTS;
     fast_elements_kind_sequence[5] = FAST_HOLEY_ELEMENTS;
+
+    // Verify that kFastElementsKindPackedToHoley is correct.
+    STATIC_ASSERT(FAST_SMI_ELEMENTS + kFastElementsKindPackedToHoley ==
+                  FAST_HOLEY_SMI_ELEMENTS);
+    STATIC_ASSERT(FAST_DOUBLE_ELEMENTS + kFastElementsKindPackedToHoley ==
+                  FAST_HOLEY_DOUBLE_ELEMENTS);
+    STATIC_ASSERT(FAST_ELEMENTS + kFastElementsKindPackedToHoley ==
+                  FAST_HOLEY_ELEMENTS);
   }
 };
 
index da15192..f5280d6 100644 (file)
@@ -77,6 +77,10 @@ const int kElementsKindCount = LAST_ELEMENTS_KIND - FIRST_ELEMENTS_KIND + 1;
 const int kFastElementsKindCount = LAST_FAST_ELEMENTS_KIND -
     FIRST_FAST_ELEMENTS_KIND + 1;
 
+// The number to add to a packed elements kind to reach a holey elements kind
+const int kFastElementsKindPackedToHoley =
+    FAST_HOLEY_SMI_ELEMENTS - FAST_SMI_ELEMENTS;
+
 const char* ElementsKindToString(ElementsKind kind);
 void PrintElementsKind(FILE* out, ElementsKind kind);
 
index b5d03bc..e77d8d5 100644 (file)
@@ -4598,8 +4598,7 @@ MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor,
   // advice
   Map* initial_map = constructor->initial_map();
 
-  Smi* smi = Smi::cast(allocation_site->transition_info());
-  ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
+  ElementsKind to_kind = allocation_site->GetElementsKind();
   AllocationSiteMode mode = TRACK_ALLOCATION_SITE;
   if (to_kind != initial_map->elements_kind()) {
     MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind);
index 33586f3..b47ec80 100644 (file)
@@ -1909,6 +1909,48 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
 }
 
 
+
+HValue* HGraphBuilder::BuildAllocateArrayFromLength(
+    JSArrayBuilder* array_builder,
+    HValue* length_argument) {
+  if (length_argument->IsConstant() &&
+      HConstant::cast(length_argument)->HasSmiValue()) {
+    int array_length = HConstant::cast(length_argument)->Integer32Value();
+    HValue* new_object = array_length == 0
+        ? array_builder->AllocateEmptyArray()
+        : array_builder->AllocateArray(length_argument, length_argument);
+    return new_object;
+  }
+
+  HValue* constant_zero = graph()->GetConstant0();
+  HConstant* max_alloc_length =
+      Add<HConstant>(JSObject::kInitialMaxFastElementArray);
+  HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
+                                                   max_alloc_length);
+  IfBuilder if_builder(this);
+  if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
+                                          Token::EQ);
+  if_builder.Then();
+  const int initial_capacity = JSArray::kPreallocatedArrayElements;
+  HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
+  Push(initial_capacity_node);  // capacity
+  Push(constant_zero);          // length
+  if_builder.Else();
+  if (!(top_info()->IsStub()) &&
+      IsFastPackedElementsKind(array_builder->kind())) {
+    // We'll come back later with better (holey) feedback.
+    if_builder.Deopt("Holey array despite packed elements_kind feedback");
+  }
+  Push(checked_length);         // capacity
+  Push(checked_length);         // length
+  if_builder.End();
+
+  // Figure out total size
+  HValue* length = Pop();
+  HValue* capacity = Pop();
+  return array_builder->AllocateArray(capacity, length);
+}
+
 HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
                                              HValue* capacity) {
   int elements_size;
@@ -2097,19 +2139,18 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
       : Add<HConstant>(nan_double);
 
   // Special loop unfolding case
-  static const int kLoopUnfoldLimit = 4;
-  bool unfold_loop = false;
-  int initial_capacity = JSArray::kPreallocatedArrayElements;
-  if (from->ActualValue()->IsConstant() && to->ActualValue()->IsConstant() &&
-      initial_capacity <= kLoopUnfoldLimit) {
+  static const int kLoopUnfoldLimit = 8;
+  STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
+  int initial_capacity = -1;
+  if (from->ActualValue()->IsConstant() && to->ActualValue()->IsConstant()) {
     HConstant* constant_from = HConstant::cast(from->ActualValue());
     HConstant* constant_to = HConstant::cast(to->ActualValue());
 
     if (constant_from->HasInteger32Value() &&
         constant_from->Integer32Value() == 0 &&
         constant_to->HasInteger32Value() &&
-        constant_to->Integer32Value() == initial_capacity) {
-      unfold_loop = true;
+        constant_to->Integer32Value() <= kLoopUnfoldLimit) {
+      initial_capacity = constant_to->Integer32Value();
     }
   }
 
@@ -2119,7 +2160,7 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
     elements_kind = FAST_HOLEY_ELEMENTS;
   }
 
-  if (unfold_loop) {
+  if (initial_capacity >= 0) {
     for (int i = 0; i < initial_capacity; i++) {
       HInstruction* key = Add<HConstant>(i);
       Add<HStoreKeyed>(elements, key, hole, elements_kind);
@@ -2378,6 +2419,13 @@ HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
 
 
 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
+  if (!builder()->top_info()->IsStub()) {
+    // A constant map is fine.
+    Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
+                    builder()->isolate());
+    return builder()->Add<HConstant>(map);
+  }
+
   if (kind_ == GetInitialFastElementsKind()) {
     // No need for a context lookup if the kind_ matches the initial
     // map, because we can just load the map in that case.
@@ -2420,12 +2468,14 @@ HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
 
   HInstruction* elements_size_value =
       builder()->Add<HConstant>(elements_size());
-  HInstruction* mul = builder()->Add<HMul>(length_node, elements_size_value);
-  mul->ClearFlag(HValue::kCanOverflow);
-
+  HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(),
+                                    length_node, elements_size_value);
+  builder()->AddInstruction(mul);
   HInstruction* base = builder()->Add<HConstant>(base_size);
-  HInstruction* total_size = builder()->Add<HAdd>(base, mul);
+  HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(),
+                                       base, mul);
   total_size->ClearFlag(HValue::kCanOverflow);
+  builder()->AddInstruction(total_size);
   return total_size;
 }
 
@@ -2449,23 +2499,22 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
   HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
   return AllocateArray(size_in_bytes,
                        capacity,
-                       builder()->graph()->GetConstant0(),
-                       true);
+                       builder()->graph()->GetConstant0());
 }
 
 
 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
                                                      HValue* length_field,
-                                                     bool fill_with_hole) {
+                                                     FillMode fill_mode) {
   HValue* size_in_bytes = EstablishAllocationSize(capacity);
-  return AllocateArray(size_in_bytes, capacity, length_field, fill_with_hole);
+  return AllocateArray(size_in_bytes, capacity, length_field, fill_mode);
 }
 
 
 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
                                                      HValue* capacity,
                                                      HValue* length_field,
-                                                     bool fill_with_hole) {
+                                                     FillMode fill_mode) {
   // These HForceRepresentations are because we store these as fields in the
   // objects we construct, and an int32-to-smi HChange could deopt. Accept
   // the deopt possibility now, before allocation occurs.
@@ -2499,7 +2548,7 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
   // Initialize the elements
   builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
 
-  if (fill_with_hole) {
+  if (fill_mode == FILL_WITH_HOLE) {
     builder()->BuildFillElementsWithHole(elements_location_, kind_,
                                          graph()->GetConstant0(), capacity);
   }
@@ -7535,6 +7584,71 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
 }
 
 
+void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
+  NoObservableSideEffectsScope no_effects(this);
+
+  int argument_count = expr->arguments()->length();
+  // We should at least have the constructor on the expression stack.
+  HValue* constructor = environment()->ExpressionStackAt(argument_count);
+
+  ElementsKind kind = expr->elements_kind();
+  Handle<Cell> cell = expr->allocation_info_cell();
+  AllocationSite* site = AllocationSite::cast(cell->value());
+
+  // Register on the site for deoptimization if the cell value changes.
+  site->AddDependentCompilationInfo(AllocationSite::TRANSITIONS, top_info());
+  HInstruction* cell_instruction = Add<HConstant>(cell);
+
+  // In the single constant argument case, we may have to adjust elements kind
+  // to avoid creating a packed non-empty array.
+  if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
+    HValue* argument = environment()->Top();
+    if (argument->IsConstant()) {
+      HConstant* constant_argument = HConstant::cast(argument);
+      ASSERT(constant_argument->HasSmiValue());
+      int constant_array_size = constant_argument->Integer32Value();
+      if (constant_array_size != 0) {
+        kind = GetHoleyElementsKind(kind);
+      }
+    }
+  }
+
+  // Build the array.
+  JSArrayBuilder array_builder(this,
+                               kind,
+                               cell_instruction,
+                               constructor,
+                               DISABLE_ALLOCATION_SITES);
+  HValue* new_object;
+  if (argument_count == 0) {
+    new_object = array_builder.AllocateEmptyArray();
+  } else if (argument_count == 1) {
+    HValue* argument = environment()->Top();
+    new_object = BuildAllocateArrayFromLength(&array_builder, argument);
+  } else {
+    HValue* length = Add<HConstant>(argument_count);
+    // Smi arrays need to initialize array elements with the hole because
+    // bailout could occur if the arguments don't fit in a smi.
+    //
+    // TODO(mvstanton): If all the arguments are constants in smi range, then
+    // we could set fill_with_hole to false and save a few instructions.
+    JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
+        ? JSArrayBuilder::FILL_WITH_HOLE
+        : JSArrayBuilder::DONT_FILL_WITH_HOLE;
+    new_object = array_builder.AllocateArray(length, length, fill_mode);
+    HValue* elements = array_builder.GetElementsLocation();
+    for (int i = 0; i < argument_count; i++) {
+      HValue* value = environment()->ExpressionStackAt(argument_count - i - 1);
+      HValue* constant_i = Add<HConstant>(i);
+      Add<HStoreKeyed>(elements, constant_i, value, kind);
+    }
+  }
+
+  Drop(argument_count + 1);  // drop constructor and args.
+  ast_context()->ReturnValue(new_object);
+}
+
+
 // Checks whether allocation using the given constructor can be inlined.
 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
   return constructor->has_initial_map() &&
@@ -7544,6 +7658,50 @@ static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
 }
 
 
+bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
+  bool inline_ok = false;
+  Handle<JSFunction> caller = current_info()->closure();
+  Handle<JSFunction> target(isolate()->global_context()->array_function(),
+                            isolate());
+  int argument_count = expr->arguments()->length();
+  // We should have the function plus array arguments on the environment stack.
+  ASSERT(environment()->length() >= (argument_count + 1));
+  Handle<Cell> cell = expr->allocation_info_cell();
+  AllocationSite* site = AllocationSite::cast(cell->value());
+  if (site->CanInlineCall()) {
+    // We also want to avoid inlining in certain 1 argument scenarios.
+    if (argument_count == 1) {
+      HValue* argument = Top();
+      if (argument->IsConstant()) {
+        // Do not inline if the constant length argument is not a smi or
+        // outside the valid range for a fast array.
+        HConstant* constant_argument = HConstant::cast(argument);
+        if (constant_argument->HasSmiValue()) {
+          int value = constant_argument->Integer32Value();
+          inline_ok = value >= 0 &&
+              value < JSObject::kInitialMaxFastElementArray;
+          if (!inline_ok) {
+            TraceInline(target, caller,
+                        "Length outside of valid array range");
+          }
+        }
+      } else {
+        inline_ok = true;
+      }
+    } else {
+      inline_ok = true;
+    }
+  } else {
+    TraceInline(target, caller, "AllocationSite requested no inlining.");
+  }
+
+  if (inline_ok) {
+    TraceInline(target, caller, NULL);
+  }
+  return inline_ok;
+}
+
+
 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
   ASSERT(!HasStackOverflow());
   ASSERT(current_block() != NULL);
@@ -7552,14 +7710,15 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
   int argument_count = expr->arguments()->length() + 1;  // Plus constructor.
   Factory* factory = isolate()->factory();
 
+  // The constructor function is on the stack in the unoptimized code
+  // during evaluation of the arguments.
+  CHECK_ALIVE(VisitForValue(expr->expression()));
+  HValue* function = Top();
+  CHECK_ALIVE(VisitExpressions(expr->arguments()));
+
   if (FLAG_inline_construct &&
       expr->IsMonomorphic() &&
       IsAllocationInlineable(expr->target())) {
-    // The constructor function is on the stack in the unoptimized code
-    // during evaluation of the arguments.
-    CHECK_ALIVE(VisitForValue(expr->expression()));
-    HValue* function = Top();
-    CHECK_ALIVE(VisitExpressions(expr->arguments()));
     Handle<JSFunction> constructor = expr->target();
     HValue* check = Add<HCheckValue>(function, constructor);
 
@@ -7646,19 +7805,24 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
     // argument to the construct call.
     Handle<JSFunction> array_function(
         isolate()->global_context()->array_function(), isolate());
-    CHECK_ALIVE(VisitArgument(expr->expression()));
-    HValue* constructor = HPushArgument::cast(Top())->argument();
-    CHECK_ALIVE(VisitArgumentList(expr->arguments()));
+    bool use_call_new_array = expr->target().is_identical_to(array_function);
+    Handle<Cell> cell = expr->allocation_info_cell();
+    if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
+      // Verify we are still calling the array function for our native context.
+      Add<HCheckValue>(function, array_function);
+      BuildInlinedCallNewArray(expr);
+      return;
+    }
+
     HBinaryCall* call;
-    if (expr->target().is_identical_to(array_function)) {
-      Handle<Cell> cell = expr->allocation_info_cell();
-      Add<HCheckValue>(constructor, array_function);
-      call = New<HCallNewArray>(constructor, argument_count,
-                                cell, expr->elements_kind());
+    if (use_call_new_array) {
+      Add<HCheckValue>(function, array_function);
+      call = New<HCallNewArray>(function, argument_count, cell,
+                                expr->elements_kind());
     } else {
-      call = New<HCallNew>(constructor, argument_count);
+      call = New<HCallNew>(function, argument_count);
     }
-    Drop(argument_count);
+    PreProcessCall(call);
     return ast_context()->ReturnInstruction(call, expr->id());
   }
 }
index b3cb8ff..7987a97 100644 (file)
@@ -1585,9 +1585,16 @@ class HGraphBuilder {
                    ElementsKind kind,
                    HValue* constructor_function);
 
+    enum FillMode {
+      DONT_FILL_WITH_HOLE,
+      FILL_WITH_HOLE
+    };
+
+    ElementsKind kind() { return kind_; }
+
     HValue* AllocateEmptyArray();
     HValue* AllocateArray(HValue* capacity, HValue* length_field,
-                          bool fill_with_hole);
+                          FillMode fill_mode = FILL_WITH_HOLE);
     HValue* GetElementsLocation() { return elements_location_; }
 
    private:
@@ -1607,7 +1614,8 @@ class HGraphBuilder {
     HValue* EstablishEmptyArrayAllocationSize();
     HValue* EstablishAllocationSize(HValue* length_node);
     HValue* AllocateArray(HValue* size_in_bytes, HValue* capacity,
-                          HValue* length_field,  bool fill_with_hole);
+                          HValue* length_field,
+                          FillMode fill_mode = FILL_WITH_HOLE);
 
     HGraphBuilder* builder_;
     ElementsKind kind_;
@@ -1617,6 +1625,9 @@ class HGraphBuilder {
     HInnerAllocatedObject* elements_location_;
   };
 
+  HValue* BuildAllocateArrayFromLength(JSArrayBuilder* array_builder,
+                                       HValue* length_argument);
+
   HValue* BuildAllocateElements(ElementsKind kind,
                                 HValue* capacity);
 
@@ -2101,6 +2112,9 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor {
                                        SmallMapList* types,
                                        Handle<String> name);
 
+  bool IsCallNewArrayInlineable(CallNew* expr);
+  void BuildInlinedCallNewArray(CallNew* expr);
+
   class PropertyAccessInfo {
    public:
     PropertyAccessInfo(Isolate* isolate, Handle<Map> map, Handle<String> name)
index adc2d50..a1aa022 100644 (file)
@@ -5785,10 +5785,12 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
       __ Assert(equal, kExpectedAllocationSiteInCell);
     }
 
-    // Save the resulting elements kind in type info
-    __ SmiTag(edx);
-    __ mov(FieldOperand(ecx, AllocationSite::kTransitionInfoOffset), edx);
-    __ SmiUntag(edx);
+    // Save the resulting elements kind in type info. We can't just store r3
+    // in the AllocationSite::transition_info field because elements kind is
+    // restricted to a portion of the field...upper bits need to be left alone.
+    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+    __ add(FieldOperand(ecx, AllocationSite::kTransitionInfoOffset),
+           Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
 
     __ bind(&normal_sequence);
     int last_index = GetSequenceIndexFromFastElementsKind(
@@ -5929,8 +5931,11 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
       masm->isolate()->factory()->allocation_site_map()));
   __ j(not_equal, &no_info);
 
+  // Only look at the lower 16 bits of the transition info.
   __ mov(edx, FieldOperand(edx, AllocationSite::kTransitionInfoOffset));
   __ SmiUntag(edx);
+  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+  __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
 
   __ bind(&no_info);
index 23e2398..43576d6 100644 (file)
@@ -1705,7 +1705,7 @@ Handle<Code> CallStubCompiler::CompileArrayCodeCall(
   }
 
   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
-  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+  site->SetElementsKind(GetInitialFastElementsKind());
   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   __ mov(eax, Immediate(argc));
   __ mov(ebx, site_feedback_cell);
index a89c049..5014c91 100644 (file)
@@ -1330,6 +1330,7 @@ bool JSObject::ShouldTrackAllocationInfo() {
 
 
 void AllocationSite::Initialize() {
+  set_transition_info(Smi::FromInt(0));
   SetElementsKind(GetInitialFastElementsKind());
   set_nested_site(Smi::FromInt(0));
   set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
@@ -1367,6 +1368,21 @@ inline bool AllocationSite::CanTrack(InstanceType type) {
 }
 
 
+inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
+    Reason reason) {
+  switch (reason) {
+    case TENURING:
+      return DependentCode::kAllocationSiteTenuringChangedGroup;
+      break;
+    case TRANSITIONS:
+      return DependentCode::kAllocationSiteTransitionChangedGroup;
+      break;
+  }
+  UNREACHABLE();
+  return DependentCode::kAllocationSiteTransitionChangedGroup;
+}
+
+
 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
   object->ValidateElements();
   ElementsKind elements_kind = object->map()->elements_kind();
index 6d2811b..5260193 100644 (file)
@@ -1126,17 +1126,12 @@ void AllocationSite::AllocationSitePrint(FILE* out) {
   PrintF(out, "\n - nested site: ");
   nested_site()->ShortPrint(out);
   PrintF(out, "\n - transition_info: ");
-  if (transition_info()->IsCell()) {
-    Cell* cell = Cell::cast(transition_info());
-    Object* cell_contents = cell->value();
-    if (cell_contents->IsSmi()) {
-      ElementsKind kind = static_cast<ElementsKind>(
-          Smi::cast(cell_contents)->value());
-      PrintF(out, "Array allocation with ElementsKind ");
-      PrintElementsKind(out, kind);
-      PrintF(out, "\n");
-      return;
-    }
+  if (transition_info()->IsSmi()) {
+    ElementsKind kind = GetElementsKind();
+    PrintF(out, "Array allocation with ElementsKind ");
+    PrintElementsKind(out, kind);
+    PrintF(out, "\n");
+    return;
   } else if (transition_info()->IsJSArray()) {
     PrintF(out, "Array literal ");
     transition_info()->ShortPrint(out);
index 935e875..3e1c251 100644 (file)
@@ -11633,6 +11633,9 @@ DependentCode* DependentCode::ForObject(Handle<HeapObject> object,
   AllowDeferredHandleDereference dependencies_are_safe;
   if (group == DependentCode::kPropertyCellChangedGroup) {
     return Handle<PropertyCell>::cast(object)->dependent_code();
+  } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup ||
+      group == DependentCode::kAllocationSiteTransitionChangedGroup) {
+    return Handle<AllocationSite>::cast(object)->dependent_code();
   }
   return Handle<Map>::cast(object)->dependent_code();
 }
@@ -12804,21 +12807,11 @@ bool AllocationSite::IsNestedSite() {
 }
 
 
-MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
-  if (!FLAG_track_allocation_sites || !IsJSArray()) {
-    return this;
-  }
-
-  AllocationMemento* memento = AllocationMemento::FindForJSObject(this);
-  if (memento == NULL || !memento->IsValid()) {
-    return this;
-  }
+MaybeObject* AllocationSite::DigestTransitionFeedback(ElementsKind to_kind) {
+  Isolate* isolate = GetIsolate();
 
-  // Walk through to the Allocation Site
-  AllocationSite* site = memento->GetAllocationSite();
-  if (site->SitePointsToLiteral() &&
-      site->transition_info()->IsJSArray()) {
-    JSArray* transition_info = JSArray::cast(site->transition_info());
+  if (SitePointsToLiteral() && transition_info()->IsJSArray()) {
+    JSArray* transition_info = JSArray::cast(this->transition_info());
     ElementsKind kind = transition_info->GetElementsKind();
     // if kind is holey ensure that to_kind is as well.
     if (IsHoleyElementsKind(kind)) {
@@ -12829,9 +12822,9 @@ MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
       // function, so we shouldn't make new instances of it very often.
       uint32_t length = 0;
       CHECK(transition_info->length()->ToArrayIndex(&length));
-      if (length <= AllocationSite::kMaximumArrayBytesToPretransition) {
+      if (length <= kMaximumArrayBytesToPretransition) {
         if (FLAG_trace_track_allocation_sites) {
-          bool is_nested = site->IsNestedSite();
+          bool is_nested = IsNestedSite();
           PrintF(
               "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
               reinterpret_cast<void*>(this),
@@ -12839,11 +12832,14 @@ MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
               ElementsKindToString(kind),
               ElementsKindToString(to_kind));
         }
-        return transition_info->TransitionElementsKind(to_kind);
+        MaybeObject* result = transition_info->TransitionElementsKind(to_kind);
+        if (result->IsFailure()) return result;
+        dependent_code()->DeoptimizeDependentCodeGroup(
+            isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
       }
     }
   } else {
-    ElementsKind kind = site->GetElementsKind();
+    ElementsKind kind = GetElementsKind();
     // if kind is holey ensure that to_kind is as well.
     if (IsHoleyElementsKind(kind)) {
       to_kind = GetHoleyElementsKind(to_kind);
@@ -12855,13 +12851,50 @@ MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
                ElementsKindToString(kind),
                ElementsKindToString(to_kind));
       }
-      site->set_transition_info(Smi::FromInt(to_kind));
+      SetElementsKind(to_kind);
+      dependent_code()->DeoptimizeDependentCodeGroup(
+          isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
     }
   }
   return this;
 }
 
 
+void AllocationSite::AddDependentCompilationInfo(Reason reason,
+                                                 CompilationInfo* info) {
+  DependentCode::DependencyGroup group = ToDependencyGroup(reason);
+  Handle<DependentCode> dep(dependent_code());
+  Handle<DependentCode> codes =
+      DependentCode::Insert(dep, group, info->object_wrapper());
+  if (*codes != dependent_code()) set_dependent_code(*codes);
+  info->dependencies(group)->Add(Handle<HeapObject>(this), info->zone());
+}
+
+
+void AllocationSite::AddDependentCode(Reason reason, Handle<Code> code) {
+  DependentCode::DependencyGroup group = ToDependencyGroup(reason);
+  Handle<DependentCode> codes = DependentCode::Insert(
+      Handle<DependentCode>(dependent_code()), group, code);
+  if (*codes != dependent_code()) set_dependent_code(*codes);
+}
+
+
+MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
+  if (!FLAG_track_allocation_sites || !IsJSArray()) {
+    return this;
+  }
+
+  AllocationMemento* memento = AllocationMemento::FindForJSObject(this);
+  if (memento == NULL || !memento->IsValid()) {
+    return this;
+  }
+
+  // Walk through to the Allocation Site
+  AllocationSite* site = memento->GetAllocationSite();
+  return site->DigestTransitionFeedback(to_kind);
+}
+
+
 MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
   ASSERT(!map()->is_observed());
   ElementsKind from_kind = map()->elements_kind();
index 2fc1d23..c912f7c 100644 (file)
@@ -5565,7 +5565,13 @@ class DependentCode: public FixedArray {
     // Group of code that depends on global property values in property cells
     // not being changed.
     kPropertyCellChangedGroup,
-    kGroupCount = kPropertyCellChangedGroup + 1
+    // Group of code that depends on tenuring information in AllocationSites
+    // not being changed.
+    kAllocationSiteTenuringChangedGroup,
+    // Group of code that depends on element transition information in
+    // AllocationSites not being changed.
+    kAllocationSiteTransitionChangedGroup,
+    kGroupCount = kAllocationSiteTransitionChangedGroup + 1
   };
 
   // Array for holding the index of the first code object of each group.
@@ -8087,13 +8093,31 @@ class AllocationSite: public Struct {
   // This method is expensive, it should only be called for reporting.
   bool IsNestedSite();
 
+  class ElementsKindBits:       public BitField<ElementsKind, 0,  15> {};
+  class UnusedBits:             public BitField<int,          15, 14> {};
+  class DoNotInlineBit:         public BitField<bool,         29,  1> {};
+
   ElementsKind GetElementsKind() {
     ASSERT(!SitePointsToLiteral());
-    return static_cast<ElementsKind>(Smi::cast(transition_info())->value());
+    int value = Smi::cast(transition_info())->value();
+    return ElementsKindBits::decode(value);
   }
 
   void SetElementsKind(ElementsKind kind) {
-    set_transition_info(Smi::FromInt(static_cast<int>(kind)));
+    int value = Smi::cast(transition_info())->value();
+    set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
+                        SKIP_WRITE_BARRIER);
+  }
+
+  bool CanInlineCall() {
+    int value = Smi::cast(transition_info())->value();
+    return DoNotInlineBit::decode(value) == 0;
+  }
+
+  void SetDoNotInlineCall() {
+    int value = Smi::cast(transition_info())->value();
+    set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
+                        SKIP_WRITE_BARRIER);
   }
 
   bool SitePointsToLiteral() {
@@ -8103,6 +8127,16 @@ class AllocationSite: public Struct {
     return transition_info()->IsJSArray() || transition_info()->IsJSObject();
   }
 
+  MaybeObject* DigestTransitionFeedback(ElementsKind to_kind);
+
+  enum Reason {
+    TENURING,
+    TRANSITIONS
+  };
+
+  void AddDependentCompilationInfo(Reason reason, CompilationInfo* info);
+  void AddDependentCode(Reason reason, Handle<Code> code);
+
   DECLARE_PRINTER(AllocationSite)
   DECLARE_VERIFIER(AllocationSite)
 
@@ -8123,6 +8157,7 @@ class AllocationSite: public Struct {
                               kSize> BodyDescriptor;
 
  private:
+  inline DependentCode::DependencyGroup ToDependencyGroup(Reason reason);
   DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationSite);
 };
 
index 80b6827..9df3ff8 100644 (file)
@@ -14688,7 +14688,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsAccessAllowedForObserver) {
 
 static MaybeObject* ArrayConstructorCommon(Isolate* isolate,
                                            Handle<JSFunction> constructor,
-                                           Handle<Object> type_info,
+                                           Handle<AllocationSite> site,
                                            Arguments* caller_args) {
   bool holey = false;
   bool can_use_type_feedback = true;
@@ -14710,14 +14710,7 @@ static MaybeObject* ArrayConstructorCommon(Isolate* isolate,
 
   JSArray* array;
   MaybeObject* maybe_array;
-  if (!type_info.is_null() &&
-      *type_info != isolate->heap()->undefined_value() &&
-      Cell::cast(*type_info)->value()->IsAllocationSite() &&
-      can_use_type_feedback) {
-    Handle<Cell> cell = Handle<Cell>::cast(type_info);
-    Handle<AllocationSite> site = Handle<AllocationSite>(
-        AllocationSite::cast(cell->value()), isolate);
-    ASSERT(!site->SitePointsToLiteral());
+  if (!site.is_null() && can_use_type_feedback) {
     ElementsKind to_kind = site->GetElementsKind();
     if (holey && !IsFastHoleyElementsKind(to_kind)) {
       to_kind = GetHoleyElementsKind(to_kind);
@@ -14743,8 +14736,17 @@ static MaybeObject* ArrayConstructorCommon(Isolate* isolate,
   maybe_array = isolate->heap()->AllocateJSArrayStorage(array, 0, 0,
       DONT_INITIALIZE_ARRAY_ELEMENTS);
   if (maybe_array->IsFailure()) return maybe_array;
+  ElementsKind old_kind = array->GetElementsKind();
   maybe_array = ArrayConstructInitializeElements(array, caller_args);
   if (maybe_array->IsFailure()) return maybe_array;
+  if (!site.is_null() &&
+      (old_kind != array->GetElementsKind() ||
+       !can_use_type_feedback)) {
+    // The arguments passed in caused a transition. This kind of complexity
+    // can't be dealt with in the inlined hydrogen array constructor case.
+    // We must mark the allocationsite as un-inlinable.
+    site->SetDoNotInlineCall();
+  }
   return array;
 }
 
@@ -14771,9 +14773,19 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConstructor) {
     ASSERT(arg_count == caller_args->length());
   }
 #endif
+
+  Handle<AllocationSite> site;
+  if (!type_info.is_null() &&
+      *type_info != isolate->heap()->undefined_value() &&
+      Cell::cast(*type_info)->value()->IsAllocationSite()) {
+    site = Handle<AllocationSite>(
+        AllocationSite::cast(Cell::cast(*type_info)->value()), isolate);
+    ASSERT(!site->SitePointsToLiteral());
+  }
+
   return ArrayConstructorCommon(isolate,
                                 constructor,
-                                type_info,
+                                site,
                                 caller_args);
 }
 
@@ -14796,7 +14808,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalArrayConstructor) {
 #endif
   return ArrayConstructorCommon(isolate,
                                 constructor,
-                                Handle<Object>::null(),
+                                Handle<AllocationSite>::null(),
                                 caller_args);
 }
 
index 9d95fc4..4ccda2d 100644 (file)
@@ -5593,10 +5593,12 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
       __ Assert(equal, kExpectedAllocationSiteInCell);
     }
 
-    // Save the resulting elements kind in type info
-    __ Integer32ToSmi(rdx, rdx);
-    __ movq(FieldOperand(rcx, AllocationSite::kTransitionInfoOffset), rdx);
-    __ SmiToInteger32(rdx, rdx);
+    // Save the resulting elements kind in type info. We can't just store r3
+    // in the AllocationSite::transition_info field because elements kind is
+    // restricted to a portion of the field...upper bits need to be left alone.
+    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+    __ SmiAddConstant(FieldOperand(rcx, AllocationSite::kTransitionInfoOffset),
+                      Smi::FromInt(kFastElementsKindPackedToHoley));
 
     __ bind(&normal_sequence);
     int last_index = GetSequenceIndexFromFastElementsKind(
@@ -5738,8 +5740,11 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
          masm->isolate()->factory()->allocation_site_map());
   __ j(not_equal, &no_info);
 
+  // Only look at the lower 16 bits of the transition info.
   __ movq(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset));
   __ SmiToInteger32(rdx, rdx);
+  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+  __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
 
   __ bind(&no_info);
index 499ccdf..cd5291c 100644 (file)
@@ -1629,7 +1629,7 @@ Handle<Code> CallStubCompiler::CompileArrayCodeCall(
   }
 
   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
-  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+  site->SetElementsKind(GetInitialFastElementsKind());
   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   __ movq(rax, Immediate(argc));
   __ Move(rbx, site_feedback_cell);
index 72ff12c..bfe50d2 100644 (file)
@@ -138,8 +138,8 @@ if (support_smi_only_arrays) {
   })();
 
 
-  // Test: Ensure that bailouts from the stub don't deopt a crankshafted
-  // method with a call to that stub.
+  // Test: Ensure that inlined array calls in crankshaft learn from deopts
+  // based on the move to a dictionary for the array.
   (function() {
     function bar(len) {
       return new Array(len);
@@ -152,10 +152,16 @@ if (support_smi_only_arrays) {
     a = bar(10);
     assertKind(elements_kind.fast, a);
     assertOptimized(bar);
-    // The stub bails out, but the method call should be fine.
+    // bar should deopt because the length is too large.
+    a = bar(100000);
+    assertUnoptimized(bar);
+    assertKind(elements_kind.dictionary, a);
+    // The allocation site now has feedback that means the array constructor
+    // will not be inlined.
+    %OptimizeFunctionOnNextCall(bar);
     a = bar(100000);
-    assertOptimized(bar);
     assertKind(elements_kind.dictionary, a);
+    assertOptimized(bar);
 
     // If the argument isn't a smi, it bails out as well
     a = bar("oops");
@@ -172,8 +178,12 @@ if (support_smi_only_arrays) {
     barn(1, 2, 3);
     assertOptimized(barn);
     a = barn(1, "oops", 3);
-    // The stub should bail out but the method should remain optimized.
+    // The method should deopt, but learn from the failure to avoid inlining
+    // the array.
     assertKind(elements_kind.fast, a);
+    assertUnoptimized(barn);
+    %OptimizeFunctionOnNextCall(barn);
+    a = barn(1, "oops", 3);
     assertOptimized(barn);
   })();
 
@@ -219,4 +229,29 @@ if (support_smi_only_arrays) {
     assertFalse(Realm.eval(contextB, "bar2();") instanceof Array);
     assertTrue(Realm.eval(contextB, "bar2() instanceof Array"));
   })();
+
+  // Test: create array with packed feedback, then optimize/inline
+  // function. Verify that if we ask for a holey array then we deopt.
+  // Reoptimization will proceed with the correct feedback and we
+  // won't deopt anymore.
+  (function() {
+    function bar(len) { return new Array(len); }
+    bar(0);
+    bar(0);
+    %OptimizeFunctionOnNextCall(bar);
+    a = bar(0);
+    assertOptimized(bar);
+    assertFalse(isHoley(a));
+    a = bar(1);  // ouch!
+    assertUnoptimized(bar);
+    assertTrue(isHoley(a));
+    // Try again
+    %OptimizeFunctionOnNextCall(bar);
+    a = bar(100);
+    assertOptimized(bar);
+    assertTrue(isHoley(a));
+    a = bar(0);
+    assertOptimized(bar);
+    assertTrue(isHoley(a));
+  })();
 }