From 198c4769f28915681bac5a8785644a6ba0c88055 Mon Sep 17 00:00:00 2001 From: "danno@chromium.org" Date: Fri, 25 Apr 2014 11:54:01 +0000 Subject: [PATCH] Unify and simplify the FastCloneShallowArrayStub - Don't bake in length/capacity into full codegen calls of stubs, allowing boilerplates to increase their capacity without regenerating code. - Unify all variants of the clone stub into a single, length-independent version. - Various tweaks to make sure that the clone stub doesn't spill and therefore need an eager stack frame. - Handle all lengths of array literals in the fast case. R=mvstanton@chromium.org Review URL: https://codereview.chromium.org/257563004 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20974 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/code-stubs-arm.cc | 14 ++ src/arm/full-codegen-arm.cc | 25 +--- src/arm64/code-stubs-arm64.cc | 14 ++ src/arm64/full-codegen-arm64.cc | 27 +--- src/code-stubs-hydrogen.cc | 82 +++++------ src/code-stubs.cc | 5 +- src/code-stubs.h | 52 +------ src/hydrogen.cc | 317 ++++++++++++++++++++++++++-------------- src/hydrogen.h | 37 ++++- src/ia32/code-stubs-ia32.cc | 14 ++ src/ia32/full-codegen-ia32.cc | 35 +---- src/mips/full-codegen-mips.cc | 2 +- src/v8-counters.h | 1 + src/x64/code-stubs-x64.cc | 14 ++ src/x64/full-codegen-x64.cc | 34 +---- 15 files changed, 347 insertions(+), 326 deletions(-) diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 80378bb..7985459 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -81,6 +81,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { r3, r2, r1 }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; + static Representation representations[] = { + Representation::Tagged(), + Representation::Smi(), + Representation::Tagged() }; + descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -224,6 +229,11 @@ static void InitializeArrayConstructorDescriptor( descriptor->stack_parameter_count_ = r0; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -251,6 +261,10 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = r0; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index cffab77..b76af24 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -1821,33 +1821,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); __ mov(r1, Operand(constant_elements)); - if (has_fast_elements && constant_elements_values->map() == - isolate()->heap()->fixed_cow_array_map()) { - FastCloneShallowArrayStub stub( - isolate(), - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, - allocation_site_mode, - length); - __ CallStub(&stub); - __ IncrementCounter( - isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2); - } else if (expr->depth() > 1 || Serializer::enabled() || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + if (expr->depth() > 1) { __ mov(r0, Operand(Smi::FromInt(flags))); __ Push(r3, r2, r1, r0); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; - - if (has_fast_elements) { - mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; - } - - FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode, - length); + FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); __ CallStub(&stub); } diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc index c037a81..293362a 100644 --- a/src/arm64/code-stubs-arm64.cc +++ b/src/arm64/code-stubs-arm64.cc @@ -88,6 +88,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { x3, x2, x1 }; descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); descriptor->register_params_ = registers; + static Representation representations[] = { + Representation::Tagged(), + Representation::Smi(), + Representation::Tagged() }; + descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -253,6 +258,11 @@ static void InitializeArrayConstructorDescriptor( descriptor->register_param_count_ = sizeof(registers_variable_args) / sizeof(registers_variable_args[0]); descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -299,6 +309,10 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->register_param_count_ = sizeof(registers_variable_args) / sizeof(registers_variable_args[0]); descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/arm64/full-codegen-arm64.cc b/src/arm64/full-codegen-arm64.cc index 2099fde..e6b57ea 100644 --- a/src/arm64/full-codegen-arm64.cc +++ b/src/arm64/full-codegen-arm64.cc @@ -1824,35 +1824,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset)); __ Mov(x2, Smi::FromInt(expr->literal_index())); __ Mov(x1, Operand(constant_elements)); - if (has_fast_elements && constant_elements_values->map() == - isolate()->heap()->fixed_cow_array_map()) { - FastCloneShallowArrayStub stub( - isolate(), - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, - allocation_site_mode, - length); - __ CallStub(&stub); - __ IncrementCounter( - isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11); - } else if ((expr->depth() > 1) || Serializer::enabled() || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + if (expr->depth() > 1) { __ Mov(x0, Smi::FromInt(flags)); __ Push(x3, x2, x1, x0); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; - - if (has_fast_elements) { - mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; - } - - FastCloneShallowArrayStub stub(isolate(), - mode, - allocation_site_mode, - length); + FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); __ CallStub(&stub); } diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index 68c9cd5..342e317 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -150,9 +150,9 @@ bool CodeStubGraphBuilderBase::BuildGraph() { bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid(); HInstruction* stack_parameter_count = NULL; for (int i = 0; i < param_count; ++i) { - Representation r = descriptor_->IsParameterCountRegister(i) - ? Representation::Integer32() - : Representation::Tagged(); + Representation r = descriptor_->register_param_representations_ == NULL + ? Representation::Tagged() + : descriptor_->register_param_representations_[i]; HParameter* param = Add(i, HParameter::REGISTER_PARAMETER, r); start_environment->Bind(i, param); parameters_[i] = param; @@ -353,8 +353,6 @@ HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); - FastCloneShallowArrayStub::Mode mode = casted_stub()->mode(); - int length = casted_stub()->length(); HInstruction* allocation_site = Add(GetParameter(0), GetParameter(1), @@ -369,46 +367,40 @@ HValue* CodeStubGraphBuilder::BuildCodeStub() { AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add( allocation_site, static_cast(NULL), access); - HValue* push_value; - if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) { - HValue* elements = AddLoadElements(boilerplate); - - IfBuilder if_fixed_cow(this); - if_fixed_cow.If(elements, factory->fixed_cow_array_map()); - if_fixed_cow.Then(); - push_value = BuildCloneShallowArray(boilerplate, - allocation_site, - alloc_site_mode, - FAST_ELEMENTS, - 0/*copy-on-write*/); - environment()->Push(push_value); - if_fixed_cow.Else(); - - IfBuilder if_fixed(this); - if_fixed.If(elements, factory->fixed_array_map()); - if_fixed.Then(); - push_value = BuildCloneShallowArray(boilerplate, - allocation_site, - alloc_site_mode, - FAST_ELEMENTS, - length); - environment()->Push(push_value); - if_fixed.Else(); - push_value = BuildCloneShallowArray(boilerplate, - allocation_site, - alloc_site_mode, - FAST_DOUBLE_ELEMENTS, - length); - environment()->Push(push_value); - } else { - ElementsKind elements_kind = casted_stub()->ComputeElementsKind(); - push_value = BuildCloneShallowArray(boilerplate, - allocation_site, - alloc_site_mode, - elements_kind, - length); - environment()->Push(push_value); - } + HValue* elements = AddLoadElements(boilerplate); + HValue* capacity = AddLoadFixedArrayLength(elements); + IfBuilder zero_capacity(this); + zero_capacity.If(capacity, graph()->GetConstant0(), + Token::EQ); + zero_capacity.Then(); + Push(BuildCloneShallowArrayEmpty(boilerplate, + allocation_site, + alloc_site_mode)); + zero_capacity.Else(); + IfBuilder if_fixed_cow(this); + if_fixed_cow.If(elements, factory->fixed_cow_array_map()); + if_fixed_cow.Then(); + Push(BuildCloneShallowArrayCow(boilerplate, + allocation_site, + alloc_site_mode, + FAST_ELEMENTS)); + if_fixed_cow.Else(); + IfBuilder if_fixed(this); + if_fixed.If(elements, factory->fixed_array_map()); + if_fixed.Then(); + Push(BuildCloneShallowArrayNonEmpty(boilerplate, + allocation_site, + alloc_site_mode, + FAST_ELEMENTS)); + + if_fixed.Else(); + Push(BuildCloneShallowArrayNonEmpty(boilerplate, + allocation_site, + alloc_site_mode, + FAST_DOUBLE_ELEMENTS)); + if_fixed.End(); + if_fixed_cow.End(); + zero_capacity.End(); checker.ElseDeopt("Uninitialized boilerplate literals"); checker.End(); diff --git a/src/code-stubs.cc b/src/code-stubs.cc index 353a228..90d36a4 100644 --- a/src/code-stubs.cc +++ b/src/code-stubs.cc @@ -45,6 +45,7 @@ CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor() hint_stack_parameter_count_(-1), function_mode_(NOT_JS_FUNCTION_STUB_MODE), register_params_(NULL), + register_param_representations_(NULL), deoptimization_handler_(NULL), handler_arguments_mode_(DONT_PASS_ARGUMENTS), miss_handler_(), @@ -752,9 +753,7 @@ void FastNewContextStub::InstallDescriptors(Isolate* isolate) { // static void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) { - FastCloneShallowArrayStub stub(isolate, - FastCloneShallowArrayStub::CLONE_ELEMENTS, - DONT_TRACK_ALLOCATION_SITE, 0); + FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE); InstallDescriptor(isolate, &stub); } diff --git a/src/code-stubs.h b/src/code-stubs.h index f337137..23acaef 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -300,6 +300,7 @@ struct CodeStubInterfaceDescriptor { int hint_stack_parameter_count_; StubFunctionMode function_mode_; Register* register_params_; + Representation* register_param_representations_; Address deoptimization_handler_; HandlerArgumentsMode handler_arguments_mode_; @@ -604,50 +605,18 @@ class FastNewContextStub V8_FINAL : public HydrogenCodeStub { class FastCloneShallowArrayStub : public HydrogenCodeStub { public: // Maximum length of copied elements array. - static const int kMaximumClonedLength = 8; - enum Mode { - CLONE_ELEMENTS, - CLONE_DOUBLE_ELEMENTS, - COPY_ON_WRITE_ELEMENTS, - CLONE_ANY_ELEMENTS, - LAST_CLONE_MODE = CLONE_ANY_ELEMENTS - }; - - static const int kFastCloneModeCount = LAST_CLONE_MODE + 1; + static const int kMaximumInlinedCloneLength = 8; FastCloneShallowArrayStub(Isolate* isolate, - Mode mode, - AllocationSiteMode allocation_site_mode, - int length) + AllocationSiteMode allocation_site_mode) : HydrogenCodeStub(isolate), - mode_(mode), - allocation_site_mode_(allocation_site_mode), - length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) { - ASSERT_GE(length_, 0); - ASSERT_LE(length_, kMaximumClonedLength); - } + allocation_site_mode_(allocation_site_mode) {} - Mode mode() const { return mode_; } - int length() const { return length_; } AllocationSiteMode allocation_site_mode() const { return allocation_site_mode_; } - ElementsKind ComputeElementsKind() const { - switch (mode()) { - case CLONE_ELEMENTS: - case COPY_ON_WRITE_ELEMENTS: - return FAST_ELEMENTS; - case CLONE_DOUBLE_ELEMENTS: - return FAST_DOUBLE_ELEMENTS; - case CLONE_ANY_ELEMENTS: - /*fall-through*/; - } - UNREACHABLE(); - return LAST_ELEMENTS_KIND; - } - - virtual Handle GenerateCode() V8_OVERRIDE; + virtual Handle GenerateCode(); virtual void InitializeInterfaceDescriptor( CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; @@ -655,22 +624,13 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub { static void InstallDescriptors(Isolate* isolate); private: - Mode mode_; AllocationSiteMode allocation_site_mode_; - int length_; class AllocationSiteModeBits: public BitField {}; - class ModeBits: public BitField {}; - class LengthBits: public BitField {}; // Ensure data fits within available bits. - STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1); - STATIC_ASSERT(kFastCloneModeCount < 16); - STATIC_ASSERT(kMaximumClonedLength < 16); Major MajorKey() { return FastCloneShallowArray; } int NotMissMinorKey() { - return AllocationSiteModeBits::encode(allocation_site_mode_) - | ModeBits::encode(mode_) - | LengthBits::encode(length_); + return AllocationSiteModeBits::encode(allocation_site_mode_); } }; diff --git a/src/hydrogen.cc b/src/hydrogen.cc index 4c02c90..bee033d 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -2414,15 +2414,26 @@ HInstruction* HGraphBuilder::AddElementAccess( } -HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) { +HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object, + HValue* dependency) { return Add( - object, static_cast(NULL), HObjectAccess::ForElementsPointer()); + object, dependency, HObjectAccess::ForElementsPointer()); } -HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) { +HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength( + HValue* array, + HValue* dependency) { return Add( - object, static_cast(NULL), HObjectAccess::ForFixedArrayLength()); + array, dependency, HObjectAccess::ForFixedArrayLength()); +} + + +HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array, + ElementsKind kind, + HValue* dependency) { + return Add( + array, dependency, HObjectAccess::ForArrayLength(kind)); } @@ -2463,9 +2474,8 @@ HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( new_kind, new_capacity); - BuildCopyElements(elements, kind, - new_elements, new_kind, - length, new_capacity); + BuildCopyElements(object, elements, kind, new_elements, + new_kind, length, new_capacity); Add(object, HObjectAccess::ForElementsPointer(), new_elements); @@ -2478,8 +2488,8 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, ElementsKind elements_kind, HValue* from, HValue* to) { - // Fast elements kinds need to be initialized in case statements below cause - // a garbage collection. + // Fast elements kinds need to be initialized in case statements below cause a + // garbage collection. Factory* factory = isolate()->factory(); double nan_double = FixedDoubleArray::hole_nan_as_double(); @@ -2523,93 +2533,137 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, } -void HGraphBuilder::BuildCopyElements(HValue* from_elements, +void HGraphBuilder::BuildCopyElements(HValue* array, + HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, HValue* length, HValue* capacity) { - bool pre_fill_with_holes = - IsFastDoubleElementsKind(from_elements_kind) && - IsFastObjectElementsKind(to_elements_kind); - - if (pre_fill_with_holes) { - // If the copy might trigger a GC, make sure that the FixedArray is - // pre-initialized with holes to make sure that it's always in a consistent - // state. - BuildFillElementsWithHole(to_elements, to_elements_kind, - graph()->GetConstant0(), capacity); + int constant_capacity = -1; + if (capacity->IsConstant() && + HConstant::cast(capacity)->HasInteger32Value()) { + int constant_candidate = HConstant::cast(capacity)->Integer32Value(); + if (constant_candidate <= + FastCloneShallowArrayStub::kMaximumInlinedCloneLength) { + constant_capacity = constant_candidate; + } } - LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); - - HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT); + if (constant_capacity != -1) { + // Unroll the loop for small elements kinds. + for (int i = 0; i < constant_capacity; i++) { + HValue* key_constant = Add(i); + HInstruction* value = Add(from_elements, key_constant, + static_cast(NULL), + from_elements_kind); + Add(to_elements, key_constant, value, to_elements_kind); + } + } else { + bool pre_fill_with_holes = + IsFastDoubleElementsKind(from_elements_kind) && + IsFastObjectElementsKind(to_elements_kind); - HValue* element = Add(from_elements, key, - static_cast(NULL), - from_elements_kind, - ALLOW_RETURN_HOLE); + if (pre_fill_with_holes) { + // If the copy might trigger a GC, make sure that the FixedArray is + // pre-initialized with holes to make sure that it's always in a + // consistent state. + BuildFillElementsWithHole(to_elements, to_elements_kind, + graph()->GetConstant0(), capacity); + } + + LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); + + // Be very careful to copy the elements up to length backwards down to + // zero. This eliminates the need to keep length alive through the loop, + // since the termination condition compares to a constant. This reduces + // register pressure in code stubs that otherwise would spill and create + // a stack frame. + HValue* decremented_length = AddUncasted(length, + graph()->GetConstant1()); + decremented_length->ClearFlag(HValue::kCanOverflow); + HValue* key = builder.BeginBody(decremented_length, Add(-1), + Token::NE); + HValue* element = Add(from_elements, key, + static_cast(NULL), + from_elements_kind, + ALLOW_RETURN_HOLE); - ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && - IsFastSmiElementsKind(to_elements_kind)) + ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && + IsFastSmiElementsKind(to_elements_kind)) ? FAST_HOLEY_ELEMENTS : to_elements_kind; - if (IsHoleyElementsKind(from_elements_kind) && - from_elements_kind != to_elements_kind) { - IfBuilder if_hole(this); - if_hole.If(element); - if_hole.Then(); - HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) + if (IsHoleyElementsKind(from_elements_kind) && + from_elements_kind != to_elements_kind) { + IfBuilder if_hole(this); + if_hole.If(element); + if_hole.Then(); + HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) ? Add(FixedDoubleArray::hole_nan_as_double()) : graph()->GetConstantHole(); - Add(to_elements, key, hole_constant, kind); - if_hole.Else(); - HStoreKeyed* store = Add(to_elements, key, element, kind); - store->SetFlag(HValue::kAllowUndefinedAsNaN); - if_hole.End(); - } else { - HStoreKeyed* store = Add(to_elements, key, element, kind); - store->SetFlag(HValue::kAllowUndefinedAsNaN); - } + Add(to_elements, key, hole_constant, kind); + if_hole.Else(); + HStoreKeyed* store = Add(to_elements, key, element, kind); + store->SetFlag(HValue::kAllowUndefinedAsNaN); + if_hole.End(); + } else { + HStoreKeyed* store = Add(to_elements, key, element, kind); + store->SetFlag(HValue::kAllowUndefinedAsNaN); + } - builder.EndBody(); + builder.EndBody(); - if (!pre_fill_with_holes && length != capacity) { - // Fill unused capacity with the hole. - BuildFillElementsWithHole(to_elements, to_elements_kind, - key, capacity); + if (!pre_fill_with_holes && !length->Equals(capacity)) { + // Force an explicit reload of capacity and length from locations where we + // know they are available. This caps their live ranges before entering + // the the element copy loop above, reducing register pressure enough to + // not spill and create stack frames for performance-critical array stubs + // on platforms with a small register set. + capacity = AddLoadFixedArrayLength(to_elements); + capacity->ClearFlag(HValue::kUseGVN); + length = AddLoadArrayLength(array, to_elements_kind); + length->ClearFlag(HValue::kUseGVN); + // Fill unused capacity with the hole. + BuildFillElementsWithHole(to_elements, to_elements_kind, + length, capacity); + } } -} + Counters* counters = isolate()->counters(); + AddIncrementCounter(counters->inlined_copyied_elements()); +} -HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode, - ElementsKind kind, - int length) { - NoObservableSideEffectsScope no_effects(this); - +HValue* HGraphBuilder::BuildCloneShallowArrayCommon( + HValue* boilerplate, + HValue* allocation_site, + HValue* extra_size, + HValue** return_elements, + AllocationSiteMode mode) { // All sizes here are multiples of kPointerSize. - int size = JSArray::kSize; + int array_size = JSArray::kSize; if (mode == TRACK_ALLOCATION_SITE) { - size += AllocationMemento::kSize; + array_size += AllocationMemento::kSize; + } + + HValue* size_in_bytes = Add(array_size); + if (extra_size != NULL) { + size_in_bytes = AddUncasted(extra_size, size_in_bytes); + size_in_bytes->ClearFlag(HValue::kCanOverflow); } - HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); // Copy the JS array part. - for (int i = 0; i < JSArray::kSize; i += kPointerSize) { - if ((i != JSArray::kElementsOffset) || (length == 0)) { - HObjectAccess access = HObjectAccess::ForJSArrayOffset(i); - Add( - object, access, Add( - boilerplate, static_cast(NULL), access)); - } - } + HValue* map = Add(boilerplate, + static_cast(NULL), HObjectAccess::ForMap()); + Add(object, HObjectAccess::ForPropertiesPointer(), + Add(isolate()->factory()->empty_fixed_array()), + INITIALIZING_STORE); + Add(object, HObjectAccess::ForMap(), map, + INITIALIZING_STORE); // Create an allocation site info if requested. if (mode == TRACK_ALLOCATION_SITE) { @@ -2617,54 +2671,97 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate, object, Add(JSArray::kSize), allocation_site); } - if (length > 0) { - // We have to initialize the elements pointer if allocation folding is - // turned off. - if (!FLAG_use_gvn || !FLAG_use_allocation_folding) { - HConstant* empty_fixed_array = Add( - isolate()->factory()->empty_fixed_array()); - Add(object, HObjectAccess::ForElementsPointer(), - empty_fixed_array, INITIALIZING_STORE); - } - - HValue* boilerplate_elements = AddLoadElements(boilerplate); - HValue* object_elements; - if (IsFastDoubleElementsKind(kind)) { - HValue* elems_size = Add(FixedDoubleArray::SizeFor(length)); - object_elements = Add(elems_size, HType::Tagged(), - NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE); - } else { - HValue* elems_size = Add(FixedArray::SizeFor(length)); - object_elements = Add(elems_size, HType::Tagged(), - NOT_TENURED, FIXED_ARRAY_TYPE); - } - Add(object, HObjectAccess::ForElementsPointer(), - object_elements); - - // Copy the elements array header. - for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { - HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); - Add( - object_elements, access, Add( - boilerplate_elements, static_cast(NULL), access)); - } - - // Copy the elements array contents. - // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold - // copying loops with constant length up to a given boundary and use this - // helper here instead. - for (int i = 0; i < length; i++) { - HValue* key_constant = Add(i); - HInstruction* value = Add(boilerplate_elements, key_constant, - static_cast(NULL), kind); - Add(object_elements, key_constant, value, kind); - } + if (extra_size != NULL) { + HValue* elements = Add(object, + Add(array_size)); + if (return_elements != NULL) *return_elements = elements; } return object; } +HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind) { + HValue* result = BuildCloneShallowArrayCommon(boilerplate, + allocation_site, NULL, NULL, mode); + + HValue* length = AddLoadArrayLength(boilerplate, kind); + HValue* elements = AddLoadElements(boilerplate); + HObjectAccess access1 = HObjectAccess::ForArrayLength(kind); + HObjectAccess access2 = HObjectAccess::ForElementsPointer(); + Add(result, access1, length, INITIALIZING_STORE); + Add(result, access2, elements, INITIALIZING_STORE); + + return result; +} + + +HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode) { + HValue* result = BuildCloneShallowArrayCommon(boilerplate, + allocation_site, NULL, NULL, mode); + + HObjectAccess access = HObjectAccess::ForArrayLength(FAST_ELEMENTS); + Add(result, access, graph()->GetConstant0(), + INITIALIZING_STORE); + access = HObjectAccess::ForElementsPointer(); + Add(result, access, + Add(isolate()->factory()->empty_fixed_array()), + INITIALIZING_STORE); + + return result; +} + + +HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind) { + int elements_kind_size = IsFastDoubleElementsKind(kind) + ? kDoubleSize : kPointerSize; + + HValue* boilerplate_elements = AddLoadElements(boilerplate); + HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements); + HValue* extra = AddUncasted(capacity, + Add(elements_kind_size)); + extra->ClearFlag(HValue::kCanOverflow); + extra = AddUncasted(extra, Add(FixedArray::kHeaderSize)); + extra->ClearFlag(HValue::kCanOverflow); + HValue* elements = NULL; + HValue* result = BuildCloneShallowArrayCommon(boilerplate, + allocation_site, extra, &elements, mode); + + // Explicitly reload the boilerplate's elements. This frees up a register + // during the allocation which otherwise causes spillage in many common code + // sequences on platforms with tight register constraints. + boilerplate_elements = AddLoadElements(boilerplate); + boilerplate_elements->ClearFlag(HValue::kUseGVN); + HValue* length = Add(boilerplate, static_cast(NULL), + HObjectAccess::ForArrayLength(kind)); + Add(result, HObjectAccess::ForElementsPointer(), + elements, INITIALIZING_STORE); + Add(result, HObjectAccess::ForArrayLength(kind), + length, INITIALIZING_STORE); + + // Copy the elements array header. + for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { + HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); + Add(elements, access, + Add(boilerplate_elements, + static_cast(NULL), access)); + } + + BuildCopyElements(result, boilerplate_elements, kind, elements, + kind, length, capacity); + + return result; +} + + void HGraphBuilder::BuildCompareNil( HValue* value, Type* type, diff --git a/src/hydrogen.h b/src/hydrogen.h index c561ab1..2099a80 100644 --- a/src/hydrogen.h +++ b/src/hydrogen.h @@ -1421,7 +1421,8 @@ class HGraphBuilder { store_map->SkipWriteBarrier(); return store_map; } - HLoadNamedField* AddLoadElements(HValue* object); + HLoadNamedField* AddLoadElements(HValue* object, + HValue* dependency = NULL); bool MatchRotateRight(HValue* left, HValue* right, @@ -1437,7 +1438,12 @@ class HGraphBuilder { Maybe fixed_right_arg, HAllocationMode allocation_mode); - HLoadNamedField* AddLoadFixedArrayLength(HValue *object); + HLoadNamedField* AddLoadFixedArrayLength(HValue *object, + HValue *dependency = NULL); + + HLoadNamedField* AddLoadArrayLength(HValue *object, + ElementsKind kind, + HValue *dependency = NULL); HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin); @@ -1780,18 +1786,33 @@ class HGraphBuilder { HValue* from, HValue* to); - void BuildCopyElements(HValue* from_elements, + void BuildCopyElements(HValue* array, + HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, HValue* length, HValue* capacity); - HValue* BuildCloneShallowArray(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode, - ElementsKind kind, - int length); + HValue* BuildCloneShallowArrayCommon(HValue* boilerplate, + HValue* allocation_site, + HValue* extra_size, + HValue** return_elements, + AllocationSiteMode mode); + + HValue* BuildCloneShallowArrayCow(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind); + + HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode); + + HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind); HValue* BuildElementIndexHash(HValue* index); diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index fea5cbf..8826f51 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -86,6 +86,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { eax, ebx, ecx }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; + static Representation representations[] = { + Representation::Tagged(), + Representation::Smi(), + Representation::Tagged() }; + descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -217,6 +222,11 @@ static void InitializeArrayConstructorDescriptor( descriptor->stack_parameter_count_ = eax; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -244,6 +254,10 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = eax; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index bfdc7bb..9854bb6 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -1758,24 +1758,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; } - Heap* heap = isolate()->heap(); - if (has_constant_fast_elements && - constant_elements_values->map() == heap->fixed_cow_array_map()) { - // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot - // change, so it's possible to specialize the stub in advance. - __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); - __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); - __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset)); - __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); - __ mov(ecx, Immediate(constant_elements)); - FastCloneShallowArrayStub stub( - isolate(), - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, - allocation_site_mode, - length); - __ CallStub(&stub); - } else if (expr->depth() > 1 || Serializer::enabled() || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + if (expr->depth() > 1) { __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ push(Immediate(Smi::FromInt(expr->literal_index()))); @@ -1783,25 +1766,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ push(Immediate(Smi::FromInt(flags))); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; - - // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot - // change, so it's possible to specialize the stub in advance. - if (has_constant_fast_elements) { - mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; - } - __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); __ mov(ecx, Immediate(constant_elements)); - FastCloneShallowArrayStub stub(isolate(), - mode, - allocation_site_mode, - length); + FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); __ CallStub(&stub); } diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc index 8a1008d..cb6ef7d 100644 --- a/src/mips/full-codegen-mips.cc +++ b/src/mips/full-codegen-mips.cc @@ -1845,7 +1845,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1, a1, a2); } else if (expr->depth() > 1 || Serializer::enabled() || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + length > FastCloneShallowArrayStub::kMaximumInlinedCloneLength) { __ li(a0, Operand(Smi::FromInt(flags))); __ Push(a3, a2, a1, a0); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); diff --git a/src/v8-counters.h b/src/v8-counters.h index 0bd4955..620957e 100644 --- a/src/v8-counters.h +++ b/src/v8-counters.h @@ -128,6 +128,7 @@ namespace internal { SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \ SC(call_normal_stubs, V8.CallNormalStubs) \ SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \ + SC(inlined_copyied_elements, V8.InlinedCopiedElements) \ SC(arguments_adaptors, V8.ArgumentsAdaptors) \ SC(compilation_cache_hits, V8.CompilationCacheHits) \ SC(compilation_cache_misses, V8.CompilationCacheMisses) \ diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index f985669..26e9322 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -82,6 +82,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { rax, rbx, rcx }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; + static Representation representations[] = { + Representation::Tagged(), + Representation::Smi(), + Representation::Tagged() }; + descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -211,6 +216,11 @@ static void InitializeArrayConstructorDescriptor( descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; descriptor->stack_parameter_count_ = rax; descriptor->register_param_count_ = 3; + static Representation representations[] = { + Representation::Tagged(), + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; descriptor->register_params_ = registers_variable_args; } @@ -239,6 +249,10 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = rax; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index ae72ba7..d974033 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -1796,24 +1796,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; } - Heap* heap = isolate()->heap(); - if (has_constant_fast_elements && - constant_elements_values->map() == heap->fixed_cow_array_map()) { - // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot - // change, so it's possible to specialize the stub in advance. - __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); - __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); - __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); - __ Move(rbx, Smi::FromInt(expr->literal_index())); - __ Move(rcx, constant_elements); - FastCloneShallowArrayStub stub( - isolate(), - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, - allocation_site_mode, - length); - __ CallStub(&stub); - } else if (expr->depth() > 1 || Serializer::enabled() || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + if (expr->depth() > 1) { __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Push(Smi::FromInt(expr->literal_index())); @@ -1821,24 +1804,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ Push(Smi::FromInt(flags)); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; - - // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot - // change, so it's possible to specialize the stub in advance. - if (has_constant_fast_elements) { - mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; - } - __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Move(rbx, Smi::FromInt(expr->literal_index())); __ Move(rcx, constant_elements); - FastCloneShallowArrayStub stub(isolate(), - mode, - allocation_site_mode, length); + FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); __ CallStub(&stub); } -- 2.7.4