From 71789ea1e505cc32ad50ec5c20f82c833c073010 Mon Sep 17 00:00:00 2001 From: "danno@chromium.org" Date: Mon, 12 May 2014 07:49:11 +0000 Subject: [PATCH] Reland r20974: Unify and simplify the FastCloneShallowArrayStub - Don't bake in length/capacity into full codegen calls of stubs, allowing boilerplates to increase their capacity without regenerating code. - Unify all variants of the clone stub into a single, length-independent version. - Various tweaks to make sure that the clone stub doesn't spill and therefore need an eager stack frame. - Handle all lengths of array literals in the fast case. R=mvstanton@chromium.org Committed: https://code.google.com/p/v8/source/detail?r=21230 Review URL: https://codereview.chromium.org/272513004 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21253 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/code-stubs-arm.cc | 14 ++ src/arm/full-codegen-arm.cc | 25 +--- src/arm64/code-stubs-arm64.cc | 14 ++ src/arm64/full-codegen-arm64.cc | 27 +--- src/code-stubs-hydrogen.cc | 92 ++++++------ src/code-stubs.cc | 5 +- src/code-stubs.h | 56 ++----- src/compiler.h | 10 ++ src/counters.h | 1 + src/hydrogen-gvn.cc | 3 +- src/hydrogen-instructions.h | 5 + src/hydrogen.cc | 324 ++++++++++++++++++++++++++-------------- src/hydrogen.h | 37 ++++- src/ia32/code-stubs-ia32.cc | 14 ++ src/ia32/full-codegen-ia32.cc | 35 +---- src/lithium.cc | 4 + src/mips/code-stubs-mips.cc | 14 ++ src/mips/full-codegen-mips.cc | 25 +--- src/x64/code-stubs-x64.cc | 14 ++ src/x64/full-codegen-x64.cc | 34 +---- 20 files changed, 403 insertions(+), 350 deletions(-) diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 910714c..afea875 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -58,6 +58,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { r3, r2, r1 }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; + static Representation representations[] = { + Representation::Tagged(), + Representation::Smi(), + Representation::Tagged() }; + descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -201,6 +206,11 @@ static void InitializeArrayConstructorDescriptor( descriptor->stack_parameter_count_ = r0; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -228,6 +238,10 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = r0; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index baea557..221526b 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -1792,33 +1792,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); __ mov(r1, Operand(constant_elements)); - if (has_fast_elements && constant_elements_values->map() == - isolate()->heap()->fixed_cow_array_map()) { - FastCloneShallowArrayStub stub( - isolate(), - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, - allocation_site_mode, - length); - __ CallStub(&stub); - __ IncrementCounter( - isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2); - } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + if (expr->depth() > 1) { __ mov(r0, Operand(Smi::FromInt(flags))); __ Push(r3, r2, r1, r0); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; - - if (has_fast_elements) { - mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; - } - - FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode, - length); + FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); __ CallStub(&stub); } diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc index dc9d63c..0bf89d8 100644 --- a/src/arm64/code-stubs-arm64.cc +++ b/src/arm64/code-stubs-arm64.cc @@ -65,6 +65,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { x3, x2, x1 }; descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); descriptor->register_params_ = registers; + static Representation representations[] = { + Representation::Tagged(), + Representation::Smi(), + Representation::Tagged() }; + descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -230,6 +235,11 @@ static void InitializeArrayConstructorDescriptor( descriptor->register_param_count_ = sizeof(registers_variable_args) / sizeof(registers_variable_args[0]); descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -276,6 +286,10 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->register_param_count_ = sizeof(registers_variable_args) / sizeof(registers_variable_args[0]); descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/arm64/full-codegen-arm64.cc b/src/arm64/full-codegen-arm64.cc index 0196e69..af00758 100644 --- a/src/arm64/full-codegen-arm64.cc +++ b/src/arm64/full-codegen-arm64.cc @@ -1795,35 +1795,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset)); __ Mov(x2, Smi::FromInt(expr->literal_index())); __ Mov(x1, Operand(constant_elements)); - if (has_fast_elements && constant_elements_values->map() == - isolate()->heap()->fixed_cow_array_map()) { - FastCloneShallowArrayStub stub( - isolate(), - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, - allocation_site_mode, - length); - __ CallStub(&stub); - __ IncrementCounter( - isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11); - } else if ((expr->depth() > 1) || Serializer::enabled(isolate()) || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + if (expr->depth() > 1) { __ Mov(x0, Smi::FromInt(flags)); __ Push(x3, x2, x1, x0); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; - - if (has_fast_elements) { - mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; - } - - FastCloneShallowArrayStub stub(isolate(), - mode, - allocation_site_mode, - length); + FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); __ CallStub(&stub); } diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index a35da69..83b461d 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -127,9 +127,9 @@ bool CodeStubGraphBuilderBase::BuildGraph() { bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid(); HInstruction* stack_parameter_count = NULL; for (int i = 0; i < param_count; ++i) { - Representation r = descriptor_->IsParameterCountRegister(i) - ? Representation::Integer32() - : Representation::Tagged(); + Representation r = descriptor_->register_param_representations_ == NULL + ? Representation::Tagged() + : descriptor_->register_param_representations_[i]; HParameter* param = Add(i, HParameter::REGISTER_PARAMETER, r); start_environment->Bind(i, param); parameters_[i] = param; @@ -330,8 +330,10 @@ HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); - FastCloneShallowArrayStub::Mode mode = casted_stub()->mode(); - int length = casted_stub()->length(); + + // This stub is very performance sensitive, the generated code must be tuned + // so that it doesn't build and eager frame. + info()->MarkMustNotHaveEagerFrame(); HInstruction* allocation_site = Add(GetParameter(0), GetParameter(1), @@ -346,46 +348,40 @@ HValue* CodeStubGraphBuilder::BuildCodeStub() { AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add( allocation_site, static_cast(NULL), access); - HValue* push_value; - if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) { - HValue* elements = AddLoadElements(boilerplate); - - IfBuilder if_fixed_cow(this); - if_fixed_cow.If(elements, factory->fixed_cow_array_map()); - if_fixed_cow.Then(); - push_value = BuildCloneShallowArray(boilerplate, - allocation_site, - alloc_site_mode, - FAST_ELEMENTS, - 0/*copy-on-write*/); - environment()->Push(push_value); - if_fixed_cow.Else(); - - IfBuilder if_fixed(this); - if_fixed.If(elements, factory->fixed_array_map()); - if_fixed.Then(); - push_value = BuildCloneShallowArray(boilerplate, - allocation_site, - alloc_site_mode, - FAST_ELEMENTS, - length); - environment()->Push(push_value); - if_fixed.Else(); - push_value = BuildCloneShallowArray(boilerplate, - allocation_site, - alloc_site_mode, - FAST_DOUBLE_ELEMENTS, - length); - environment()->Push(push_value); - } else { - ElementsKind elements_kind = casted_stub()->ComputeElementsKind(); - push_value = BuildCloneShallowArray(boilerplate, - allocation_site, - alloc_site_mode, - elements_kind, - length); - environment()->Push(push_value); - } + HValue* elements = AddLoadElements(boilerplate); + HValue* capacity = AddLoadFixedArrayLength(elements); + IfBuilder zero_capacity(this); + zero_capacity.If(capacity, graph()->GetConstant0(), + Token::EQ); + zero_capacity.Then(); + Push(BuildCloneShallowArrayEmpty(boilerplate, + allocation_site, + alloc_site_mode)); + zero_capacity.Else(); + IfBuilder if_fixed_cow(this); + if_fixed_cow.If(elements, factory->fixed_cow_array_map()); + if_fixed_cow.Then(); + Push(BuildCloneShallowArrayCow(boilerplate, + allocation_site, + alloc_site_mode, + FAST_ELEMENTS)); + if_fixed_cow.Else(); + IfBuilder if_fixed(this); + if_fixed.If(elements, factory->fixed_array_map()); + if_fixed.Then(); + Push(BuildCloneShallowArrayNonEmpty(boilerplate, + allocation_site, + alloc_site_mode, + FAST_ELEMENTS)); + + if_fixed.Else(); + Push(BuildCloneShallowArrayNonEmpty(boilerplate, + allocation_site, + alloc_site_mode, + FAST_DOUBLE_ELEMENTS)); + if_fixed.End(); + if_fixed_cow.End(); + zero_capacity.End(); checker.ElseDeopt("Uninitialized boilerplate literals"); checker.End(); @@ -644,6 +640,9 @@ HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( HValue* result = NULL; switch (argument_class) { case NONE: + // This stub is very performance sensitive, the generated code must be + // tuned so that it doesn't build and eager frame. + info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: @@ -667,6 +666,9 @@ HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( HValue* result = NULL; switch (argument_class) { case NONE: + // This stub is very performance sensitive, the generated code must be + // tuned so that it doesn't build and eager frame. + info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: diff --git a/src/code-stubs.cc b/src/code-stubs.cc index 24f60ed..64fb3eb 100644 --- a/src/code-stubs.cc +++ b/src/code-stubs.cc @@ -22,6 +22,7 @@ CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor() hint_stack_parameter_count_(-1), function_mode_(NOT_JS_FUNCTION_STUB_MODE), register_params_(NULL), + register_param_representations_(NULL), deoptimization_handler_(NULL), handler_arguments_mode_(DONT_PASS_ARGUMENTS), miss_handler_(), @@ -733,9 +734,7 @@ void FastNewContextStub::InstallDescriptors(Isolate* isolate) { // static void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) { - FastCloneShallowArrayStub stub(isolate, - FastCloneShallowArrayStub::CLONE_ELEMENTS, - DONT_TRACK_ALLOCATION_SITE, 0); + FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE); InstallDescriptor(isolate, &stub); } diff --git a/src/code-stubs.h b/src/code-stubs.h index a6c2294..4180e27 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -277,6 +277,11 @@ struct CodeStubInterfaceDescriptor { int hint_stack_parameter_count_; StubFunctionMode function_mode_; Register* register_params_; + // Specifies Representations for the stub's parameter. Points to an array of + // Representations of the same length of the numbers of parameters to the + // stub, or if NULL (the default value), Representation of each parameter + // assumed to be Tagged() + Representation* register_param_representations_; Address deoptimization_handler_; HandlerArgumentsMode handler_arguments_mode_; @@ -581,50 +586,18 @@ class FastNewContextStub V8_FINAL : public HydrogenCodeStub { class FastCloneShallowArrayStub : public HydrogenCodeStub { public: // Maximum length of copied elements array. - static const int kMaximumClonedLength = 8; - enum Mode { - CLONE_ELEMENTS, - CLONE_DOUBLE_ELEMENTS, - COPY_ON_WRITE_ELEMENTS, - CLONE_ANY_ELEMENTS, - LAST_CLONE_MODE = CLONE_ANY_ELEMENTS - }; - - static const int kFastCloneModeCount = LAST_CLONE_MODE + 1; + static const int kMaximumInlinedCloneLength = 8; FastCloneShallowArrayStub(Isolate* isolate, - Mode mode, - AllocationSiteMode allocation_site_mode, - int length) + AllocationSiteMode allocation_site_mode) : HydrogenCodeStub(isolate), - mode_(mode), - allocation_site_mode_(allocation_site_mode), - length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) { - ASSERT_GE(length_, 0); - ASSERT_LE(length_, kMaximumClonedLength); - } + allocation_site_mode_(allocation_site_mode) {} - Mode mode() const { return mode_; } - int length() const { return length_; } AllocationSiteMode allocation_site_mode() const { return allocation_site_mode_; } - ElementsKind ComputeElementsKind() const { - switch (mode()) { - case CLONE_ELEMENTS: - case COPY_ON_WRITE_ELEMENTS: - return FAST_ELEMENTS; - case CLONE_DOUBLE_ELEMENTS: - return FAST_DOUBLE_ELEMENTS; - case CLONE_ANY_ELEMENTS: - /*fall-through*/; - } - UNREACHABLE(); - return LAST_ELEMENTS_KIND; - } - - virtual Handle GenerateCode() V8_OVERRIDE; + virtual Handle GenerateCode(); virtual void InitializeInterfaceDescriptor( CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; @@ -632,22 +605,13 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub { static void InstallDescriptors(Isolate* isolate); private: - Mode mode_; AllocationSiteMode allocation_site_mode_; - int length_; class AllocationSiteModeBits: public BitField {}; - class ModeBits: public BitField {}; - class LengthBits: public BitField {}; // Ensure data fits within available bits. - STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1); - STATIC_ASSERT(kFastCloneModeCount < 16); - STATIC_ASSERT(kMaximumClonedLength < 16); Major MajorKey() { return FastCloneShallowArray; } int NotMissMinorKey() { - return AllocationSiteModeBits::encode(allocation_site_mode_) - | ModeBits::encode(mode_) - | LengthBits::encode(length_); + return AllocationSiteModeBits::encode(allocation_site_mode_); } }; diff --git a/src/compiler.h b/src/compiler.h index 24a8a9f..fd26d24 100644 --- a/src/compiler.h +++ b/src/compiler.h @@ -143,6 +143,14 @@ class CompilationInfo { return RequiresFrame::decode(flags_); } + void MarkMustNotHaveEagerFrame() { + flags_ |= MustNotHaveEagerFrame::encode(true); + } + + bool GetMustNotHaveEagerFrame() const { + return MustNotHaveEagerFrame::decode(flags_); + } + void SetParseRestriction(ParseRestriction restriction) { flags_ = ParseRestricitonField::update(flags_, restriction); } @@ -368,6 +376,8 @@ class CompilationInfo { class ParseRestricitonField: public BitField {}; // If the function requires a frame (for unspecified reasons) class RequiresFrame: public BitField {}; + // If the function cannot build a frame (for unspecified reasons) + class MustNotHaveEagerFrame: public BitField {}; unsigned flags_; diff --git a/src/counters.h b/src/counters.h index 9f1cd37..19e19bd 100644 --- a/src/counters.h +++ b/src/counters.h @@ -381,6 +381,7 @@ class HistogramTimerScope BASE_EMBEDDED { SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \ SC(call_normal_stubs, V8.CallNormalStubs) \ SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \ + SC(inlined_copied_elements, V8.InlinedCopiedElements) \ SC(arguments_adaptors, V8.ArgumentsAdaptors) \ SC(compilation_cache_hits, V8.CompilationCacheHits) \ SC(compilation_cache_misses, V8.CompilationCacheMisses) \ diff --git a/src/hydrogen-gvn.cc b/src/hydrogen-gvn.cc index f9d1b40..b32b909 100644 --- a/src/hydrogen-gvn.cc +++ b/src/hydrogen-gvn.cc @@ -863,7 +863,8 @@ void HGlobalValueNumberingPhase::AnalyzeGraph() { stream.OutputToStdOut(); } } - if (instr->CheckFlag(HValue::kUseGVN)) { + if (instr->CheckFlag(HValue::kUseGVN) && + !instr->CheckFlag(HValue::kCantBeReplaced)) { ASSERT(!instr->HasObservableSideEffects()); HInstruction* other = map->Lookup(instr); if (other != NULL) { diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h index fcf4655..34eafa9 100644 --- a/src/hydrogen-instructions.h +++ b/src/hydrogen-instructions.h @@ -619,6 +619,10 @@ class HValue : public ZoneObject { // flag. kUint32, kHasNoObservableSideEffects, + // Indicates an instruction shouldn't be replaced by optimization, this flag + // is useful to set in cases where recomputing a value is cheaper than + // extending the value's live range and spilling it. + kCantBeReplaced, // Indicates the instruction is live during dead code elimination. kIsLive, @@ -6257,6 +6261,7 @@ class HLoadNamedField V8_FINAL : public HTemplateInstruction<2> { virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; bool CanBeReplacedWith(HValue* other) const { + if (!CheckFlag(HValue::kCantBeReplaced)) return false; if (!type().Equals(other->type())) return false; if (!representation().Equals(other->representation())) return false; if (!other->IsLoadNamedField()) return true; diff --git a/src/hydrogen.cc b/src/hydrogen.cc index 60730ab..ba0fcab 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -2388,15 +2388,26 @@ HInstruction* HGraphBuilder::AddElementAccess( } -HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) { +HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object, + HValue* dependency) { return Add( - object, static_cast(NULL), HObjectAccess::ForElementsPointer()); + object, dependency, HObjectAccess::ForElementsPointer()); } -HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) { +HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength( + HValue* array, + HValue* dependency) { return Add( - object, static_cast(NULL), HObjectAccess::ForFixedArrayLength()); + array, dependency, HObjectAccess::ForFixedArrayLength()); +} + + +HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array, + ElementsKind kind, + HValue* dependency) { + return Add( + array, dependency, HObjectAccess::ForArrayLength(kind)); } @@ -2429,9 +2440,8 @@ HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( new_kind, new_capacity); - BuildCopyElements(elements, kind, - new_elements, new_kind, - length, new_capacity); + BuildCopyElements(object, elements, kind, new_elements, + new_kind, length, new_capacity); Add(object, HObjectAccess::ForElementsPointer(), new_elements); @@ -2444,8 +2454,8 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, ElementsKind elements_kind, HValue* from, HValue* to) { - // Fast elements kinds need to be initialized in case statements below cause - // a garbage collection. + // Fast elements kinds need to be initialized in case statements below cause a + // garbage collection. Factory* factory = isolate()->factory(); double nan_double = FixedDoubleArray::hole_nan_as_double(); @@ -2453,6 +2463,10 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, ? Add(factory->the_hole_value()) : Add(nan_double); + if (to == NULL) { + to = AddLoadFixedArrayLength(elements); + } + // Special loop unfolding case static const int kLoopUnfoldLimit = 8; STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit); @@ -2478,104 +2492,144 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, Add(elements, key, hole, elements_kind); } } else { - LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); + // Carefully loop backwards so that the "from" remains live through the loop + // rather than the to. This often corresponds to keeping length live rather + // then capacity, which helps register allocation, since length is used more + // other than capacity after filling with holes. + LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); + + HValue* key = builder.BeginBody(to, from, Token::GT); - HValue* key = builder.BeginBody(from, to, Token::LT); + HValue* adjusted_key = AddUncasted(key, graph()->GetConstant1()); + adjusted_key->ClearFlag(HValue::kCanOverflow); - Add(elements, key, hole, elements_kind); + Add(elements, adjusted_key, hole, elements_kind); builder.EndBody(); } } -void HGraphBuilder::BuildCopyElements(HValue* from_elements, +void HGraphBuilder::BuildCopyElements(HValue* array, + HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, HValue* length, HValue* capacity) { - bool pre_fill_with_holes = + int constant_capacity = -1; + if (capacity != NULL && + capacity->IsConstant() && + HConstant::cast(capacity)->HasInteger32Value()) { + int constant_candidate = HConstant::cast(capacity)->Integer32Value(); + if (constant_candidate <= + FastCloneShallowArrayStub::kMaximumInlinedCloneLength) { + constant_capacity = constant_candidate; + } + } + + if (constant_capacity != -1) { + // Unroll the loop for small elements kinds. + for (int i = 0; i < constant_capacity; i++) { + HValue* key_constant = Add(i); + HInstruction* value = Add(from_elements, key_constant, + static_cast(NULL), + from_elements_kind); + Add(to_elements, key_constant, value, to_elements_kind); + } + } else { + bool pre_fill_with_holes = IsFastDoubleElementsKind(from_elements_kind) && IsFastObjectElementsKind(to_elements_kind); - if (pre_fill_with_holes) { - // If the copy might trigger a GC, make sure that the FixedArray is - // pre-initialized with holes to make sure that it's always in a consistent - // state. - BuildFillElementsWithHole(to_elements, to_elements_kind, - graph()->GetConstant0(), capacity); - } + if (pre_fill_with_holes) { + // If the copy might trigger a GC, make sure that the FixedArray is + // pre-initialized with holes to make sure that it's always in a + // consistent state. + BuildFillElementsWithHole(to_elements, to_elements_kind, + graph()->GetConstant0(), NULL); + } else if (capacity == NULL || !length->Equals(capacity)) { + BuildFillElementsWithHole(to_elements, to_elements_kind, + length, NULL); + } + + if (capacity == NULL) { + capacity = AddLoadFixedArrayLength(to_elements); + } + + LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); - LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); + HValue* key = builder.BeginBody(length, graph()->GetConstant0(), + Token::GT); - HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT); + key = AddUncasted(key, graph()->GetConstant1()); + key->ClearFlag(HValue::kCanOverflow); - HValue* element = Add(from_elements, key, - static_cast(NULL), - from_elements_kind, - ALLOW_RETURN_HOLE); + HValue* element = Add(from_elements, key, + static_cast(NULL), + from_elements_kind, + ALLOW_RETURN_HOLE); - ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && - IsFastSmiElementsKind(to_elements_kind)) + ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && + IsFastSmiElementsKind(to_elements_kind)) ? FAST_HOLEY_ELEMENTS : to_elements_kind; - if (IsHoleyElementsKind(from_elements_kind) && - from_elements_kind != to_elements_kind) { - IfBuilder if_hole(this); - if_hole.If(element); - if_hole.Then(); - HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) + if (IsHoleyElementsKind(from_elements_kind) && + from_elements_kind != to_elements_kind) { + IfBuilder if_hole(this); + if_hole.If(element); + if_hole.Then(); + HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) ? Add(FixedDoubleArray::hole_nan_as_double()) : graph()->GetConstantHole(); - Add(to_elements, key, hole_constant, kind); - if_hole.Else(); - HStoreKeyed* store = Add(to_elements, key, element, kind); - store->SetFlag(HValue::kAllowUndefinedAsNaN); - if_hole.End(); - } else { - HStoreKeyed* store = Add(to_elements, key, element, kind); - store->SetFlag(HValue::kAllowUndefinedAsNaN); - } - - builder.EndBody(); + Add(to_elements, key, hole_constant, kind); + if_hole.Else(); + HStoreKeyed* store = Add(to_elements, key, element, kind); + store->SetFlag(HValue::kAllowUndefinedAsNaN); + if_hole.End(); + } else { + HStoreKeyed* store = Add(to_elements, key, element, kind); + store->SetFlag(HValue::kAllowUndefinedAsNaN); + } - if (!pre_fill_with_holes && length != capacity) { - // Fill unused capacity with the hole. - BuildFillElementsWithHole(to_elements, to_elements_kind, - key, capacity); + builder.EndBody(); } -} - -HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode, - ElementsKind kind, - int length) { - NoObservableSideEffectsScope no_effects(this); + Counters* counters = isolate()->counters(); + AddIncrementCounter(counters->inlined_copied_elements()); +} +HValue* HGraphBuilder::BuildCloneShallowArrayCommon( + HValue* boilerplate, + HValue* allocation_site, + HValue* extra_size, + HValue** return_elements, + AllocationSiteMode mode) { // All sizes here are multiples of kPointerSize. - int size = JSArray::kSize; + int array_size = JSArray::kSize; if (mode == TRACK_ALLOCATION_SITE) { - size += AllocationMemento::kSize; + array_size += AllocationMemento::kSize; + } + + HValue* size_in_bytes = Add(array_size); + if (extra_size != NULL) { + size_in_bytes = AddUncasted(extra_size, size_in_bytes); + size_in_bytes->ClearFlag(HValue::kCanOverflow); } - HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); // Copy the JS array part. - for (int i = 0; i < JSArray::kSize; i += kPointerSize) { - if ((i != JSArray::kElementsOffset) || (length == 0)) { - HObjectAccess access = HObjectAccess::ForJSArrayOffset(i); - Add( - object, access, Add( - boilerplate, static_cast(NULL), access)); - } - } + HValue* map = Add(boilerplate, + static_cast(NULL), HObjectAccess::ForMap()); + Add(object, HObjectAccess::ForPropertiesPointer(), + Add(isolate()->factory()->empty_fixed_array()), + INITIALIZING_STORE); + Add(object, HObjectAccess::ForMap(), map, + INITIALIZING_STORE); // Create an allocation site info if requested. if (mode == TRACK_ALLOCATION_SITE) { @@ -2583,54 +2637,102 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate, object, Add(JSArray::kSize), allocation_site); } - if (length > 0) { - // We have to initialize the elements pointer if allocation folding is - // turned off. - if (!FLAG_use_gvn || !FLAG_use_allocation_folding) { - HConstant* empty_fixed_array = Add( - isolate()->factory()->empty_fixed_array()); - Add(object, HObjectAccess::ForElementsPointer(), - empty_fixed_array, INITIALIZING_STORE); - } - - HValue* boilerplate_elements = AddLoadElements(boilerplate); - HValue* object_elements; - if (IsFastDoubleElementsKind(kind)) { - HValue* elems_size = Add(FixedDoubleArray::SizeFor(length)); - object_elements = Add(elems_size, HType::Tagged(), - NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE); - } else { - HValue* elems_size = Add(FixedArray::SizeFor(length)); - object_elements = Add(elems_size, HType::Tagged(), - NOT_TENURED, FIXED_ARRAY_TYPE); - } - Add(object, HObjectAccess::ForElementsPointer(), - object_elements); - - // Copy the elements array header. - for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { - HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); - Add( - object_elements, access, Add( - boilerplate_elements, static_cast(NULL), access)); - } - - // Copy the elements array contents. - // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold - // copying loops with constant length up to a given boundary and use this - // helper here instead. - for (int i = 0; i < length; i++) { - HValue* key_constant = Add(i); - HInstruction* value = Add(boilerplate_elements, key_constant, - static_cast(NULL), kind); - Add(object_elements, key_constant, value, kind); - } + if (extra_size != NULL) { + HValue* elements = Add(object, + Add(array_size)); + if (return_elements != NULL) *return_elements = elements; } return object; } +HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind) { + HValue* result = BuildCloneShallowArrayCommon(boilerplate, + allocation_site, NULL, NULL, mode); + + HValue* elements = AddLoadElements(boilerplate); + HObjectAccess access = HObjectAccess::ForElementsPointer(); + Add(result, access, elements, INITIALIZING_STORE); + + HValue* length = AddLoadArrayLength(boilerplate, kind); + access = HObjectAccess::ForArrayLength(kind); + Add(result, access, length, INITIALIZING_STORE); + + return result; +} + + +HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode) { + HValue* result = BuildCloneShallowArrayCommon(boilerplate, + allocation_site, NULL, NULL, mode); + + HObjectAccess access = HObjectAccess::ForArrayLength(FAST_ELEMENTS); + Add(result, access, graph()->GetConstant0(), + INITIALIZING_STORE); + access = HObjectAccess::ForElementsPointer(); + Add(result, access, + Add(isolate()->factory()->empty_fixed_array()), + INITIALIZING_STORE); + + return result; +} + + +HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind) { + int elements_kind_size = IsFastDoubleElementsKind(kind) + ? kDoubleSize : kPointerSize; + + HValue* boilerplate_elements = AddLoadElements(boilerplate); + HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements); + HValue* extra = AddUncasted(capacity, + Add(elements_kind_size)); + extra->ClearFlag(HValue::kCanOverflow); + extra = AddUncasted(extra, Add(FixedArray::kHeaderSize)); + extra->ClearFlag(HValue::kCanOverflow); + HValue* elements = NULL; + HValue* result = BuildCloneShallowArrayCommon(boilerplate, + allocation_site, extra, &elements, mode); + Add(result, HObjectAccess::ForElementsPointer(), + elements, INITIALIZING_STORE); + + // The allocation for the cloned array above causes register pressure on + // machines with low register counts. Force a reload of the boilerplate + // elements here to free up a register for the allocation to avoid unnecessary + // spillage. + boilerplate_elements = AddLoadElements(boilerplate); + boilerplate_elements->SetFlag(HValue::kCantBeReplaced); + + // Copy the elements array header. + for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { + HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); + Add(elements, access, + Add(boilerplate_elements, + static_cast(NULL), access), + INITIALIZING_STORE); + } + + // And the result of the length + HValue* length = Add(boilerplate, static_cast(NULL), + HObjectAccess::ForArrayLength(kind)); + Add(result, HObjectAccess::ForArrayLength(kind), + length, INITIALIZING_STORE); + + BuildCopyElements(result, boilerplate_elements, kind, elements, + kind, length, NULL); + + return result; +} + + void HGraphBuilder::BuildCompareNil( HValue* value, Type* type, diff --git a/src/hydrogen.h b/src/hydrogen.h index 060ccb9..42a63fd 100644 --- a/src/hydrogen.h +++ b/src/hydrogen.h @@ -1397,7 +1397,8 @@ class HGraphBuilder { store_map->SkipWriteBarrier(); return store_map; } - HLoadNamedField* AddLoadElements(HValue* object); + HLoadNamedField* AddLoadElements(HValue* object, + HValue* dependency = NULL); bool MatchRotateRight(HValue* left, HValue* right, @@ -1413,7 +1414,12 @@ class HGraphBuilder { Maybe fixed_right_arg, HAllocationMode allocation_mode); - HLoadNamedField* AddLoadFixedArrayLength(HValue *object); + HLoadNamedField* AddLoadFixedArrayLength(HValue *object, + HValue *dependency = NULL); + + HLoadNamedField* AddLoadArrayLength(HValue *object, + ElementsKind kind, + HValue *dependency = NULL); HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin); @@ -1753,18 +1759,33 @@ class HGraphBuilder { HValue* from, HValue* to); - void BuildCopyElements(HValue* from_elements, + void BuildCopyElements(HValue* array, + HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, HValue* length, HValue* capacity); - HValue* BuildCloneShallowArray(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode, - ElementsKind kind, - int length); + HValue* BuildCloneShallowArrayCommon(HValue* boilerplate, + HValue* allocation_site, + HValue* extra_size, + HValue** return_elements, + AllocationSiteMode mode); + + HValue* BuildCloneShallowArrayCow(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind); + + HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode); + + HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind); HValue* BuildElementIndexHash(HValue* index); diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 2fde5ac..03c4361 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -63,6 +63,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { eax, ebx, ecx }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; + static Representation representations[] = { + Representation::Tagged(), + Representation::Smi(), + Representation::Tagged() }; + descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -194,6 +199,11 @@ static void InitializeArrayConstructorDescriptor( descriptor->stack_parameter_count_ = eax; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -221,6 +231,10 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = eax; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 65abb46..640f0a5 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -1729,24 +1729,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; } - Heap* heap = isolate()->heap(); - if (has_constant_fast_elements && - constant_elements_values->map() == heap->fixed_cow_array_map()) { - // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot - // change, so it's possible to specialize the stub in advance. - __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); - __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); - __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset)); - __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); - __ mov(ecx, Immediate(constant_elements)); - FastCloneShallowArrayStub stub( - isolate(), - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, - allocation_site_mode, - length); - __ CallStub(&stub); - } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + if (expr->depth() > 1) { __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ push(Immediate(Smi::FromInt(expr->literal_index()))); @@ -1754,25 +1737,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ push(Immediate(Smi::FromInt(flags))); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; - - // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot - // change, so it's possible to specialize the stub in advance. - if (has_constant_fast_elements) { - mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; - } - __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); __ mov(ecx, Immediate(constant_elements)); - FastCloneShallowArrayStub stub(isolate(), - mode, - allocation_site_mode, - length); + FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); __ CallStub(&stub); } diff --git a/src/lithium.cc b/src/lithium.cc index 2265353..c6d6413 100644 --- a/src/lithium.cc +++ b/src/lithium.cc @@ -5,6 +5,7 @@ #include "v8.h" #include "lithium.h" #include "scopes.h" +#include "serialize.h" #if V8_TARGET_ARCH_IA32 #include "ia32/lithium-ia32.h" @@ -449,6 +450,9 @@ Handle LChunk::Codegen() { CodeEndLinePosInfoRecordEvent(*code, jit_handler_data)); CodeGenerator::PrintCode(code, info()); + ASSERT(!(Serializer::enabled(info()->isolate()) && + info()->GetMustNotHaveEagerFrame() && + generator.NeedsEagerFrame())); return code; } assembler.AbortedCodeGeneration(); diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc index a746d76..6e033b9 100644 --- a/src/mips/code-stubs-mips.cc +++ b/src/mips/code-stubs-mips.cc @@ -59,6 +59,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { a3, a2, a1 }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; + static Representation representations[] = { + Representation::Tagged(), + Representation::Smi(), + Representation::Tagged() }; + descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -202,6 +207,11 @@ static void InitializeArrayConstructorDescriptor( descriptor->stack_parameter_count_ = a0; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -229,6 +239,10 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = a0; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc index c51f9bc..13032ca 100644 --- a/src/mips/full-codegen-mips.cc +++ b/src/mips/full-codegen-mips.cc @@ -1805,33 +1805,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); __ li(a1, Operand(constant_elements)); - if (has_fast_elements && constant_elements_values->map() == - isolate()->heap()->fixed_cow_array_map()) { - FastCloneShallowArrayStub stub( - isolate(), - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, - allocation_site_mode, - length); - __ CallStub(&stub); - __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), - 1, a1, a2); - } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + if (expr->depth() > 1) { __ li(a0, Operand(Smi::FromInt(flags))); __ Push(a3, a2, a1, a0); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; - - if (has_fast_elements) { - mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; - } - - FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode, - length); + FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); __ CallStub(&stub); } diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index f65a94a..0e53eb3 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -59,6 +59,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { rax, rbx, rcx }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; + static Representation representations[] = { + Representation::Tagged(), + Representation::Smi(), + Representation::Tagged() }; + descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -188,6 +193,11 @@ static void InitializeArrayConstructorDescriptor( descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; descriptor->stack_parameter_count_ = rax; descriptor->register_param_count_ = 3; + static Representation representations[] = { + Representation::Tagged(), + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; descriptor->register_params_ = registers_variable_args; } @@ -216,6 +226,10 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = rax; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; + static Representation representations[] = { + Representation::Tagged(), + Representation::Integer32() }; + descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 5400773..1f03fbf 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -1766,24 +1766,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; } - Heap* heap = isolate()->heap(); - if (has_constant_fast_elements && - constant_elements_values->map() == heap->fixed_cow_array_map()) { - // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot - // change, so it's possible to specialize the stub in advance. - __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); - __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); - __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); - __ Move(rbx, Smi::FromInt(expr->literal_index())); - __ Move(rcx, constant_elements); - FastCloneShallowArrayStub stub( - isolate(), - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, - allocation_site_mode, - length); - __ CallStub(&stub); - } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || - length > FastCloneShallowArrayStub::kMaximumClonedLength) { + if (expr->depth() > 1) { __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Push(Smi::FromInt(expr->literal_index())); @@ -1791,24 +1774,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ Push(Smi::FromInt(flags)); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; - - // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot - // change, so it's possible to specialize the stub in advance. - if (has_constant_fast_elements) { - mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; - } - __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Move(rbx, Smi::FromInt(expr->literal_index())); __ Move(rcx, constant_elements); - FastCloneShallowArrayStub stub(isolate(), - mode, - allocation_site_mode, length); + FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); __ CallStub(&stub); } -- 2.7.4