From 77e53fc88453071b65c70583c72afed42206aaf1 Mon Sep 17 00:00:00 2001 From: "weiliang.lin@intel.com" Date: Fri, 12 Sep 2014 01:39:25 +0000 Subject: [PATCH] X87: Encapsulate megamorphic load/tail-call in hydrogen port r23772. original commit message: To aid vector-based load ic work, we need to be able to handle the megamorphic load case in hydrogen. A simple approach is to wrap the probe activity in a hydrogen instruction. The instruction is novel in that it always tail-calls away. BUG= R=weiliang.lin@intel.com Review URL: https://codereview.chromium.org/564683002 Patch from Jing Bao . git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@23889 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/ic/x87/ic-x87.cc | 20 +-- src/ic/x87/stub-cache-x87.cc | 20 ++- src/x87/lithium-codegen-x87.cc | 33 ++++- src/x87/lithium-x87.cc | 13 ++ src/x87/lithium-x87.h | 311 ++++++++++++++++++++++------------------- 5 files changed, 223 insertions(+), 174 deletions(-) diff --git a/src/ic/x87/ic-x87.cc b/src/ic/x87/ic-x87.cc index 1e3200f..7a798d7 100644 --- a/src/ic/x87/ic-x87.cc +++ b/src/ic/x87/ic-x87.cc @@ -825,24 +825,6 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, } -void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { - // The return address is on the stack. - Register receiver = LoadDescriptor::ReceiverRegister(); - Register name = LoadDescriptor::NameRegister(); - DCHECK(receiver.is(edx)); - DCHECK(name.is(ecx)); - - // Probe the stub cache. - Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( - Code::ComputeHandlerFlags(Code::LOAD_IC)); - masm->isolate()->stub_cache()->GenerateProbe(masm, flags, receiver, name, ebx, - eax); - - // Cache miss: Jump to runtime. - GenerateMiss(masm); -} - - void LoadIC::GenerateNormal(MacroAssembler* masm) { Register dictionary = eax; DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); @@ -923,7 +905,7 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( Code::ComputeHandlerFlags(Code::STORE_IC)); masm->isolate()->stub_cache()->GenerateProbe( - masm, flags, StoreDescriptor::ReceiverRegister(), + masm, flags, false, StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(), ebx, no_reg); // Cache miss: Jump to runtime. diff --git a/src/ic/x87/stub-cache-x87.cc b/src/ic/x87/stub-cache-x87.cc index 8b4f0fd..0291ef3 100644 --- a/src/ic/x87/stub-cache-x87.cc +++ b/src/ic/x87/stub-cache-x87.cc @@ -16,8 +16,8 @@ namespace internal { static void ProbeTable(Isolate* isolate, MacroAssembler* masm, - Code::Flags flags, StubCache::Table table, Register name, - Register receiver, + Code::Flags flags, bool leave_frame, + StubCache::Table table, Register name, Register receiver, // Number of the cache entry pointer-size scaled. Register offset, Register extra) { ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); @@ -56,6 +56,8 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm, } #endif + if (leave_frame) __ leave(); + // Jump to the first instruction in the code stub. __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag)); __ jmp(extra); @@ -98,6 +100,8 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm, __ pop(offset); __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); + if (leave_frame) __ leave(); + // Jump to the first instruction in the code stub. __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag)); __ jmp(offset); @@ -110,9 +114,9 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm, void StubCache::GenerateProbe(MacroAssembler* masm, Code::Flags flags, - Register receiver, Register name, - Register scratch, Register extra, Register extra2, - Register extra3) { + bool leave_frame, Register receiver, + Register name, Register scratch, Register extra, + Register extra2, Register extra3) { Label miss; // Assert that code is valid. The multiplying code relies on the entry size @@ -155,7 +159,8 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Code::Flags flags, DCHECK(kCacheIndexShift == kPointerSizeLog2); // Probe the primary table. - ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra); + ProbeTable(isolate(), masm, flags, leave_frame, kPrimary, name, receiver, + offset, extra); // Primary miss: Compute hash for secondary probe. __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); @@ -167,7 +172,8 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Code::Flags flags, __ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift); // Probe the secondary table. - ProbeTable(isolate(), masm, flags, kSecondary, name, receiver, offset, extra); + ProbeTable(isolate(), masm, flags, leave_frame, kSecondary, name, receiver, + offset, extra); // Cache miss: Fall-through and let caller handle the miss by // entering the runtime system. diff --git a/src/x87/lithium-codegen-x87.cc b/src/x87/lithium-codegen-x87.cc index 07c9fb0..df22249 100644 --- a/src/x87/lithium-codegen-x87.cc +++ b/src/x87/lithium-codegen-x87.cc @@ -11,6 +11,7 @@ #include "src/codegen.h" #include "src/deoptimizer.h" #include "src/hydrogen-osr.h" +#include "src/ic/stub-cache.h" #include "src/x87/lithium-codegen-x87.h" namespace v8 { @@ -3277,11 +3278,9 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { // Load the result. __ mov(result, - BuildFastArrayOperand(instr->elements(), - instr->key(), + BuildFastArrayOperand(instr->elements(), instr->key(), instr->hydrogen()->key()->representation(), - FAST_ELEMENTS, - instr->base_offset())); + FAST_ELEMENTS, instr->base_offset())); // Check for the hole value. if (instr->hydrogen()->RequiresHoleCheck()) { @@ -3577,6 +3576,32 @@ void LCodeGen::CallKnownFunction(Handle function, } +void LCodeGen::DoTailCallThroughMegamorphicCache( + LTailCallThroughMegamorphicCache* instr) { + Register receiver = ToRegister(instr->receiver()); + Register name = ToRegister(instr->name()); + DCHECK(receiver.is(LoadDescriptor::ReceiverRegister())); + DCHECK(name.is(LoadDescriptor::NameRegister())); + + Register scratch = ebx; + Register extra = eax; + DCHECK(!scratch.is(receiver) && !scratch.is(name)); + DCHECK(!extra.is(receiver) && !extra.is(name)); + + // Important for the tail-call. + bool must_teardown_frame = NeedsEagerFrame(); + + // The probe will tail call to a handler if found. + isolate()->stub_cache()->GenerateProbe(masm(), instr->hydrogen()->flags(), + must_teardown_frame, receiver, name, + scratch, extra); + + // Tail call to miss if we ended up here. + if (must_teardown_frame) __ leave(); + LoadIC::GenerateMiss(masm()); +} + + void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { DCHECK(ToRegister(instr->result()).is(eax)); diff --git a/src/x87/lithium-x87.cc b/src/x87/lithium-x87.cc index 392335b..f93ed62 100644 --- a/src/x87/lithium-x87.cc +++ b/src/x87/lithium-x87.cc @@ -1135,6 +1135,19 @@ LInstruction* LChunkBuilder::DoCallWithDescriptor( } +LInstruction* LChunkBuilder::DoTailCallThroughMegamorphicCache( + HTailCallThroughMegamorphicCache* instr) { + LOperand* context = UseFixed(instr->context(), esi); + LOperand* receiver_register = + UseFixed(instr->receiver(), LoadDescriptor::ReceiverRegister()); + LOperand* name_register = + UseFixed(instr->name(), LoadDescriptor::NameRegister()); + // Not marked as call. It can't deoptimize, and it never returns. + return new (zone()) LTailCallThroughMegamorphicCache( + context, receiver_register, name_register); +} + + LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) { LOperand* context = UseFixed(instr->context(), esi); LOperand* function = UseFixed(instr->function(), edi); diff --git a/src/x87/lithium-x87.h b/src/x87/lithium-x87.h index e54ba02..233eaf2 100644 --- a/src/x87/lithium-x87.h +++ b/src/x87/lithium-x87.h @@ -21,149 +21,150 @@ class RCodeVisualizer; // Forward declarations. class LCodeGen; -#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \ - V(AccessArgumentsAt) \ - V(AddI) \ - V(AllocateBlockContext) \ - V(Allocate) \ - V(ApplyArguments) \ - V(ArgumentsElements) \ - V(ArgumentsLength) \ - V(ArithmeticD) \ - V(ArithmeticT) \ - V(BitI) \ - V(BoundsCheck) \ - V(Branch) \ - V(CallJSFunction) \ - V(CallWithDescriptor) \ - V(CallFunction) \ - V(CallNew) \ - V(CallNewArray) \ - V(CallRuntime) \ - V(CallStub) \ - V(CheckInstanceType) \ - V(CheckMaps) \ - V(CheckMapValue) \ - V(CheckNonSmi) \ - V(CheckSmi) \ - V(CheckValue) \ - V(ClampDToUint8) \ - V(ClampIToUint8) \ - V(ClampTToUint8NoSSE2) \ - V(ClassOfTestAndBranch) \ - V(ClobberDoubles) \ - V(CompareMinusZeroAndBranch) \ - V(CompareNumericAndBranch) \ - V(CmpObjectEqAndBranch) \ - V(CmpHoleAndBranch) \ - V(CmpMapAndBranch) \ - V(CmpT) \ - V(ConstantD) \ - V(ConstantE) \ - V(ConstantI) \ - V(ConstantS) \ - V(ConstantT) \ - V(ConstructDouble) \ - V(Context) \ - V(DateField) \ - V(DebugBreak) \ - V(DeclareGlobals) \ - V(Deoptimize) \ - V(DivByConstI) \ - V(DivByPowerOf2I) \ - V(DivI) \ - V(DoubleBits) \ - V(DoubleToI) \ - V(DoubleToSmi) \ - V(Drop) \ - V(Dummy) \ - V(DummyUse) \ - V(FlooringDivByConstI) \ - V(FlooringDivByPowerOf2I) \ - V(FlooringDivI) \ - V(ForInCacheArray) \ - V(ForInPrepareMap) \ - V(FunctionLiteral) \ - V(GetCachedArrayIndex) \ - V(Goto) \ - V(HasCachedArrayIndexAndBranch) \ - V(HasInstanceTypeAndBranch) \ - V(InnerAllocatedObject) \ - V(InstanceOf) \ - V(InstanceOfKnownGlobal) \ - V(InstructionGap) \ - V(Integer32ToDouble) \ - V(InvokeFunction) \ - V(IsConstructCallAndBranch) \ - V(IsObjectAndBranch) \ - V(IsStringAndBranch) \ - V(IsSmiAndBranch) \ - V(IsUndetectableAndBranch) \ - V(Label) \ - V(LazyBailout) \ - V(LoadContextSlot) \ - V(LoadFieldByIndex) \ - V(LoadFunctionPrototype) \ - V(LoadGlobalCell) \ - V(LoadGlobalGeneric) \ - V(LoadKeyed) \ - V(LoadKeyedGeneric) \ - V(LoadNamedField) \ - V(LoadNamedGeneric) \ - V(LoadRoot) \ - V(MapEnumLength) \ - V(MathAbs) \ - V(MathClz32) \ - V(MathExp) \ - V(MathFloor) \ - V(MathFround) \ - V(MathLog) \ - V(MathMinMax) \ - V(MathPowHalf) \ - V(MathRound) \ - V(MathSqrt) \ - V(ModByConstI) \ - V(ModByPowerOf2I) \ - V(ModI) \ - V(MulI) \ - V(NumberTagD) \ - V(NumberTagI) \ - V(NumberTagU) \ - V(NumberUntagD) \ - V(OsrEntry) \ - V(Parameter) \ - V(Power) \ - V(PushArgument) \ - V(RegExpLiteral) \ - V(Return) \ - V(SeqStringGetChar) \ - V(SeqStringSetChar) \ - V(ShiftI) \ - V(SmiTag) \ - V(SmiUntag) \ - V(StackCheck) \ - V(StoreCodeEntry) \ - V(StoreContextSlot) \ - V(StoreFrameContext) \ - V(StoreGlobalCell) \ - V(StoreKeyed) \ - V(StoreKeyedGeneric) \ - V(StoreNamedField) \ - V(StoreNamedGeneric) \ - V(StringAdd) \ - V(StringCharCodeAt) \ - V(StringCharFromCode) \ - V(StringCompareAndBranch) \ - V(SubI) \ - V(TaggedToI) \ - V(ThisFunction) \ - V(ToFastProperties) \ - V(TransitionElementsKind) \ - V(TrapAllocationMemento) \ - V(Typeof) \ - V(TypeofIsAndBranch) \ - V(Uint32ToDouble) \ - V(UnknownOSRValue) \ +#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \ + V(AccessArgumentsAt) \ + V(AddI) \ + V(AllocateBlockContext) \ + V(Allocate) \ + V(ApplyArguments) \ + V(ArgumentsElements) \ + V(ArgumentsLength) \ + V(ArithmeticD) \ + V(ArithmeticT) \ + V(BitI) \ + V(BoundsCheck) \ + V(Branch) \ + V(CallJSFunction) \ + V(CallWithDescriptor) \ + V(CallFunction) \ + V(CallNew) \ + V(CallNewArray) \ + V(CallRuntime) \ + V(CallStub) \ + V(CheckInstanceType) \ + V(CheckMaps) \ + V(CheckMapValue) \ + V(CheckNonSmi) \ + V(CheckSmi) \ + V(CheckValue) \ + V(ClampDToUint8) \ + V(ClampIToUint8) \ + V(ClampTToUint8NoSSE2) \ + V(ClassOfTestAndBranch) \ + V(ClobberDoubles) \ + V(CompareMinusZeroAndBranch) \ + V(CompareNumericAndBranch) \ + V(CmpObjectEqAndBranch) \ + V(CmpHoleAndBranch) \ + V(CmpMapAndBranch) \ + V(CmpT) \ + V(ConstantD) \ + V(ConstantE) \ + V(ConstantI) \ + V(ConstantS) \ + V(ConstantT) \ + V(ConstructDouble) \ + V(Context) \ + V(DateField) \ + V(DebugBreak) \ + V(DeclareGlobals) \ + V(Deoptimize) \ + V(DivByConstI) \ + V(DivByPowerOf2I) \ + V(DivI) \ + V(DoubleBits) \ + V(DoubleToI) \ + V(DoubleToSmi) \ + V(Drop) \ + V(Dummy) \ + V(DummyUse) \ + V(FlooringDivByConstI) \ + V(FlooringDivByPowerOf2I) \ + V(FlooringDivI) \ + V(ForInCacheArray) \ + V(ForInPrepareMap) \ + V(FunctionLiteral) \ + V(GetCachedArrayIndex) \ + V(Goto) \ + V(HasCachedArrayIndexAndBranch) \ + V(HasInstanceTypeAndBranch) \ + V(InnerAllocatedObject) \ + V(InstanceOf) \ + V(InstanceOfKnownGlobal) \ + V(InstructionGap) \ + V(Integer32ToDouble) \ + V(InvokeFunction) \ + V(IsConstructCallAndBranch) \ + V(IsObjectAndBranch) \ + V(IsStringAndBranch) \ + V(IsSmiAndBranch) \ + V(IsUndetectableAndBranch) \ + V(Label) \ + V(LazyBailout) \ + V(LoadContextSlot) \ + V(LoadFieldByIndex) \ + V(LoadFunctionPrototype) \ + V(LoadGlobalCell) \ + V(LoadGlobalGeneric) \ + V(LoadKeyed) \ + V(LoadKeyedGeneric) \ + V(LoadNamedField) \ + V(LoadNamedGeneric) \ + V(LoadRoot) \ + V(MapEnumLength) \ + V(MathAbs) \ + V(MathClz32) \ + V(MathExp) \ + V(MathFloor) \ + V(MathFround) \ + V(MathLog) \ + V(MathMinMax) \ + V(MathPowHalf) \ + V(MathRound) \ + V(MathSqrt) \ + V(ModByConstI) \ + V(ModByPowerOf2I) \ + V(ModI) \ + V(MulI) \ + V(NumberTagD) \ + V(NumberTagI) \ + V(NumberTagU) \ + V(NumberUntagD) \ + V(OsrEntry) \ + V(Parameter) \ + V(Power) \ + V(PushArgument) \ + V(RegExpLiteral) \ + V(Return) \ + V(SeqStringGetChar) \ + V(SeqStringSetChar) \ + V(ShiftI) \ + V(SmiTag) \ + V(SmiUntag) \ + V(StackCheck) \ + V(StoreCodeEntry) \ + V(StoreContextSlot) \ + V(StoreFrameContext) \ + V(StoreGlobalCell) \ + V(StoreKeyed) \ + V(StoreKeyedGeneric) \ + V(StoreNamedField) \ + V(StoreNamedGeneric) \ + V(StringAdd) \ + V(StringCharCodeAt) \ + V(StringCharFromCode) \ + V(StringCompareAndBranch) \ + V(SubI) \ + V(TaggedToI) \ + V(TailCallThroughMegamorphicCache) \ + V(ThisFunction) \ + V(ToFastProperties) \ + V(TransitionElementsKind) \ + V(TrapAllocationMemento) \ + V(Typeof) \ + V(TypeofIsAndBranch) \ + V(Uint32ToDouble) \ + V(UnknownOSRValue) \ V(WrapReceiver) @@ -496,6 +497,27 @@ class LCallStub FINAL : public LTemplateInstruction<1, 1, 0> { }; +class LTailCallThroughMegamorphicCache FINAL + : public LTemplateInstruction<0, 3, 0> { + public: + explicit LTailCallThroughMegamorphicCache(LOperand* context, + LOperand* receiver, + LOperand* name) { + inputs_[0] = context; + inputs_[1] = receiver; + inputs_[2] = name; + } + + LOperand* context() { return inputs_[0]; } + LOperand* receiver() { return inputs_[1]; } + LOperand* name() { return inputs_[2]; } + + DECLARE_CONCRETE_INSTRUCTION(TailCallThroughMegamorphicCache, + "tail-call-through-megamorphic-cache") + DECLARE_HYDROGEN_ACCESSOR(TailCallThroughMegamorphicCache) +}; + + class LUnknownOSRValue FINAL : public LTemplateInstruction<1, 0, 0> { public: virtual bool HasInterestingComment(LCodeGen* gen) const OVERRIDE { @@ -1909,9 +1931,10 @@ class LCallWithDescriptor FINAL : public LTemplateResultInstruction<1> { LOperand* target() const { return inputs_[0]; } + DECLARE_HYDROGEN_ACCESSOR(CallWithDescriptor) + private: DECLARE_CONCRETE_INSTRUCTION(CallWithDescriptor, "call-with-descriptor") - DECLARE_HYDROGEN_ACCESSOR(CallWithDescriptor) virtual void PrintDataTo(StringStream* stream) OVERRIDE; -- 2.7.4