From 1fa4285e1ce16cfadf8c40a0993491ec4e2bbbe0 Mon Sep 17 00:00:00 2001 From: danno Date: Wed, 1 Jul 2015 23:20:03 -0700 Subject: [PATCH] [turbofan] Enable tail calls for %_CallRuntime. This involves: - Enabling the tail call optimization reducer in all cases. - Adding an addition flag to CallFunctionParameters to mark call sites that can be tail-called enabled. - Only set the tail-call flag for %_CallFunction. R=bmeurer@chromium.org Review URL: https://codereview.chromium.org/1216933011 Cr-Commit-Position: refs/heads/master@{#29436} --- src/compiler/arm/code-generator-arm.cc | 12 +-- src/compiler/arm/instruction-selector-arm.cc | 6 +- src/compiler/arm64/code-generator-arm64.cc | 12 +-- .../arm64/instruction-selector-arm64.cc | 6 +- src/compiler/ia32/code-generator-ia32.cc | 17 ++-- .../ia32/instruction-selector-ia32.cc | 7 +- src/compiler/js-generic-lowering.cc | 3 + src/compiler/js-intrinsic-lowering.cc | 21 +++++ src/compiler/js-intrinsic-lowering.h | 3 + src/compiler/js-operator.cc | 17 ++-- src/compiler/js-operator.h | 13 ++- src/compiler/linkage.cc | 58 +++++++++++++ src/compiler/linkage.h | 3 + src/compiler/mips/code-generator-mips.cc | 14 ++-- .../mips/instruction-selector-mips.cc | 6 +- src/compiler/mips64/code-generator-mips64.cc | 14 ++-- .../mips64/instruction-selector-mips64.cc | 6 +- src/compiler/pipeline.cc | 3 +- src/compiler/x64/code-generator-x64.cc | 19 +++-- src/compiler/x64/instruction-selector-x64.cc | 6 +- test/mjsunit/call-runtime-tail.js | 81 +++++++++++++++++++ 21 files changed, 244 insertions(+), 83 deletions(-) create mode 100644 test/mjsunit/call-runtime-tail.js diff --git a/src/compiler/arm/code-generator-arm.cc b/src/compiler/arm/code-generator-arm.cc index b9cc354b2..2b6054607 100644 --- a/src/compiler/arm/code-generator-arm.cc +++ b/src/compiler/arm/code-generator-arm.cc @@ -304,10 +304,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() { int stack_slots = frame()->GetSpillSlotCount(); if (descriptor->IsJSFunctionCall() || stack_slots > 0) { __ LeaveFrame(StackFrame::MANUAL); - int pop_count = descriptor->IsJSFunctionCall() - ? static_cast(descriptor->JSParameterCount()) - : 0; - __ Drop(pop_count); } } @@ -1053,8 +1049,12 @@ void CodeGenerator::AssembleReturn() { __ LeaveFrame(StackFrame::MANUAL); int pop_count = descriptor->IsJSFunctionCall() ? static_cast(descriptor->JSParameterCount()) - : 0; - __ Drop(pop_count); + : (info()->IsStub() + ? info()->code_stub()->GetStackParameterCount() + : 0); + if (pop_count != 0) { + __ Drop(pop_count); + } __ Ret(); } } else { diff --git a/src/compiler/arm/instruction-selector-arm.cc b/src/compiler/arm/instruction-selector-arm.cc index b53245446..d9f8f8b1d 100644 --- a/src/compiler/arm/instruction-selector-arm.cc +++ b/src/compiler/arm/instruction-selector-arm.cc @@ -1166,9 +1166,7 @@ void InstructionSelector::VisitTailCall(Node* node) { DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); // TODO(turbofan): Relax restriction for stack parameters. - if (descriptor->UsesOnlyRegisters() && - descriptor->HasSameReturnLocationsAs( - linkage()->GetIncomingDescriptor())) { + if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) { CallBuffer buffer(zone(), descriptor, nullptr); // Compute InstructionOperands for inputs and outputs. @@ -1177,8 +1175,6 @@ void InstructionSelector::VisitTailCall(Node* node) { // heuristics in the register allocator for where to emit constants. InitializeCallBuffer(node, &buffer, true, false); - DCHECK_EQ(0u, buffer.pushed_nodes.size()); - // Select the appropriate opcode based on the call type. InstructionCode opcode; switch (descriptor->kind()) { diff --git a/src/compiler/arm64/code-generator-arm64.cc b/src/compiler/arm64/code-generator-arm64.cc index 0dab3c743..c3e9af6a2 100644 --- a/src/compiler/arm64/code-generator-arm64.cc +++ b/src/compiler/arm64/code-generator-arm64.cc @@ -351,10 +351,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() { if (descriptor->IsJSFunctionCall() || stack_slots > 0) { __ Mov(jssp, fp); __ Pop(fp, lr); - int pop_count = descriptor->IsJSFunctionCall() - ? static_cast(descriptor->JSParameterCount()) - : 0; - __ Drop(pop_count); } } @@ -1184,8 +1180,12 @@ void CodeGenerator::AssembleReturn() { __ Pop(fp, lr); int pop_count = descriptor->IsJSFunctionCall() ? static_cast(descriptor->JSParameterCount()) - : 0; - __ Drop(pop_count); + : (info()->IsStub() + ? info()->code_stub()->GetStackParameterCount() + : 0); + if (pop_count != 0) { + __ Drop(pop_count); + } __ Ret(); } } else { diff --git a/src/compiler/arm64/instruction-selector-arm64.cc b/src/compiler/arm64/instruction-selector-arm64.cc index 3eb37e1ab..b304abcc4 100644 --- a/src/compiler/arm64/instruction-selector-arm64.cc +++ b/src/compiler/arm64/instruction-selector-arm64.cc @@ -1470,9 +1470,7 @@ void InstructionSelector::VisitTailCall(Node* node) { DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); // TODO(turbofan): Relax restriction for stack parameters. - if (descriptor->UsesOnlyRegisters() && - descriptor->HasSameReturnLocationsAs( - linkage()->GetIncomingDescriptor())) { + if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) { CallBuffer buffer(zone(), descriptor, nullptr); // Compute InstructionOperands for inputs and outputs. @@ -1481,8 +1479,6 @@ void InstructionSelector::VisitTailCall(Node* node) { // heuristics in the register allocator for where to emit constants. InitializeCallBuffer(node, &buffer, true, false); - DCHECK_EQ(0u, buffer.pushed_nodes.size()); - // Select the appropriate opcode based on the call type. InstructionCode opcode; switch (descriptor->kind()) { diff --git a/src/compiler/ia32/code-generator-ia32.cc b/src/compiler/ia32/code-generator-ia32.cc index 7272fdee9..4690a8cc0 100644 --- a/src/compiler/ia32/code-generator-ia32.cc +++ b/src/compiler/ia32/code-generator-ia32.cc @@ -290,13 +290,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() { if (descriptor->IsJSFunctionCall() || stack_slots > 0) { __ mov(esp, ebp); __ pop(ebp); - int32_t bytes_to_pop = - descriptor->IsJSFunctionCall() - ? static_cast(descriptor->JSParameterCount() * - kPointerSize) - : 0; - __ pop(Operand(esp, bytes_to_pop)); - __ add(esp, Immediate(bytes_to_pop)); } } @@ -1348,8 +1341,14 @@ void CodeGenerator::AssembleReturn() { __ pop(ebp); // Pop caller's frame pointer. int pop_count = descriptor->IsJSFunctionCall() ? static_cast(descriptor->JSParameterCount()) - : 0; - __ Ret(pop_count * kPointerSize, ebx); + : (info()->IsStub() + ? info()->code_stub()->GetStackParameterCount() + : 0); + if (pop_count == 0) { + __ ret(0); + } else { + __ Ret(pop_count * kPointerSize, ebx); + } } } else { __ ret(0); diff --git a/src/compiler/ia32/instruction-selector-ia32.cc b/src/compiler/ia32/instruction-selector-ia32.cc index 2720910ca..d9ff42259 100644 --- a/src/compiler/ia32/instruction-selector-ia32.cc +++ b/src/compiler/ia32/instruction-selector-ia32.cc @@ -906,16 +906,13 @@ void InstructionSelector::VisitTailCall(Node* node) { DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); // TODO(turbofan): Relax restriction for stack parameters. - if (descriptor->UsesOnlyRegisters() && - descriptor->HasSameReturnLocationsAs( - linkage()->GetIncomingDescriptor())) { + + if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) { CallBuffer buffer(zone(), descriptor, nullptr); // Compute InstructionOperands for inputs and outputs. InitializeCallBuffer(node, &buffer, true, true); - DCHECK_EQ(0u, buffer.pushed_nodes.size()); - // Select the appropriate opcode based on the call type. InstructionCode opcode; switch (descriptor->kind()) { diff --git a/src/compiler/js-generic-lowering.cc b/src/compiler/js-generic-lowering.cc index d907c3d14..da42aba52 100644 --- a/src/compiler/js-generic-lowering.cc +++ b/src/compiler/js-generic-lowering.cc @@ -529,6 +529,9 @@ void JSGenericLowering::LowerJSCallFunction(Node* node) { CallFunctionStub stub(isolate(), arg_count, p.flags()); CallInterfaceDescriptor d = stub.GetCallInterfaceDescriptor(); CallDescriptor::Flags flags = AdjustFrameStatesForCall(node); + if (p.AllowTailCalls()) { + flags |= CallDescriptor::kSupportsTailCalls; + } CallDescriptor* desc = Linkage::GetStubCallDescriptor( isolate(), zone(), d, static_cast(p.arity() - 1), flags); Node* stub_code = jsgraph()->HeapConstant(stub.GetCode()); diff --git a/src/compiler/js-intrinsic-lowering.cc b/src/compiler/js-intrinsic-lowering.cc index 0039d1d4c..01552559e 100644 --- a/src/compiler/js-intrinsic-lowering.cc +++ b/src/compiler/js-intrinsic-lowering.cc @@ -94,6 +94,8 @@ Reduction JSIntrinsicLowering::Reduce(Node* node) { return ReduceGetCallerJSFunction(node); case Runtime::kInlineThrowNotDateError: return ReduceThrowNotDateError(node); + case Runtime::kInlineCallFunction: + return ReduceCallFunction(node); default: break; } @@ -513,6 +515,21 @@ Reduction JSIntrinsicLowering::ReduceThrowNotDateError(Node* node) { } +Reduction JSIntrinsicLowering::ReduceCallFunction(Node* node) { + CallRuntimeParameters params = OpParameter(node->op()); + size_t arity = params.arity(); + node->set_op(javascript()->CallFunction(arity, NO_CALL_FUNCTION_FLAGS, STRICT, + VectorSlotPair(), ALLOW_TAIL_CALLS)); + Node* function = node->InputAt(static_cast(arity - 1)); + while (--arity != 0) { + node->ReplaceInput(static_cast(arity), + node->InputAt(static_cast(arity - 1))); + } + node->ReplaceInput(0, function); + return Changed(node); +} + + Reduction JSIntrinsicLowering::Change(Node* node, const Operator* op, Node* a, Node* b) { node->set_op(op); @@ -549,6 +566,10 @@ CommonOperatorBuilder* JSIntrinsicLowering::common() const { return jsgraph()->common(); } +JSOperatorBuilder* JSIntrinsicLowering::javascript() const { + return jsgraph_->javascript(); +} + MachineOperatorBuilder* JSIntrinsicLowering::machine() const { return jsgraph()->machine(); diff --git a/src/compiler/js-intrinsic-lowering.h b/src/compiler/js-intrinsic-lowering.h index 5dd46156b..61cf9c481 100644 --- a/src/compiler/js-intrinsic-lowering.h +++ b/src/compiler/js-intrinsic-lowering.h @@ -15,6 +15,7 @@ namespace compiler { // Forward declarations. class CommonOperatorBuilder; +class JSOperatorBuilder; class JSGraph; class MachineOperatorBuilder; @@ -56,6 +57,7 @@ class JSIntrinsicLowering final : public AdvancedReducer { Reduction ReduceGetTypeFeedbackVector(Node* node); Reduction ReduceGetCallerJSFunction(Node* node); Reduction ReduceThrowNotDateError(Node* node); + Reduction ReduceCallFunction(Node* node); Reduction Change(Node* node, const Operator* op); Reduction Change(Node* node, const Operator* op, Node* a, Node* b); @@ -65,6 +67,7 @@ class JSIntrinsicLowering final : public AdvancedReducer { Graph* graph() const; JSGraph* jsgraph() const { return jsgraph_; } CommonOperatorBuilder* common() const; + JSOperatorBuilder* javascript() const; MachineOperatorBuilder* machine() const; DeoptimizationMode mode() const { return mode_; } SimplifiedOperatorBuilder* simplified() { return &simplified_; } diff --git a/src/compiler/js-operator.cc b/src/compiler/js-operator.cc index d496dff3c..1966724a8 100644 --- a/src/compiler/js-operator.cc +++ b/src/compiler/js-operator.cc @@ -30,7 +30,11 @@ size_t hash_value(VectorSlotPair const& p) { std::ostream& operator<<(std::ostream& os, CallFunctionParameters const& p) { - return os << p.arity() << ", " << p.flags() << ", " << p.language_mode(); + os << p.arity() << ", " << p.flags() << ", " << p.language_mode(); + if (p.AllowTailCalls()) { + os << ", ALLOW_TAIL_CALLS"; + } + return os; } @@ -470,10 +474,13 @@ CACHED_OP_LIST_WITH_LANGUAGE_MODE(CACHED_WITH_LANGUAGE_MODE) #undef CACHED_WITH_LANGUAGE_MODE -const Operator* JSOperatorBuilder::CallFunction( - size_t arity, CallFunctionFlags flags, LanguageMode language_mode, - VectorSlotPair const& feedback) { - CallFunctionParameters parameters(arity, flags, language_mode, feedback); +const Operator* JSOperatorBuilder::CallFunction(size_t arity, + CallFunctionFlags flags, + LanguageMode language_mode, + VectorSlotPair const& feedback, + TailCallMode tail_call_mode) { + CallFunctionParameters parameters(arity, flags, language_mode, feedback, + tail_call_mode); return new (zone()) Operator1( // -- IrOpcode::kJSCallFunction, Operator::kNoProperties, // opcode "JSCallFunction", // name diff --git a/src/compiler/js-operator.h b/src/compiler/js-operator.h index 3a7d2ba91..d70c8e209 100644 --- a/src/compiler/js-operator.h +++ b/src/compiler/js-operator.h @@ -45,6 +45,7 @@ bool operator!=(VectorSlotPair const&, VectorSlotPair const&); size_t hash_value(VectorSlotPair const&); +enum TailCallMode { NO_TAIL_CALLS, ALLOW_TAIL_CALLS }; // Defines the arity and the call flags for a JavaScript function call. This is // used as a parameter by JSCallFunction operators. @@ -52,10 +53,12 @@ class CallFunctionParameters final { public: CallFunctionParameters(size_t arity, CallFunctionFlags flags, LanguageMode language_mode, - VectorSlotPair const& feedback) + VectorSlotPair const& feedback, + TailCallMode tail_call_mode) : bit_field_(ArityField::encode(arity) | FlagsField::encode(flags) | LanguageModeField::encode(language_mode)), - feedback_(feedback) {} + feedback_(feedback), + tail_call_mode_(tail_call_mode) {} size_t arity() const { return ArityField::decode(bit_field_); } CallFunctionFlags flags() const { return FlagsField::decode(bit_field_); } @@ -72,6 +75,8 @@ class CallFunctionParameters final { return !(*this == that); } + bool AllowTailCalls() const { return tail_call_mode_ == ALLOW_TAIL_CALLS; } + private: friend size_t hash_value(CallFunctionParameters const& p) { return base::hash_combine(p.bit_field_, p.feedback_); @@ -83,6 +88,7 @@ class CallFunctionParameters final { const uint32_t bit_field_; const VectorSlotPair feedback_; + bool tail_call_mode_; }; size_t hash_value(CallFunctionParameters const&); @@ -415,7 +421,8 @@ class JSOperatorBuilder final : public ZoneObject { const Operator* CallFunction( size_t arity, CallFunctionFlags flags, LanguageMode language_mode, - VectorSlotPair const& feedback = VectorSlotPair()); + VectorSlotPair const& feedback = VectorSlotPair(), + TailCallMode tail_call_mode = NO_TAIL_CALLS); const Operator* CallRuntime(Runtime::FunctionId id, size_t arity); const Operator* CallConstruct(int arguments); diff --git a/src/compiler/linkage.cc b/src/compiler/linkage.cc index 9338a5bed..93bf21a84 100644 --- a/src/compiler/linkage.cc +++ b/src/compiler/linkage.cc @@ -4,6 +4,7 @@ #include "src/code-stubs.h" #include "src/compiler.h" +#include "src/compiler/common-operator.h" #include "src/compiler/linkage.h" #include "src/compiler/node.h" #include "src/compiler/pipeline.h" @@ -48,6 +49,63 @@ bool CallDescriptor::HasSameReturnLocationsAs( } +bool CallDescriptor::CanTailCall(const Node* node) const { + // Tail calling is currently allowed if return locations match and all + // parameters are either in registers or on the stack but match exactly in + // number and content. + CallDescriptor const* other = OpParameter(node); + if (!HasSameReturnLocationsAs(other)) return false; + size_t current_input = 0; + size_t other_input = 0; + size_t stack_parameter = 0; + while (true) { + if (other_input >= other->InputCount()) { + while (current_input <= InputCount()) { + if (!GetInputLocation(current_input).is_register()) { + return false; + } + ++current_input; + } + return true; + } + if (current_input >= InputCount()) { + while (other_input < other->InputCount()) { + if (!other->GetInputLocation(other_input).is_register()) { + return false; + } + ++other_input; + } + return true; + } + if (GetInputLocation(current_input).is_register()) { + ++current_input; + continue; + } + if (other->GetInputLocation(other_input).is_register()) { + ++other_input; + continue; + } + if (GetInputLocation(current_input) != + other->GetInputLocation(other_input)) { + return false; + } + Node* input = node->InputAt(static_cast(other_input)); + if (input->opcode() != IrOpcode::kParameter) { + return false; + } + size_t param_index = ParameterIndexOf(input->op()); + if (param_index != stack_parameter) { + return false; + } + ++stack_parameter; + ++current_input; + ++other_input; + } + UNREACHABLE(); + return false; +} + + CallDescriptor* Linkage::ComputeIncoming(Zone* zone, CompilationInfo* info) { if (info->code_stub() != NULL) { // Use the code stub interface descriptor. diff --git a/src/compiler/linkage.h b/src/compiler/linkage.h index ea130d7dd..e403f63bb 100644 --- a/src/compiler/linkage.h +++ b/src/compiler/linkage.h @@ -20,6 +20,7 @@ class CallInterfaceDescriptor; namespace compiler { +class Node; class OsrHelper; // Describes the location for a parameter or a return value to a call. @@ -167,6 +168,8 @@ class CallDescriptor final : public ZoneObject { bool HasSameReturnLocationsAs(const CallDescriptor* other) const; + bool CanTailCall(const Node* call) const; + private: friend class Linkage; diff --git a/src/compiler/mips/code-generator-mips.cc b/src/compiler/mips/code-generator-mips.cc index a7806ccae..4d87f2c24 100644 --- a/src/compiler/mips/code-generator-mips.cc +++ b/src/compiler/mips/code-generator-mips.cc @@ -399,10 +399,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() { int stack_slots = frame()->GetSpillSlotCount(); if (descriptor->IsJSFunctionCall() || stack_slots > 0) { __ LeaveFrame(StackFrame::MANUAL); - int pop_count = descriptor->IsJSFunctionCall() - ? static_cast(descriptor->JSParameterCount()) - : 0; - __ Drop(pop_count); } } @@ -1157,8 +1153,14 @@ void CodeGenerator::AssembleReturn() { __ Pop(ra, fp); int pop_count = descriptor->IsJSFunctionCall() ? static_cast(descriptor->JSParameterCount()) - : 0; - __ DropAndRet(pop_count); + : (info()->IsStub() + ? info()->code_stub()->GetStackParameterCount() + : 0); + if (pop_count != 0) { + __ DropAndRet(pop_count); + } else { + __ Ret(); + } } } else { __ Ret(); diff --git a/src/compiler/mips/instruction-selector-mips.cc b/src/compiler/mips/instruction-selector-mips.cc index 8ecc25e9e..52be8a9a2 100644 --- a/src/compiler/mips/instruction-selector-mips.cc +++ b/src/compiler/mips/instruction-selector-mips.cc @@ -589,16 +589,12 @@ void InstructionSelector::VisitTailCall(Node* node) { DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); // TODO(turbofan): Relax restriction for stack parameters. - if (descriptor->UsesOnlyRegisters() && - descriptor->HasSameReturnLocationsAs( - linkage()->GetIncomingDescriptor())) { + if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) { CallBuffer buffer(zone(), descriptor, nullptr); // Compute InstructionOperands for inputs and outputs. InitializeCallBuffer(node, &buffer, true, false); - DCHECK_EQ(0u, buffer.pushed_nodes.size()); - // Select the appropriate opcode based on the call type. InstructionCode opcode; switch (descriptor->kind()) { diff --git a/src/compiler/mips64/code-generator-mips64.cc b/src/compiler/mips64/code-generator-mips64.cc index 534ebfb09..b3f8088e5 100644 --- a/src/compiler/mips64/code-generator-mips64.cc +++ b/src/compiler/mips64/code-generator-mips64.cc @@ -399,10 +399,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() { int stack_slots = frame()->GetSpillSlotCount(); if (descriptor->IsJSFunctionCall() || stack_slots > 0) { __ LeaveFrame(StackFrame::MANUAL); - int pop_count = descriptor->IsJSFunctionCall() - ? static_cast(descriptor->JSParameterCount()) - : 0; - __ Drop(pop_count); } } @@ -1230,8 +1226,14 @@ void CodeGenerator::AssembleReturn() { __ Pop(ra, fp); int pop_count = descriptor->IsJSFunctionCall() ? static_cast(descriptor->JSParameterCount()) - : 0; - __ DropAndRet(pop_count); + : (info()->IsStub() + ? info()->code_stub()->GetStackParameterCount() + : 0); + if (pop_count != 0) { + __ DropAndRet(pop_count); + } else { + __ Ret(); + } } } else { __ Ret(); diff --git a/src/compiler/mips64/instruction-selector-mips64.cc b/src/compiler/mips64/instruction-selector-mips64.cc index 4127c312f..d4dbfe03a 100644 --- a/src/compiler/mips64/instruction-selector-mips64.cc +++ b/src/compiler/mips64/instruction-selector-mips64.cc @@ -738,16 +738,12 @@ void InstructionSelector::VisitTailCall(Node* node) { DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); // TODO(turbofan): Relax restriction for stack parameters. - if (descriptor->UsesOnlyRegisters() && - descriptor->HasSameReturnLocationsAs( - linkage()->GetIncomingDescriptor())) { + if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) { CallBuffer buffer(zone(), descriptor, nullptr); // Compute InstructionOperands for inputs and outputs. InitializeCallBuffer(node, &buffer, true, false); - DCHECK_EQ(0u, buffer.pushed_nodes.size()); - // Select the appropriate opcode based on the call type. InstructionCode opcode; switch (descriptor->kind()) { diff --git a/src/compiler/pipeline.cc b/src/compiler/pipeline.cc index 6f85651a2..7ced8e67b 100644 --- a/src/compiler/pipeline.cc +++ b/src/compiler/pipeline.cc @@ -703,8 +703,7 @@ struct GenericLoweringPhase { AddReducer(data, &graph_reducer, &common_reducer); AddReducer(data, &graph_reducer, &generic_lowering); AddReducer(data, &graph_reducer, &select_lowering); - // TODO(turbofan): TCO is currently limited to stubs. - if (data->info()->IsStub()) AddReducer(data, &graph_reducer, &tco); + AddReducer(data, &graph_reducer, &tco); graph_reducer.ReduceGraph(); } }; diff --git a/src/compiler/x64/code-generator-x64.cc b/src/compiler/x64/code-generator-x64.cc index 50eb64cce..bdce08320 100644 --- a/src/compiler/x64/code-generator-x64.cc +++ b/src/compiler/x64/code-generator-x64.cc @@ -538,13 +538,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() { if (descriptor->IsJSFunctionCall() || stack_slots > 0) { __ movq(rsp, rbp); __ popq(rbp); - int32_t bytes_to_pop = - descriptor->IsJSFunctionCall() - ? static_cast(descriptor->JSParameterCount() * - kPointerSize) - : 0; - __ popq(Operand(rsp, bytes_to_pop)); - __ addq(rsp, Immediate(bytes_to_pop)); } } @@ -1578,11 +1571,17 @@ void CodeGenerator::AssembleReturn() { __ popq(rbp); // Pop caller's frame pointer. int pop_count = descriptor->IsJSFunctionCall() ? static_cast(descriptor->JSParameterCount()) - : 0; - __ Ret(pop_count * kPointerSize, rbx); + : (info()->IsStub() + ? info()->code_stub()->GetStackParameterCount() + : 0); + if (pop_count == 0) { + __ Ret(); + } else { + __ Ret(pop_count * kPointerSize, rbx); + } } } else { - __ ret(0); + __ Ret(); } } diff --git a/src/compiler/x64/instruction-selector-x64.cc b/src/compiler/x64/instruction-selector-x64.cc index 35785c37a..b245fa8f8 100644 --- a/src/compiler/x64/instruction-selector-x64.cc +++ b/src/compiler/x64/instruction-selector-x64.cc @@ -1112,16 +1112,12 @@ void InstructionSelector::VisitTailCall(Node* node) { DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall); // TODO(turbofan): Relax restriction for stack parameters. - if (descriptor->UsesOnlyRegisters() && - descriptor->HasSameReturnLocationsAs( - linkage()->GetIncomingDescriptor())) { + if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) { CallBuffer buffer(zone(), descriptor, nullptr); // Compute InstructionOperands for inputs and outputs. InitializeCallBuffer(node, &buffer, true, true); - DCHECK_EQ(0u, buffer.pushed_nodes.size()); - // Select the appropriate opcode based on the call type. InstructionCode opcode; switch (descriptor->kind()) { diff --git a/test/mjsunit/call-runtime-tail.js b/test/mjsunit/call-runtime-tail.js new file mode 100644 index 000000000..6ad107dcb --- /dev/null +++ b/test/mjsunit/call-runtime-tail.js @@ -0,0 +1,81 @@ +// Copyright 2015 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --nostress-opt --turbo + +var p0 = new Object(); +var p1 = new Object(); +var p2 = new Object(); + +// Ensure 1 parameter passed straight-through is handled correctly +var count1 = 100000; +tailee1 = function() { + "use strict"; + if (count1-- == 0) { + return this; + } + return %_CallFunction(this, tailee1); +}; + +%OptimizeFunctionOnNextCall(tailee1); +assertEquals(p0, tailee1.call(p0)); + +// Ensure 2 parameters passed straight-through trigger a tail call are handled +// correctly and don't cause a stack overflow. +var count2 = 100000; +tailee2 = function(px) { + "use strict"; + assertEquals(p2, px); + assertEquals(p1, this); + count2 = ((count2 | 0) - 1) | 0; + if ((count2 | 0) === 0) { + return this; + } + return %_CallFunction(this, px, tailee2); +}; + +%OptimizeFunctionOnNextCall(tailee2); +assertEquals(p1, tailee2.call(p1, p2)); + +// Ensure swapped 2 parameters don't trigger a tail call (parameter swizzling +// for the tail call isn't supported yet). +var count3 = 100000; +tailee3 = function(px) { + "use strict"; + if (count3-- == 0) { + return this; + } + return %_CallFunction(px, this, tailee3); +}; + +%OptimizeFunctionOnNextCall(tailee3); +assertThrows(function() { tailee3.call(p1, p2); }); + +// Ensure too many parameters defeats the tail call optimization (currently +// unsupported). +var count4 = 1000000; +tailee4 = function(px) { + "use strict"; + if (count4-- == 0) { + return this; + } + return %_CallFunction(this, px, undefined, tailee4); +}; + +%OptimizeFunctionOnNextCall(tailee4); +assertThrows(function() { tailee4.call(p1, p2); }); + +// Ensure too few parameters defeats the tail call optimization (currently +// unsupported). +var count5 = 1000000; +tailee5 = function(px) { + "use strict"; + if (count5-- == 0) { + return this; + } + return %_CallFunction(this, tailee5); +}; + +%OptimizeFunctionOnNextCall(tailee5); +assertThrows(function() { tailee5.call(p1, p2); }); -- 2.34.1