int stack_slots = frame()->GetSpillSlotCount();
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ LeaveFrame(StackFrame::MANUAL);
- int pop_count = descriptor->IsJSFunctionCall()
- ? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ Drop(pop_count);
}
}
__ LeaveFrame(StackFrame::MANUAL);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ Drop(pop_count);
+ : (info()->IsStub()
+ ? info()->code_stub()->GetStackParameterCount()
+ : 0);
+ if (pop_count != 0) {
+ __ Drop(pop_count);
+ }
__ Ret();
}
} else {
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
- if (descriptor->UsesOnlyRegisters() &&
- descriptor->HasSameReturnLocationsAs(
- linkage()->GetIncomingDescriptor())) {
+ if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
// heuristics in the register allocator for where to emit constants.
InitializeCallBuffer(node, &buffer, true, false);
- DCHECK_EQ(0u, buffer.pushed_nodes.size());
-
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ Mov(jssp, fp);
__ Pop(fp, lr);
- int pop_count = descriptor->IsJSFunctionCall()
- ? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ Drop(pop_count);
}
}
__ Pop(fp, lr);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ Drop(pop_count);
+ : (info()->IsStub()
+ ? info()->code_stub()->GetStackParameterCount()
+ : 0);
+ if (pop_count != 0) {
+ __ Drop(pop_count);
+ }
__ Ret();
}
} else {
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
- if (descriptor->UsesOnlyRegisters() &&
- descriptor->HasSameReturnLocationsAs(
- linkage()->GetIncomingDescriptor())) {
+ if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
// heuristics in the register allocator for where to emit constants.
InitializeCallBuffer(node, &buffer, true, false);
- DCHECK_EQ(0u, buffer.pushed_nodes.size());
-
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ mov(esp, ebp);
__ pop(ebp);
- int32_t bytes_to_pop =
- descriptor->IsJSFunctionCall()
- ? static_cast<int32_t>(descriptor->JSParameterCount() *
- kPointerSize)
- : 0;
- __ pop(Operand(esp, bytes_to_pop));
- __ add(esp, Immediate(bytes_to_pop));
}
}
__ pop(ebp); // Pop caller's frame pointer.
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ Ret(pop_count * kPointerSize, ebx);
+ : (info()->IsStub()
+ ? info()->code_stub()->GetStackParameterCount()
+ : 0);
+ if (pop_count == 0) {
+ __ ret(0);
+ } else {
+ __ Ret(pop_count * kPointerSize, ebx);
+ }
}
} else {
__ ret(0);
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
- if (descriptor->UsesOnlyRegisters() &&
- descriptor->HasSameReturnLocationsAs(
- linkage()->GetIncomingDescriptor())) {
+
+ if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(node, &buffer, true, true);
- DCHECK_EQ(0u, buffer.pushed_nodes.size());
-
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
CallFunctionStub stub(isolate(), arg_count, p.flags());
CallInterfaceDescriptor d = stub.GetCallInterfaceDescriptor();
CallDescriptor::Flags flags = AdjustFrameStatesForCall(node);
+ if (p.AllowTailCalls()) {
+ flags |= CallDescriptor::kSupportsTailCalls;
+ }
CallDescriptor* desc = Linkage::GetStubCallDescriptor(
isolate(), zone(), d, static_cast<int>(p.arity() - 1), flags);
Node* stub_code = jsgraph()->HeapConstant(stub.GetCode());
return ReduceGetCallerJSFunction(node);
case Runtime::kInlineThrowNotDateError:
return ReduceThrowNotDateError(node);
+ case Runtime::kInlineCallFunction:
+ return ReduceCallFunction(node);
default:
break;
}
}
+Reduction JSIntrinsicLowering::ReduceCallFunction(Node* node) {
+ CallRuntimeParameters params = OpParameter<CallRuntimeParameters>(node->op());
+ size_t arity = params.arity();
+ node->set_op(javascript()->CallFunction(arity, NO_CALL_FUNCTION_FLAGS, STRICT,
+ VectorSlotPair(), ALLOW_TAIL_CALLS));
+ Node* function = node->InputAt(static_cast<int>(arity - 1));
+ while (--arity != 0) {
+ node->ReplaceInput(static_cast<int>(arity),
+ node->InputAt(static_cast<int>(arity - 1)));
+ }
+ node->ReplaceInput(0, function);
+ return Changed(node);
+}
+
+
Reduction JSIntrinsicLowering::Change(Node* node, const Operator* op, Node* a,
Node* b) {
node->set_op(op);
return jsgraph()->common();
}
+JSOperatorBuilder* JSIntrinsicLowering::javascript() const {
+ return jsgraph_->javascript();
+}
+
MachineOperatorBuilder* JSIntrinsicLowering::machine() const {
return jsgraph()->machine();
// Forward declarations.
class CommonOperatorBuilder;
+class JSOperatorBuilder;
class JSGraph;
class MachineOperatorBuilder;
Reduction ReduceGetTypeFeedbackVector(Node* node);
Reduction ReduceGetCallerJSFunction(Node* node);
Reduction ReduceThrowNotDateError(Node* node);
+ Reduction ReduceCallFunction(Node* node);
Reduction Change(Node* node, const Operator* op);
Reduction Change(Node* node, const Operator* op, Node* a, Node* b);
Graph* graph() const;
JSGraph* jsgraph() const { return jsgraph_; }
CommonOperatorBuilder* common() const;
+ JSOperatorBuilder* javascript() const;
MachineOperatorBuilder* machine() const;
DeoptimizationMode mode() const { return mode_; }
SimplifiedOperatorBuilder* simplified() { return &simplified_; }
std::ostream& operator<<(std::ostream& os, CallFunctionParameters const& p) {
- return os << p.arity() << ", " << p.flags() << ", " << p.language_mode();
+ os << p.arity() << ", " << p.flags() << ", " << p.language_mode();
+ if (p.AllowTailCalls()) {
+ os << ", ALLOW_TAIL_CALLS";
+ }
+ return os;
}
#undef CACHED_WITH_LANGUAGE_MODE
-const Operator* JSOperatorBuilder::CallFunction(
- size_t arity, CallFunctionFlags flags, LanguageMode language_mode,
- VectorSlotPair const& feedback) {
- CallFunctionParameters parameters(arity, flags, language_mode, feedback);
+const Operator* JSOperatorBuilder::CallFunction(size_t arity,
+ CallFunctionFlags flags,
+ LanguageMode language_mode,
+ VectorSlotPair const& feedback,
+ TailCallMode tail_call_mode) {
+ CallFunctionParameters parameters(arity, flags, language_mode, feedback,
+ tail_call_mode);
return new (zone()) Operator1<CallFunctionParameters>( // --
IrOpcode::kJSCallFunction, Operator::kNoProperties, // opcode
"JSCallFunction", // name
size_t hash_value(VectorSlotPair const&);
+enum TailCallMode { NO_TAIL_CALLS, ALLOW_TAIL_CALLS };
// Defines the arity and the call flags for a JavaScript function call. This is
// used as a parameter by JSCallFunction operators.
public:
CallFunctionParameters(size_t arity, CallFunctionFlags flags,
LanguageMode language_mode,
- VectorSlotPair const& feedback)
+ VectorSlotPair const& feedback,
+ TailCallMode tail_call_mode)
: bit_field_(ArityField::encode(arity) | FlagsField::encode(flags) |
LanguageModeField::encode(language_mode)),
- feedback_(feedback) {}
+ feedback_(feedback),
+ tail_call_mode_(tail_call_mode) {}
size_t arity() const { return ArityField::decode(bit_field_); }
CallFunctionFlags flags() const { return FlagsField::decode(bit_field_); }
return !(*this == that);
}
+ bool AllowTailCalls() const { return tail_call_mode_ == ALLOW_TAIL_CALLS; }
+
private:
friend size_t hash_value(CallFunctionParameters const& p) {
return base::hash_combine(p.bit_field_, p.feedback_);
const uint32_t bit_field_;
const VectorSlotPair feedback_;
+ bool tail_call_mode_;
};
size_t hash_value(CallFunctionParameters const&);
const Operator* CallFunction(
size_t arity, CallFunctionFlags flags, LanguageMode language_mode,
- VectorSlotPair const& feedback = VectorSlotPair());
+ VectorSlotPair const& feedback = VectorSlotPair(),
+ TailCallMode tail_call_mode = NO_TAIL_CALLS);
const Operator* CallRuntime(Runtime::FunctionId id, size_t arity);
const Operator* CallConstruct(int arguments);
#include "src/code-stubs.h"
#include "src/compiler.h"
+#include "src/compiler/common-operator.h"
#include "src/compiler/linkage.h"
#include "src/compiler/node.h"
#include "src/compiler/pipeline.h"
}
+bool CallDescriptor::CanTailCall(const Node* node) const {
+ // Tail calling is currently allowed if return locations match and all
+ // parameters are either in registers or on the stack but match exactly in
+ // number and content.
+ CallDescriptor const* other = OpParameter<CallDescriptor const*>(node);
+ if (!HasSameReturnLocationsAs(other)) return false;
+ size_t current_input = 0;
+ size_t other_input = 0;
+ size_t stack_parameter = 0;
+ while (true) {
+ if (other_input >= other->InputCount()) {
+ while (current_input <= InputCount()) {
+ if (!GetInputLocation(current_input).is_register()) {
+ return false;
+ }
+ ++current_input;
+ }
+ return true;
+ }
+ if (current_input >= InputCount()) {
+ while (other_input < other->InputCount()) {
+ if (!other->GetInputLocation(other_input).is_register()) {
+ return false;
+ }
+ ++other_input;
+ }
+ return true;
+ }
+ if (GetInputLocation(current_input).is_register()) {
+ ++current_input;
+ continue;
+ }
+ if (other->GetInputLocation(other_input).is_register()) {
+ ++other_input;
+ continue;
+ }
+ if (GetInputLocation(current_input) !=
+ other->GetInputLocation(other_input)) {
+ return false;
+ }
+ Node* input = node->InputAt(static_cast<int>(other_input));
+ if (input->opcode() != IrOpcode::kParameter) {
+ return false;
+ }
+ size_t param_index = ParameterIndexOf(input->op());
+ if (param_index != stack_parameter) {
+ return false;
+ }
+ ++stack_parameter;
+ ++current_input;
+ ++other_input;
+ }
+ UNREACHABLE();
+ return false;
+}
+
+
CallDescriptor* Linkage::ComputeIncoming(Zone* zone, CompilationInfo* info) {
if (info->code_stub() != NULL) {
// Use the code stub interface descriptor.
namespace compiler {
+class Node;
class OsrHelper;
// Describes the location for a parameter or a return value to a call.
bool HasSameReturnLocationsAs(const CallDescriptor* other) const;
+ bool CanTailCall(const Node* call) const;
+
private:
friend class Linkage;
int stack_slots = frame()->GetSpillSlotCount();
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ LeaveFrame(StackFrame::MANUAL);
- int pop_count = descriptor->IsJSFunctionCall()
- ? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ Drop(pop_count);
}
}
__ Pop(ra, fp);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ DropAndRet(pop_count);
+ : (info()->IsStub()
+ ? info()->code_stub()->GetStackParameterCount()
+ : 0);
+ if (pop_count != 0) {
+ __ DropAndRet(pop_count);
+ } else {
+ __ Ret();
+ }
}
} else {
__ Ret();
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
- if (descriptor->UsesOnlyRegisters() &&
- descriptor->HasSameReturnLocationsAs(
- linkage()->GetIncomingDescriptor())) {
+ if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(node, &buffer, true, false);
- DCHECK_EQ(0u, buffer.pushed_nodes.size());
-
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
int stack_slots = frame()->GetSpillSlotCount();
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ LeaveFrame(StackFrame::MANUAL);
- int pop_count = descriptor->IsJSFunctionCall()
- ? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ Drop(pop_count);
}
}
__ Pop(ra, fp);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ DropAndRet(pop_count);
+ : (info()->IsStub()
+ ? info()->code_stub()->GetStackParameterCount()
+ : 0);
+ if (pop_count != 0) {
+ __ DropAndRet(pop_count);
+ } else {
+ __ Ret();
+ }
}
} else {
__ Ret();
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
- if (descriptor->UsesOnlyRegisters() &&
- descriptor->HasSameReturnLocationsAs(
- linkage()->GetIncomingDescriptor())) {
+ if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(node, &buffer, true, false);
- DCHECK_EQ(0u, buffer.pushed_nodes.size());
-
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
AddReducer(data, &graph_reducer, &common_reducer);
AddReducer(data, &graph_reducer, &generic_lowering);
AddReducer(data, &graph_reducer, &select_lowering);
- // TODO(turbofan): TCO is currently limited to stubs.
- if (data->info()->IsStub()) AddReducer(data, &graph_reducer, &tco);
+ AddReducer(data, &graph_reducer, &tco);
graph_reducer.ReduceGraph();
}
};
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ movq(rsp, rbp);
__ popq(rbp);
- int32_t bytes_to_pop =
- descriptor->IsJSFunctionCall()
- ? static_cast<int32_t>(descriptor->JSParameterCount() *
- kPointerSize)
- : 0;
- __ popq(Operand(rsp, bytes_to_pop));
- __ addq(rsp, Immediate(bytes_to_pop));
}
}
__ popq(rbp); // Pop caller's frame pointer.
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
- : 0;
- __ Ret(pop_count * kPointerSize, rbx);
+ : (info()->IsStub()
+ ? info()->code_stub()->GetStackParameterCount()
+ : 0);
+ if (pop_count == 0) {
+ __ Ret();
+ } else {
+ __ Ret(pop_count * kPointerSize, rbx);
+ }
}
} else {
- __ ret(0);
+ __ Ret();
}
}
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
- if (descriptor->UsesOnlyRegisters() &&
- descriptor->HasSameReturnLocationsAs(
- linkage()->GetIncomingDescriptor())) {
+ if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(node, &buffer, true, true);
- DCHECK_EQ(0u, buffer.pushed_nodes.size());
-
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
--- /dev/null
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --allow-natives-syntax --nostress-opt --turbo
+
+var p0 = new Object();
+var p1 = new Object();
+var p2 = new Object();
+
+// Ensure 1 parameter passed straight-through is handled correctly
+var count1 = 100000;
+tailee1 = function() {
+ "use strict";
+ if (count1-- == 0) {
+ return this;
+ }
+ return %_CallFunction(this, tailee1);
+};
+
+%OptimizeFunctionOnNextCall(tailee1);
+assertEquals(p0, tailee1.call(p0));
+
+// Ensure 2 parameters passed straight-through trigger a tail call are handled
+// correctly and don't cause a stack overflow.
+var count2 = 100000;
+tailee2 = function(px) {
+ "use strict";
+ assertEquals(p2, px);
+ assertEquals(p1, this);
+ count2 = ((count2 | 0) - 1) | 0;
+ if ((count2 | 0) === 0) {
+ return this;
+ }
+ return %_CallFunction(this, px, tailee2);
+};
+
+%OptimizeFunctionOnNextCall(tailee2);
+assertEquals(p1, tailee2.call(p1, p2));
+
+// Ensure swapped 2 parameters don't trigger a tail call (parameter swizzling
+// for the tail call isn't supported yet).
+var count3 = 100000;
+tailee3 = function(px) {
+ "use strict";
+ if (count3-- == 0) {
+ return this;
+ }
+ return %_CallFunction(px, this, tailee3);
+};
+
+%OptimizeFunctionOnNextCall(tailee3);
+assertThrows(function() { tailee3.call(p1, p2); });
+
+// Ensure too many parameters defeats the tail call optimization (currently
+// unsupported).
+var count4 = 1000000;
+tailee4 = function(px) {
+ "use strict";
+ if (count4-- == 0) {
+ return this;
+ }
+ return %_CallFunction(this, px, undefined, tailee4);
+};
+
+%OptimizeFunctionOnNextCall(tailee4);
+assertThrows(function() { tailee4.call(p1, p2); });
+
+// Ensure too few parameters defeats the tail call optimization (currently
+// unsupported).
+var count5 = 1000000;
+tailee5 = function(px) {
+ "use strict";
+ if (count5-- == 0) {
+ return this;
+ }
+ return %_CallFunction(this, tailee5);
+};
+
+%OptimizeFunctionOnNextCall(tailee5);
+assertThrows(function() { tailee5.call(p1, p2); });