PARSE_INFO_GETTER(bool, is_eval)
PARSE_INFO_GETTER(bool, is_native)
PARSE_INFO_GETTER(bool, is_module)
-PARSE_INFO_GETTER(LanguageMode, language_mode)
+PARSE_INFO_GETTER_WITH_DEFAULT(LanguageMode, language_mode, STRICT)
PARSE_INFO_GETTER_WITH_DEFAULT(Handle<JSFunction>, closure,
Handle<JSFunction>::null())
PARSE_INFO_GETTER(FunctionLiteral*, function)
} while (0)
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+ CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+ int stack_slots = frame()->GetSpillSlotCount();
+ if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+ __ LeaveFrame(StackFrame::MANUAL);
+ int pop_count = descriptor->IsJSFunctionCall()
+ ? static_cast<int>(descriptor->JSParameterCount())
+ : 0;
+ __ Drop(pop_count);
+ }
+}
+
+
// Assembles an instruction after register allocation, producing machine code.
void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
ArmOperandConverter i(this, instr);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
+ case kArchTailCallCodeObject: {
+ AssembleDeconstructActivationRecord();
+ if (instr->InputAt(0)->IsImmediate()) {
+ __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+ RelocInfo::CODE_TARGET);
+ } else {
+ __ add(ip, i.InputRegister(0),
+ Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(ip);
+ }
+ DCHECK_EQ(LeaveCC, i.OutputSBit());
+ break;
+ }
case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
+ case kArchTailCallJSFunction: {
+ Register func = i.InputRegister(0);
+ if (FLAG_debug_code) {
+ // Check the function's context matches the context argument.
+ __ ldr(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
+ __ cmp(cp, kScratchReg);
+ __ Assert(eq, kWrongFunctionContext);
+ }
+ AssembleDeconstructActivationRecord();
+ __ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+ __ Jump(ip);
+ DCHECK_EQ(LeaveCC, i.OutputSBit());
+ break;
+ }
case kArchJmp:
AssembleArchJump(i.InputRpo(0));
DCHECK_EQ(LeaveCC, i.OutputSBit());
}
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+ CallMode call_mode) {
ArmOperandGenerator g(this);
const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
}
// Select the appropriate opcode based on the call type.
+ bool is_tail_call = call_mode == TAIL_CALL;
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
- opcode = kArchCallCodeObject;
+ opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
break;
}
case CallDescriptor::kCallJSFunction:
- opcode = kArchCallJSFunction;
+ opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
break;
default:
UNREACHABLE();
opcode |= MiscField::encode(flags);
// Emit the call instruction.
+ size_t size = is_tail_call ? 0 : buffer.outputs.size();
InstructionOperand* first_output =
- buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+ size > 0 ? &buffer.outputs.front() : nullptr;
Instruction* call_instr =
- Emit(opcode, buffer.outputs.size(), first_output,
- buffer.instruction_args.size(), &buffer.instruction_args.front());
+ Emit(opcode, size, first_output, buffer.instruction_args.size(),
+ &buffer.instruction_args.front());
call_instr->MarkAsCall();
}
} while (0)
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+ CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+ int stack_slots = frame()->GetSpillSlotCount();
+ if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+ __ Mov(jssp, fp);
+ __ Pop(fp, lr);
+ int pop_count = descriptor->IsJSFunctionCall()
+ ? static_cast<int>(descriptor->JSParameterCount())
+ : 0;
+ __ Drop(pop_count);
+ }
+}
+
+
// Assembles an instruction after register allocation, producing machine code.
void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
Arm64OperandConverter i(this, instr);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallCodeObject: {
+ AssembleDeconstructActivationRecord();
+ if (instr->InputAt(0)->IsImmediate()) {
+ __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+ RelocInfo::CODE_TARGET);
+ } else {
+ Register target = i.InputRegister(0);
+ __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
+ __ Jump(target);
+ }
+ break;
+ }
case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallJSFunction: {
+ Register func = i.InputRegister(0);
+ if (FLAG_debug_code) {
+ // Check the function's context matches the context argument.
+ UseScratchRegisterScope scope(masm());
+ Register temp = scope.AcquireX();
+ __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
+ __ cmp(cp, temp);
+ __ Assert(eq, kWrongFunctionContext);
+ }
+ AssembleDeconstructActivationRecord();
+ __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+ __ Jump(x10);
+ break;
+ }
case kArchJmp:
AssembleArchJump(i.InputRpo(0));
break;
}
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+ CallMode call_mode) {
Arm64OperandGenerator g(this);
const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
}
// Select the appropriate opcode based on the call type.
+ bool is_tail_call = call_mode == TAIL_CALL;
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
- opcode = kArchCallCodeObject;
+ opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
break;
}
case CallDescriptor::kCallJSFunction:
- opcode = kArchCallJSFunction;
+ opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
break;
default:
UNREACHABLE();
opcode |= MiscField::encode(flags);
// Emit the call instruction.
+ size_t size = is_tail_call ? 0 : buffer.outputs.size();
InstructionOperand* first_output =
- buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+ size > 0 ? &buffer.outputs.front() : nullptr;
Instruction* call_instr =
- Emit(opcode, buffer.outputs.size(), first_output,
- buffer.instruction_args.size(), &buffer.instruction_args.front());
+ Emit(opcode, size, first_output, buffer.instruction_args.size(),
+ &buffer.instruction_args.front());
call_instr->MarkAsCall();
}
// to tear down a stack frame.
void AssembleReturn();
+ // Generates code to deconstruct a the caller's frame, including arguments.
+ void AssembleDeconstructActivationRecord();
+
// ===========================================================================
// ============== Architecture-specific gap resolver methods. ================
// ===========================================================================
} while (false)
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+ CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+ int stack_slots = frame()->GetSpillSlotCount();
+ if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+ __ mov(esp, ebp);
+ __ pop(ebp);
+ int32_t bytes_to_pop =
+ descriptor->IsJSFunctionCall()
+ ? static_cast<int32_t>(descriptor->JSParameterCount() *
+ kPointerSize)
+ : 0;
+ __ pop(Operand(esp, bytes_to_pop));
+ __ add(esp, Immediate(bytes_to_pop));
+ }
+}
+
+
// Assembles an instruction after register allocation, producing machine code.
void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
IA32OperandConverter i(this, instr);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallCodeObject: {
+ AssembleDeconstructActivationRecord();
+ if (HasImmediateInput(instr, 0)) {
+ Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
+ __ jmp(code, RelocInfo::CODE_TARGET);
+ } else {
+ Register reg = i.InputRegister(0);
+ __ jmp(Operand(reg, Code::kHeaderSize - kHeapObjectTag));
+ }
+ break;
+ }
case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallJSFunction: {
+ Register func = i.InputRegister(0);
+ if (FLAG_debug_code) {
+ // Check the function's context matches the context argument.
+ __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset));
+ __ Assert(equal, kWrongFunctionContext);
+ }
+ AssembleDeconstructActivationRecord();
+ __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
+ break;
+ }
case kArchJmp:
AssembleArchJump(i.InputRpo(0));
break;
}
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+ CallMode call_mode) {
IA32OperandGenerator g(this);
const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
}
// Select the appropriate opcode based on the call type.
+ bool is_tail_call = call_mode == TAIL_CALL;
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
- opcode = kArchCallCodeObject;
+ opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
break;
}
case CallDescriptor::kCallJSFunction:
- opcode = kArchCallJSFunction;
+ opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
break;
default:
UNREACHABLE();
opcode |= MiscField::encode(flags);
// Emit the call instruction.
+ size_t size = is_tail_call ? 0 : buffer.outputs.size();
InstructionOperand* first_output =
- buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+ size > 0 ? &buffer.outputs.front() : nullptr;
Instruction* call_instr =
- Emit(opcode, buffer.outputs.size(), first_output,
- buffer.instruction_args.size(), &buffer.instruction_args.front());
+ Emit(opcode, size, first_output, buffer.instruction_args.size(),
+ &buffer.instruction_args.front());
call_instr->MarkAsCall();
}
// Most opcodes specify a single instruction.
#define ARCH_OPCODE_LIST(V) \
V(ArchCallCodeObject) \
+ V(ArchTailCallCodeObject) \
V(ArchCallJSFunction) \
+ V(ArchTailCallJSFunction) \
V(ArchJmp) \
V(ArchLookupSwitch) \
V(ArchTableSwitch) \
DCHECK_EQ(IrOpcode::kCall, input->opcode());
BasicBlock* success = block->SuccessorAt(0);
BasicBlock* exception = block->SuccessorAt(1);
- return VisitCall(input, exception), VisitGoto(success);
+ return VisitCall(input, exception, NORMAL_CALL), VisitGoto(success);
}
case BasicBlock::kBranch: {
DCHECK_EQ(IrOpcode::kBranch, input->opcode());
}
case BasicBlock::kReturn: {
DCHECK_EQ(IrOpcode::kReturn, input->opcode());
- return VisitReturn(input->InputAt(0));
+ return VisitReturn(input);
}
case BasicBlock::kDeoptimize: {
// If the result itself is a return, return its input.
return VisitConstant(node);
}
case IrOpcode::kCall:
- return VisitCall(node, nullptr);
+ return VisitCall(node, nullptr, NORMAL_CALL);
case IrOpcode::kFrameState:
case IrOpcode::kStateValues:
return;
}
-void InstructionSelector::VisitReturn(Node* value) {
+namespace {
+
+// Returns the call node if the given return node is part of a tail call,
+// nullptr otherwise.
+Node* TryMatchTailCall(Node* ret) {
+ // The value which is returned must be the result of a potential tail call,
+ // there must be no try/catch/finally around the call, and there must be no
+ // effects between the call and the return.
+ Node* call = NodeProperties::GetValueInput(ret, 0);
+ if (call->opcode() != IrOpcode::kCall ||
+ !OpParameter<const CallDescriptor*>(call)->SupportsTailCalls() ||
+ NodeProperties::IsExceptionalCall(call) ||
+ NodeProperties::GetEffectInput(ret, 0) != call) {
+ return nullptr;
+ }
+ // Furthermore, control has to flow via an IfSuccess from the call (for calls
+ // which can throw), or the return and the call have to use the same control
+ // input (for calls which can't throw).
+ Node* control = NodeProperties::GetControlInput(ret, 0);
+ bool found = (control->opcode() == IrOpcode::kIfSuccess)
+ ? (NodeProperties::GetControlInput(control, 0) == call)
+ : (control == NodeProperties::GetControlInput(call, 0));
+ return found ? call : nullptr;
+}
+
+} // namespace
+
+
+void InstructionSelector::VisitReturn(Node* node) {
+ if (FLAG_turbo_tail_calls) {
+ Node* call = TryMatchTailCall(node);
+ if (call != nullptr) {
+ const CallDescriptor* desc = OpParameter<const CallDescriptor*>(call);
+ if (desc->UsesOnlyRegisters() &&
+ desc->HasSameReturnLocationsAs(linkage()->GetIncomingDescriptor())) {
+ return VisitCall(call, nullptr, TAIL_CALL);
+ }
+ }
+ }
+ Node* value = NodeProperties::GetValueInput(node, 0);
DCHECK_NOT_NULL(value);
OperandGenerator g(this);
Emit(kArchRet, g.NoOutput(),
#undef DECLARE_UNIMPLEMENTED_SELECTOR
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+ CallMode call_mode) {
UNIMPLEMENTED();
}
void VisitPhi(Node* node);
void VisitProjection(Node* node);
void VisitConstant(Node* node);
- void VisitCall(Node* call, BasicBlock* handler);
+ void VisitCall(Node* call, BasicBlock* handler, CallMode call_mode);
void VisitGoto(BasicBlock* target);
void VisitBranch(Node* input, BasicBlock* tbranch, BasicBlock* fbranch);
void VisitSwitch(Node* node, const SwitchInfo& sw);
context = jsgraph()->HeapConstant(Handle<Context>(function->context()));
}
node->ReplaceInput(index, context);
- CallDescriptor* desc = Linkage::GetJSCallDescriptor(
- zone(), false, 1 + arg_count, FlagsForNode(node));
+ CallDescriptor::Flags flags = FlagsForNode(node);
+ if (is_strict(p.language_mode())) flags |= CallDescriptor::kSupportsTailCalls;
+ CallDescriptor* desc =
+ Linkage::GetJSCallDescriptor(zone(), false, 1 + arg_count, flags);
node->set_op(common()->Call(desc));
return true;
}
// The target for C calls is always an address (i.e. machine pointer).
MachineType target_type = kMachPtr;
LinkageLocation target_loc = LinkageLocation::AnyRegister();
- return new (zone) CallDescriptor( // --
- CallDescriptor::kCallAddress, // kind
- target_type, // target MachineType
- target_loc, // target location
- msig, // machine_sig
- locations.Build(), // location_sig
- 0, // js_parameter_count
- Operator::kNoProperties, // properties
- LinkageTraits::CCalleeSaveRegisters(), CallDescriptor::kNoFlags,
+ return new (zone) CallDescriptor( // --
+ CallDescriptor::kCallAddress, // kind
+ target_type, // target MachineType
+ target_loc, // target location
+ msig, // machine_sig
+ locations.Build(), // location_sig
+ 0, // js_parameter_count
+ Operator::kNoProperties, // properties
+ LinkageTraits::CCalleeSaveRegisters(), // callee-saved registers
+ CallDescriptor::kNoFlags, // flags
"c-call");
}
// TODO(svenpanne) Output properties etc. and be less cryptic.
return os << d.kind() << ":" << d.debug_name() << ":r" << d.ReturnCount()
<< "j" << d.JSParameterCount() << "i" << d.InputCount() << "f"
- << d.FrameStateCount();
+ << d.FrameStateCount() << "t" << d.SupportsTailCalls();
+}
+
+
+bool CallDescriptor::HasSameReturnLocationsAs(
+ const CallDescriptor* other) const {
+ if (ReturnCount() != other->ReturnCount()) return false;
+ for (size_t i = 0; i < ReturnCount(); ++i) {
+ if (GetReturnLocation(i) != other->GetReturnLocation(i)) return false;
+ }
+ return true;
}
}
+bool CallDescriptor::UsesOnlyRegisters() const {
+ for (size_t i = 0; i < InputCount(); ++i) {
+ if (!GetInputLocation(i).is_register()) return false;
+ }
+ for (size_t i = 0; i < ReturnCount(); ++i) {
+ if (!GetReturnLocation(i).is_register()) return false;
+ }
+ return true;
+}
+
+
//==============================================================================
// Provide unimplemented methods on unsupported architectures, to at least link.
//==============================================================================
public:
explicit LinkageLocation(int location) : location_(location) {}
+ bool is_register() const {
+ return 0 <= location_ && location_ <= ANY_REGISTER;
+ }
+
static const int16_t ANY_REGISTER = 1023;
static const int16_t MAX_STACK_SLOT = 32767;
static LinkageLocation AnyRegister() { return LinkageLocation(ANY_REGISTER); }
+ bool operator==(const LinkageLocation& other) const {
+ return location_ == other.location_;
+ }
+
+ bool operator!=(const LinkageLocation& other) const {
+ return !(*this == other);
+ }
+
private:
friend class CallDescriptor;
friend class OperandGenerator;
kPatchableCallSite = 1u << 1,
kNeedsNopAfterCall = 1u << 2,
kHasExceptionHandler = 1u << 3,
+ kSupportsTailCalls = 1u << 4,
kPatchableCallSiteWithNop = kPatchableCallSite | kNeedsNopAfterCall
};
typedef base::Flags<Flag> Flags;
Flags flags() const { return flags_; }
bool NeedsFrameState() const { return flags() & kNeedsFrameState; }
+ bool SupportsTailCalls() const { return flags() & kSupportsTailCalls; }
LinkageLocation GetReturnLocation(size_t index) const {
return location_sig_->GetReturn(index);
const char* debug_name() const { return debug_name_; }
+ bool UsesOnlyRegisters() const;
+
+ bool HasSameReturnLocationsAs(const CallDescriptor* other) const;
+
private:
friend class Linkage;
} while (0)
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+ CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+ int stack_slots = frame()->GetSpillSlotCount();
+ if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+ __ mov(sp, fp);
+ __ Pop(ra, fp);
+ int pop_count = descriptor->IsJSFunctionCall()
+ ? static_cast<int>(descriptor->JSParameterCount())
+ : 0;
+ __ Drop(pop_count);
+ }
+}
+
+
// Assembles an instruction after register allocation, producing machine code.
void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
MipsOperandConverter i(this, instr);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallCodeObject: {
+ AssembleDeconstructActivationRecord();
+ if (instr->InputAt(0)->IsImmediate()) {
+ __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+ RelocInfo::CODE_TARGET);
+ } else {
+ __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
+ __ Jump(at);
+ }
+ break;
+ }
case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallJSFunction: {
+ Register func = i.InputRegister(0);
+ if (FLAG_debug_code) {
+ // Check the function's context matches the context argument.
+ __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
+ __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg));
+ }
+
+ AssembleDeconstructActivationRecord();
+ __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+ __ Jump(at);
+ break;
+ }
case kArchJmp:
AssembleArchJump(i.InputRpo(0));
break;
}
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+ CallMode call_mode) {
MipsOperandGenerator g(this);
const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
}
// Select the appropriate opcode based on the call type.
+ bool is_tail_call = call_mode == TAIL_CALL;
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
- opcode = kArchCallCodeObject;
+ opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
break;
}
case CallDescriptor::kCallJSFunction:
- opcode = kArchCallJSFunction;
+ opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
break;
default:
UNREACHABLE();
opcode |= MiscField::encode(flags);
// Emit the call instruction.
+ size_t size = is_tail_call ? 0 : buffer.outputs.size();
InstructionOperand* first_output =
- buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+ size > 0 ? &buffer.outputs.front() : nullptr;
Instruction* call_instr =
- Emit(opcode, buffer.outputs.size(), first_output,
- buffer.instruction_args.size(), &buffer.instruction_args.front());
+ Emit(opcode, size, first_output, buffer.instruction_args.size(),
+ &buffer.instruction_args.front());
call_instr->MarkAsCall();
}
} while (0)
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+ CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+ int stack_slots = frame()->GetSpillSlotCount();
+ if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+ __ mov(sp, fp);
+ __ Pop(ra, fp);
+ int pop_count = descriptor->IsJSFunctionCall()
+ ? static_cast<int>(descriptor->JSParameterCount())
+ : 0;
+ __ Drop(pop_count);
+ }
+}
+
+
// Assembles an instruction after register allocation, producing machine code.
void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
MipsOperandConverter i(this, instr);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallCodeObject: {
+ AssembleDeconstructActivationRecord();
+ if (instr->InputAt(0)->IsImmediate()) {
+ __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+ RelocInfo::CODE_TARGET);
+ } else {
+ __ daddiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
+ __ Jump(at);
+ }
+ break;
+ }
case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallJSFunction: {
+ Register func = i.InputRegister(0);
+ if (FLAG_debug_code) {
+ // Check the function's context matches the context argument.
+ __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
+ __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg));
+ }
+
+ AssembleDeconstructActivationRecord();
+ __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+ __ Jump(at);
+ break;
+ }
case kArchJmp:
AssembleArchJump(i.InputRpo(0));
break;
}
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+ CallMode call_mode) {
Mips64OperandGenerator g(this);
const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
}
// Select the appropriate opcode based on the call type.
+ bool is_tail_call = call_mode == TAIL_CALL;
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
- opcode = kArchCallCodeObject;
+ opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
break;
}
case CallDescriptor::kCallJSFunction:
- opcode = kArchCallJSFunction;
+ opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
break;
default:
UNREACHABLE();
opcode |= MiscField::encode(flags);
// Emit the call instruction.
+ size_t size = is_tail_call ? 0 : buffer.outputs.size();
+ InstructionOperand* first_output =
+ size > 0 ? &buffer.outputs.front() : nullptr;
Instruction* call_instr =
- Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(),
- buffer.instruction_args.size(), &buffer.instruction_args.front());
-
+ Emit(opcode, size, first_output, buffer.instruction_args.size(),
+ &buffer.instruction_args.front());
call_instr->MarkAsCall();
}
// static
+bool NodeProperties::IsExceptionalCall(Node* node) {
+ for (Node* const use : node->uses()) {
+ if (use->opcode() == IrOpcode::kIfException) return true;
+ }
+ return false;
+}
+
+
+// static
void NodeProperties::ReplaceContextInput(Node* node, Node* context) {
node->ReplaceInput(FirstContextIndex(node), context);
}
return IrOpcode::IsPhiOpcode(node->opcode());
}
+ static bool IsExceptionalCall(Node* node);
// ---------------------------------------------------------------------------
// Miscellaneous mutators.
} while (0)
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+ CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+ int stack_slots = frame()->GetSpillSlotCount();
+ if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+ int pop_count = descriptor->IsJSFunctionCall()
+ ? static_cast<int>(descriptor->JSParameterCount())
+ : 0;
+ __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize);
+ }
+}
+
+
// Assembles an instruction after register allocation, producing machine code.
void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
PPCOperandConverter i(this, instr);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
}
+ case kArchTailCallCodeObject: {
+ AssembleDeconstructActivationRecord();
+ if (HasRegisterInput(instr, 0)) {
+ __ addi(ip, i.InputRegister(0),
+ Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(ip);
+ } else {
+ __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+ RelocInfo::CODE_TARGET);
+ }
+ DCHECK_EQ(LeaveRC, i.OutputRCBit());
+ break;
+ }
case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
}
+ case kArchTailCallJSFunction: {
+ Register func = i.InputRegister(0);
+ if (FLAG_debug_code) {
+ // Check the function's context matches the context argument.
+ __ LoadP(kScratchReg,
+ FieldMemOperand(func, JSFunction::kContextOffset));
+ __ cmp(cp, kScratchReg);
+ __ Assert(eq, kWrongFunctionContext);
+ }
+ AssembleDeconstructActivationRecord();
+ __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+ __ Jump(ip);
+ DCHECK_EQ(LeaveRC, i.OutputRCBit());
+ break;
+ }
case kArchJmp:
AssembleArchJump(i.InputRpo(0));
DCHECK_EQ(LeaveRC, i.OutputRCBit());
UNREACHABLE();
break;
}
-}
+} // NOLINT(readability/fn_size)
// Assembles branches after an instruction.
}
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+ CallMode call_mode) {
PPCOperandGenerator g(this);
- const CallDescriptor* descriptor = OpParameter<CallDescriptor*>(node);
+ const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
FrameStateDescriptor* frame_state_descriptor = NULL;
if (descriptor->NeedsFrameState()) {
}
// Select the appropriate opcode based on the call type.
+ bool is_tail_call = call_mode == TAIL_CALL;
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
- opcode = kArchCallCodeObject;
+ opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
break;
}
case CallDescriptor::kCallJSFunction:
- opcode = kArchCallJSFunction;
+ opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
break;
default:
UNREACHABLE();
opcode |= MiscField::encode(flags);
// Emit the call instruction.
+ size_t size = is_tail_call ? 0 : buffer.outputs.size();
InstructionOperand* first_output =
- buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+ size > 0 ? &buffer.outputs.front() : nullptr;
Instruction* call_instr =
- Emit(opcode, buffer.outputs.size(), first_output,
- buffer.instruction_args.size(), &buffer.instruction_args.front());
+ Emit(opcode, size, first_output, buffer.instruction_args.size(),
+ &buffer.instruction_args.front());
call_instr->MarkAsCall();
}
BuildBlocksForSuccessors(node);
break;
case IrOpcode::kCall:
- if (IsExceptionalCall(node)) {
+ if (NodeProperties::IsExceptionalCall(node)) {
BuildBlocksForSuccessors(node);
}
break;
ConnectThrow(node);
break;
case IrOpcode::kCall:
- if (IsExceptionalCall(node)) {
+ if (NodeProperties::IsExceptionalCall(node)) {
scheduler_->UpdatePlacement(node, Scheduler::kFixed);
ConnectCall(node);
}
}
}
- bool IsExceptionalCall(Node* node) {
- for (Node* const use : node->uses()) {
- if (use->opcode() == IrOpcode::kIfException) return true;
- }
- return false;
- }
-
bool IsFinalMerge(Node* node) {
return (node->opcode() == IrOpcode::kMerge &&
node == scheduler_->graph_->end()->InputAt(0));
} while (false)
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+ CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+ int stack_slots = frame()->GetSpillSlotCount();
+ if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+ __ movq(rsp, rbp);
+ __ popq(rbp);
+ int32_t bytes_to_pop =
+ descriptor->IsJSFunctionCall()
+ ? static_cast<int32_t>(descriptor->JSParameterCount() *
+ kPointerSize)
+ : 0;
+ __ popq(Operand(rsp, bytes_to_pop));
+ __ addq(rsp, Immediate(bytes_to_pop));
+ }
+}
+
+
// Assembles an instruction after register allocation, producing machine code.
void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
X64OperandConverter i(this, instr);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallCodeObject: {
+ AssembleDeconstructActivationRecord();
+ if (HasImmediateInput(instr, 0)) {
+ Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
+ __ jmp(code, RelocInfo::CODE_TARGET);
+ } else {
+ Register reg = i.InputRegister(0);
+ int entry = Code::kHeaderSize - kHeapObjectTag;
+ __ jmp(Operand(reg, entry));
+ }
+ break;
+ }
case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
RecordCallPosition(instr);
break;
}
+ case kArchTailCallJSFunction: {
+ Register func = i.InputRegister(0);
+ if (FLAG_debug_code) {
+ // Check the function's context matches the context argument.
+ __ cmpp(rsi, FieldOperand(func, JSFunction::kContextOffset));
+ __ Assert(equal, kWrongFunctionContext);
+ }
+ AssembleDeconstructActivationRecord();
+ __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
+ break;
+ }
case kArchJmp:
AssembleArchJump(i.InputRpo(0));
break;
}
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+ CallMode call_mode) {
X64OperandGenerator g(this);
const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
}
// Select the appropriate opcode based on the call type.
+ bool is_tail_call = call_mode == TAIL_CALL;
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
- opcode = kArchCallCodeObject;
+ opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
break;
}
case CallDescriptor::kCallJSFunction:
- opcode = kArchCallJSFunction;
+ opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
break;
default:
UNREACHABLE();
opcode |= MiscField::encode(flags);
// Emit the call instruction.
+ size_t size = is_tail_call ? 0 : buffer.outputs.size();
InstructionOperand* first_output =
- buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+ size > 0 ? &buffer.outputs.front() : nullptr;
Instruction* call_instr =
- Emit(opcode, buffer.outputs.size(), first_output,
- buffer.instruction_args.size(), &buffer.instruction_args.front());
+ Emit(opcode, size, first_output, buffer.instruction_args.size(),
+ &buffer.instruction_args.front());
call_instr->MarkAsCall();
}
"stress loop peeling optimization")
DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan")
DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan")
+DEFINE_BOOL(turbo_tail_calls, false,
+ "enable tail call optimization in TurboFan")
DEFINE_INT(typed_array_max_size_in_heap, 64,
"threshold for in-heap typed array")
enum Executability { NOT_EXECUTABLE, EXECUTABLE };
+enum CallMode { NORMAL_CALL, TAIL_CALL };
+
enum VisitMode {
VISIT_ALL,
VISIT_ALL_IN_SCAVENGE,
};
-enum CallMode { NORMAL_CALL, TAIL_CALL };
-
-
class HCallWithDescriptor final : public HInstruction {
public:
static HCallWithDescriptor* New(Isolate* isolate, Zone* zone, HValue* context,
#include "test/unittests/compiler/instruction-selector-unittest.h"
+#include "src/code-stubs.h"
#include "src/compiler/graph.h"
#include "src/compiler/schedule.h"
#include "src/flags.h"
EXPECT_EQ(index, s.size());
}
+
+// -----------------------------------------------------------------------------
+// Tail calls.
+
+TARGET_TEST_F(InstructionSelectorTest, TailCall) {
+ for (int mode = 0; mode < 2; ++mode) {
+ bool supports_tail_calls = FLAG_turbo_tail_calls && (mode == 0);
+
+ StreamBuilder m(this, kMachAnyTagged);
+ Node* start = m.graph()->start();
+ Node* undefined = m.UndefinedConstant();
+
+ StringLengthStub stub(isolate());
+ CallDescriptor* desc = Linkage::GetStubCallDescriptor(
+ isolate(), zone(), stub.GetCallInterfaceDescriptor(), 0,
+ supports_tail_calls ? CallDescriptor::kSupportsTailCalls
+ : CallDescriptor::kNoFlags,
+ Operator::kNoProperties);
+ Node* stub_node = m.NewNode(m.common()->HeapConstant(
+ Unique<Code>::CreateUninitialized(stub.GetCode())));
+
+ Node* call = m.NewNode(m.common()->Call(desc), stub_node, undefined,
+ undefined, undefined, undefined, undefined);
+ call->AppendInput(zone(), start); // effect
+ call->AppendInput(zone(), start); // control
+
+ m.Return(call);
+ Node* ret = *call->uses().begin();
+ ret->AppendInput(zone(), call); // effect
+ ret->AppendInput(zone(), start); // control
+
+ Stream s = m.Build(kAllInstructions);
+ if (supports_tail_calls) {
+ ASSERT_EQ(3U, s.size());
+ EXPECT_EQ(kArchNop, s[0]->arch_opcode());
+ EXPECT_EQ(kArchTailCallCodeObject, s[1]->arch_opcode());
+ EXPECT_EQ(kArchNop, s[2]->arch_opcode());
+ } else {
+ ASSERT_EQ(4U, s.size());
+ EXPECT_EQ(kArchNop, s[0]->arch_opcode());
+ EXPECT_EQ(kArchCallCodeObject, s[1]->arch_opcode());
+ EXPECT_EQ(kArchRet, s[2]->arch_opcode());
+ EXPECT_EQ(kArchNop, s[3]->arch_opcode());
+ }
+ }
+}
+
} // namespace compiler
} // namespace internal
} // namespace v8