Detect simple tail calls
authorsvenpanne <svenpanne@chromium.org>
Thu, 30 Apr 2015 09:10:21 +0000 (02:10 -0700)
committerCommit bot <commit-bot@chromium.org>
Thu, 30 Apr 2015 09:10:28 +0000 (09:10 +0000)
This CL contains the first steps towards tail call optimization:

  * Structurally detect tail calls during instruction selection,
    looking for special return/call combinations.

  * Added new architecture-specific instructions for tail calls which
    jump instead of call and take care of frame adjustment.

  * Moved some code around.

Currently we restrict tail calls to callees which only use registers
for arguments/return value and to call sites which are explicitly
marked as being OK for tail calls. This excludes, among other things,
call sites in sloppy JS functions and our IC machinery (both need in
general to be able to access the caller's frame).

All this is behind a flag --turbo-tail-calls, which is currently off
by default, so it can easily be toggled.

Review URL: https://codereview.chromium.org/1108563002

Cr-Commit-Position: refs/heads/master@{#28150}

30 files changed:
src/compiler.cc
src/compiler/arm/code-generator-arm.cc
src/compiler/arm/instruction-selector-arm.cc
src/compiler/arm64/code-generator-arm64.cc
src/compiler/arm64/instruction-selector-arm64.cc
src/compiler/code-generator.h
src/compiler/ia32/code-generator-ia32.cc
src/compiler/ia32/instruction-selector-ia32.cc
src/compiler/instruction-codes.h
src/compiler/instruction-selector.cc
src/compiler/instruction-selector.h
src/compiler/js-generic-lowering.cc
src/compiler/linkage-impl.h
src/compiler/linkage.cc
src/compiler/linkage.h
src/compiler/mips/code-generator-mips.cc
src/compiler/mips/instruction-selector-mips.cc
src/compiler/mips64/code-generator-mips64.cc
src/compiler/mips64/instruction-selector-mips64.cc
src/compiler/node-properties.cc
src/compiler/node-properties.h
src/compiler/ppc/code-generator-ppc.cc
src/compiler/ppc/instruction-selector-ppc.cc
src/compiler/scheduler.cc
src/compiler/x64/code-generator-x64.cc
src/compiler/x64/instruction-selector-x64.cc
src/flag-definitions.h
src/globals.h
src/hydrogen-instructions.h
test/unittests/compiler/instruction-selector-unittest.cc

index ef27b81..0f7f36e 100644 (file)
@@ -62,7 +62,7 @@ PARSE_INFO_GETTER(Handle<Script>, script)
 PARSE_INFO_GETTER(bool, is_eval)
 PARSE_INFO_GETTER(bool, is_native)
 PARSE_INFO_GETTER(bool, is_module)
-PARSE_INFO_GETTER(LanguageMode, language_mode)
+PARSE_INFO_GETTER_WITH_DEFAULT(LanguageMode, language_mode, STRICT)
 PARSE_INFO_GETTER_WITH_DEFAULT(Handle<JSFunction>, closure,
                                Handle<JSFunction>::null())
 PARSE_INFO_GETTER(FunctionLiteral*, function)
index 663cfb0..306c347 100644 (file)
@@ -299,6 +299,19 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
   } while (0)
 
 
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+  CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+  int stack_slots = frame()->GetSpillSlotCount();
+  if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+    __ LeaveFrame(StackFrame::MANUAL);
+    int pop_count = descriptor->IsJSFunctionCall()
+                        ? static_cast<int>(descriptor->JSParameterCount())
+                        : 0;
+    __ Drop(pop_count);
+  }
+}
+
+
 // Assembles an instruction after register allocation, producing machine code.
 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
   ArmOperandConverter i(this, instr);
@@ -318,6 +331,19 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       DCHECK_EQ(LeaveCC, i.OutputSBit());
       break;
     }
+    case kArchTailCallCodeObject: {
+      AssembleDeconstructActivationRecord();
+      if (instr->InputAt(0)->IsImmediate()) {
+        __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+                RelocInfo::CODE_TARGET);
+      } else {
+        __ add(ip, i.InputRegister(0),
+               Operand(Code::kHeaderSize - kHeapObjectTag));
+        __ Jump(ip);
+      }
+      DCHECK_EQ(LeaveCC, i.OutputSBit());
+      break;
+    }
     case kArchCallJSFunction: {
       EnsureSpaceForLazyDeopt();
       Register func = i.InputRegister(0);
@@ -333,6 +359,20 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       DCHECK_EQ(LeaveCC, i.OutputSBit());
       break;
     }
+    case kArchTailCallJSFunction: {
+      Register func = i.InputRegister(0);
+      if (FLAG_debug_code) {
+        // Check the function's context matches the context argument.
+        __ ldr(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
+        __ cmp(cp, kScratchReg);
+        __ Assert(eq, kWrongFunctionContext);
+      }
+      AssembleDeconstructActivationRecord();
+      __ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+      __ Jump(ip);
+      DCHECK_EQ(LeaveCC, i.OutputSBit());
+      break;
+    }
     case kArchJmp:
       AssembleArchJump(i.InputRpo(0));
       DCHECK_EQ(LeaveCC, i.OutputSBit());
index 73ea636..af55951 100644 (file)
@@ -1080,7 +1080,8 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+                                    CallMode call_mode) {
   ArmOperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
@@ -1112,14 +1113,15 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   }
 
   // Select the appropriate opcode based on the call type.
+  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = kArchCallCodeObject;
+      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = kArchCallJSFunction;
+      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -1128,11 +1130,12 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
+  size_t size = is_tail_call ? 0 : buffer.outputs.size();
   InstructionOperand* first_output =
-      buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+      size > 0 ? &buffer.outputs.front() : nullptr;
   Instruction* call_instr =
-      Emit(opcode, buffer.outputs.size(), first_output,
-           buffer.instruction_args.size(), &buffer.instruction_args.front());
+      Emit(opcode, size, first_output, buffer.instruction_args.size(),
+           &buffer.instruction_args.front());
   call_instr->MarkAsCall();
 }
 
index 51106c2..dc045af 100644 (file)
@@ -343,6 +343,20 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
   } while (0)
 
 
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+  CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+  int stack_slots = frame()->GetSpillSlotCount();
+  if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+    __ Mov(jssp, fp);
+    __ Pop(fp, lr);
+    int pop_count = descriptor->IsJSFunctionCall()
+                        ? static_cast<int>(descriptor->JSParameterCount())
+                        : 0;
+    __ Drop(pop_count);
+  }
+}
+
+
 // Assembles an instruction after register allocation, producing machine code.
 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
   Arm64OperandConverter i(this, instr);
@@ -361,6 +375,18 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallCodeObject: {
+      AssembleDeconstructActivationRecord();
+      if (instr->InputAt(0)->IsImmediate()) {
+        __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+                RelocInfo::CODE_TARGET);
+      } else {
+        Register target = i.InputRegister(0);
+        __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
+        __ Jump(target);
+      }
+      break;
+    }
     case kArchCallJSFunction: {
       EnsureSpaceForLazyDeopt();
       Register func = i.InputRegister(0);
@@ -377,6 +403,21 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallJSFunction: {
+      Register func = i.InputRegister(0);
+      if (FLAG_debug_code) {
+        // Check the function's context matches the context argument.
+        UseScratchRegisterScope scope(masm());
+        Register temp = scope.AcquireX();
+        __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
+        __ cmp(cp, temp);
+        __ Assert(eq, kWrongFunctionContext);
+      }
+      AssembleDeconstructActivationRecord();
+      __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+      __ Jump(x10);
+      break;
+    }
     case kArchJmp:
       AssembleArchJump(i.InputRpo(0));
       break;
index dce7f04..874e93c 100644 (file)
@@ -1204,7 +1204,8 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+                                    CallMode call_mode) {
   Arm64OperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
@@ -1260,14 +1261,15 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   }
 
   // Select the appropriate opcode based on the call type.
+  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = kArchCallCodeObject;
+      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = kArchCallJSFunction;
+      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -1276,11 +1278,12 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
+  size_t size = is_tail_call ? 0 : buffer.outputs.size();
   InstructionOperand* first_output =
-      buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+      size > 0 ? &buffer.outputs.front() : nullptr;
   Instruction* call_instr =
-      Emit(opcode, buffer.outputs.size(), first_output,
-           buffer.instruction_args.size(), &buffer.instruction_args.front());
+      Emit(opcode, size, first_output, buffer.instruction_args.size(),
+           &buffer.instruction_args.front());
   call_instr->MarkAsCall();
 }
 
index 9e49653..4ffb3dd 100644 (file)
@@ -93,6 +93,9 @@ class CodeGenerator final : public GapResolver::Assembler {
   // to tear down a stack frame.
   void AssembleReturn();
 
+  // Generates code to deconstruct a the caller's frame, including arguments.
+  void AssembleDeconstructActivationRecord();
+
   // ===========================================================================
   // ============== Architecture-specific gap resolver methods. ================
   // ===========================================================================
index ffd1c00..0262c2a 100644 (file)
@@ -284,6 +284,23 @@ class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
   } while (false)
 
 
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+  CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+  int stack_slots = frame()->GetSpillSlotCount();
+  if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+    __ mov(esp, ebp);
+    __ pop(ebp);
+    int32_t bytes_to_pop =
+        descriptor->IsJSFunctionCall()
+            ? static_cast<int32_t>(descriptor->JSParameterCount() *
+                                   kPointerSize)
+            : 0;
+    __ pop(Operand(esp, bytes_to_pop));
+    __ add(esp, Immediate(bytes_to_pop));
+  }
+}
+
+
 // Assembles an instruction after register allocation, producing machine code.
 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
   IA32OperandConverter i(this, instr);
@@ -301,6 +318,17 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallCodeObject: {
+      AssembleDeconstructActivationRecord();
+      if (HasImmediateInput(instr, 0)) {
+        Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
+        __ jmp(code, RelocInfo::CODE_TARGET);
+      } else {
+        Register reg = i.InputRegister(0);
+        __ jmp(Operand(reg, Code::kHeaderSize - kHeapObjectTag));
+      }
+      break;
+    }
     case kArchCallJSFunction: {
       EnsureSpaceForLazyDeopt();
       Register func = i.InputRegister(0);
@@ -313,6 +341,17 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallJSFunction: {
+      Register func = i.InputRegister(0);
+      if (FLAG_debug_code) {
+        // Check the function's context matches the context argument.
+        __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset));
+        __ Assert(equal, kWrongFunctionContext);
+      }
+      AssembleDeconstructActivationRecord();
+      __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
+      break;
+    }
     case kArchJmp:
       AssembleArchJump(i.InputRpo(0));
       break;
index 72dd48e..24817f4 100644 (file)
@@ -815,7 +815,8 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+                                    CallMode call_mode) {
   IA32OperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
@@ -849,14 +850,15 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   }
 
   // Select the appropriate opcode based on the call type.
+  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = kArchCallCodeObject;
+      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = kArchCallJSFunction;
+      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -865,11 +867,12 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
+  size_t size = is_tail_call ? 0 : buffer.outputs.size();
   InstructionOperand* first_output =
-      buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+      size > 0 ? &buffer.outputs.front() : nullptr;
   Instruction* call_instr =
-      Emit(opcode, buffer.outputs.size(), first_output,
-           buffer.instruction_args.size(), &buffer.instruction_args.front());
+      Emit(opcode, size, first_output, buffer.instruction_args.size(),
+           &buffer.instruction_args.front());
   call_instr->MarkAsCall();
 }
 
index d5c04ab..7b42eb7 100644 (file)
@@ -35,7 +35,9 @@ namespace compiler {
 // Most opcodes specify a single instruction.
 #define ARCH_OPCODE_LIST(V) \
   V(ArchCallCodeObject)     \
+  V(ArchTailCallCodeObject) \
   V(ArchCallJSFunction)     \
+  V(ArchTailCallJSFunction) \
   V(ArchJmp)                \
   V(ArchLookupSwitch)       \
   V(ArchTableSwitch)        \
index 3d6c1fe..ed44ade 100644 (file)
@@ -463,7 +463,7 @@ void InstructionSelector::VisitControl(BasicBlock* block) {
       DCHECK_EQ(IrOpcode::kCall, input->opcode());
       BasicBlock* success = block->SuccessorAt(0);
       BasicBlock* exception = block->SuccessorAt(1);
-      return VisitCall(input, exception), VisitGoto(success);
+      return VisitCall(input, exception, NORMAL_CALL), VisitGoto(success);
     }
     case BasicBlock::kBranch: {
       DCHECK_EQ(IrOpcode::kBranch, input->opcode());
@@ -506,7 +506,7 @@ void InstructionSelector::VisitControl(BasicBlock* block) {
     }
     case BasicBlock::kReturn: {
       DCHECK_EQ(IrOpcode::kReturn, input->opcode());
-      return VisitReturn(input->InputAt(0));
+      return VisitReturn(input);
     }
     case BasicBlock::kDeoptimize: {
       // If the result itself is a return, return its input.
@@ -583,7 +583,7 @@ void InstructionSelector::VisitNode(Node* node) {
       return VisitConstant(node);
     }
     case IrOpcode::kCall:
-      return VisitCall(node, nullptr);
+      return VisitCall(node, nullptr, NORMAL_CALL);
     case IrOpcode::kFrameState:
     case IrOpcode::kStateValues:
       return;
@@ -988,7 +988,46 @@ void InstructionSelector::VisitGoto(BasicBlock* target) {
 }
 
 
-void InstructionSelector::VisitReturn(Node* value) {
+namespace {
+
+// Returns the call node if the given return node is part of a tail call,
+// nullptr otherwise.
+Node* TryMatchTailCall(Node* ret) {
+  // The value which is returned must be the result of a potential tail call,
+  // there must be no try/catch/finally around the call, and there must be no
+  // effects between the call and the return.
+  Node* call = NodeProperties::GetValueInput(ret, 0);
+  if (call->opcode() != IrOpcode::kCall ||
+      !OpParameter<const CallDescriptor*>(call)->SupportsTailCalls() ||
+      NodeProperties::IsExceptionalCall(call) ||
+      NodeProperties::GetEffectInput(ret, 0) != call) {
+    return nullptr;
+  }
+  // Furthermore, control has to flow via an IfSuccess from the call (for calls
+  // which can throw), or the return and the call have to use the same control
+  // input (for calls which can't throw).
+  Node* control = NodeProperties::GetControlInput(ret, 0);
+  bool found = (control->opcode() == IrOpcode::kIfSuccess)
+                   ? (NodeProperties::GetControlInput(control, 0) == call)
+                   : (control == NodeProperties::GetControlInput(call, 0));
+  return found ? call : nullptr;
+}
+
+}  // namespace
+
+
+void InstructionSelector::VisitReturn(Node* node) {
+  if (FLAG_turbo_tail_calls) {
+    Node* call = TryMatchTailCall(node);
+    if (call != nullptr) {
+      const CallDescriptor* desc = OpParameter<const CallDescriptor*>(call);
+      if (desc->UsesOnlyRegisters() &&
+          desc->HasSameReturnLocationsAs(linkage()->GetIncomingDescriptor())) {
+        return VisitCall(call, nullptr, TAIL_CALL);
+      }
+    }
+  }
+  Node* value = NodeProperties::GetValueInput(node, 0);
   DCHECK_NOT_NULL(value);
   OperandGenerator g(this);
   Emit(kArchRet, g.NoOutput(),
@@ -1119,7 +1158,8 @@ MACHINE_OP_LIST(DECLARE_UNIMPLEMENTED_SELECTOR)
 #undef DECLARE_UNIMPLEMENTED_SELECTOR
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+                                    CallMode call_mode) {
   UNIMPLEMENTED();
 }
 
index b305408..767e19d 100644 (file)
@@ -197,7 +197,7 @@ class InstructionSelector final {
   void VisitPhi(Node* node);
   void VisitProjection(Node* node);
   void VisitConstant(Node* node);
-  void VisitCall(Node* call, BasicBlock* handler);
+  void VisitCall(Node* call, BasicBlock* handler, CallMode call_mode);
   void VisitGoto(BasicBlock* target);
   void VisitBranch(Node* input, BasicBlock* tbranch, BasicBlock* fbranch);
   void VisitSwitch(Node* node, const SwitchInfo& sw);
index 3ebb6bc..8c5c9b9 100644 (file)
@@ -491,8 +491,10 @@ bool JSGenericLowering::TryLowerDirectJSCall(Node* node) {
     context = jsgraph()->HeapConstant(Handle<Context>(function->context()));
   }
   node->ReplaceInput(index, context);
-  CallDescriptor* desc = Linkage::GetJSCallDescriptor(
-      zone(), false, 1 + arg_count, FlagsForNode(node));
+  CallDescriptor::Flags flags = FlagsForNode(node);
+  if (is_strict(p.language_mode())) flags |= CallDescriptor::kSupportsTailCalls;
+  CallDescriptor* desc =
+      Linkage::GetJSCallDescriptor(zone(), false, 1 + arg_count, flags);
   node->set_op(common()->Call(desc));
   return true;
 }
index 98f8f1c..1791a43 100644 (file)
@@ -215,15 +215,16 @@ class LinkageHelper {
     // The target for C calls is always an address (i.e. machine pointer).
     MachineType target_type = kMachPtr;
     LinkageLocation target_loc = LinkageLocation::AnyRegister();
-    return new (zone) CallDescriptor(  // --
-        CallDescriptor::kCallAddress,  // kind
-        target_type,                   // target MachineType
-        target_loc,                    // target location
-        msig,                          // machine_sig
-        locations.Build(),             // location_sig
-        0,                             // js_parameter_count
-        Operator::kNoProperties,       // properties
-        LinkageTraits::CCalleeSaveRegisters(), CallDescriptor::kNoFlags,
+    return new (zone) CallDescriptor(           // --
+        CallDescriptor::kCallAddress,           // kind
+        target_type,                            // target MachineType
+        target_loc,                             // target location
+        msig,                                   // machine_sig
+        locations.Build(),                      // location_sig
+        0,                                      // js_parameter_count
+        Operator::kNoProperties,                // properties
+        LinkageTraits::CCalleeSaveRegisters(),  // callee-saved registers
+        CallDescriptor::kNoFlags,               // flags
         "c-call");
   }
 
index 29d4388..0288f1a 100644 (file)
@@ -34,7 +34,17 @@ std::ostream& operator<<(std::ostream& os, const CallDescriptor& d) {
   // TODO(svenpanne) Output properties etc. and be less cryptic.
   return os << d.kind() << ":" << d.debug_name() << ":r" << d.ReturnCount()
             << "j" << d.JSParameterCount() << "i" << d.InputCount() << "f"
-            << d.FrameStateCount();
+            << d.FrameStateCount() << "t" << d.SupportsTailCalls();
+}
+
+
+bool CallDescriptor::HasSameReturnLocationsAs(
+    const CallDescriptor* other) const {
+  if (ReturnCount() != other->ReturnCount()) return false;
+  for (size_t i = 0; i < ReturnCount(); ++i) {
+    if (GetReturnLocation(i) != other->GetReturnLocation(i)) return false;
+  }
+  return true;
 }
 
 
@@ -141,6 +151,17 @@ bool Linkage::NeedsFrameState(Runtime::FunctionId function) {
 }
 
 
+bool CallDescriptor::UsesOnlyRegisters() const {
+  for (size_t i = 0; i < InputCount(); ++i) {
+    if (!GetInputLocation(i).is_register()) return false;
+  }
+  for (size_t i = 0; i < ReturnCount(); ++i) {
+    if (!GetReturnLocation(i).is_register()) return false;
+  }
+  return true;
+}
+
+
 //==============================================================================
 // Provide unimplemented methods on unsupported architectures, to at least link.
 //==============================================================================
index fa34adb..aa68068 100644 (file)
@@ -27,11 +27,23 @@ class LinkageLocation {
  public:
   explicit LinkageLocation(int location) : location_(location) {}
 
+  bool is_register() const {
+    return 0 <= location_ && location_ <= ANY_REGISTER;
+  }
+
   static const int16_t ANY_REGISTER = 1023;
   static const int16_t MAX_STACK_SLOT = 32767;
 
   static LinkageLocation AnyRegister() { return LinkageLocation(ANY_REGISTER); }
 
+  bool operator==(const LinkageLocation& other) const {
+    return location_ == other.location_;
+  }
+
+  bool operator!=(const LinkageLocation& other) const {
+    return !(*this == other);
+  }
+
  private:
   friend class CallDescriptor;
   friend class OperandGenerator;
@@ -61,6 +73,7 @@ class CallDescriptor final : public ZoneObject {
     kPatchableCallSite = 1u << 1,
     kNeedsNopAfterCall = 1u << 2,
     kHasExceptionHandler = 1u << 3,
+    kSupportsTailCalls = 1u << 4,
     kPatchableCallSiteWithNop = kPatchableCallSite | kNeedsNopAfterCall
   };
   typedef base::Flags<Flag> Flags;
@@ -108,6 +121,7 @@ class CallDescriptor final : public ZoneObject {
   Flags flags() const { return flags_; }
 
   bool NeedsFrameState() const { return flags() & kNeedsFrameState; }
+  bool SupportsTailCalls() const { return flags() & kSupportsTailCalls; }
 
   LinkageLocation GetReturnLocation(size_t index) const {
     return location_sig_->GetReturn(index);
@@ -137,6 +151,10 @@ class CallDescriptor final : public ZoneObject {
 
   const char* debug_name() const { return debug_name_; }
 
+  bool UsesOnlyRegisters() const;
+
+  bool HasSameReturnLocationsAs(const CallDescriptor* other) const;
+
  private:
   friend class Linkage;
 
index befd07e..dfee358 100644 (file)
@@ -394,6 +394,20 @@ FPUCondition FlagsConditionToConditionCmpD(bool& predicate,
   } while (0)
 
 
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+  CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+  int stack_slots = frame()->GetSpillSlotCount();
+  if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+    __ mov(sp, fp);
+    __ Pop(ra, fp);
+    int pop_count = descriptor->IsJSFunctionCall()
+                        ? static_cast<int>(descriptor->JSParameterCount())
+                        : 0;
+    __ Drop(pop_count);
+  }
+}
+
+
 // Assembles an instruction after register allocation, producing machine code.
 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
   MipsOperandConverter i(this, instr);
@@ -412,6 +426,17 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallCodeObject: {
+      AssembleDeconstructActivationRecord();
+      if (instr->InputAt(0)->IsImmediate()) {
+        __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+                RelocInfo::CODE_TARGET);
+      } else {
+        __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
+        __ Jump(at);
+      }
+      break;
+    }
     case kArchCallJSFunction: {
       EnsureSpaceForLazyDeopt();
       Register func = i.InputRegister(0);
@@ -426,6 +451,19 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallJSFunction: {
+      Register func = i.InputRegister(0);
+      if (FLAG_debug_code) {
+        // Check the function's context matches the context argument.
+        __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
+        __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg));
+      }
+
+      AssembleDeconstructActivationRecord();
+      __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+      __ Jump(at);
+      break;
+    }
     case kArchJmp:
       AssembleArchJump(i.InputRpo(0));
       break;
index f5b107b..04b33ce 100644 (file)
@@ -499,7 +499,8 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+                                    CallMode call_mode) {
   MipsOperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
@@ -534,14 +535,15 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   }
 
   // Select the appropriate opcode based on the call type.
+  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = kArchCallCodeObject;
+      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = kArchCallJSFunction;
+      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -550,11 +552,12 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
+  size_t size = is_tail_call ? 0 : buffer.outputs.size();
   InstructionOperand* first_output =
-      buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+      size > 0 ? &buffer.outputs.front() : nullptr;
   Instruction* call_instr =
-      Emit(opcode, buffer.outputs.size(), first_output,
-           buffer.instruction_args.size(), &buffer.instruction_args.front());
+      Emit(opcode, size, first_output, buffer.instruction_args.size(),
+           &buffer.instruction_args.front());
   call_instr->MarkAsCall();
 }
 
index b2b2670..63c7957 100644 (file)
@@ -394,6 +394,20 @@ FPUCondition FlagsConditionToConditionCmpD(bool& predicate,
   } while (0)
 
 
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+  CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+  int stack_slots = frame()->GetSpillSlotCount();
+  if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+    __ mov(sp, fp);
+    __ Pop(ra, fp);
+    int pop_count = descriptor->IsJSFunctionCall()
+                        ? static_cast<int>(descriptor->JSParameterCount())
+                        : 0;
+    __ Drop(pop_count);
+  }
+}
+
+
 // Assembles an instruction after register allocation, producing machine code.
 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
   MipsOperandConverter i(this, instr);
@@ -412,6 +426,17 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallCodeObject: {
+      AssembleDeconstructActivationRecord();
+      if (instr->InputAt(0)->IsImmediate()) {
+        __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+                RelocInfo::CODE_TARGET);
+      } else {
+        __ daddiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
+        __ Jump(at);
+      }
+      break;
+    }
     case kArchCallJSFunction: {
       EnsureSpaceForLazyDeopt();
       Register func = i.InputRegister(0);
@@ -426,6 +451,19 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallJSFunction: {
+      Register func = i.InputRegister(0);
+      if (FLAG_debug_code) {
+        // Check the function's context matches the context argument.
+        __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
+        __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg));
+      }
+
+      AssembleDeconstructActivationRecord();
+      __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+      __ Jump(at);
+      break;
+    }
     case kArchJmp:
       AssembleArchJump(i.InputRpo(0));
       break;
index 5b32e10..96a67eb 100644 (file)
@@ -648,7 +648,8 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+                                    CallMode call_mode) {
   Mips64OperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
@@ -683,14 +684,15 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   }
 
   // Select the appropriate opcode based on the call type.
+  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = kArchCallCodeObject;
+      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = kArchCallJSFunction;
+      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -699,10 +701,12 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
+  size_t size = is_tail_call ? 0 : buffer.outputs.size();
+  InstructionOperand* first_output =
+      size > 0 ? &buffer.outputs.front() : nullptr;
   Instruction* call_instr =
-      Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(),
-           buffer.instruction_args.size(), &buffer.instruction_args.front());
-
+      Emit(opcode, size, first_output, buffer.instruction_args.size(),
+           &buffer.instruction_args.front());
   call_instr->MarkAsCall();
 }
 
index 8956915..9e665d1 100644 (file)
@@ -119,6 +119,15 @@ bool NodeProperties::IsControlEdge(Edge edge) {
 
 
 // static
+bool NodeProperties::IsExceptionalCall(Node* node) {
+  for (Node* const use : node->uses()) {
+    if (use->opcode() == IrOpcode::kIfException) return true;
+  }
+  return false;
+}
+
+
+// static
 void NodeProperties::ReplaceContextInput(Node* node, Node* context) {
   node->ReplaceInput(FirstContextIndex(node), context);
 }
index 78a8cf1..0f25051 100644 (file)
@@ -72,6 +72,7 @@ class NodeProperties final {
     return IrOpcode::IsPhiOpcode(node->opcode());
   }
 
+  static bool IsExceptionalCall(Node* node);
 
   // ---------------------------------------------------------------------------
   // Miscellaneous mutators.
index 95d4fa6..bdebd30 100644 (file)
@@ -581,6 +581,18 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
   } while (0)
 
 
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+  CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+  int stack_slots = frame()->GetSpillSlotCount();
+  if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+    int pop_count = descriptor->IsJSFunctionCall()
+                        ? static_cast<int>(descriptor->JSParameterCount())
+                        : 0;
+    __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize);
+  }
+}
+
+
 // Assembles an instruction after register allocation, producing machine code.
 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
   PPCOperandConverter i(this, instr);
@@ -601,6 +613,19 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       DCHECK_EQ(LeaveRC, i.OutputRCBit());
       break;
     }
+    case kArchTailCallCodeObject: {
+      AssembleDeconstructActivationRecord();
+      if (HasRegisterInput(instr, 0)) {
+        __ addi(ip, i.InputRegister(0),
+                Operand(Code::kHeaderSize - kHeapObjectTag));
+        __ Jump(ip);
+      } else {
+        __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
+                RelocInfo::CODE_TARGET);
+      }
+      DCHECK_EQ(LeaveRC, i.OutputRCBit());
+      break;
+    }
     case kArchCallJSFunction: {
       EnsureSpaceForLazyDeopt();
       Register func = i.InputRegister(0);
@@ -617,6 +642,21 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       DCHECK_EQ(LeaveRC, i.OutputRCBit());
       break;
     }
+    case kArchTailCallJSFunction: {
+      Register func = i.InputRegister(0);
+      if (FLAG_debug_code) {
+        // Check the function's context matches the context argument.
+        __ LoadP(kScratchReg,
+                 FieldMemOperand(func, JSFunction::kContextOffset));
+        __ cmp(cp, kScratchReg);
+        __ Assert(eq, kWrongFunctionContext);
+      }
+      AssembleDeconstructActivationRecord();
+      __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
+      __ Jump(ip);
+      DCHECK_EQ(LeaveRC, i.OutputRCBit());
+      break;
+    }
     case kArchJmp:
       AssembleArchJump(i.InputRpo(0));
       DCHECK_EQ(LeaveRC, i.OutputRCBit());
@@ -1085,7 +1125,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       UNREACHABLE();
       break;
   }
-}
+}  // NOLINT(readability/fn_size)
 
 
 // Assembles branches after an instruction.
index 46516e4..4670685 100644 (file)
@@ -1434,9 +1434,10 @@ void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+                                    CallMode call_mode) {
   PPCOperandGenerator g(this);
-  const CallDescriptor* descriptor = OpParameter<CallDescriptor*>(node);
+  const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
   FrameStateDescriptor* frame_state_descriptor = NULL;
   if (descriptor->NeedsFrameState()) {
@@ -1466,14 +1467,15 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   }
 
   // Select the appropriate opcode based on the call type.
+  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = kArchCallCodeObject;
+      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = kArchCallJSFunction;
+      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -1482,11 +1484,12 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
+  size_t size = is_tail_call ? 0 : buffer.outputs.size();
   InstructionOperand* first_output =
-      buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+      size > 0 ? &buffer.outputs.front() : nullptr;
   Instruction* call_instr =
-      Emit(opcode, buffer.outputs.size(), first_output,
-           buffer.instruction_args.size(), &buffer.instruction_args.front());
+      Emit(opcode, size, first_output, buffer.instruction_args.size(),
+           &buffer.instruction_args.front());
   call_instr->MarkAsCall();
 }
 
index 9851dba..589094c 100644 (file)
@@ -318,7 +318,7 @@ class CFGBuilder : public ZoneObject {
         BuildBlocksForSuccessors(node);
         break;
       case IrOpcode::kCall:
-        if (IsExceptionalCall(node)) {
+        if (NodeProperties::IsExceptionalCall(node)) {
           BuildBlocksForSuccessors(node);
         }
         break;
@@ -354,7 +354,7 @@ class CFGBuilder : public ZoneObject {
         ConnectThrow(node);
         break;
       case IrOpcode::kCall:
-        if (IsExceptionalCall(node)) {
+        if (NodeProperties::IsExceptionalCall(node)) {
           scheduler_->UpdatePlacement(node, Scheduler::kFixed);
           ConnectCall(node);
         }
@@ -519,13 +519,6 @@ class CFGBuilder : public ZoneObject {
     }
   }
 
-  bool IsExceptionalCall(Node* node) {
-    for (Node* const use : node->uses()) {
-      if (use->opcode() == IrOpcode::kIfException) return true;
-    }
-    return false;
-  }
-
   bool IsFinalMerge(Node* node) {
     return (node->opcode() == IrOpcode::kMerge &&
             node == scheduler_->graph_->end()->InputAt(0));
index 9241d83..0f1e959 100644 (file)
@@ -527,6 +527,23 @@ class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
   } while (false)
 
 
+void CodeGenerator::AssembleDeconstructActivationRecord() {
+  CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
+  int stack_slots = frame()->GetSpillSlotCount();
+  if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
+    __ movq(rsp, rbp);
+    __ popq(rbp);
+    int32_t bytes_to_pop =
+        descriptor->IsJSFunctionCall()
+            ? static_cast<int32_t>(descriptor->JSParameterCount() *
+                                   kPointerSize)
+            : 0;
+    __ popq(Operand(rsp, bytes_to_pop));
+    __ addq(rsp, Immediate(bytes_to_pop));
+  }
+}
+
+
 // Assembles an instruction after register allocation, producing machine code.
 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
   X64OperandConverter i(this, instr);
@@ -545,6 +562,18 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallCodeObject: {
+      AssembleDeconstructActivationRecord();
+      if (HasImmediateInput(instr, 0)) {
+        Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
+        __ jmp(code, RelocInfo::CODE_TARGET);
+      } else {
+        Register reg = i.InputRegister(0);
+        int entry = Code::kHeaderSize - kHeapObjectTag;
+        __ jmp(Operand(reg, entry));
+      }
+      break;
+    }
     case kArchCallJSFunction: {
       EnsureSpaceForLazyDeopt();
       Register func = i.InputRegister(0);
@@ -557,6 +586,17 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       RecordCallPosition(instr);
       break;
     }
+    case kArchTailCallJSFunction: {
+      Register func = i.InputRegister(0);
+      if (FLAG_debug_code) {
+        // Check the function's context matches the context argument.
+        __ cmpp(rsi, FieldOperand(func, JSFunction::kContextOffset));
+        __ Assert(equal, kWrongFunctionContext);
+      }
+      AssembleDeconstructActivationRecord();
+      __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
+      break;
+    }
     case kArchJmp:
       AssembleArchJump(i.InputRpo(0));
       break;
index e0e97ac..c0a6175 100644 (file)
@@ -1019,7 +1019,8 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
+                                    CallMode call_mode) {
   X64OperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
@@ -1052,14 +1053,15 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   }
 
   // Select the appropriate opcode based on the call type.
+  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = kArchCallCodeObject;
+      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = kArchCallJSFunction;
+      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -1068,11 +1070,12 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
+  size_t size = is_tail_call ? 0 : buffer.outputs.size();
   InstructionOperand* first_output =
-      buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
+      size > 0 ? &buffer.outputs.front() : nullptr;
   Instruction* call_instr =
-      Emit(opcode, buffer.outputs.size(), first_output,
-           buffer.instruction_args.size(), &buffer.instruction_args.front());
+      Emit(opcode, size, first_output, buffer.instruction_args.size(),
+           &buffer.instruction_args.front());
   call_instr->MarkAsCall();
 }
 
index c16b2d6..cfa1ff2 100644 (file)
@@ -423,6 +423,8 @@ DEFINE_BOOL(turbo_stress_loop_peeling, false,
             "stress loop peeling optimization")
 DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan")
 DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan")
+DEFINE_BOOL(turbo_tail_calls, false,
+            "enable tail call optimization in TurboFan")
 
 DEFINE_INT(typed_array_max_size_in_heap, 64,
            "threshold for in-heap typed array")
index e71314f..f70ddf3 100644 (file)
@@ -454,6 +454,8 @@ enum GarbageCollector { SCAVENGER, MARK_COMPACTOR };
 
 enum Executability { NOT_EXECUTABLE, EXECUTABLE };
 
+enum CallMode { NORMAL_CALL, TAIL_CALL };
+
 enum VisitMode {
   VISIT_ALL,
   VISIT_ALL_IN_SCAVENGE,
index 771197d..6f56c76 100644 (file)
@@ -2242,9 +2242,6 @@ class HCallJSFunction final : public HCall<1> {
 };
 
 
-enum CallMode { NORMAL_CALL, TAIL_CALL };
-
-
 class HCallWithDescriptor final : public HInstruction {
  public:
   static HCallWithDescriptor* New(Isolate* isolate, Zone* zone, HValue* context,
index 645b0f6..4f0aba1 100644 (file)
@@ -4,6 +4,7 @@
 
 #include "test/unittests/compiler/instruction-selector-unittest.h"
 
+#include "src/code-stubs.h"
 #include "src/compiler/graph.h"
 #include "src/compiler/schedule.h"
 #include "src/flags.h"
@@ -590,6 +591,53 @@ TARGET_TEST_F(InstructionSelectorTest,
   EXPECT_EQ(index, s.size());
 }
 
+
+// -----------------------------------------------------------------------------
+// Tail calls.
+
+TARGET_TEST_F(InstructionSelectorTest, TailCall) {
+  for (int mode = 0; mode < 2; ++mode) {
+    bool supports_tail_calls = FLAG_turbo_tail_calls && (mode == 0);
+
+    StreamBuilder m(this, kMachAnyTagged);
+    Node* start = m.graph()->start();
+    Node* undefined = m.UndefinedConstant();
+
+    StringLengthStub stub(isolate());
+    CallDescriptor* desc = Linkage::GetStubCallDescriptor(
+        isolate(), zone(), stub.GetCallInterfaceDescriptor(), 0,
+        supports_tail_calls ? CallDescriptor::kSupportsTailCalls
+                            : CallDescriptor::kNoFlags,
+        Operator::kNoProperties);
+    Node* stub_node = m.NewNode(m.common()->HeapConstant(
+        Unique<Code>::CreateUninitialized(stub.GetCode())));
+
+    Node* call = m.NewNode(m.common()->Call(desc), stub_node, undefined,
+                           undefined, undefined, undefined, undefined);
+    call->AppendInput(zone(), start);  // effect
+    call->AppendInput(zone(), start);  // control
+
+    m.Return(call);
+    Node* ret = *call->uses().begin();
+    ret->AppendInput(zone(), call);   // effect
+    ret->AppendInput(zone(), start);  // control
+
+    Stream s = m.Build(kAllInstructions);
+    if (supports_tail_calls) {
+      ASSERT_EQ(3U, s.size());
+      EXPECT_EQ(kArchNop, s[0]->arch_opcode());
+      EXPECT_EQ(kArchTailCallCodeObject, s[1]->arch_opcode());
+      EXPECT_EQ(kArchNop, s[2]->arch_opcode());
+    } else {
+      ASSERT_EQ(4U, s.size());
+      EXPECT_EQ(kArchNop, s[0]->arch_opcode());
+      EXPECT_EQ(kArchCallCodeObject, s[1]->arch_opcode());
+      EXPECT_EQ(kArchRet, s[2]->arch_opcode());
+      EXPECT_EQ(kArchNop, s[3]->arch_opcode());
+    }
+  }
+}
+
 }  // namespace compiler
 }  // namespace internal
 }  // namespace v8