[turbofan] Fix tail call optimization.
authorbmeurer <bmeurer@chromium.org>
Tue, 5 May 2015 09:42:59 +0000 (02:42 -0700)
committerCommit bot <commit-bot@chromium.org>
Tue, 5 May 2015 09:43:02 +0000 (09:43 +0000)
Tail calls are matched on the graph, with a dedicated tail call
optimization that is actually testable. The instruction selection can
still fall back to a regular if the platform constraints don't allow to
emit a tail call (i.e. the return locations of caller and callee differ
or the callee takes non-register parameters, which is a restriction that
will be removed in the future).

Also explicitly limit tail call optimization to stubs for now and drop
the global flag.

BUG=v8:4076
LOG=n

Review URL: https://codereview.chromium.org/1114163005

Cr-Commit-Position: refs/heads/master@{#28219}

34 files changed:
BUILD.gn
src/compiler/arm/instruction-selector-arm.cc
src/compiler/arm64/instruction-selector-arm64.cc
src/compiler/common-operator.cc
src/compiler/common-operator.h
src/compiler/ia32/instruction-selector-ia32.cc
src/compiler/instruction-selector.cc
src/compiler/instruction-selector.h
src/compiler/machine-type.h
src/compiler/mips/instruction-selector-mips.cc
src/compiler/mips64/instruction-selector-mips64.cc
src/compiler/node.cc
src/compiler/node.h
src/compiler/opcodes.h
src/compiler/pipeline.cc
src/compiler/schedule.cc
src/compiler/schedule.h
src/compiler/scheduler.cc
src/compiler/tail-call-optimization.cc [new file with mode: 0644]
src/compiler/tail-call-optimization.h [new file with mode: 0644]
src/compiler/typer.cc
src/compiler/verifier.cc
src/compiler/x64/instruction-selector-x64.cc
src/flag-definitions.h
src/globals.h
src/hydrogen-instructions.h
test/unittests/compiler/instruction-selector-unittest.cc
test/unittests/compiler/node-test-utils.cc
test/unittests/compiler/node-test-utils.h
test/unittests/compiler/node-unittest.cc
test/unittests/compiler/scheduler-unittest.cc
test/unittests/compiler/tail-call-optimization-unittest.cc [new file with mode: 0644]
test/unittests/unittests.gyp
tools/gyp/v8.gyp

index 424738cff2bc39089f6052984519db5a48076340..233499b71025ebfa04fb9171015f7c44fdf3a19f 100644 (file)
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -678,6 +678,8 @@ source_set("v8_base") {
     "src/compiler/source-position.h",
     "src/compiler/state-values-utils.cc",
     "src/compiler/state-values-utils.h",
+    "src/compiler/tail-call-optimization.cc",
+    "src/compiler/tail-call-optimization.h",
     "src/compiler/typer.cc",
     "src/compiler/typer.h",
     "src/compiler/value-numbering-reducer.cc",
index af55951f64e51831a4a119dcc1300527f8e374b9..76a0acb410f0b8fcec908285b8935c94df6e8751 100644 (file)
@@ -1080,12 +1080,11 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
-                                    CallMode call_mode) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   ArmOperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
-  FrameStateDescriptor* frame_state_descriptor = NULL;
+  FrameStateDescriptor* frame_state_descriptor = nullptr;
   if (descriptor->NeedsFrameState()) {
     frame_state_descriptor =
         GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
@@ -1094,12 +1093,11 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
   CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
 
   // Compute InstructionOperands for inputs and outputs.
-  // TODO(turbofan): on ARM64 it's probably better to use the code object in a
+  // TODO(turbofan): on ARM it's probably better to use the code object in a
   // register if there are multiple uses of it. Improve constant pool and the
   // heuristics in the register allocator for where to emit constants.
   InitializeCallBuffer(node, &buffer, true, false);
 
-  // TODO(dcarney): might be possible to use claim/poke instead
   // Push any stack arguments.
   for (Node* node : base::Reversed(buffer.pushed_nodes)) {
     Emit(kArmPush, g.NoOutput(), g.UseRegister(node));
@@ -1107,21 +1105,20 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
 
   // Pass label of exception handler block.
   CallDescriptor::Flags flags = descriptor->flags();
-  if (handler != nullptr) {
+  if (handler) {
     flags |= CallDescriptor::kHasExceptionHandler;
     buffer.instruction_args.push_back(g.Label(handler));
   }
 
   // Select the appropriate opcode based on the call type.
-  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
+      opcode = kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
+      opcode = kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -1130,13 +1127,95 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
-  size_t size = is_tail_call ? 0 : buffer.outputs.size();
-  InstructionOperand* first_output =
-      size > 0 ? &buffer.outputs.front() : nullptr;
-  Instruction* call_instr =
-      Emit(opcode, size, first_output, buffer.instruction_args.size(),
-           &buffer.instruction_args.front());
-  call_instr->MarkAsCall();
+  size_t const output_count = buffer.outputs.size();
+  auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+  Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+       &buffer.instruction_args.front())->MarkAsCall();
+}
+
+
+void InstructionSelector::VisitTailCall(Node* node) {
+  ArmOperandGenerator g(this);
+  CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
+  DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
+
+  // TODO(turbofan): Relax restriction for stack parameters.
+  if (descriptor->UsesOnlyRegisters() &&
+      descriptor->HasSameReturnLocationsAs(
+          linkage()->GetIncomingDescriptor())) {
+    CallBuffer buffer(zone(), descriptor, nullptr);
+
+    // Compute InstructionOperands for inputs and outputs.
+    // TODO(turbofan): on ARM it's probably better to use the code object in a
+    // register if there are multiple uses of it. Improve constant pool and the
+    // heuristics in the register allocator for where to emit constants.
+    InitializeCallBuffer(node, &buffer, true, false);
+
+    DCHECK_EQ(0u, buffer.pushed_nodes.size());
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject:
+        opcode = kArchTailCallCodeObject;
+        break;
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchTailCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the tailcall instruction.
+    Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
+         &buffer.instruction_args.front());
+  } else {
+    FrameStateDescriptor* frame_state_descriptor =
+        descriptor->NeedsFrameState()
+            ? GetFrameStateDescriptor(
+                  node->InputAt(static_cast<int>(descriptor->InputCount())))
+            : nullptr;
+
+    CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
+
+    // Compute InstructionOperands for inputs and outputs.
+    // TODO(turbofan): on ARM it's probably better to use the code object in a
+    // register if there are multiple uses of it. Improve constant pool and the
+    // heuristics in the register allocator for where to emit constants.
+    InitializeCallBuffer(node, &buffer, true, false);
+
+    // Push any stack arguments.
+    for (Node* node : base::Reversed(buffer.pushed_nodes)) {
+      Emit(kArmPush, g.NoOutput(), g.UseRegister(node));
+    }
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject: {
+        opcode = kArchCallCodeObject;
+        break;
+      }
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the call instruction.
+    size_t const output_count = buffer.outputs.size();
+    auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+    Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+         &buffer.instruction_args.front())->MarkAsCall();
+    Emit(kArchRet, 0, nullptr, output_count, outputs);
+  }
 }
 
 
index 874e93cb551fc8c1997a9a755af2d1d1629e50c1..f653520c36e95c6cd3524fd267af9affea43b26d 100644 (file)
@@ -1204,12 +1204,11 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
-                                    CallMode call_mode) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   Arm64OperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
-  FrameStateDescriptor* frame_state_descriptor = NULL;
+  FrameStateDescriptor* frame_state_descriptor = nullptr;
   if (descriptor->NeedsFrameState()) {
     frame_state_descriptor =
         GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
@@ -1261,15 +1260,14 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
   }
 
   // Select the appropriate opcode based on the call type.
-  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
+      opcode = kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
+      opcode = kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -1278,13 +1276,120 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
-  size_t size = is_tail_call ? 0 : buffer.outputs.size();
-  InstructionOperand* first_output =
-      size > 0 ? &buffer.outputs.front() : nullptr;
-  Instruction* call_instr =
-      Emit(opcode, size, first_output, buffer.instruction_args.size(),
-           &buffer.instruction_args.front());
-  call_instr->MarkAsCall();
+  size_t const output_count = buffer.outputs.size();
+  auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+  Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+       &buffer.instruction_args.front())->MarkAsCall();
+}
+
+
+void InstructionSelector::VisitTailCall(Node* node) {
+  Arm64OperandGenerator g(this);
+  const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
+  DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
+
+  // TODO(turbofan): Relax restriction for stack parameters.
+  if (descriptor->UsesOnlyRegisters() &&
+      descriptor->HasSameReturnLocationsAs(
+          linkage()->GetIncomingDescriptor())) {
+    CallBuffer buffer(zone(), descriptor, nullptr);
+
+    // Compute InstructionOperands for inputs and outputs.
+    // TODO(turbofan): on ARM64 it's probably better to use the code object in a
+    // register if there are multiple uses of it. Improve constant pool and the
+    // heuristics in the register allocator for where to emit constants.
+    InitializeCallBuffer(node, &buffer, true, false);
+
+    DCHECK_EQ(0u, buffer.pushed_nodes.size());
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject:
+        opcode = kArchTailCallCodeObject;
+        break;
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchTailCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the tailcall instruction.
+    Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
+         &buffer.instruction_args.front());
+  } else {
+    FrameStateDescriptor* frame_state_descriptor = nullptr;
+    if (descriptor->NeedsFrameState()) {
+      frame_state_descriptor =
+          GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
+    }
+
+    CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
+
+    // Compute InstructionOperands for inputs and outputs.
+    // TODO(turbofan): on ARM64 it's probably better to use the code object in a
+    // register if there are multiple uses of it. Improve constant pool and the
+    // heuristics in the register allocator for where to emit constants.
+    InitializeCallBuffer(node, &buffer, true, false);
+
+    // Push the arguments to the stack.
+    bool pushed_count_uneven = buffer.pushed_nodes.size() & 1;
+    int aligned_push_count = buffer.pushed_nodes.size();
+    // TODO(dcarney): claim and poke probably take small immediates,
+    //                loop here or whatever.
+    // Bump the stack pointer(s).
+    if (aligned_push_count > 0) {
+      // TODO(dcarney): it would be better to bump the csp here only
+      //                and emit paired stores with increment for non c frames.
+      Emit(kArm64Claim, g.NoOutput(), g.TempImmediate(aligned_push_count));
+    }
+    // Move arguments to the stack.
+    {
+      int slot = buffer.pushed_nodes.size() - 1;
+      // Emit the uneven pushes.
+      if (pushed_count_uneven) {
+        Node* input = buffer.pushed_nodes[slot];
+        Emit(kArm64Poke, g.NoOutput(), g.UseRegister(input),
+             g.TempImmediate(slot));
+        slot--;
+      }
+      // Now all pushes can be done in pairs.
+      for (; slot >= 0; slot -= 2) {
+        Emit(kArm64PokePair, g.NoOutput(),
+             g.UseRegister(buffer.pushed_nodes[slot]),
+             g.UseRegister(buffer.pushed_nodes[slot - 1]),
+             g.TempImmediate(slot));
+      }
+    }
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject: {
+        opcode = kArchCallCodeObject;
+        break;
+      }
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the call instruction.
+    size_t const output_count = buffer.outputs.size();
+    auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+    Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+         &buffer.instruction_args.front())->MarkAsCall();
+    Emit(kArchRet, 0, nullptr, output_count, outputs);
+  }
 }
 
 
index 0c1361511a42a98f9fa16231488b21a2989cbf89..3a7ce3993dcdcb0ff7c46123cd0d626a1212b2b7 100644 (file)
@@ -662,9 +662,9 @@ const Operator* CommonOperatorBuilder::FrameState(
 const Operator* CommonOperatorBuilder::Call(const CallDescriptor* descriptor) {
   class CallOperator final : public Operator1<const CallDescriptor*> {
    public:
-    CallOperator(const CallDescriptor* descriptor, const char* mnemonic)
+    explicit CallOperator(const CallDescriptor* descriptor)
         : Operator1<const CallDescriptor*>(
-              IrOpcode::kCall, descriptor->properties(), mnemonic,
+              IrOpcode::kCall, descriptor->properties(), "Call",
               descriptor->InputCount() + descriptor->FrameStateCount(),
               Operator::ZeroIfPure(descriptor->properties()),
               Operator::ZeroIfEliminatable(descriptor->properties()),
@@ -676,7 +676,25 @@ const Operator* CommonOperatorBuilder::Call(const CallDescriptor* descriptor) {
       os << "[" << *parameter() << "]";
     }
   };
-  return new (zone()) CallOperator(descriptor, "Call");
+  return new (zone()) CallOperator(descriptor);
+}
+
+
+const Operator* CommonOperatorBuilder::TailCall(
+    const CallDescriptor* descriptor) {
+  class TailCallOperator final : public Operator1<const CallDescriptor*> {
+   public:
+    explicit TailCallOperator(const CallDescriptor* descriptor)
+        : Operator1<const CallDescriptor*>(
+              IrOpcode::kTailCall, descriptor->properties(), "TailCall",
+              descriptor->InputCount() + descriptor->FrameStateCount(), 1, 1, 0,
+              0, 1, descriptor) {}
+
+    void PrintParameter(std::ostream& os) const override {
+      os << "[" << *parameter() << "]";
+    }
+  };
+  return new (zone()) TailCallOperator(descriptor);
 }
 
 
index a453b8159d62d242255eecfccbd30d4f6cfa4cb2..bad44f51acea9b8451862d108966cc63386553ed 100644 (file)
@@ -232,6 +232,7 @@ class CommonOperatorBuilder final : public ZoneObject {
       OutputFrameStateCombine state_combine,
       MaybeHandle<JSFunction> jsfunction = MaybeHandle<JSFunction>());
   const Operator* Call(const CallDescriptor* descriptor);
+  const Operator* TailCall(const CallDescriptor* descriptor);
   const Operator* Projection(size_t index);
 
   // Constructs a new merge or phi operator with the same opcode as {op}, but
index 24817f4fe455ff197113143adee44932db64346d..1f569ad5066184b66c8b13ad9b3a65123f3d4e3f 100644 (file)
@@ -815,13 +815,11 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
-                                    CallMode call_mode) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   IA32OperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
-  FrameStateDescriptor* frame_state_descriptor = NULL;
-
+  FrameStateDescriptor* frame_state_descriptor = nullptr;
   if (descriptor->NeedsFrameState()) {
     frame_state_descriptor =
         GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
@@ -844,21 +842,20 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
 
   // Pass label of exception handler block.
   CallDescriptor::Flags flags = descriptor->flags();
-  if (handler != nullptr) {
+  if (handler) {
     flags |= CallDescriptor::kHasExceptionHandler;
     buffer.instruction_args.push_back(g.Label(handler));
   }
 
   // Select the appropriate opcode based on the call type.
-  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
+      opcode = kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
+      opcode = kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -867,13 +864,93 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
-  size_t size = is_tail_call ? 0 : buffer.outputs.size();
-  InstructionOperand* first_output =
-      size > 0 ? &buffer.outputs.front() : nullptr;
-  Instruction* call_instr =
-      Emit(opcode, size, first_output, buffer.instruction_args.size(),
-           &buffer.instruction_args.front());
-  call_instr->MarkAsCall();
+  size_t const output_count = buffer.outputs.size();
+  auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+  Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+       &buffer.instruction_args.front())->MarkAsCall();
+}
+
+
+void InstructionSelector::VisitTailCall(Node* node) {
+  IA32OperandGenerator g(this);
+  CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
+  DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
+
+  // TODO(turbofan): Relax restriction for stack parameters.
+  if (descriptor->UsesOnlyRegisters() &&
+      descriptor->HasSameReturnLocationsAs(
+          linkage()->GetIncomingDescriptor())) {
+    CallBuffer buffer(zone(), descriptor, nullptr);
+
+    // Compute InstructionOperands for inputs and outputs.
+    InitializeCallBuffer(node, &buffer, true, true);
+
+    DCHECK_EQ(0u, buffer.pushed_nodes.size());
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject:
+        opcode = kArchTailCallCodeObject;
+        break;
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchTailCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the tailcall instruction.
+    Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
+         &buffer.instruction_args.front());
+  } else {
+    FrameStateDescriptor* frame_state_descriptor =
+        descriptor->NeedsFrameState()
+            ? GetFrameStateDescriptor(
+                  node->InputAt(static_cast<int>(descriptor->InputCount())))
+            : nullptr;
+
+    CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
+
+    // Compute InstructionOperands for inputs and outputs.
+    InitializeCallBuffer(node, &buffer, true, true);
+
+    // Push any stack arguments.
+    for (Node* node : base::Reversed(buffer.pushed_nodes)) {
+      // TODO(titzer): Handle pushing double parameters.
+      InstructionOperand value =
+          g.CanBeImmediate(node)
+              ? g.UseImmediate(node)
+              : IsSupported(ATOM) ? g.UseRegister(node) : g.Use(node);
+      Emit(kIA32Push, g.NoOutput(), value);
+    }
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject:
+        opcode = kArchCallCodeObject;
+        break;
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the call instruction.
+    size_t output_count = buffer.outputs.size();
+    auto* outputs = &buffer.outputs.front();
+    Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+         &buffer.instruction_args.front())->MarkAsCall();
+    Emit(kArchRet, 0, nullptr, output_count, outputs);
+  }
 }
 
 
index ff162e9c77b531c783945a555c2045a049367a19..b53b31e7bed9ee2ae6c52f470fa5724cc832ffb1 100644 (file)
@@ -9,7 +9,6 @@
 #include "src/base/adapters.h"
 #include "src/compiler/instruction-selector-impl.h"
 #include "src/compiler/node-matchers.h"
-#include "src/compiler/node-properties.h"
 #include "src/compiler/pipeline.h"
 #include "src/compiler/schedule.h"
 #include "src/compiler/state-values-utils.h"
@@ -278,7 +277,7 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
                                                bool call_code_immediate,
                                                bool call_address_immediate) {
   OperandGenerator g(this);
-  DCHECK_EQ(call->op()->ValueOutputCount(),
+  DCHECK_LE(call->op()->ValueOutputCount(),
             static_cast<int>(buffer->descriptor->ReturnCount()));
   DCHECK_EQ(
       call->op()->ValueInputCount(),
@@ -464,7 +463,11 @@ void InstructionSelector::VisitControl(BasicBlock* block) {
       DCHECK_EQ(IrOpcode::kCall, input->opcode());
       BasicBlock* success = block->SuccessorAt(0);
       BasicBlock* exception = block->SuccessorAt(1);
-      return VisitCall(input, exception, NORMAL_CALL), VisitGoto(success);
+      return VisitCall(input, exception), VisitGoto(success);
+    }
+    case BasicBlock::kTailCall: {
+      DCHECK_EQ(IrOpcode::kTailCall, input->opcode());
+      return VisitTailCall(input);
     }
     case BasicBlock::kBranch: {
       DCHECK_EQ(IrOpcode::kBranch, input->opcode());
@@ -507,7 +510,7 @@ void InstructionSelector::VisitControl(BasicBlock* block) {
     }
     case BasicBlock::kReturn: {
       DCHECK_EQ(IrOpcode::kReturn, input->opcode());
-      return VisitReturn(input);
+      return VisitReturn(input->InputAt(0));
     }
     case BasicBlock::kDeoptimize: {
       // If the result itself is a return, return its input.
@@ -584,7 +587,7 @@ void InstructionSelector::VisitNode(Node* node) {
       return VisitConstant(node);
     }
     case IrOpcode::kCall:
-      return VisitCall(node, nullptr, NORMAL_CALL);
+      return VisitCall(node);
     case IrOpcode::kFrameState:
     case IrOpcode::kStateValues:
       return;
@@ -989,46 +992,7 @@ void InstructionSelector::VisitGoto(BasicBlock* target) {
 }
 
 
-namespace {
-
-// Returns the call node if the given return node is part of a tail call,
-// nullptr otherwise.
-Node* TryMatchTailCall(Node* ret) {
-  // The value which is returned must be the result of a potential tail call,
-  // there must be no try/catch/finally around the call, and there must be no
-  // effects between the call and the return.
-  Node* call = NodeProperties::GetValueInput(ret, 0);
-  if (call->opcode() != IrOpcode::kCall ||
-      !OpParameter<const CallDescriptor*>(call)->SupportsTailCalls() ||
-      NodeProperties::IsExceptionalCall(call) ||
-      NodeProperties::GetEffectInput(ret, 0) != call) {
-    return nullptr;
-  }
-  // Furthermore, control has to flow via an IfSuccess from the call (for calls
-  // which can throw), or the return and the call have to use the same control
-  // input (for calls which can't throw).
-  Node* control = NodeProperties::GetControlInput(ret, 0);
-  bool found = (control->opcode() == IrOpcode::kIfSuccess)
-                   ? (NodeProperties::GetControlInput(control, 0) == call)
-                   : (control == NodeProperties::GetControlInput(call, 0));
-  return found ? call : nullptr;
-}
-
-}  // namespace
-
-
-void InstructionSelector::VisitReturn(Node* node) {
-  if (FLAG_turbo_tail_calls) {
-    Node* call = TryMatchTailCall(node);
-    if (call != nullptr) {
-      const CallDescriptor* desc = OpParameter<const CallDescriptor*>(call);
-      if (desc->UsesOnlyRegisters() &&
-          desc->HasSameReturnLocationsAs(linkage()->GetIncomingDescriptor())) {
-        return VisitCall(call, nullptr, TAIL_CALL);
-      }
-    }
-  }
-  Node* value = NodeProperties::GetValueInput(node, 0);
+void InstructionSelector::VisitReturn(Node* value) {
   DCHECK_NOT_NULL(value);
   OperandGenerator g(this);
   Emit(kArchRet, g.NoOutput(),
@@ -1159,12 +1123,14 @@ MACHINE_OP_LIST(DECLARE_UNIMPLEMENTED_SELECTOR)
 #undef DECLARE_UNIMPLEMENTED_SELECTOR
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
-                                    CallMode call_mode) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   UNIMPLEMENTED();
 }
 
 
+void InstructionSelector::VisitTailCall(Node* node) { UNIMPLEMENTED(); }
+
+
 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
                                       BasicBlock* fbranch) {
   UNIMPLEMENTED();
index 81c0c15ee97a0fa7b48a7e3868ba79d2b189c5a1..9a0744720a099295361e8ab603bb7f6277d0bfcc 100644 (file)
@@ -201,7 +201,8 @@ class InstructionSelector final {
   void VisitPhi(Node* node);
   void VisitProjection(Node* node);
   void VisitConstant(Node* node);
-  void VisitCall(Node* call, BasicBlock* handler, CallMode call_mode);
+  void VisitCall(Node* call, BasicBlock* handler = nullptr);
+  void VisitTailCall(Node* call);
   void VisitGoto(BasicBlock* target);
   void VisitBranch(Node* input, BasicBlock* tbranch, BasicBlock* fbranch);
   void VisitSwitch(Node* node, const SwitchInfo& sw);
index 0b56b7ab47da18b3d6edfbc051476cfac4883bc1..02719f270f01d7aeba5bf2f2c65818f4ff924b06 100644 (file)
@@ -113,6 +113,7 @@ inline int ElementSizeOf(MachineType machine_type) {
 }
 
 typedef Signature<MachineType> MachineSignature;
+
 }  // namespace compiler
 }  // namespace internal
 }  // namespace v8
index 5f64dbd16188b406d6e40c6960b56ccede1a2c67..afddd0a5089a2c2704e9285f5ef6fc4023054fd3 100644 (file)
@@ -499,12 +499,11 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
-                                    CallMode call_mode) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   MipsOperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
-  FrameStateDescriptor* frame_state_descriptor = NULL;
+  FrameStateDescriptor* frame_state_descriptor = nullptr;
   if (descriptor->NeedsFrameState()) {
     frame_state_descriptor =
         GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
@@ -529,21 +528,20 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
 
   // Pass label of exception handler block.
   CallDescriptor::Flags flags = descriptor->flags();
-  if (handler != nullptr) {
+  if (handler) {
     flags |= CallDescriptor::kHasExceptionHandler;
     buffer.instruction_args.push_back(g.Label(handler));
   }
 
   // Select the appropriate opcode based on the call type.
-  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
+      opcode = kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
+      opcode = kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -552,13 +550,96 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
-  size_t size = is_tail_call ? 0 : buffer.outputs.size();
-  InstructionOperand* first_output =
-      size > 0 ? &buffer.outputs.front() : nullptr;
-  Instruction* call_instr =
-      Emit(opcode, size, first_output, buffer.instruction_args.size(),
-           &buffer.instruction_args.front());
-  call_instr->MarkAsCall();
+  size_t const output_count = buffer.outputs.size();
+  auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+  Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+       &buffer.instruction_args.front())->MarkAsCall();
+}
+
+
+void InstructionSelector::VisitTailCall(Node* node) {
+  MipsOperandGenerator g(this);
+  const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
+  DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
+
+  // TODO(turbofan): Relax restriction for stack parameters.
+  if (descriptor->UsesOnlyRegisters() &&
+      descriptor->HasSameReturnLocationsAs(
+          linkage()->GetIncomingDescriptor())) {
+    CallBuffer buffer(zone(), descriptor, nullptr);
+
+    // Compute InstructionOperands for inputs and outputs.
+    InitializeCallBuffer(node, &buffer, true, false);
+
+    DCHECK_EQ(0u, buffer.pushed_nodes.size());
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject:
+        opcode = kArchTailCallCodeObject;
+        break;
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchTailCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the tailcall instruction.
+    Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
+         &buffer.instruction_args.front());
+  } else {
+    FrameStateDescriptor* frame_state_descriptor =
+        descriptor->NeedsFrameState()
+            ? GetFrameStateDescriptor(
+                  node->InputAt(static_cast<int>(descriptor->InputCount())))
+            : nullptr;
+
+    CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
+
+    // Compute InstructionOperands for inputs and outputs.
+    InitializeCallBuffer(node, &buffer, true, false);
+    // Possibly align stack here for functions.
+    int push_count = buffer.pushed_nodes.size();
+    if (push_count > 0) {
+      Emit(kMipsStackClaim, g.NoOutput(),
+           g.TempImmediate(push_count << kPointerSizeLog2));
+    }
+    int slot = buffer.pushed_nodes.size() - 1;
+    for (Node* node : base::Reversed(buffer.pushed_nodes)) {
+      Emit(kMipsStoreToStackSlot, g.NoOutput(), g.UseRegister(node),
+           g.TempImmediate(slot << kPointerSizeLog2));
+      slot--;
+    }
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject: {
+        opcode = kArchCallCodeObject;
+        break;
+      }
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the call instruction.
+    size_t const output_count = buffer.outputs.size();
+    auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+    Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+         &buffer.instruction_args.front())->MarkAsCall();
+    Emit(kArchRet, 0, nullptr, output_count, outputs);
+  }
 }
 
 
index 96a67eb87f4c151c9dfc57f4ba5e846147df7d6b..2f434954786f2bd60b27f3c9a67ecc0b09e3b970 100644 (file)
@@ -648,12 +648,11 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
-                                    CallMode call_mode) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   Mips64OperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
-  FrameStateDescriptor* frame_state_descriptor = NULL;
+  FrameStateDescriptor* frame_state_descriptor = nullptr;
   if (descriptor->NeedsFrameState()) {
     frame_state_descriptor =
         GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
@@ -678,21 +677,20 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
 
   // Pass label of exception handler block.
   CallDescriptor::Flags flags = descriptor->flags();
-  if (handler != nullptr) {
+  if (handler) {
     flags |= CallDescriptor::kHasExceptionHandler;
     buffer.instruction_args.push_back(g.Label(handler));
   }
 
   // Select the appropriate opcode based on the call type.
-  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
     case CallDescriptor::kCallCodeObject: {
-      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
+      opcode = kArchCallCodeObject;
       break;
     }
     case CallDescriptor::kCallJSFunction:
-      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
+      opcode = kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -701,13 +699,96 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
-  size_t size = is_tail_call ? 0 : buffer.outputs.size();
-  InstructionOperand* first_output =
-      size > 0 ? &buffer.outputs.front() : nullptr;
-  Instruction* call_instr =
-      Emit(opcode, size, first_output, buffer.instruction_args.size(),
-           &buffer.instruction_args.front());
-  call_instr->MarkAsCall();
+  size_t const output_count = buffer.outputs.size();
+  auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+  Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+       &buffer.instruction_args.front())->MarkAsCall();
+}
+
+
+void InstructionSelector::VisitTailCall(Node* node) {
+  Mips64OperandGenerator g(this);
+  const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
+  DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
+
+  // TODO(turbofan): Relax restriction for stack parameters.
+  if (descriptor->UsesOnlyRegisters() &&
+      descriptor->HasSameReturnLocationsAs(
+          linkage()->GetIncomingDescriptor())) {
+    CallBuffer buffer(zone(), descriptor, nullptr);
+
+    // Compute InstructionOperands for inputs and outputs.
+    InitializeCallBuffer(node, &buffer, true, false);
+
+    DCHECK_EQ(0u, buffer.pushed_nodes.size());
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject:
+        opcode = kArchTailCallCodeObject;
+        break;
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchTailCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the tailcall instruction.
+    Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
+         &buffer.instruction_args.front());
+  } else {
+    FrameStateDescriptor* frame_state_descriptor =
+        descriptor->NeedsFrameState()
+            ? GetFrameStateDescriptor(
+                  node->InputAt(static_cast<int>(descriptor->InputCount())))
+            : nullptr;
+
+    CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
+
+    // Compute InstructionOperands for inputs and outputs.
+    InitializeCallBuffer(node, &buffer, true, false);
+
+    int push_count = buffer.pushed_nodes.size();
+    if (push_count > 0) {
+      Emit(kMips64StackClaim, g.NoOutput(),
+           g.TempImmediate(push_count << kPointerSizeLog2));
+    }
+    int slot = buffer.pushed_nodes.size() - 1;
+    for (Node* node : base::Reversed(buffer.pushed_nodes)) {
+      Emit(kMips64StoreToStackSlot, g.NoOutput(), g.UseRegister(node),
+           g.TempImmediate(slot << kPointerSizeLog2));
+      slot--;
+    }
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject: {
+        opcode = kArchCallCodeObject;
+        break;
+      }
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the call instruction.
+    size_t const output_count = buffer.outputs.size();
+    auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+    Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+         &buffer.instruction_args.front())->MarkAsCall();
+    Emit(kArchRet, 0, nullptr, output_count, outputs);
+  }
 }
 
 
index 1a9c326f20be0f37fd5aa36ba049f5bf8133cc51..724c9f173ec55bff234a8de7db7eddcfd06b7c75 100644 (file)
@@ -138,6 +138,21 @@ void Node::ReplaceUses(Node* that) {
 }
 
 
+bool Node::OwnedBy(Node const* owner1, Node const* owner2) const {
+  unsigned mask = 0;
+  for (Use* use = first_use_; use; use = use->next) {
+    if (use->from == owner1) {
+      mask |= 1;
+    } else if (use->from == owner2) {
+      mask |= 2;
+    } else {
+      return false;
+    }
+  }
+  return mask == 3;
+}
+
+
 void Node::Input::Update(Node* new_to) {
   Node* old_to = this->to;
   if (new_to == old_to) return;  // Nothing to do.
index 54c01aa3b8d6e1537992133d635c5f951b2a4d04..46dd041f1fcad8127b667a91c8885e0a360a9ddb 100644 (file)
@@ -146,6 +146,9 @@ class Node final {
     return first_use_ && first_use_->from == owner && !first_use_->next;
   }
 
+  // Returns true if {owner1} and {owner2} are the only users of {this} node.
+  bool OwnedBy(Node const* owner1, Node const* owner2) const;
+
  private:
   struct Use final : public ZoneObject {
     Node* from;
index 5609970a9cc3ac947956ffbbbdec8fd98003b13e..83de619fba0987eac89cb48fda2f167236d192d4 100644 (file)
@@ -21,6 +21,7 @@
   V(Merge)                 \
   V(Deoptimize)            \
   V(Return)                \
+  V(TailCall)              \
   V(OsrNormalEntry)        \
   V(OsrLoopEntry)          \
   V(Throw)                 \
index 5e51c454cda8b73d296b443015f17a472b88c2e2..a3fe262258830fc2730f455c1d1ccdd76eea40d1 100644 (file)
@@ -44,6 +44,7 @@
 #include "src/compiler/select-lowering.h"
 #include "src/compiler/simplified-lowering.h"
 #include "src/compiler/simplified-operator-reducer.h"
+#include "src/compiler/tail-call-optimization.h"
 #include "src/compiler/typer.h"
 #include "src/compiler/value-numbering-reducer.h"
 #include "src/compiler/verifier.h"
@@ -688,9 +689,12 @@ struct GenericLoweringPhase {
     JSGenericLowering generic(data->info()->is_typing_enabled(),
                               data->jsgraph());
     SelectLowering select(data->jsgraph()->graph(), data->jsgraph()->common());
+    TailCallOptimization tco(data->common(), data->graph());
     GraphReducer graph_reducer(data->graph(), temp_zone);
     AddReducer(data, &graph_reducer, &generic);
     AddReducer(data, &graph_reducer, &select);
+    // TODO(turbofan): TCO is currently limited to stubs.
+    if (data->info()->IsStub()) AddReducer(data, &graph_reducer, &tco);
     graph_reducer.ReduceGraph();
   }
 };
index f30e5f61ceb9d944a8afdbe0c16ba2a5822f4a62..adb80a7b081b7c3ceba52752f589d669e25241bc 100644 (file)
@@ -108,6 +108,8 @@ std::ostream& operator<<(std::ostream& os, const BasicBlock::Control& c) {
       return os << "switch";
     case BasicBlock::kDeoptimize:
       return os << "deoptimize";
+    case BasicBlock::kTailCall:
+      return os << "tailcall";
     case BasicBlock::kReturn:
       return os << "return";
     case BasicBlock::kThrow:
@@ -233,6 +235,14 @@ void Schedule::AddSwitch(BasicBlock* block, Node* sw, BasicBlock** succ_blocks,
 }
 
 
+void Schedule::AddTailCall(BasicBlock* block, Node* input) {
+  DCHECK_EQ(BasicBlock::kNone, block->control());
+  block->set_control(BasicBlock::kTailCall);
+  SetControlInput(block, input);
+  if (block != end()) AddSuccessor(block, end());
+}
+
+
 void Schedule::AddReturn(BasicBlock* block, Node* input) {
   DCHECK_EQ(BasicBlock::kNone, block->control());
   block->set_control(BasicBlock::kReturn);
index 027a01dd4af03a9d46bdff6f52bed68672f5a1f5..37ce76299ed53858411f38493ee78275c0efeb61 100644 (file)
@@ -37,6 +37,7 @@ class BasicBlock final : public ZoneObject {
     kBranch,      // Branch if true to first successor, otherwise second.
     kSwitch,      // Table dispatch to one of the successor blocks.
     kDeoptimize,  // Return a value from this method.
+    kTailCall,    // Tail call another method from this method.
     kReturn,      // Return a value from this method.
     kThrow        // Throw an exception.
   };
@@ -220,6 +221,9 @@ class Schedule final : public ZoneObject {
   // BasicBlock building: add a deoptimize at the end of {block}.
   void AddDeoptimize(BasicBlock* block, Node* input);
 
+  // BasicBlock building: add a tailcall at the end of {block}.
+  void AddTailCall(BasicBlock* block, Node* input);
+
   // BasicBlock building: add a return at the end of {block}.
   void AddReturn(BasicBlock* block, Node* input);
 
index 3a2af0234614794baed8bd80677891aac9432847..066b4a13776444b54ca5aea936f9bcdd3894266b 100644 (file)
@@ -345,6 +345,10 @@ class CFGBuilder : public ZoneObject {
         scheduler_->UpdatePlacement(node, Scheduler::kFixed);
         ConnectDeoptimize(node);
         break;
+      case IrOpcode::kTailCall:
+        scheduler_->UpdatePlacement(node, Scheduler::kFixed);
+        ConnectTailCall(node);
+        break;
       case IrOpcode::kReturn:
         scheduler_->UpdatePlacement(node, Scheduler::kFixed);
         ConnectReturn(node);
@@ -487,6 +491,13 @@ class CFGBuilder : public ZoneObject {
     }
   }
 
+  void ConnectTailCall(Node* call) {
+    Node* call_control = NodeProperties::GetControlInput(call);
+    BasicBlock* call_block = FindPredecessorBlock(call_control);
+    TraceConnect(call, call_block, NULL);
+    schedule_->AddTailCall(call_block, call);
+  }
+
   void ConnectReturn(Node* ret) {
     Node* return_control = NodeProperties::GetControlInput(ret);
     BasicBlock* return_block = FindPredecessorBlock(return_control);
diff --git a/src/compiler/tail-call-optimization.cc b/src/compiler/tail-call-optimization.cc
new file mode 100644 (file)
index 0000000..a88ebe3
--- /dev/null
@@ -0,0 +1,83 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/compiler/tail-call-optimization.h"
+
+#include "src/compiler/common-operator.h"
+#include "src/compiler/graph.h"
+#include "src/compiler/linkage.h"
+#include "src/compiler/node-properties.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+Reduction TailCallOptimization::Reduce(Node* node) {
+  if (node->opcode() != IrOpcode::kReturn) return NoChange();
+  // The value which is returned must be the result of a potential tail call,
+  // there must be no try/catch/finally around the Call, and there must be no
+  // other effect between the Call and the Return nodes.
+  Node* const call = NodeProperties::GetValueInput(node, 0);
+  if (call->opcode() == IrOpcode::kCall &&
+      OpParameter<CallDescriptor const*>(call)->SupportsTailCalls() &&
+      NodeProperties::GetEffectInput(node) == call &&
+      !NodeProperties::IsExceptionalCall(call)) {
+    Node* const control = NodeProperties::GetControlInput(node);
+    if (control->opcode() == IrOpcode::kIfSuccess &&
+        call->OwnedBy(node, control) && control->OwnedBy(node)) {
+      // Furthermore, control has to flow via an IfSuccess from the Call, so
+      // the Return node value and effect depends directly on the Call node,
+      // and indirectly control depends on the Call via an IfSuccess.
+
+      // Value1 ... ValueN Effect Control
+      //   ^          ^      ^       ^
+      //   |          |      |       |
+      //   |          +--+ +-+       |
+      //   +----------+  | |  +------+
+      //               \ | | /
+      //             Call[Descriptor]
+      //                ^ ^ ^
+      //                | | |
+      //              +-+ | |
+      //              |   | |
+      //              | +-+ |
+      //              | | IfSuccess
+      //              | |  ^
+      //              | |  |
+      //              Return
+      //                ^
+      //                |
+
+      // The resulting graph looks like this:
+
+      // Value1 ... ValueN Effect Control
+      //   ^          ^      ^       ^
+      //   |          |      |       |
+      //   |          +--+ +-+       |
+      //   +----------+  | |  +------+
+      //               \ | | /
+      //           TailCall[Descriptor]
+      //                 ^
+      //                 |
+
+      DCHECK_EQ(call, NodeProperties::GetControlInput(control, 0));
+      DCHECK_EQ(3, node->InputCount());
+      node->set_op(
+          common()->TailCall(OpParameter<CallDescriptor const*>(call)));
+      node->ReplaceInput(0, NodeProperties::GetEffectInput(call));
+      node->ReplaceInput(1, NodeProperties::GetControlInput(call));
+      node->RemoveInput(2);
+      for (int index = 0; index < call->op()->ValueInputCount(); ++index) {
+        node->InsertInput(graph()->zone(), index,
+                          NodeProperties::GetValueInput(call, index));
+      }
+      return Changed(node);
+    }
+  }
+  return NoChange();
+}
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
diff --git a/src/compiler/tail-call-optimization.h b/src/compiler/tail-call-optimization.h
new file mode 100644 (file)
index 0000000..b5d4f96
--- /dev/null
@@ -0,0 +1,40 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_COMPILER_TAIL_CALL_OPTIMIZATION_H_
+#define V8_COMPILER_TAIL_CALL_OPTIMIZATION_H_
+
+#include "src/compiler/graph-reducer.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+// Forward declarations.
+class CommonOperatorBuilder;
+class Graph;
+
+
+// Performs tail call optimization by replacing certain combinations of Return
+// and Call nodes with a single TailCall.
+class TailCallOptimization final : public Reducer {
+ public:
+  TailCallOptimization(CommonOperatorBuilder* common, Graph* graph)
+      : common_(common), graph_(graph) {}
+
+  Reduction Reduce(Node* node) final;
+
+ private:
+  CommonOperatorBuilder* common() const { return common_; }
+  Graph* graph() const { return graph_; }
+
+  CommonOperatorBuilder* const common_;
+  Graph* const graph_;
+};
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_COMPILER_TAIL_CALL_OPTIMIZATION_H_
index 7f176fe1ff48eb6aadb03b7d4e9fc863c850043a..d1897750337964b861b51e60ab9866ae4f0c38b9 100644 (file)
@@ -252,6 +252,7 @@ class Typer::Visitor : public Reducer {
       DECLARE_CASE(Merge)
       DECLARE_CASE(Deoptimize)
       DECLARE_CASE(Return)
+      DECLARE_CASE(TailCall)
       DECLARE_CASE(OsrNormalEntry)
       DECLARE_CASE(OsrLoopEntry)
       DECLARE_CASE(Throw)
@@ -295,6 +296,7 @@ class Typer::Visitor : public Reducer {
       DECLARE_CASE(Merge)
       DECLARE_CASE(Deoptimize)
       DECLARE_CASE(Return)
+      DECLARE_CASE(TailCall)
       DECLARE_CASE(OsrNormalEntry)
       DECLARE_CASE(OsrLoopEntry)
       DECLARE_CASE(Throw)
index b86bba93b05dbdb8c78527fad098ea6bd5a79e40..4dd0b6470648e6799ef4ce50c657cc7f9b9064bd 100644 (file)
@@ -430,6 +430,9 @@ void Verifier::Visitor::Check(Node* node) {
     case IrOpcode::kCall:
       // TODO(rossberg): what are the constraints on these?
       break;
+    case IrOpcode::kTailCall:
+      // TODO(bmeurer): what are the constraints on these?
+      break;
 
     // JavaScript operators
     // --------------------
index c0a6175eef3c7aa340bcf3a302c85265680d8be9..ccacc038d514e9d73e6c5e30c397d375ecf39290 100644 (file)
@@ -1019,12 +1019,11 @@ void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
 }
 
 
-void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
-                                    CallMode call_mode) {
+void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
   X64OperandGenerator g(this);
   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
 
-  FrameStateDescriptor* frame_state_descriptor = NULL;
+  FrameStateDescriptor* frame_state_descriptor = nullptr;
   if (descriptor->NeedsFrameState()) {
     frame_state_descriptor = GetFrameStateDescriptor(
         node->InputAt(static_cast<int>(descriptor->InputCount())));
@@ -1047,21 +1046,19 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
 
   // Pass label of exception handler block.
   CallDescriptor::Flags flags = descriptor->flags();
-  if (handler != nullptr) {
+  if (handler) {
     flags |= CallDescriptor::kHasExceptionHandler;
     buffer.instruction_args.push_back(g.Label(handler));
   }
 
   // Select the appropriate opcode based on the call type.
-  bool is_tail_call = call_mode == TAIL_CALL;
   InstructionCode opcode;
   switch (descriptor->kind()) {
-    case CallDescriptor::kCallCodeObject: {
-      opcode = is_tail_call ? kArchTailCallCodeObject : kArchCallCodeObject;
+    case CallDescriptor::kCallCodeObject:
+      opcode = kArchCallCodeObject;
       break;
-    }
     case CallDescriptor::kCallJSFunction:
-      opcode = is_tail_call ? kArchTailCallJSFunction : kArchCallJSFunction;
+      opcode = kArchCallJSFunction;
       break;
     default:
       UNREACHABLE();
@@ -1070,13 +1067,93 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler,
   opcode |= MiscField::encode(flags);
 
   // Emit the call instruction.
-  size_t size = is_tail_call ? 0 : buffer.outputs.size();
-  InstructionOperand* first_output =
-      size > 0 ? &buffer.outputs.front() : nullptr;
-  Instruction* call_instr =
-      Emit(opcode, size, first_output, buffer.instruction_args.size(),
-           &buffer.instruction_args.front());
-  call_instr->MarkAsCall();
+  size_t const output_count = buffer.outputs.size();
+  auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
+  Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+       &buffer.instruction_args.front())->MarkAsCall();
+}
+
+
+void InstructionSelector::VisitTailCall(Node* node) {
+  X64OperandGenerator g(this);
+  CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
+  DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
+  DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
+
+  // TODO(turbofan): Relax restriction for stack parameters.
+  if (descriptor->UsesOnlyRegisters() &&
+      descriptor->HasSameReturnLocationsAs(
+          linkage()->GetIncomingDescriptor())) {
+    CallBuffer buffer(zone(), descriptor, nullptr);
+
+    // Compute InstructionOperands for inputs and outputs.
+    InitializeCallBuffer(node, &buffer, true, true);
+
+    DCHECK_EQ(0u, buffer.pushed_nodes.size());
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject:
+        opcode = kArchTailCallCodeObject;
+        break;
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchTailCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the tailcall instruction.
+    Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
+         &buffer.instruction_args.front());
+  } else {
+    FrameStateDescriptor* frame_state_descriptor =
+        descriptor->NeedsFrameState()
+            ? GetFrameStateDescriptor(
+                  node->InputAt(static_cast<int>(descriptor->InputCount())))
+            : nullptr;
+
+    CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
+
+    // Compute InstructionOperands for inputs and outputs.
+    InitializeCallBuffer(node, &buffer, true, true);
+
+    // Push any stack arguments.
+    for (Node* node : base::Reversed(buffer.pushed_nodes)) {
+      // TODO(titzer): Handle pushing double parameters.
+      InstructionOperand value =
+          g.CanBeImmediate(node)
+              ? g.UseImmediate(node)
+              : IsSupported(ATOM) ? g.UseRegister(node) : g.Use(node);
+      Emit(kX64Push, g.NoOutput(), value);
+    }
+
+    // Select the appropriate opcode based on the call type.
+    InstructionCode opcode;
+    switch (descriptor->kind()) {
+      case CallDescriptor::kCallCodeObject:
+        opcode = kArchCallCodeObject;
+        break;
+      case CallDescriptor::kCallJSFunction:
+        opcode = kArchCallJSFunction;
+        break;
+      default:
+        UNREACHABLE();
+        return;
+    }
+    opcode |= MiscField::encode(descriptor->flags());
+
+    // Emit the call instruction.
+    size_t output_count = buffer.outputs.size();
+    auto* outputs = &buffer.outputs.front();
+    Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+         &buffer.instruction_args.front())->MarkAsCall();
+    Emit(kArchRet, 0, nullptr, output_count, outputs);
+  }
 }
 
 
index 25cb5f22e7c5375f651365cf38b529d6afee9d47..0a4b74bc3da90f542f5601ab38e933c2d716bf42 100644 (file)
@@ -424,8 +424,6 @@ DEFINE_BOOL(turbo_stress_loop_peeling, false,
             "stress loop peeling optimization")
 DEFINE_BOOL(turbo_cf_optimization, true, "optimize control flow in TurboFan")
 DEFINE_BOOL(turbo_frame_elision, true, "elide frames in TurboFan")
-DEFINE_BOOL(turbo_tail_calls, false,
-            "enable tail call optimization in TurboFan")
 
 DEFINE_INT(typed_array_max_size_in_heap, 64,
            "threshold for in-heap typed array")
index f70ddf3cddfef07ffb523752d5ee05ed1ba90355..e71314f9c1af9558e6e249e16cdaab7dca870397 100644 (file)
@@ -454,8 +454,6 @@ enum GarbageCollector { SCAVENGER, MARK_COMPACTOR };
 
 enum Executability { NOT_EXECUTABLE, EXECUTABLE };
 
-enum CallMode { NORMAL_CALL, TAIL_CALL };
-
 enum VisitMode {
   VISIT_ALL,
   VISIT_ALL_IN_SCAVENGE,
index e49e86024f29e1ced61783a8d16173703e612a50..5fa3b7802a9808935bdcdb9b92359cb5e8d83444 100644 (file)
@@ -2242,6 +2242,9 @@ class HCallJSFunction final : public HCall<1> {
 };
 
 
+enum CallMode { NORMAL_CALL, TAIL_CALL };
+
+
 class HCallWithDescriptor final : public HInstruction {
  public:
   static HCallWithDescriptor* New(Isolate* isolate, Zone* zone, HValue* context,
index ab2623091ad509755e9282a2d8493651d70280f5..dfdb4c2b1f85bebc47afe1bf934523b20f4f5464 100644 (file)
@@ -4,7 +4,6 @@
 
 #include "test/unittests/compiler/instruction-selector-unittest.h"
 
-#include "src/code-stubs.h"
 #include "src/compiler/graph.h"
 #include "src/compiler/schedule.h"
 #include "src/flags.h"
@@ -593,53 +592,6 @@ TARGET_TEST_F(InstructionSelectorTest,
   EXPECT_EQ(index, s.size());
 }
 
-
-// -----------------------------------------------------------------------------
-// Tail calls.
-
-TARGET_TEST_F(InstructionSelectorTest, TailCall) {
-  for (int mode = 0; mode < 2; ++mode) {
-    bool supports_tail_calls = FLAG_turbo_tail_calls && (mode == 0);
-
-    StreamBuilder m(this, kMachAnyTagged);
-    Node* start = m.graph()->start();
-    Node* undefined = m.UndefinedConstant();
-
-    StringLengthStub stub(isolate());
-    CallDescriptor* desc = Linkage::GetStubCallDescriptor(
-        isolate(), zone(), stub.GetCallInterfaceDescriptor(), 0,
-        supports_tail_calls ? CallDescriptor::kSupportsTailCalls
-                            : CallDescriptor::kNoFlags,
-        Operator::kNoProperties);
-    Node* stub_node = m.NewNode(m.common()->HeapConstant(
-        Unique<Code>::CreateUninitialized(stub.GetCode())));
-
-    Node* call = m.NewNode(m.common()->Call(desc), stub_node, undefined,
-                           undefined, undefined, undefined, undefined);
-    call->AppendInput(zone(), start);  // effect
-    call->AppendInput(zone(), start);  // control
-
-    m.Return(call);
-    Node* ret = *call->uses().begin();
-    ret->AppendInput(zone(), call);   // effect
-    ret->AppendInput(zone(), start);  // control
-
-    Stream s = m.Build(kAllInstructions);
-    if (supports_tail_calls) {
-      ASSERT_EQ(3U, s.size());
-      EXPECT_EQ(kArchNop, s[0]->arch_opcode());
-      EXPECT_EQ(kArchTailCallCodeObject, s[1]->arch_opcode());
-      EXPECT_EQ(kArchNop, s[2]->arch_opcode());
-    } else {
-      ASSERT_EQ(4U, s.size());
-      EXPECT_EQ(kArchNop, s[0]->arch_opcode());
-      EXPECT_EQ(kArchCallCodeObject, s[1]->arch_opcode());
-      EXPECT_EQ(kArchRet, s[2]->arch_opcode());
-      EXPECT_EQ(kArchNop, s[3]->arch_opcode());
-    }
-  }
-}
-
 }  // namespace compiler
 }  // namespace internal
 }  // namespace v8
index 44ec06a3db4097851e19545144f96dd6f15b4bcb..fd0a06ba68ceba34a19c04bfc6bee67dbe64c172 100644 (file)
@@ -659,6 +659,64 @@ class IsCallMatcher final : public NodeMatcher {
 };
 
 
+class IsTailCallMatcher final : public NodeMatcher {
+ public:
+  IsTailCallMatcher(const Matcher<CallDescriptor const*>& descriptor_matcher,
+                    const std::vector<Matcher<Node*>>& value_matchers,
+                    const Matcher<Node*>& effect_matcher,
+                    const Matcher<Node*>& control_matcher)
+      : NodeMatcher(IrOpcode::kTailCall),
+        descriptor_matcher_(descriptor_matcher),
+        value_matchers_(value_matchers),
+        effect_matcher_(effect_matcher),
+        control_matcher_(control_matcher) {}
+
+  void DescribeTo(std::ostream* os) const final {
+    NodeMatcher::DescribeTo(os);
+    for (size_t i = 0; i < value_matchers_.size(); ++i) {
+      if (i == 0) {
+        *os << " whose value0 (";
+      } else {
+        *os << "), value" << i << " (";
+      }
+      value_matchers_[i].DescribeTo(os);
+    }
+    *os << "), effect (";
+    effect_matcher_.DescribeTo(os);
+    *os << ") and control (";
+    control_matcher_.DescribeTo(os);
+    *os << ")";
+  }
+
+  bool MatchAndExplain(Node* node, MatchResultListener* listener) const final {
+    if (!NodeMatcher::MatchAndExplain(node, listener) ||
+        !PrintMatchAndExplain(OpParameter<CallDescriptor const*>(node),
+                              "descriptor", descriptor_matcher_, listener)) {
+      return false;
+    }
+    for (size_t i = 0; i < value_matchers_.size(); ++i) {
+      std::ostringstream ost;
+      ost << "value" << i;
+      if (!PrintMatchAndExplain(
+              NodeProperties::GetValueInput(node, static_cast<int>(i)),
+              ost.str(), value_matchers_[i], listener)) {
+        return false;
+      }
+    }
+    return (PrintMatchAndExplain(NodeProperties::GetEffectInput(node), "effect",
+                                 effect_matcher_, listener) &&
+            PrintMatchAndExplain(NodeProperties::GetControlInput(node),
+                                 "control", control_matcher_, listener));
+  }
+
+ private:
+  const Matcher<CallDescriptor const*> descriptor_matcher_;
+  const std::vector<Matcher<Node*>> value_matchers_;
+  const Matcher<Node*> effect_matcher_;
+  const Matcher<Node*> control_matcher_;
+};
+
+
 class IsAllocateMatcher final : public NodeMatcher {
  public:
   IsAllocateMatcher(const Matcher<Node*>& size_matcher,
@@ -1497,6 +1555,19 @@ Matcher<Node*> IsCall(
 }
 
 
+Matcher<Node*> IsTailCall(
+    const Matcher<CallDescriptor const*>& descriptor_matcher,
+    const Matcher<Node*>& value0_matcher, const Matcher<Node*>& value1_matcher,
+    const Matcher<Node*>& effect_matcher,
+    const Matcher<Node*>& control_matcher) {
+  std::vector<Matcher<Node*>> value_matchers;
+  value_matchers.push_back(value0_matcher);
+  value_matchers.push_back(value1_matcher);
+  return MakeMatcher(new IsTailCallMatcher(descriptor_matcher, value_matchers,
+                                           effect_matcher, control_matcher));
+}
+
+
 Matcher<Node*> IsAllocate(const Matcher<Node*>& size_matcher,
                           const Matcher<Node*>& effect_matcher,
                           const Matcher<Node*>& control_matcher) {
index 085df3e8490e0dcb060707bc5fbcd18cd0eee019..7d7f07076ad6172285e65de948c99ae49dee529e 100644 (file)
@@ -117,6 +117,11 @@ Matcher<Node*> IsCall(
     const Matcher<Node*>& value4_matcher, const Matcher<Node*>& value5_matcher,
     const Matcher<Node*>& value6_matcher, const Matcher<Node*>& effect_matcher,
     const Matcher<Node*>& control_matcher);
+Matcher<Node*> IsTailCall(
+    const Matcher<CallDescriptor const*>& descriptor_matcher,
+    const Matcher<Node*>& value0_matcher, const Matcher<Node*>& value1_matcher,
+    const Matcher<Node*>& effect_matcher,
+    const Matcher<Node*>& control_matcher);
 
 Matcher<Node*> IsBooleanNot(const Matcher<Node*>& value_matcher);
 Matcher<Node*> IsNumberEqual(const Matcher<Node*>& lhs_matcher,
index 1a6c1bdf37fc7d32c0140bb42421ccf1247abc0e..6b61bd5c1c122afe73e0d6cddc23b8f50e7ef44f 100644 (file)
@@ -122,6 +122,7 @@ TEST_F(NodeTest, OwnedBy) {
   EXPECT_FALSE(n0->OwnedBy(n1));
   EXPECT_FALSE(n0->OwnedBy(n2));
   EXPECT_TRUE(n1->OwnedBy(n2));
+  EXPECT_TRUE(n0->OwnedBy(n1, n2));
   n2->ReplaceInput(0, n2);
   EXPECT_TRUE(n0->OwnedBy(n1));
   EXPECT_TRUE(n1->OwnedBy(n2));
index 5ebe270677dfe26880364e1de93745639554aea0..22b04ef9ec17d4e8ff01ce66191626a61443eea4 100644 (file)
@@ -138,6 +138,8 @@ const Operator kIntAdd(IrOpcode::kInt32Add, Operator::kPure, "Int32Add", 2, 0,
                        0, 1, 0, 0);
 const Operator kMockCall(IrOpcode::kCall, Operator::kNoProperties, "MockCall",
                          0, 0, 1, 1, 0, 2);
+const Operator kMockTailCall(IrOpcode::kTailCall, Operator::kNoProperties,
+                             "MockTailCall", 1, 1, 1, 0, 0, 1);
 
 }  // namespace
 
@@ -2406,6 +2408,20 @@ TARGET_TEST_F(SchedulerTest, CallException) {
 }
 
 
+TARGET_TEST_F(SchedulerTest, TailCall) {
+  Node* start = graph()->NewNode(common()->Start(1));
+  graph()->SetStart(start);
+
+  Node* p0 = graph()->NewNode(common()->Parameter(0), start);
+  Node* call = graph()->NewNode(&kMockTailCall, p0, start, start);
+  Node* end = graph()->NewNode(common()->End(), call);
+
+  graph()->SetEnd(end);
+
+  ComputeAndVerifySchedule(4);
+}
+
+
 TARGET_TEST_F(SchedulerTest, Switch) {
   Node* start = graph()->NewNode(common()->Start(1));
   graph()->SetStart(start);
diff --git a/test/unittests/compiler/tail-call-optimization-unittest.cc b/test/unittests/compiler/tail-call-optimization-unittest.cc
new file mode 100644 (file)
index 0000000..5ac1f8e
--- /dev/null
@@ -0,0 +1,160 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/compiler/linkage.h"
+#include "src/compiler/tail-call-optimization.h"
+#include "test/unittests/compiler/graph-unittest.h"
+#include "test/unittests/compiler/node-test-utils.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+class TailCallOptimizationTest : public GraphTest {
+ public:
+  explicit TailCallOptimizationTest(int num_parameters = 1)
+      : GraphTest(num_parameters) {}
+  ~TailCallOptimizationTest() override {}
+
+ protected:
+  Reduction Reduce(Node* node) {
+    TailCallOptimization tco(common(), graph());
+    return tco.Reduce(node);
+  }
+};
+
+
+TEST_F(TailCallOptimizationTest, CallCodeObject0) {
+  MachineType kMachineSignature[] = {kMachAnyTagged, kMachAnyTagged};
+  LinkageLocation kLocationSignature[] = {LinkageLocation(0),
+                                          LinkageLocation(1)};
+  const CallDescriptor* kCallDescriptor = new (zone()) CallDescriptor(
+      CallDescriptor::kCallCodeObject, kMachAnyTagged, LinkageLocation(0),
+      new (zone()) MachineSignature(1, 1, kMachineSignature),
+      new (zone()) LocationSignature(1, 1, kLocationSignature), 0,
+      Operator::kNoProperties, 0, CallDescriptor::kNoFlags);
+  Node* p0 = Parameter(0);
+  Node* p1 = Parameter(1);
+  Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
+                                graph()->start(), graph()->start());
+  Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
+  Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
+  Reduction r = Reduce(ret);
+  ASSERT_FALSE(r.Changed());
+}
+
+
+TEST_F(TailCallOptimizationTest, CallCodeObject1) {
+  MachineType kMachineSignature[] = {kMachAnyTagged, kMachAnyTagged};
+  LinkageLocation kLocationSignature[] = {LinkageLocation(0),
+                                          LinkageLocation(1)};
+  const CallDescriptor* kCallDescriptor = new (zone()) CallDescriptor(
+      CallDescriptor::kCallCodeObject, kMachAnyTagged, LinkageLocation(0),
+      new (zone()) MachineSignature(1, 1, kMachineSignature),
+      new (zone()) LocationSignature(1, 1, kLocationSignature), 0,
+      Operator::kNoProperties, 0, CallDescriptor::kSupportsTailCalls);
+  Node* p0 = Parameter(0);
+  Node* p1 = Parameter(1);
+  Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
+                                graph()->start(), graph()->start());
+  Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
+  Node* if_exception = graph()->NewNode(common()->IfException(), call);
+  Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
+  Node* end = graph()->NewNode(common()->End(), if_exception);
+  graph()->SetEnd(end);
+  Reduction r = Reduce(ret);
+  ASSERT_FALSE(r.Changed());
+}
+
+
+TEST_F(TailCallOptimizationTest, CallCodeObject2) {
+  MachineType kMachineSignature[] = {kMachAnyTagged, kMachAnyTagged};
+  LinkageLocation kLocationSignature[] = {LinkageLocation(0),
+                                          LinkageLocation(1)};
+  const CallDescriptor* kCallDescriptor = new (zone()) CallDescriptor(
+      CallDescriptor::kCallCodeObject, kMachAnyTagged, LinkageLocation(0),
+      new (zone()) MachineSignature(1, 1, kMachineSignature),
+      new (zone()) LocationSignature(1, 1, kLocationSignature), 0,
+      Operator::kNoProperties, 0, CallDescriptor::kSupportsTailCalls);
+  Node* p0 = Parameter(0);
+  Node* p1 = Parameter(1);
+  Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
+                                graph()->start(), graph()->start());
+  Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
+  Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
+  Reduction r = Reduce(ret);
+  ASSERT_TRUE(r.Changed());
+  EXPECT_THAT(r.replacement(), IsTailCall(kCallDescriptor, p0, p1,
+                                          graph()->start(), graph()->start()));
+}
+
+
+TEST_F(TailCallOptimizationTest, CallJSFunction0) {
+  MachineType kMachineSignature[] = {kMachAnyTagged, kMachAnyTagged};
+  LinkageLocation kLocationSignature[] = {LinkageLocation(0),
+                                          LinkageLocation(1)};
+  const CallDescriptor* kCallDescriptor = new (zone()) CallDescriptor(
+      CallDescriptor::kCallJSFunction, kMachAnyTagged, LinkageLocation(0),
+      new (zone()) MachineSignature(1, 1, kMachineSignature),
+      new (zone()) LocationSignature(1, 1, kLocationSignature), 0,
+      Operator::kNoProperties, 0, CallDescriptor::kNoFlags);
+  Node* p0 = Parameter(0);
+  Node* p1 = Parameter(1);
+  Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
+                                graph()->start(), graph()->start());
+  Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
+  Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
+  Reduction r = Reduce(ret);
+  ASSERT_FALSE(r.Changed());
+}
+
+
+TEST_F(TailCallOptimizationTest, CallJSFunction1) {
+  MachineType kMachineSignature[] = {kMachAnyTagged, kMachAnyTagged};
+  LinkageLocation kLocationSignature[] = {LinkageLocation(0),
+                                          LinkageLocation(1)};
+  const CallDescriptor* kCallDescriptor = new (zone()) CallDescriptor(
+      CallDescriptor::kCallJSFunction, kMachAnyTagged, LinkageLocation(0),
+      new (zone()) MachineSignature(1, 1, kMachineSignature),
+      new (zone()) LocationSignature(1, 1, kLocationSignature), 0,
+      Operator::kNoProperties, 0, CallDescriptor::kSupportsTailCalls);
+  Node* p0 = Parameter(0);
+  Node* p1 = Parameter(1);
+  Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
+                                graph()->start(), graph()->start());
+  Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
+  Node* if_exception = graph()->NewNode(common()->IfException(), call);
+  Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
+  Node* end = graph()->NewNode(common()->End(), if_exception);
+  graph()->SetEnd(end);
+  Reduction r = Reduce(ret);
+  ASSERT_FALSE(r.Changed());
+}
+
+
+TEST_F(TailCallOptimizationTest, CallJSFunction2) {
+  MachineType kMachineSignature[] = {kMachAnyTagged, kMachAnyTagged};
+  LinkageLocation kLocationSignature[] = {LinkageLocation(0),
+                                          LinkageLocation(1)};
+  const CallDescriptor* kCallDescriptor = new (zone()) CallDescriptor(
+      CallDescriptor::kCallJSFunction, kMachAnyTagged, LinkageLocation(0),
+      new (zone()) MachineSignature(1, 1, kMachineSignature),
+      new (zone()) LocationSignature(1, 1, kLocationSignature), 0,
+      Operator::kNoProperties, 0, CallDescriptor::kSupportsTailCalls);
+  Node* p0 = Parameter(0);
+  Node* p1 = Parameter(1);
+  Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
+                                graph()->start(), graph()->start());
+  Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
+  Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
+  Reduction r = Reduce(ret);
+  ASSERT_TRUE(r.Changed());
+  EXPECT_THAT(r.replacement(), IsTailCall(kCallDescriptor, p0, p1,
+                                          graph()->start(), graph()->start()));
+}
+
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
index 24e6e59d8d1b982fd03cdb7938b72b55220fd347..1230df62852db98e2a73dcf7ff5cc972ba93402d 100644 (file)
@@ -78,6 +78,7 @@
         'compiler/simplified-operator-reducer-unittest.cc',
         'compiler/simplified-operator-unittest.cc',
         'compiler/state-values-utils-unittest.cc',
+        'compiler/tail-call-optimization-unittest.cc',
         'compiler/typer-unittest.cc',
         'compiler/value-numbering-reducer-unittest.cc',
         'compiler/zone-pool-unittest.cc',
index b34af8c8c511ddd0388a972f53ec166b0ea5b5c4..288c294491ecf4cce7bcc8ce1b9481d58501f8ba 100644 (file)
         '../../src/compiler/source-position.h',
         '../../src/compiler/state-values-utils.cc',
         '../../src/compiler/state-values-utils.h',
+        '../../src/compiler/tail-call-optimization.cc',
+        '../../src/compiler/tail-call-optimization.h',
         '../../src/compiler/typer.cc',
         '../../src/compiler/typer.h',
         '../../src/compiler/value-numbering-reducer.cc',