From: Matthias Braun Date: Tue, 13 Sep 2016 19:27:38 +0000 (+0000) Subject: AArch64: Cleanup tailcall CC check, enable swiftcc. X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=1af1414d4d268b5b8580a56101be3e434f8c9291;p=platform%2Fupstream%2Fllvm.git AArch64: Cleanup tailcall CC check, enable swiftcc. Cleanup/change the code that checks for possible tailcall conventions to look the same as the one in the X86 target. This makes the distinction between calling conventions that can guarnatee tailcalls and the ones that may tailcall more obvious. - Add Swift to the mayTailCall list - PreserveMost seemed to be incorrectly part of the guarnteed tail call list, move it to the mayTailCall list. llvm-svn: 281376 --- diff --git a/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp b/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp index 732baca..3812863 100644 --- a/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp +++ b/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp @@ -2764,15 +2764,29 @@ SDValue AArch64TargetLowering::LowerCallResult( return Chain; } +/// Return true if the calling convention is one that we can guarantee TCO for. +static bool canGuaranteeTCO(CallingConv::ID CC) { + return CC == CallingConv::Fast; +} + +/// Return true if we might ever do TCO for calls with this calling convention. +static bool mayTailCallThisCC(CallingConv::ID CC) { + switch (CC) { + case CallingConv::C: + case CallingConv::PreserveMost: + case CallingConv::Swift: + return true; + default: + return canGuaranteeTCO(CC); + } +} + bool AArch64TargetLowering::isEligibleForTailCallOptimization( SDValue Callee, CallingConv::ID CalleeCC, bool isVarArg, const SmallVectorImpl &Outs, const SmallVectorImpl &OutVals, const SmallVectorImpl &Ins, SelectionDAG &DAG) const { - // For CallingConv::C this function knows whether the ABI needs - // changing. That's not true for other conventions so they will have to opt in - // manually. - if (!IsTailCallConvention(CalleeCC) && CalleeCC != CallingConv::C) + if (!mayTailCallThisCC(CalleeCC)) return false; MachineFunction &MF = DAG.getMachineFunction(); @@ -2789,9 +2803,8 @@ bool AArch64TargetLowering::isEligibleForTailCallOptimization( if (i->hasByValAttr()) return false; - if (getTargetMachine().Options.GuaranteedTailCallOpt) { - return IsTailCallConvention(CalleeCC) && CCMatch; - } + if (getTargetMachine().Options.GuaranteedTailCallOpt) + return canGuaranteeTCO(CalleeCC) && CCMatch; // Externally-defined functions with weak linkage should not be // tail-called on AArch64 when the OS does not support dynamic @@ -2909,11 +2922,6 @@ bool AArch64TargetLowering::DoesCalleeRestoreStack(CallingConv::ID CallCC, return CallCC == CallingConv::Fast && TailCallOpt; } -bool AArch64TargetLowering::IsTailCallConvention(CallingConv::ID CallCC) const { - return CallCC == CallingConv::Fast || - CallCC == CallingConv::PreserveMost; -} - /// LowerCall - Lower a call to a callseq_start + CALL + callseq_end chain, /// and add input and output parameter nodes. SDValue diff --git a/llvm/lib/Target/AArch64/AArch64ISelLowering.h b/llvm/lib/Target/AArch64/AArch64ISelLowering.h index 69cfcbe..2d75b9f 100644 --- a/llvm/lib/Target/AArch64/AArch64ISelLowering.h +++ b/llvm/lib/Target/AArch64/AArch64ISelLowering.h @@ -460,8 +460,6 @@ private: bool DoesCalleeRestoreStack(CallingConv::ID CallCC, bool TailCallOpt) const; - bool IsTailCallConvention(CallingConv::ID CallCC) const; - void saveVarArgRegisters(CCState &CCInfo, SelectionDAG &DAG, const SDLoc &DL, SDValue &Chain) const; diff --git a/llvm/test/CodeGen/AArch64/arm64-call-tailcalls.ll b/llvm/test/CodeGen/AArch64/arm64-call-tailcalls.ll index 6621db2..7a91f05 100644 --- a/llvm/test/CodeGen/AArch64/arm64-call-tailcalls.ll +++ b/llvm/test/CodeGen/AArch64/arm64-call-tailcalls.ll @@ -89,3 +89,12 @@ declare void @foo() nounwind declare i32 @a(i32) declare i32 @b(i32) declare i32 @c(i32) + +; CHECK-LABEL: tswift: +; CHECK: b _swiftfunc +define swiftcc i32 @tswift(i32 %a) nounwind { + %res = tail call i32 @swiftfunc(i32 %a) + ret i32 %res +} + +declare swiftcc i32 @swiftfunc(i32) nounwind