From 2e37557fdeed381e8e790e20702ededb13c9cef3 Mon Sep 17 00:00:00 2001 From: Liqiang Tao Date: Fri, 16 Sep 2022 22:15:15 +0800 Subject: [PATCH] StackProtector: ensure stack checks are inserted before the tail call The IR stack protector pass should insert stack checks before the tail calls not only the musttail calls. So that the attributes `ssqreq` and `tail call`, which are emited by llvm-opt, could be both enabled by llvm-llc. Reviewed By: compnerd Differential Revision: https://reviews.llvm.org/D133860 --- llvm/lib/CodeGen/StackProtector.cpp | 8 +- .../CodeGen/AArch64/stack-protector-musttail.ll | 39 ++++++++ .../ARM/Windows/stack-protector-musttail.ll | 33 ++++++- llvm/test/CodeGen/X86/stack-protector-musttail.ll | 105 +++++++++++++++++++++ llvm/test/CodeGen/X86/tailcc-ssp.ll | 35 ++++--- 5 files changed, 204 insertions(+), 16 deletions(-) create mode 100644 llvm/test/CodeGen/X86/stack-protector-musttail.ll diff --git a/llvm/lib/CodeGen/StackProtector.cpp b/llvm/lib/CodeGen/StackProtector.cpp index 510a8e3..ff828f5 100644 --- a/llvm/lib/CodeGen/StackProtector.cpp +++ b/llvm/lib/CodeGen/StackProtector.cpp @@ -471,18 +471,18 @@ bool StackProtector::InsertStackProtectors() { // instrumentation has already been generated. HasIRCheck = true; - // If we're instrumenting a block with a musttail call, the check has to be + // If we're instrumenting a block with a tail call, the check has to be // inserted before the call rather than between it and the return. The - // verifier guarantees that a musttail call is either directly before the + // verifier guarantees that a tail call is either directly before the // return or with a single correct bitcast of the return value in between so // we don't need to worry about many situations here. Instruction *CheckLoc = RI; Instruction *Prev = RI->getPrevNonDebugInstruction(); - if (Prev && isa(Prev) && cast(Prev)->isMustTailCall()) + if (Prev && isa(Prev) && cast(Prev)->isTailCall()) CheckLoc = Prev; else if (Prev) { Prev = Prev->getPrevNonDebugInstruction(); - if (Prev && isa(Prev) && cast(Prev)->isMustTailCall()) + if (Prev && isa(Prev) && cast(Prev)->isTailCall()) CheckLoc = Prev; } diff --git a/llvm/test/CodeGen/AArch64/stack-protector-musttail.ll b/llvm/test/CodeGen/AArch64/stack-protector-musttail.ll index 8a2e095..72fa9fe 100644 --- a/llvm/test/CodeGen/AArch64/stack-protector-musttail.ll +++ b/llvm/test/CodeGen/AArch64/stack-protector-musttail.ll @@ -64,3 +64,42 @@ define i8* @caller2() ssp { %res = bitcast i64* %tmp to i8* ret i8* %res } + +define void @caller3() ssp { +; CHECK-LABEL: define void @caller3() +; Prologue: +; CHECK: @llvm.stackguard + +; CHECK: [[GUARD:%.*]] = call i8* @llvm.stackguard() +; CHECK: [[TOKEN:%.*]] = load volatile i8*, i8** {{%.*}} +; CHECK: [[TST:%.*]] = icmp eq i8* [[GUARD]], [[TOKEN]] +; CHECK: br i1 [[TST]] + +; CHECK: tail call void @callee() +; CHECK-NEXT: ret void + %var = alloca [2 x i64] + store [2 x i64]* %var, [2 x i64]** @var + tail call void @callee() + ret void +} + +define i8* @caller4() ssp { +; CHECK-LABEL: define i8* @caller4() +; Prologue: +; CHECK: @llvm.stackguard + +; CHECK: [[GUARD:%.*]] = call i8* @llvm.stackguard() +; CHECK: [[TOKEN:%.*]] = load volatile i8*, i8** {{%.*}} +; CHECK: [[TST:%.*]] = icmp eq i8* [[GUARD]], [[TOKEN]] +; CHECK: br i1 [[TST]] + +; CHECK: [[TMP:%.*]] = tail call i64* @callee2() +; CHECK-NEXT: [[RES:%.*]] = bitcast i64* [[TMP]] to i8* +; CHECK-NEXT: ret i8* [[RES]] + + %var = alloca [2 x i64] + store [2 x i64]* %var, [2 x i64]** @var + %tmp = tail call i64* @callee2() + %res = bitcast i64* %tmp to i8* + ret i8* %res +} diff --git a/llvm/test/CodeGen/ARM/Windows/stack-protector-musttail.ll b/llvm/test/CodeGen/ARM/Windows/stack-protector-musttail.ll index 93b601c..74829f9 100644 --- a/llvm/test/CodeGen/ARM/Windows/stack-protector-musttail.ll +++ b/llvm/test/CodeGen/ARM/Windows/stack-protector-musttail.ll @@ -34,7 +34,6 @@ retblock: ret void } - declare i64* @callee2() define i8* @caller2() sspreq { @@ -54,3 +53,35 @@ define i8* @caller2() sspreq { %res = bitcast i64* %tmp to i8* ret i8* %res } + +define void @caller3() sspreq { +; CHECK-LABEL: define void @caller3() +; Prologue: + +; CHECK: call void @__security_check_cookie + +; CHECK: tail call void @callee() +; CHECK-NEXT: ret void + %var = alloca [2 x i64] + store [2 x i64]* %var, [2 x i64]** @var + tail call void @callee() + ret void +} + +define i8* @caller4() sspreq { +; CHECK-LABEL: define i8* @caller4() +; Prologue: +; CHECK: @llvm.stackguard + +; CHECK: call void @__security_check_cookie + +; CHECK: [[TMP:%.*]] = tail call i64* @callee2() +; CHECK-NEXT: [[RES:%.*]] = bitcast i64* [[TMP]] to i8* +; CHECK-NEXT: ret i8* [[RES]] + + %var = alloca [2 x i64] + store [2 x i64]* %var, [2 x i64]** @var + %tmp = tail call i64* @callee2() + %res = bitcast i64* %tmp to i8* + ret i8* %res +} diff --git a/llvm/test/CodeGen/X86/stack-protector-musttail.ll b/llvm/test/CodeGen/X86/stack-protector-musttail.ll new file mode 100644 index 0000000..aa6945a --- /dev/null +++ b/llvm/test/CodeGen/X86/stack-protector-musttail.ll @@ -0,0 +1,105 @@ +; RUN: llc -mtriple=x86_64-linux-gnu -fast-isel %s -o - -start-before=stack-protector -stop-after=stack-protector | FileCheck %s + +@var = global [2 x i64]* null + +declare void @callee() + +define void @caller1() sspreq { +; CHECK-LABEL: define void @caller1() +; Prologue: +; CHECK: @llvm.stackprotector + +; CHECK: [[GUARD:%.*]] = load volatile i8*, i8* +; CHECK: [[TOKEN:%.*]] = load volatile i8*, i8** {{%.*}} +; CHECK: [[TST:%.*]] = icmp eq i8* [[GUARD]], [[TOKEN]] +; CHECK: br i1 [[TST]] + +; CHECK: musttail call void @callee() +; CHECK-NEXT: ret void + %var = alloca [2 x i64] + store [2 x i64]* %var, [2 x i64]** @var + musttail call void @callee() + ret void +} + +define void @justret() sspreq { +; CHECK-LABEL: define void @justret() +; Prologue: +; CHECK: @llvm.stackprotector + +; CHECK: [[GUARD:%.*]] = load volatile i8*, i8* +; CHECK: [[TOKEN:%.*]] = load volatile i8*, i8** {{%.*}} +; CHECK: [[TST:%.*]] = icmp eq i8* [[GUARD]], [[TOKEN]] +; CHECK: br i1 [[TST]] + +; CHECK: ret void + %var = alloca [2 x i64] + store [2 x i64]* %var, [2 x i64]** @var + br label %retblock + +retblock: + ret void +} + + +declare i64* @callee2() + +define i8* @caller2() sspreq { +; CHECK-LABEL: define i8* @caller2() +; Prologue: +; CHECK: @llvm.stackprotector + +; CHECK: [[GUARD:%.*]] = load volatile i8*, i8* +; CHECK: [[TOKEN:%.*]] = load volatile i8*, i8** {{%.*}} +; CHECK: [[TST:%.*]] = icmp eq i8* [[GUARD]], [[TOKEN]] +; CHECK: br i1 [[TST]] + +; CHECK: [[TMP:%.*]] = musttail call i64* @callee2() +; CHECK-NEXT: [[RES:%.*]] = bitcast i64* [[TMP]] to i8* +; CHECK-NEXT: ret i8* [[RES]] + + %var = alloca [2 x i64] + store [2 x i64]* %var, [2 x i64]** @var + %tmp = musttail call i64* @callee2() + %res = bitcast i64* %tmp to i8* + ret i8* %res +} + +define void @caller3() sspreq { +; CHECK-LABEL: define void @caller3() +; Prologue: +; CHECK: @llvm.stackprotector + +; CHECK: [[GUARD:%.*]] = load volatile i8*, i8* +; CHECK: [[TOKEN:%.*]] = load volatile i8*, i8** {{%.*}} +; CHECK: [[TST:%.*]] = icmp eq i8* [[GUARD]], [[TOKEN]] +; CHECK: br i1 [[TST]] + +; CHECK: tail call void @callee() +; CHECK-NEXT: ret void + %var = alloca [2 x i64] + store [2 x i64]* %var, [2 x i64]** @var + tail call void @callee() + ret void +} + +define i8* @caller4() sspreq { +; CHECK-LABEL: define i8* @caller4() +; Prologue: +; CHECK: @llvm.stackprotector + +; CHECK: [[GUARD:%.*]] = load volatile i8*, i8* +; CHECK: [[TOKEN:%.*]] = load volatile i8*, i8** {{%.*}} +; CHECK: [[TST:%.*]] = icmp eq i8* [[GUARD]], [[TOKEN]] +; CHECK: br i1 [[TST]] + +; CHECK: [[TMP:%.*]] = tail call i64* @callee2() +; CHECK-NEXT: [[RES:%.*]] = bitcast i64* [[TMP]] to i8* +; CHECK-NEXT: ret i8* [[RES]] + + %var = alloca [2 x i64] + store [2 x i64]* %var, [2 x i64]** @var + %tmp = tail call i64* @callee2() + %res = bitcast i64* %tmp to i8* + ret i8* %res +} diff --git a/llvm/test/CodeGen/X86/tailcc-ssp.ll b/llvm/test/CodeGen/X86/tailcc-ssp.ll index 33617b4..bb9b442 100644 --- a/llvm/test/CodeGen/X86/tailcc-ssp.ll +++ b/llvm/test/CodeGen/X86/tailcc-ssp.ll @@ -1,12 +1,18 @@ -; RUN: llc -mtriple=x86_64-windows-msvc %s -o - -verify-machineinstrs | FileCheck %s +; RUN: llc -mtriple=x86_64-windows-msvc %s -o - -verify-machineinstrs | FileCheck %s -check-prefix=WINDOWS +; RUN: llc -mtriple=x86_64-linux-gnu %s -o - -verify-machineinstrs | FileCheck %s -check-prefix=LINUX declare void @h(ptr, i64, ptr) define tailcc void @tailcall_frame(ptr %0, i64 %1) sspreq { -; CHECK-LABEL: tailcall_frame: -; CHECK: callq __security_check_cookie -; CHECK: xorl %ecx, %ecx -; CHECK: jmp h +; WINDOWS-LABEL: tailcall_frame: +; WINDOWS: callq __security_check_cookie +; WINDOWS: xorl %ecx, %ecx +; WINDOWS: jmp h + +; LINUX-LABEL: tailcall_frame: +; LINUX: jne +; LINUX: jmp h +; LINUX: callq __stack_chk_fail tail call tailcc void @h(ptr null, i64 0, ptr null) ret void @@ -14,12 +20,19 @@ define tailcc void @tailcall_frame(ptr %0, i64 %1) sspreq { declare void @bar() define void @tailcall_unrelated_frame() sspreq { -; CHECK-LABEL: tailcall_unrelated_frame: -; CHECK: subq [[STACK:\$.*]], %rsp -; CHECK: callq bar -; CHECK: callq __security_check_cookie -; CHECK: addq [[STACK]], %rsp -; CHECK: jmp bar +; WINDOWS-LABEL: tailcall_unrelated_frame: +; WINDOWS: subq [[STACK:\$.*]], %rsp +; WINDOWS: callq bar +; WINDOWS: callq __security_check_cookie +; WINDOWS: addq [[STACK]], %rsp +; WINDOWS: jmp bar + +; LINUX-LABEL: tailcall_unrelated_frame: +; LINUX: callq bar +; LINUX: jne +; LINUX: jmp bar +; LINUX: callq __stack_chk_fail + call void @bar() tail call void @bar() ret void -- 2.7.4