From bc2fe4a0d675e7559de5734df0a4a40170389acc Mon Sep 17 00:00:00 2001 From: Philip Reames Date: Mon, 23 May 2022 10:10:08 -0700 Subject: [PATCH] [RISCV] Add basic fault-first load coverage for VSETVLI insertion Simplified version of a test taken from D123581. --- llvm/test/CodeGen/RISCV/rvv/vsetvli-insert.ll | 34 +++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert.ll b/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert.ll index 267a57c..4e0364e 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert.ll @@ -493,6 +493,34 @@ entry: ret i64 %vl } +; Fault first loads can modify VL. +; TODO: The first and third VSETVLIs are redundant here. +define @vleNff(i64* %str, i64 %n, i64 %x) { +; CHECK-LABEL: vleNff: +; CHECK: # %bb.0: # %entry +; CHECK-NEXT: vsetvli zero, a1, e8, m4, ta, mu +; CHECK-NEXT: vsetvli zero, a1, e64, m1, ta, mu +; CHECK-NEXT: vle64ff.v v8, (a0) +; CHECK-NEXT: csrr a0, vl +; CHECK-NEXT: vsetvli zero, a0, e64, m1, tu, mu +; CHECK-NEXT: vadd.vx v8, v8, a2 +; CHECK-NEXT: ret +entry: + %0 = tail call i64 @llvm.riscv.vsetvli.i64(i64 %n, i64 0, i64 2) + %1 = bitcast i64* %str to * + %2 = tail call { , i64 } @llvm.riscv.vleff.nxv1i64.i64( undef, * %1, i64 %0) + %3 = extractvalue { , i64 } %2, 0 + %4 = extractvalue { , i64 } %2, 1 + %5 = tail call @llvm.riscv.vadd.nxv1i64.i64.i64( %3, %3, i64 %x, i64 %4) + ret %5 +} + +declare { , i64 } @llvm.riscv.vleff.nxv1i64.i64( + , * nocapture, i64) + +declare @llvm.riscv.vmseq.nxv1i64.i64.i64( + , i64, i64) + declare @llvm.riscv.vadd.mask.nxv1i64.nxv1i64( , , @@ -501,6 +529,12 @@ declare @llvm.riscv.vadd.mask.nxv1i64.nxv1i64( i64, i64); +declare @llvm.riscv.vadd.nxv1i64.i64.i64( + , + , + i64, + i64); + declare @llvm.riscv.vfadd.mask.nxv1f64.f64( , , -- 2.7.4