From 5398dd04bf62db100639d96c84a8c41041f4ad01 Mon Sep 17 00:00:00 2001 From: Karol Herbst Date: Fri, 25 Nov 2022 01:56:07 +0100 Subject: [PATCH] nir/lower_int64: fix shift lowering Starting with !19748 lowered 64 bit shifts were showing wrong results for shifts with insignificant bits set. nir shifts are defined to only look at the least significant bits. The lowering has take this into account. So there are two things going on: 1. the `ieq` and `uge` further down depend on `y` being masked. 2. the calculation of `reverse_count` actually depends on a masked `y` as well, due to the `(iabs (iadd y -32))` giving a different result for shifts > 31; Fixes: 41f3e9e5f5d ("nir: Implement lowering of 64-bit shift operations") Signed-off-by: Karol Herbst Reviewed-by: Rhys Perry Reviewed-by: Ian Romanick Part-of: --- src/compiler/nir/nir_lower_int64.c | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/compiler/nir/nir_lower_int64.c b/src/compiler/nir/nir_lower_int64.c index e73e3fa..58feca4 100644 --- a/src/compiler/nir/nir_lower_int64.c +++ b/src/compiler/nir/nir_lower_int64.c @@ -170,6 +170,8 @@ lower_ishl64(nir_builder *b, nir_ssa_def *x, nir_ssa_def *y) * * uint64_t lshift(uint64_t x, int c) * { + * c %= 64; + * * if (c == 0) return x; * * uint32_t lo = LO(x), hi = HI(x); @@ -187,6 +189,7 @@ lower_ishl64(nir_builder *b, nir_ssa_def *x, nir_ssa_def *y) */ nir_ssa_def *x_lo = nir_unpack_64_2x32_split_x(b, x); nir_ssa_def *x_hi = nir_unpack_64_2x32_split_y(b, x); + y = nir_iand_imm(b, y, 0x3f); nir_ssa_def *reverse_count = nir_iabs(b, nir_iadd(b, y, nir_imm_int(b, -32))); nir_ssa_def *lo_shifted = nir_ishl(b, x_lo, y); @@ -212,6 +215,8 @@ lower_ishr64(nir_builder *b, nir_ssa_def *x, nir_ssa_def *y) * * uint64_t arshift(uint64_t x, int c) * { + * c %= 64; + * * if (c == 0) return x; * * uint32_t lo = LO(x); @@ -231,6 +236,7 @@ lower_ishr64(nir_builder *b, nir_ssa_def *x, nir_ssa_def *y) */ nir_ssa_def *x_lo = nir_unpack_64_2x32_split_x(b, x); nir_ssa_def *x_hi = nir_unpack_64_2x32_split_y(b, x); + y = nir_iand_imm(b, y, 0x3f); nir_ssa_def *reverse_count = nir_iabs(b, nir_iadd(b, y, nir_imm_int(b, -32))); nir_ssa_def *lo_shifted = nir_ushr(b, x_lo, y); @@ -256,6 +262,8 @@ lower_ushr64(nir_builder *b, nir_ssa_def *x, nir_ssa_def *y) * * uint64_t rshift(uint64_t x, int c) * { + * c %= 64; + * * if (c == 0) return x; * * uint32_t lo = LO(x), hi = HI(x); @@ -274,6 +282,7 @@ lower_ushr64(nir_builder *b, nir_ssa_def *x, nir_ssa_def *y) nir_ssa_def *x_lo = nir_unpack_64_2x32_split_x(b, x); nir_ssa_def *x_hi = nir_unpack_64_2x32_split_y(b, x); + y = nir_iand_imm(b, y, 0x3f); nir_ssa_def *reverse_count = nir_iabs(b, nir_iadd(b, y, nir_imm_int(b, -32))); nir_ssa_def *lo_shifted = nir_ushr(b, x_lo, y); -- 2.7.4