[RISCV] Pass ZB_Undefined to countTrailingZeros/countLeadingZeros. NFC
authorCraig Topper <craig.topper@sifive.com>
Tue, 6 Dec 2022 22:00:04 +0000 (14:00 -0800)
committerCraig Topper <craig.topper@sifive.com>
Tue, 6 Dec 2022 22:57:28 +0000 (14:57 -0800)
We know the input is not zero so we can simplify the generated code.

llvm/lib/Target/RISCV/MCTargetDesc/RISCVMatInt.cpp

index 6a1a6df..9b9c23d 100644 (file)
@@ -110,7 +110,7 @@ generateInstSeqImpl(int64_t Val, const FeatureBitset &ActiveFeatures) {
 
   // Val might now be valid for LUI without needing a shift.
   if (!isInt<32>(Val)) {
-    ShiftAmount = findFirstSet((uint64_t)Val);
+    ShiftAmount = findFirstSet((uint64_t)Val, ZB_Undefined);
     Val >>= ShiftAmount;
 
     // If the remaining bits don't fit in 12 bits, we might be able to reduce the
@@ -180,7 +180,7 @@ InstSeq generateInstSeq(int64_t Val, const FeatureBitset &ActiveFeatures) {
   // or ADDIW. If there are trailing zeros, try generating a sign extended
   // constant with no trailing zeros and use a final SLLI to restore them.
   if ((Val & 0xfff) != 0 && (Val & 1) == 0 && Res.size() >= 2) {
-    unsigned TrailingZeros = countTrailingZeros((uint64_t)Val);
+    unsigned TrailingZeros = countTrailingZeros((uint64_t)Val, ZB_Undefined);
     int64_t ShiftedVal = Val >> TrailingZeros;
     // If we can use C.LI+C.SLLI instead of LUI+ADDI(W) prefer that since
     // its more compressible. But only if LUI+ADDI(W) isn't fusable.
@@ -201,7 +201,7 @@ InstSeq generateInstSeq(int64_t Val, const FeatureBitset &ActiveFeatures) {
   if (Val > 0 && Res.size() > 2) {
     assert(ActiveFeatures[RISCV::Feature64Bit] &&
            "Expected RV32 to only need 2 instructions");
-    unsigned LeadingZeros = countLeadingZeros((uint64_t)Val);
+    unsigned LeadingZeros = countLeadingZeros((uint64_t)Val, ZB_Undefined);
     uint64_t ShiftedVal = (uint64_t)Val << LeadingZeros;
 
     {