amdgcn: remove obsolete assembler workarounds
authorAndrew Stubbs <ams@codesourcery.com>
Sat, 11 Jun 2022 23:16:24 +0000 (00:16 +0100)
committerAndrew Stubbs <ams@codesourcery.com>
Mon, 27 Jun 2022 12:47:45 +0000 (13:47 +0100)
This nonsense is no longer required, now that the minimum supported
assembler version is LLVM 13.0.1.

gcc/ChangeLog:

* config/gcn/gcn.md (*movbi): Remove assembler bug workarounds.
(jump): Likewise.
(movdi_symbol_save_scc): Likewise.

gcc/config/gcn/gcn.md

index 53e846e..033c170 100644 (file)
        we emit bytes directly as a workaround.  */
     switch (which_alternative) {
     case 0:
-      if (REG_P (operands[1]) && REGNO (operands[1]) == SCC_REG)
-       return "; s_mov_b32\t%0,%1 is not supported by the assembler.\;"
-              ".byte\t0xfd\;"
-              ".byte\t0x0\;"
-              ".byte\t0x80|%R0\;"
-              ".byte\t0xbe";
-      else
-       return "s_mov_b32\t%0, %1";
+      return "s_mov_b32\t%0, %1";
     case 1:
       if (REG_P (operands[1]) && REGNO (operands[1]) == SCC_REG)
        return "; v_mov_b32\t%0, %1\;"
     case 4:
       return "v_cmp_ne_u32\tvcc, 0, %1";
     case 5:
-      if (REGNO (operands[1]) == SCC_REG)
-       return "; s_mov_b32\t%0, %1 is not supported by the assembler.\;"
-              ".byte\t0xfd\;"
-              ".byte\t0x0\;"
-              ".byte\t0xea\;"
-              ".byte\t0xbe\;"
-              "s_mov_b32\tvcc_hi, 0";
-      else
-       return "s_mov_b32\tvcc_lo, %1\;"
-              "s_mov_b32\tvcc_hi, 0";
+      return "s_mov_b32\tvcc_lo, %1\;"
+            "s_mov_b32\tvcc_hi, 0";
     case 6:
       return "s_load_dword\t%0, %A1\;s_waitcnt\tlgkmcnt(0)";
     case 7:
       return "s_branch\t%0";
     else
       /* !!! This sequence clobbers EXEC_SAVE_REG and CC_SAVE_REG.  */
-      return "; s_mov_b32\ts22, scc is not supported by the assembler.\;"
-            ".long\t0xbe9600fd\;"
+      return "s_mov_b32\ts22, scc\;"
             "s_getpc_b64\ts[20:21]\;"
             "s_add_u32\ts20, s20, %0@rel32@lo+4\;"
             "s_addc_u32\ts21, s21, %0@rel32@hi+4\;"
          }
        else
          return "s_cbranch%c1\t.Lskip%=\;"
-                "; s_mov_b32\ts22, scc is not supported by the assembler.\;"
-                ".byte\t0xfd\;"
-                ".byte\t0x0\;"
-                ".byte\t0x80|22\;"
-                ".byte\t0xbe\;"
+                "s_mov_b32\ts22, scc\;"
                 "s_getpc_b64\ts[20:21]\;"
                 "s_add_u32\ts20, s20, %0@rel32@lo+4\;"
                 "s_addc_u32\ts21, s21, %0@rel32@hi+4\;"
 
     if (SYMBOL_REF_P (operands[1])
        && SYMBOL_REF_WEAK (operands[1]))
-       return "; s_mov_b32\ts22, scc is not supported by the assembler.\;"
-              ".long\t0xbe9600fd\;"
+       return "s_mov_b32\ts22, scc\;"
               "s_getpc_b64\t%0\;"
               "s_add_u32\t%L0, %L0, %1@gotpcrel32@lo+4\;"
               "s_addc_u32\t%H0, %H0, %1@gotpcrel32@hi+4\;"
               "s_cmpk_lg_u32\ts22, 0\;"
               "s_waitcnt\tlgkmcnt(0)";
 
-    return "; s_mov_b32\ts22, scc is not supported by the assembler.\;"
-          ".long\t0xbe9600fd\;"
+    return "s_mov_b32\ts22, scc\;"
           "s_getpc_b64\t%0\;"
           "s_add_u32\t%L0, %L0, %1@rel32@lo+4\;"
           "s_addc_u32\t%H0, %H0, %1@rel32@hi+4\;"