LoongArch: Simplify "BLT foo, zero" with BLTZ
authorWANG Xuerui <git@xen0n.name>
Tue, 26 Jul 2022 15:57:20 +0000 (23:57 +0800)
committerHuacai Chen <chenhuacai@loongson.cn>
Fri, 29 Jul 2022 10:22:32 +0000 (18:22 +0800)
Support for the syntactic sugar is present in upstream binutils port
from the beginning. Use it for shorter lines and better consistency.
Generated code should be identical.

Signed-off-by: WANG Xuerui <git@xen0n.name>
Signed-off-by: Huacai Chen <chenhuacai@loongson.cn>
arch/loongarch/include/asm/atomic.h
arch/loongarch/mm/tlbex.S

index 0869bec..dc2ae4f 100644 (file)
@@ -158,7 +158,7 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
                "1:     ll.w    %1, %2          # atomic_sub_if_positive\n"
                "       addi.w  %0, %1, %3                              \n"
                "       move    %1, %0                                  \n"
-               "       blt     %0, $zero, 2f                           \n"
+               "       bltz    %0, 2f                                  \n"
                "       sc.w    %1, %2                                  \n"
                "       beqz    %1, 1b                                  \n"
                "2:                                                     \n"
@@ -171,7 +171,7 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
                "1:     ll.w    %1, %2          # atomic_sub_if_positive\n"
                "       sub.w   %0, %1, %3                              \n"
                "       move    %1, %0                                  \n"
-               "       blt     %0, $zero, 2f                           \n"
+               "       bltz    %0, 2f                                  \n"
                "       sc.w    %1, %2                                  \n"
                "       beqz    %1, 1b                                  \n"
                "2:                                                     \n"
@@ -321,7 +321,7 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
                "1:     ll.d    %1, %2  # atomic64_sub_if_positive      \n"
                "       addi.d  %0, %1, %3                              \n"
                "       move    %1, %0                                  \n"
-               "       blt     %0, $zero, 2f                           \n"
+               "       bltz    %0, 2f                                  \n"
                "       sc.d    %1, %2                                  \n"
                "       beqz    %1, 1b                                  \n"
                "2:                                                     \n"
@@ -334,7 +334,7 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
                "1:     ll.d    %1, %2  # atomic64_sub_if_positive      \n"
                "       sub.d   %0, %1, %3                              \n"
                "       move    %1, %0                                  \n"
-               "       blt     %0, $zero, 2f                           \n"
+               "       bltz    %0, 2f                                  \n"
                "       sc.d    %1, %2                                  \n"
                "       beqz    %1, 1b                                  \n"
                "2:                                                     \n"
index 4d16e27..9ca1e3f 100644 (file)
@@ -47,7 +47,7 @@ SYM_FUNC_START(handle_tlb_load)
         * The vmalloc handling is not in the hotpath.
         */
        csrrd   t0, LOONGARCH_CSR_BADV
-       blt     t0, zero, vmalloc_load
+       bltz    t0, vmalloc_load
        csrrd   t1, LOONGARCH_CSR_PGDL
 
 vmalloc_done_load:
@@ -210,7 +210,7 @@ SYM_FUNC_START(handle_tlb_store)
         * The vmalloc handling is not in the hotpath.
         */
        csrrd   t0, LOONGARCH_CSR_BADV
-       blt     t0, zero, vmalloc_store
+       bltz    t0, vmalloc_store
        csrrd   t1, LOONGARCH_CSR_PGDL
 
 vmalloc_done_store:
@@ -378,7 +378,7 @@ SYM_FUNC_START(handle_tlb_modify)
         * The vmalloc handling is not in the hotpath.
         */
        csrrd   t0, LOONGARCH_CSR_BADV
-       blt     t0, zero, vmalloc_modify
+       bltz    t0, vmalloc_modify
        csrrd   t1, LOONGARCH_CSR_PGDL
 
 vmalloc_done_modify: