MIPS: Fix gigaton of warning building with microMIPS.
authorRalf Baechle <ralf@linux-mips.org>
Sun, 30 Mar 2014 11:20:10 +0000 (13:20 +0200)
committerRalf Baechle <ralf@linux-mips.org>
Mon, 31 Mar 2014 16:17:12 +0000 (18:17 +0200)
With binutils 2.24 the attempt to switch with microMIPS mode to MIPS III
mode through .set mips3 results in *lots* of warnings like

{standard input}: Assembler messages:
{standard input}:397: Warning: the 64-bit MIPS architecture does not support the `smartmips' extension

during a kernel build.  Fixed by using .set arch=r4000 instead.

This breaks support for building the kernel with binutils 2.13 which
was supported for 32 bit kernels only anyway and 2.14 which was a bad
vintage for MIPS anyway.

Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
18 files changed:
arch/mips/alchemy/common/sleeper.S
arch/mips/include/asm/asm.h
arch/mips/include/asm/atomic.h
arch/mips/include/asm/bitops.h
arch/mips/include/asm/cmpxchg.h
arch/mips/include/asm/futex.h
arch/mips/include/asm/io.h
arch/mips/include/asm/local.h
arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h
arch/mips/include/asm/r4kcache.h
arch/mips/include/asm/stackframe.h
arch/mips/kernel/bmips_vec.S
arch/mips/kernel/genex.S
arch/mips/kernel/idle.c
arch/mips/kernel/r4k_fpu.S
arch/mips/kernel/r4k_switch.S
arch/mips/kernel/syscall.c
arch/mips/pmcs-msp71xx/msp_setup.c

index 706d933..c73d812 100644 (file)
@@ -95,7 +95,7 @@ LEAF(alchemy_sleep_au1000)
 
        /* cache following instructions, as memory gets put to sleep */
        la      t0, 1f
-       .set    mips3
+       .set    arch=r4000
        cache   0x14, 0(t0)
        cache   0x14, 32(t0)
        cache   0x14, 64(t0)
@@ -121,7 +121,7 @@ LEAF(alchemy_sleep_au1550)
 
        /* cache following instructions, as memory gets put to sleep */
        la      t0, 1f
-       .set    mips3
+       .set    arch=r4000
        cache   0x14, 0(t0)
        cache   0x14, 32(t0)
        cache   0x14, 64(t0)
@@ -163,7 +163,7 @@ LEAF(alchemy_sleep_au1300)
        la      t1, 4f
        subu    t2, t1, t0
 
-       .set    mips3
+       .set    arch=r4000
 
 1:     cache   0x14, 0(t0)
        subu    t2, t2, 32
index b153e79..7c26b28 100644 (file)
@@ -146,7 +146,7 @@ symbol              =       value
 
 #define PREF(hint,addr)                                        \
                .set    push;                           \
-               .set    mips4;                          \
+               .set    arch=r5000;                     \
                pref    hint, addr;                     \
                .set    pop
 
@@ -159,7 +159,7 @@ symbol              =       value
 
 #define PREFX(hint,addr)                               \
                .set    push;                           \
-               .set    mips4;                          \
+               .set    arch=r5000;                     \
                prefx   hint, addr;                     \
                .set    pop
 
index 7eed2f2..e8eb3d5 100644 (file)
@@ -53,7 +53,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
                int temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     ll      %0, %1          # atomic_add            \n"
                "       addu    %0, %2                                  \n"
                "       sc      %0, %1                                  \n"
@@ -66,7 +66,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       ll      %0, %1          # atomic_add    \n"
                        "       addu    %0, %2                          \n"
                        "       sc      %0, %1                          \n"
@@ -96,7 +96,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
                int temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     ll      %0, %1          # atomic_sub            \n"
                "       subu    %0, %2                                  \n"
                "       sc      %0, %1                                  \n"
@@ -109,7 +109,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       ll      %0, %1          # atomic_sub    \n"
                        "       subu    %0, %2                          \n"
                        "       sc      %0, %1                          \n"
@@ -139,7 +139,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
                int temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     ll      %1, %2          # atomic_add_return     \n"
                "       addu    %0, %1, %3                              \n"
                "       sc      %0, %2                                  \n"
@@ -153,7 +153,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       ll      %1, %2  # atomic_add_return     \n"
                        "       addu    %0, %1, %3                      \n"
                        "       sc      %0, %2                          \n"
@@ -188,7 +188,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
                int temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     ll      %1, %2          # atomic_sub_return     \n"
                "       subu    %0, %1, %3                              \n"
                "       sc      %0, %2                                  \n"
@@ -205,7 +205,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       ll      %1, %2  # atomic_sub_return     \n"
                        "       subu    %0, %1, %3                      \n"
                        "       sc      %0, %2                          \n"
@@ -248,7 +248,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
                int temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     ll      %1, %2          # atomic_sub_if_positive\n"
                "       subu    %0, %1, %3                              \n"
                "       bltz    %0, 1f                                  \n"
@@ -266,7 +266,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
                int temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     ll      %1, %2          # atomic_sub_if_positive\n"
                "       subu    %0, %1, %3                              \n"
                "       bltz    %0, 1f                                  \n"
@@ -420,7 +420,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v)
                long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     lld     %0, %1          # atomic64_add          \n"
                "       daddu   %0, %2                                  \n"
                "       scd     %0, %1                                  \n"
@@ -433,7 +433,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       lld     %0, %1          # atomic64_add  \n"
                        "       daddu   %0, %2                          \n"
                        "       scd     %0, %1                          \n"
@@ -463,7 +463,7 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v)
                long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     lld     %0, %1          # atomic64_sub          \n"
                "       dsubu   %0, %2                                  \n"
                "       scd     %0, %1                                  \n"
@@ -476,7 +476,7 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       lld     %0, %1          # atomic64_sub  \n"
                        "       dsubu   %0, %2                          \n"
                        "       scd     %0, %1                          \n"
@@ -506,7 +506,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
                long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     lld     %1, %2          # atomic64_add_return   \n"
                "       daddu   %0, %1, %3                              \n"
                "       scd     %0, %2                                  \n"
@@ -520,7 +520,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       lld     %1, %2  # atomic64_add_return   \n"
                        "       daddu   %0, %1, %3                      \n"
                        "       scd     %0, %2                          \n"
@@ -556,7 +556,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
                long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     lld     %1, %2          # atomic64_sub_return   \n"
                "       dsubu   %0, %1, %3                              \n"
                "       scd     %0, %2                                  \n"
@@ -571,7 +571,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       lld     %1, %2  # atomic64_sub_return   \n"
                        "       dsubu   %0, %1, %3                      \n"
                        "       scd     %0, %2                          \n"
@@ -615,7 +615,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
                long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
                "       dsubu   %0, %1, %3                              \n"
                "       bltz    %0, 1f                                  \n"
@@ -633,7 +633,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
                long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
                "       dsubu   %0, %1, %3                              \n"
                "       bltz    %0, 1f                                  \n"
index 71305a8..6a65d49 100644 (file)
@@ -79,7 +79,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
 
        if (kernel_uses_llsc && R10000_LLSC_WAR) {
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     " __LL "%0, %1                  # set_bit       \n"
                "       or      %0, %2                                  \n"
                "       " __SC  "%0, %1                                 \n"
@@ -101,7 +101,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
        } else if (kernel_uses_llsc) {
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       " __LL "%0, %1          # set_bit       \n"
                        "       or      %0, %2                          \n"
                        "       " __SC  "%0, %1                         \n"
@@ -131,7 +131,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
 
        if (kernel_uses_llsc && R10000_LLSC_WAR) {
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     " __LL "%0, %1                  # clear_bit     \n"
                "       and     %0, %2                                  \n"
                "       " __SC "%0, %1                                  \n"
@@ -153,7 +153,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
        } else if (kernel_uses_llsc) {
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       " __LL "%0, %1          # clear_bit     \n"
                        "       and     %0, %2                          \n"
                        "       " __SC "%0, %1                          \n"
@@ -197,7 +197,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
                unsigned long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                           \n"
+               "       .set    arch=r4000                      \n"
                "1:     " __LL "%0, %1          # change_bit    \n"
                "       xor     %0, %2                          \n"
                "       " __SC  "%0, %1                         \n"
@@ -211,7 +211,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       " __LL "%0, %1          # change_bit    \n"
                        "       xor     %0, %2                          \n"
                        "       " __SC  "%0, %1                         \n"
@@ -244,7 +244,7 @@ static inline int test_and_set_bit(unsigned long nr,
                unsigned long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     " __LL "%0, %1          # test_and_set_bit      \n"
                "       or      %2, %0, %3                              \n"
                "       " __SC  "%2, %1                                 \n"
@@ -260,7 +260,7 @@ static inline int test_and_set_bit(unsigned long nr,
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       " __LL "%0, %1  # test_and_set_bit      \n"
                        "       or      %2, %0, %3                      \n"
                        "       " __SC  "%2, %1                         \n"
@@ -298,7 +298,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
                unsigned long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     " __LL "%0, %1          # test_and_set_bit      \n"
                "       or      %2, %0, %3                              \n"
                "       " __SC  "%2, %1                                 \n"
@@ -314,7 +314,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       " __LL "%0, %1  # test_and_set_bit      \n"
                        "       or      %2, %0, %3                      \n"
                        "       " __SC  "%2, %1                         \n"
@@ -353,7 +353,7 @@ static inline int test_and_clear_bit(unsigned long nr,
                unsigned long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
                "       or      %2, %0, %3                              \n"
                "       xor     %2, %3                                  \n"
@@ -386,7 +386,7 @@ static inline int test_and_clear_bit(unsigned long nr,
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       " __LL  "%0, %1 # test_and_clear_bit    \n"
                        "       or      %2, %0, %3                      \n"
                        "       xor     %2, %3                          \n"
@@ -427,7 +427,7 @@ static inline int test_and_change_bit(unsigned long nr,
                unsigned long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
                "       xor     %2, %0, %3                              \n"
                "       " __SC  "%2, %1                                 \n"
@@ -443,7 +443,7 @@ static inline int test_and_change_bit(unsigned long nr,
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       " __LL  "%0, %1 # test_and_change_bit   \n"
                        "       xor     %2, %0, %3                      \n"
                        "       " __SC  "\t%2, %1                       \n"
index 466069b..eefcaa3 100644 (file)
@@ -22,11 +22,11 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
                unsigned long dummy;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     ll      %0, %3                  # xchg_u32      \n"
                "       .set    mips0                                   \n"
                "       move    %2, %z4                                 \n"
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "       sc      %2, %1                                  \n"
                "       beqzl   %2, 1b                                  \n"
                "       .set    mips0                                   \n"
@@ -38,11 +38,11 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       ll      %0, %3          # xchg_u32      \n"
                        "       .set    mips0                           \n"
                        "       move    %2, %z4                         \n"
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       sc      %2, %1                          \n"
                        "       .set    mips0                           \n"
                        : "=&r" (retval), "=m" (*m), "=&r" (dummy)
@@ -74,7 +74,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
                unsigned long dummy;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     lld     %0, %3                  # xchg_u64      \n"
                "       move    %2, %z4                                 \n"
                "       scd     %2, %1                                  \n"
@@ -88,7 +88,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
 
                do {
                        __asm__ __volatile__(
-                       "       .set    mips3                           \n"
+                       "       .set    arch=r4000                      \n"
                        "       lld     %0, %3          # xchg_u64      \n"
                        "       move    %2, %z4                         \n"
                        "       scd     %2, %1                          \n"
@@ -145,12 +145,12 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
                __asm__ __volatile__(                                   \
                "       .set    push                            \n"     \
                "       .set    noat                            \n"     \
-               "       .set    mips3                           \n"     \
+               "       .set    arch=r4000                      \n"     \
                "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
                "       bne     %0, %z3, 2f                     \n"     \
                "       .set    mips0                           \n"     \
                "       move    $1, %z4                         \n"     \
-               "       .set    mips3                           \n"     \
+               "       .set    arch=r4000                      \n"     \
                "       " st "  $1, %1                          \n"     \
                "       beqzl   $1, 1b                          \n"     \
                "2:                                             \n"     \
@@ -162,12 +162,12 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
                __asm__ __volatile__(                                   \
                "       .set    push                            \n"     \
                "       .set    noat                            \n"     \
-               "       .set    mips3                           \n"     \
+               "       .set    arch=r4000                      \n"     \
                "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
                "       bne     %0, %z3, 2f                     \n"     \
                "       .set    mips0                           \n"     \
                "       move    $1, %z4                         \n"     \
-               "       .set    mips3                           \n"     \
+               "       .set    arch=r4000                      \n"     \
                "       " st "  $1, %1                          \n"     \
                "       beqz    $1, 1b                          \n"     \
                "       .set    pop                             \n"     \
index 1fe2cd6..194cda0 100644 (file)
                __asm__ __volatile__(                                   \
                "       .set    push                            \n"     \
                "       .set    noat                            \n"     \
-               "       .set    mips3                           \n"     \
+               "       .set    arch=r4000                      \n"     \
                "1:     ll      %1, %4  # __futex_atomic_op     \n"     \
                "       .set    mips0                           \n"     \
                "       " insn  "                               \n"     \
-               "       .set    mips3                           \n"     \
+               "       .set    arch=r4000                      \n"     \
                "2:     sc      $1, %2                          \n"     \
                "       beqzl   $1, 1b                          \n"     \
                __WEAK_LLSC_MB                                          \
                __asm__ __volatile__(                                   \
                "       .set    push                            \n"     \
                "       .set    noat                            \n"     \
-               "       .set    mips3                           \n"     \
+               "       .set    arch=r4000                      \n"     \
                "1:     "user_ll("%1", "%4")" # __futex_atomic_op\n"    \
                "       .set    mips0                           \n"     \
                "       " insn  "                               \n"     \
-               "       .set    mips3                           \n"     \
+               "       .set    arch=r4000                      \n"     \
                "2:     "user_sc("$1", "%2")"                   \n"     \
                "       beqz    $1, 1b                          \n"     \
                __WEAK_LLSC_MB                                          \
@@ -147,12 +147,12 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
                "# futex_atomic_cmpxchg_inatomic                        \n"
                "       .set    push                                    \n"
                "       .set    noat                                    \n"
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     ll      %1, %3                                  \n"
                "       bne     %1, %z4, 3f                             \n"
                "       .set    mips0                                   \n"
                "       move    $1, %z5                                 \n"
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "2:     sc      $1, %2                                  \n"
                "       beqzl   $1, 1b                                  \n"
                __WEAK_LLSC_MB
@@ -174,12 +174,12 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
                "# futex_atomic_cmpxchg_inatomic                        \n"
                "       .set    push                                    \n"
                "       .set    noat                                    \n"
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:     "user_ll("%1", "%3")"                           \n"
                "       bne     %1, %z4, 3f                             \n"
                "       .set    mips0                                   \n"
                "       move    $1, %z5                                 \n"
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "2:     "user_sc("$1", "%2")"                           \n"
                "       beqz    $1, 1b                                  \n"
                __WEAK_LLSC_MB
index e221d1d..933b50e 100644 (file)
@@ -331,7 +331,7 @@ static inline void pfx##write##bwlq(type val,                               \
                if (irq)                                                \
                        local_irq_save(__flags);                        \
                __asm__ __volatile__(                                   \
-                       ".set   mips3"          "\t\t# __writeq""\n\t"  \
+                       ".set   arch=r4000"     "\t\t# __writeq""\n\t"  \
                        "dsll32 %L0, %L0, 0"                    "\n\t"  \
                        "dsrl32 %L0, %L0, 0"                    "\n\t"  \
                        "dsll32 %M0, %M0, 0"                    "\n\t"  \
@@ -361,7 +361,7 @@ static inline type pfx##read##bwlq(const volatile void __iomem *mem)        \
                if (irq)                                                \
                        local_irq_save(__flags);                        \
                __asm__ __volatile__(                                   \
-                       ".set   mips3"          "\t\t# __readq" "\n\t"  \
+                       ".set   arch=r4000"     "\t\t# __readq" "\n\t"  \
                        "ld     %L0, %1"                        "\n\t"  \
                        "dsra32 %M0, %L0, 0"                    "\n\t"  \
                        "sll    %L0, %L0, 0"                    "\n\t"  \
index d44622c..46dfc3c 100644 (file)
@@ -33,7 +33,7 @@ static __inline__ long local_add_return(long i, local_t * l)
                unsigned long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:"    __LL    "%1, %2         # local_add_return      \n"
                "       addu    %0, %1, %3                              \n"
                        __SC    "%0, %2                                 \n"
@@ -47,7 +47,7 @@ static __inline__ long local_add_return(long i, local_t * l)
                unsigned long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:"    __LL    "%1, %2         # local_add_return      \n"
                "       addu    %0, %1, %3                              \n"
                        __SC    "%0, %2                                 \n"
@@ -78,7 +78,7 @@ static __inline__ long local_sub_return(long i, local_t * l)
                unsigned long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:"    __LL    "%1, %2         # local_sub_return      \n"
                "       subu    %0, %1, %3                              \n"
                        __SC    "%0, %2                                 \n"
@@ -92,7 +92,7 @@ static __inline__ long local_sub_return(long i, local_t * l)
                unsigned long temp;
 
                __asm__ __volatile__(
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "1:"    __LL    "%1, %2         # local_sub_return      \n"
                "       subu    %0, %1, %3                              \n"
                        __SC    "%0, %2                                 \n"
index 2dbc7a8..fc946c8 100644 (file)
@@ -76,7 +76,7 @@ static inline void set_value_reg32(volatile u32 *const addr,
 
        __asm__ __volatile__(
        "       .set    push                            \n"
-       "       .set    mips3                           \n"
+       "       .set    arch=r4000                      \n"
        "1:     ll      %0, %1  # set_value_reg32       \n"
        "       and     %0, %2                          \n"
        "       or      %0, %3                          \n"
@@ -98,7 +98,7 @@ static inline void set_reg32(volatile u32 *const addr,
 
        __asm__ __volatile__(
        "       .set    push                            \n"
-       "       .set    mips3                           \n"
+       "       .set    arch=r4000                      \n"
        "1:     ll      %0, %1          # set_reg32     \n"
        "       or      %0, %2                          \n"
        "       sc      %0, %1                          \n"
@@ -119,7 +119,7 @@ static inline void clear_reg32(volatile u32 *const addr,
 
        __asm__ __volatile__(
        "       .set    push                            \n"
-       "       .set    mips3                           \n"
+       "       .set    arch=r4000                      \n"
        "1:     ll      %0, %1          # clear_reg32   \n"
        "       and     %0, %2                          \n"
        "       sc      %0, %1                          \n"
@@ -140,7 +140,7 @@ static inline void toggle_reg32(volatile u32 *const addr,
 
        __asm__ __volatile__(
        "       .set    push                            \n"
-       "       .set    mips3                           \n"
+       "       .set    arch=r4000                      \n"
        "1:     ll      %0, %1          # toggle_reg32  \n"
        "       xor     %0, %2                          \n"
        "       sc      %0, %1                          \n"
@@ -216,7 +216,7 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr)
 #define custom_read_reg32(address, tmp)                                \
        __asm__ __volatile__(                                   \
        "       .set    push                            \n"     \
-       "       .set    mips3                           \n"     \
+       "       .set    arch=r4000                      \n"     \
        "1:     ll      %0, %1  #custom_read_reg32      \n"     \
        "       .set    pop                             \n"     \
        : "=r" (tmp), "=m" (*address)                           \
@@ -225,7 +225,7 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr)
 #define custom_write_reg32(address, tmp)                       \
        __asm__ __volatile__(                                   \
        "       .set    push                            \n"     \
-       "       .set    mips3                           \n"     \
+       "       .set    arch=r4000                      \n"     \
        "       sc      %0, %1  #custom_write_reg32     \n"     \
        "       "__beqz"%0, 1b                          \n"     \
        "       nop                                     \n"     \
index 69c2ada..ca64cbe 100644 (file)
@@ -36,7 +36,7 @@
        __asm__ __volatile__(                                           \
        "       .set    push                                    \n"     \
        "       .set    noreorder                               \n"     \
-       "       .set    mips3\n\t                               \n"     \
+       "       .set    arch=r4000                              \n"     \
        "       cache   %0, %1                                  \n"     \
        "       .set    pop                                     \n"     \
        :                                                               \
@@ -204,7 +204,7 @@ static inline void flush_scache_line(unsigned long addr)
        __asm__ __volatile__(                                   \
        "       .set    push                    \n"             \
        "       .set    noreorder               \n"             \
-       "       .set    mips3                   \n"             \
+       "       .set    arch=r4000              \n"             \
        "1:     cache   %0, (%1)                \n"             \
        "2:     .set    pop                     \n"             \
        "       .section __ex_table,\"a\"       \n"             \
index 4857e2c..d301e10 100644 (file)
 
                .macro  RESTORE_SP_AND_RET
                LONG_L  sp, PT_R29(sp)
-               .set    mips3
+               .set    arch=r4000
                eret
                .set    mips0
                .endm
index a5bf73d..290c23b 100644 (file)
@@ -122,7 +122,7 @@ NESTED(bmips_reset_nmi_vec, PT_SIZE, sp)
        jr      k0
 
        RESTORE_ALL
-       .set    mips3
+       .set    arch=r4000
        eret
 
 /***********************************************************************
index 7365cd6..a9ce340 100644 (file)
@@ -67,7 +67,7 @@ NESTED(except_vec3_generic, 0, sp)
  */
 NESTED(except_vec3_r4000, 0, sp)
        .set    push
-       .set    mips3
+       .set    arch=r4000
        .set    noat
        mfc0    k1, CP0_CAUSE
        li      k0, 31<<2
@@ -139,7 +139,7 @@ LEAF(__r4k_wait)
        nop
        nop
 #endif
-       .set    mips3
+       .set    arch=r4000
        wait
        /* end of rollback region (the region size must be power of two) */
 1:
@@ -577,7 +577,7 @@ isrdhwr:
        ori     k1, _THREAD_MASK
        xori    k1, _THREAD_MASK
        LONG_L  v1, TI_TP_VALUE(k1)
-       .set    mips3
+       .set    arch=r4000
        eret
        .set    mips0
 #endif
index 9f904ed..837ff27 100644 (file)
@@ -64,7 +64,7 @@ void r4k_wait_irqoff(void)
        if (!need_resched())
                __asm__(
                "       .set    push            \n"
-               "       .set    mips3           \n"
+               "       .set    arch=r4000      \n"
                "       wait                    \n"
                "       .set    pop             \n");
        local_irq_enable();
@@ -82,7 +82,7 @@ static void rm7k_wait_irqoff(void)
        if (!need_resched())
                __asm__(
                "       .set    push                                    \n"
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "       .set    noat                                    \n"
                "       mfc0    $1, $12                                 \n"
                "       sync                                            \n"
@@ -103,7 +103,7 @@ static void au1k_wait(void)
        unsigned long c0status = read_c0_status() | 1;  /* irqs on */
 
        __asm__(
-       "       .set    mips3                   \n"
+       "       .set    arch=r4000                      \n"
        "       cache   0x14, 0(%0)             \n"
        "       cache   0x14, 32(%0)            \n"
        "       sync                            \n"
index 752b50a..0cfa7a5 100644 (file)
@@ -31,7 +31,7 @@
        .endm
 
        .set    noreorder
-       .set    mips3
+       .set    arch=r4000
 
 LEAF(_save_fp_context)
        cfc1    t1, fcr31
index f938ecd..abacac7 100644 (file)
@@ -294,7 +294,7 @@ LEAF(_init_fpu)
 1:     .set    pop
 #endif /* CONFIG_CPU_MIPS32_R2 */
 #else
-       .set    mips3
+       .set    arch=r4000
        dmtc1   t1, $f0
        dmtc1   t1, $f2
        dmtc1   t1, $f4
index b79d13f..4a4f9dd 100644 (file)
@@ -110,7 +110,7 @@ static inline int mips_atomic_set(unsigned long addr, unsigned long new)
 
        if (cpu_has_llsc && R10000_LLSC_WAR) {
                __asm__ __volatile__ (
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "       li      %[err], 0                               \n"
                "1:     ll      %[old], (%[addr])                       \n"
                "       move    %[tmp], %[new]                          \n"
@@ -135,7 +135,7 @@ static inline int mips_atomic_set(unsigned long addr, unsigned long new)
                : "memory");
        } else if (cpu_has_llsc) {
                __asm__ __volatile__ (
-               "       .set    mips3                                   \n"
+               "       .set    arch=r4000                              \n"
                "       li      %[err], 0                               \n"
                "1:     ll      %[old], (%[addr])                       \n"
                "       move    %[tmp], %[new]                          \n"
index 396b296..7e98076 100644 (file)
@@ -49,7 +49,7 @@ void msp7120_reset(void)
        /* Cache the reset code of this function */
        __asm__ __volatile__ (
                "       .set    push                            \n"
-               "       .set    mips3                           \n"
+               "       .set    arch=r4000                      \n"
                "       la      %0,startpoint                   \n"
                "       la      %1,endpoint                     \n"
                "       .set    pop                             \n"