x86: add user_atomic_cmpxchg_inatomic at uaccess.h
authorQiaowei Ren <qiaowei.ren@intel.com>
Sat, 14 Dec 2013 06:25:02 +0000 (14:25 +0800)
committerH. Peter Anvin <hpa@linux.intel.com>
Mon, 16 Dec 2013 17:07:57 +0000 (09:07 -0800)
This patch adds user_atomic_cmpxchg_inatomic() to use CMPXCHG
instruction against a user space address.

This generalizes the already existing futex_atomic_cmpxchg_inatomic()
so it can be used in other contexts.  This will be used in the
upcoming support for Intel MPX (Memory Protection Extensions.)

[ hpa: replaced #ifdef inside a macro with IS_ENABLED() ]

Signed-off-by: Qiaowei Ren <qiaowei.ren@intel.com>
Link: http://lkml.kernel.org/r/1387002303-6620-1-git-send-email-qiaowei.ren@intel.com
Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
Cc: Peter Zijlstra <a.p.zijlstra@chello.nl>
arch/x86/include/asm/uaccess.h

index 8ec57c0..48ff838 100644 (file)
@@ -525,6 +525,98 @@ extern __must_check long strnlen_user(const char __user *str, long n);
 unsigned long __must_check clear_user(void __user *mem, unsigned long len);
 unsigned long __must_check __clear_user(void __user *mem, unsigned long len);
 
+extern void __cmpxchg_wrong_size(void)
+       __compiletime_error("Bad argument size for cmpxchg");
+
+#define __user_atomic_cmpxchg_inatomic(uval, ptr, old, new, size)      \
+({                                                                     \
+       int __ret = 0;                                                  \
+       __typeof__(ptr) __uval = (uval);                                \
+       __typeof__(*(ptr)) __old = (old);                               \
+       __typeof__(*(ptr)) __new = (new);                               \
+       switch (size) {                                                 \
+       case 1:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "q" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 2:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 4:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 8:                                                         \
+       {                                                               \
+               if (!IS_ENABLED(CONFIG_X86_64))                         \
+                       __cmpxchg_wrong_size();                         \
+                                                                       \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       default:                                                        \
+               __cmpxchg_wrong_size();                                 \
+       }                                                               \
+       *__uval = __old;                                                \
+       __ret;                                                          \
+})
+
+#define user_atomic_cmpxchg_inatomic(uval, ptr, old, new)              \
+({                                                                     \
+       access_ok(VERIFY_WRITE, (ptr), sizeof(*(ptr))) ?                \
+               __user_atomic_cmpxchg_inatomic((uval), (ptr),           \
+                               (old), (new), sizeof(*(ptr))) :         \
+               -EFAULT;                                                \
+})
+
 /*
  * movsl can be slow when source and dest are not both 8-byte aligned
  */