1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_FUTEX_H
3 #define _ASM_X86_FUTEX_H
7 #include <linux/futex.h>
8 #include <linux/uaccess.h>
11 #include <asm/errno.h>
12 #include <asm/processor.h>
15 #define unsafe_atomic_op1(insn, oval, uaddr, oparg, label) \
17 int oldval = 0, ret; \
18 asm volatile("1:\t" insn "\n" \
20 _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %1) \
21 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
22 : "0" (oparg), "1" (0)); \
29 #define unsafe_atomic_op2(insn, oval, uaddr, oparg, label) \
31 int oldval = 0, ret, tem; \
32 asm volatile("1:\tmovl %2, %0\n" \
33 "2:\tmovl\t%0, %3\n" \
35 "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
38 _ASM_EXTABLE_TYPE_REG(1b, 4b, EX_TYPE_EFAULT_REG, %1) \
39 _ASM_EXTABLE_TYPE_REG(3b, 4b, EX_TYPE_EFAULT_REG, %1) \
40 : "=&a" (oldval), "=&r" (ret), \
41 "+m" (*uaddr), "=&r" (tem) \
42 : "r" (oparg), "1" (0)); \
48 static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
51 if (!user_access_begin(uaddr, sizeof(u32)))
56 unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
59 unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
60 uaddr, oparg, Efault);
63 unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
66 unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
69 unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
82 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
83 u32 oldval, u32 newval)
87 if (!user_access_begin(uaddr, sizeof(u32)))
90 "1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
92 _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %0) \
93 : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
94 : "r" (newval), "1" (oldval)
103 #endif /* _ASM_X86_FUTEX_H */