1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_FUTEX_H
3 #define _ASM_X86_FUTEX_H
4
5 #ifdef __KERNEL__
6
7 #include <linux/futex.h>
8 #include <linux/uaccess.h>
9
10 #include <asm/asm.h>
11 #include <asm/errno.h>
12 #include <asm/processor.h>
13 #include <asm/smap.h>
14
15 #define unsafe_atomic_op1(insn, oval, uaddr, oparg, label) \
16 do { \
17 int oldval = 0, ret; \
18 asm volatile("1:\t" insn "\n" \
19 "2:\n" \
20 "\t.section .fixup,\"ax\"\n" \
21 "3:\tmov\t%3, %1\n" \
22 "\tjmp\t2b\n" \
23 "\t.previous\n" \
24 _ASM_EXTABLE_UA(1b, 3b) \
25 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
26 : "i" (-EFAULT), "0" (oparg), "1" (0)); \
27 if (ret) \
28 goto label; \
29 *oval = oldval; \
30 } while(0)
31
32
33 #define unsafe_atomic_op2(insn, oval, uaddr, oparg, label) \
34 do { \
35 int oldval = 0, ret, tem; \
36 asm volatile("1:\tmovl %2, %0\n" \
37 "2:\tmovl\t%0, %3\n" \
38 "\t" insn "\n" \
39 "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
40 "\tjnz\t2b\n" \
41 "4:\n" \
42 "\t.section .fixup,\"ax\"\n" \
43 "5:\tmov\t%5, %1\n" \
44 "\tjmp\t4b\n" \
45 "\t.previous\n" \
46 _ASM_EXTABLE_UA(1b, 5b) \
47 _ASM_EXTABLE_UA(3b, 5b) \
48 : "=&a" (oldval), "=&r" (ret), \
49 "+m" (*uaddr), "=&r" (tem) \
50 : "r" (oparg), "i" (-EFAULT), "1" (0)); \
51 if (ret) \
52 goto label; \
53 *oval = oldval; \
54 } while(0)
55
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)56 static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
57 u32 __user *uaddr)
58 {
59 if (!user_access_begin(uaddr, sizeof(u32)))
60 return -EFAULT;
61
62 switch (op) {
63 case FUTEX_OP_SET:
64 unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
65 break;
66 case FUTEX_OP_ADD:
67 unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
68 uaddr, oparg, Efault);
69 break;
70 case FUTEX_OP_OR:
71 unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
72 break;
73 case FUTEX_OP_ANDN:
74 unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
75 break;
76 case FUTEX_OP_XOR:
77 unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
78 break;
79 default:
80 user_access_end();
81 return -ENOSYS;
82 }
83 user_access_end();
84 return 0;
85 Efault:
86 user_access_end();
87 return -EFAULT;
88 }
89
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)90 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
91 u32 oldval, u32 newval)
92 {
93 int ret = 0;
94
95 if (!user_access_begin(uaddr, sizeof(u32)))
96 return -EFAULT;
97 asm volatile("\n"
98 "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
99 "2:\n"
100 "\t.section .fixup, \"ax\"\n"
101 "3:\tmov %3, %0\n"
102 "\tjmp 2b\n"
103 "\t.previous\n"
104 _ASM_EXTABLE_UA(1b, 3b)
105 : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
106 : "i" (-EFAULT), "r" (newval), "1" (oldval)
107 : "memory"
108 );
109 user_access_end();
110 *uval = oldval;
111 return ret;
112 }
113
114 #endif
115 #endif /* _ASM_X86_FUTEX_H */
116