1 /* SPDX-License-Identifier: GPL-2.0 */
2
3 #ifndef __ASM_CSKY_FUTEX_H
4 #define __ASM_CSKY_FUTEX_H
5
6 #ifndef CONFIG_SMP
7 #include <asm-generic/futex.h>
8 #else
9 #include <linux/atomic.h>
10 #include <linux/futex.h>
11 #include <linux/uaccess.h>
12 #include <linux/errno.h>
13
14 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
15 { \
16 u32 tmp; \
17 \
18 __atomic_pre_full_fence(); \
19 \
20 __asm__ __volatile__ ( \
21 "1: ldex.w %[ov], %[u] \n" \
22 " "insn" \n" \
23 "2: stex.w %[t], %[u] \n" \
24 " bez %[t], 1b \n" \
25 " br 4f \n" \
26 "3: mov %[r], %[e] \n" \
27 "4: \n" \
28 " .section __ex_table,\"a\" \n" \
29 " .balign 4 \n" \
30 " .long 1b, 3b \n" \
31 " .long 2b, 3b \n" \
32 " .previous \n" \
33 : [r] "+r" (ret), [ov] "=&r" (oldval), \
34 [u] "+m" (*uaddr), [t] "=&r" (tmp) \
35 : [op] "Jr" (oparg), [e] "jr" (-EFAULT) \
36 : "memory"); \
37 \
38 __atomic_post_full_fence(); \
39 }
40
41 static inline int
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)42 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
43 {
44 int oldval = 0, ret = 0;
45
46 if (!access_ok(uaddr, sizeof(u32)))
47 return -EFAULT;
48
49 switch (op) {
50 case FUTEX_OP_SET:
51 __futex_atomic_op("mov %[t], %[ov]",
52 ret, oldval, uaddr, oparg);
53 break;
54 case FUTEX_OP_ADD:
55 __futex_atomic_op("add %[t], %[ov], %[op]",
56 ret, oldval, uaddr, oparg);
57 break;
58 case FUTEX_OP_OR:
59 __futex_atomic_op("or %[t], %[ov], %[op]",
60 ret, oldval, uaddr, oparg);
61 break;
62 case FUTEX_OP_ANDN:
63 __futex_atomic_op("and %[t], %[ov], %[op]",
64 ret, oldval, uaddr, ~oparg);
65 break;
66 case FUTEX_OP_XOR:
67 __futex_atomic_op("xor %[t], %[ov], %[op]",
68 ret, oldval, uaddr, oparg);
69 break;
70 default:
71 ret = -ENOSYS;
72 }
73
74 if (!ret)
75 *oval = oldval;
76
77 return ret;
78 }
79
80
81
82 static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)83 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
84 u32 oldval, u32 newval)
85 {
86 int ret = 0;
87 u32 val, tmp;
88
89 if (!access_ok(uaddr, sizeof(u32)))
90 return -EFAULT;
91
92 __atomic_pre_full_fence();
93
94 __asm__ __volatile__ (
95 "1: ldex.w %[v], %[u] \n"
96 " cmpne %[v], %[ov] \n"
97 " bt 4f \n"
98 " mov %[t], %[nv] \n"
99 "2: stex.w %[t], %[u] \n"
100 " bez %[t], 1b \n"
101 " br 4f \n"
102 "3: mov %[r], %[e] \n"
103 "4: \n"
104 " .section __ex_table,\"a\" \n"
105 " .balign 4 \n"
106 " .long 1b, 3b \n"
107 " .long 2b, 3b \n"
108 " .previous \n"
109 : [r] "+r" (ret), [v] "=&r" (val), [u] "+m" (*uaddr),
110 [t] "=&r" (tmp)
111 : [ov] "Jr" (oldval), [nv] "Jr" (newval), [e] "Jr" (-EFAULT)
112 : "memory");
113
114 __atomic_post_full_fence();
115
116 *uval = val;
117 return ret;
118 }
119
120 #endif /* CONFIG_SMP */
121 #endif /* __ASM_CSKY_FUTEX_H */
122