1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16#ifndef __ASM_FUTEX_H
17#define __ASM_FUTEX_H
18
19#ifdef __KERNEL__
20
21#include <linux/futex.h>
22#include <linux/uaccess.h>
23
24#include <asm/errno.h>
25
26#define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg) \
27do { \
28 uaccess_enable(); \
29 asm volatile( \
30" prfm pstl1strm, %2\n" \
31"1: ldxr %w1, %2\n" \
32 insn "\n" \
33"2: stlxr %w0, %w3, %2\n" \
34" cbnz %w0, 1b\n" \
35" dmb ish\n" \
36"3:\n" \
37" .pushsection .fixup,\"ax\"\n" \
38" .align 2\n" \
39"4: mov %w0, %w5\n" \
40" b 3b\n" \
41" .popsection\n" \
42 _ASM_EXTABLE(1b, 4b) \
43 _ASM_EXTABLE(2b, 4b) \
44 : "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp) \
45 : "r" (oparg), "Ir" (-EFAULT) \
46 : "memory"); \
47 uaccess_disable(); \
48} while (0)
49
50static inline int
51arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
52{
53 int oldval = 0, ret, tmp;
54 u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
55
56 pagefault_disable();
57
58 switch (op) {
59 case FUTEX_OP_SET:
60 __futex_atomic_op("mov %w3, %w4",
61 ret, oldval, uaddr, tmp, oparg);
62 break;
63 case FUTEX_OP_ADD:
64 __futex_atomic_op("add %w3, %w1, %w4",
65 ret, oldval, uaddr, tmp, oparg);
66 break;
67 case FUTEX_OP_OR:
68 __futex_atomic_op("orr %w3, %w1, %w4",
69 ret, oldval, uaddr, tmp, oparg);
70 break;
71 case FUTEX_OP_ANDN:
72 __futex_atomic_op("and %w3, %w1, %w4",
73 ret, oldval, uaddr, tmp, ~oparg);
74 break;
75 case FUTEX_OP_XOR:
76 __futex_atomic_op("eor %w3, %w1, %w4",
77 ret, oldval, uaddr, tmp, oparg);
78 break;
79 default:
80 ret = -ENOSYS;
81 }
82
83 pagefault_enable();
84
85 if (!ret)
86 *oval = oldval;
87
88 return ret;
89}
90
91static inline int
92futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
93 u32 oldval, u32 newval)
94{
95 int ret = 0;
96 u32 val, tmp;
97 u32 __user *uaddr;
98
99 if (!access_ok(_uaddr, sizeof(u32)))
100 return -EFAULT;
101
102 uaddr = __uaccess_mask_ptr(_uaddr);
103 uaccess_enable();
104 asm volatile("// futex_atomic_cmpxchg_inatomic\n"
105" prfm pstl1strm, %2\n"
106"1: ldxr %w1, %2\n"
107" sub %w3, %w1, %w4\n"
108" cbnz %w3, 3f\n"
109"2: stlxr %w3, %w5, %2\n"
110" cbnz %w3, 1b\n"
111" dmb ish\n"
112"3:\n"
113" .pushsection .fixup,\"ax\"\n"
114"4: mov %w0, %w6\n"
115" b 3b\n"
116" .popsection\n"
117 _ASM_EXTABLE(1b, 4b)
118 _ASM_EXTABLE(2b, 4b)
119 : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp)
120 : "r" (oldval), "r" (newval), "Ir" (-EFAULT)
121 : "memory");
122 uaccess_disable();
123
124 *uval = val;
125 return ret;
126}
127
128#endif
129#endif
130