1
2
3
4
5
6
7#ifndef _ASM_RISCV_FUTEX_H
8#define _ASM_RISCV_FUTEX_H
9
10#include <linux/futex.h>
11#include <linux/uaccess.h>
12#include <linux/errno.h>
13#include <asm/asm.h>
14
15
16#ifndef CONFIG_MMU
17#define __enable_user_access() do { } while (0)
18#define __disable_user_access() do { } while (0)
19#endif
20
21#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
22{ \
23 uintptr_t tmp; \
24 __enable_user_access(); \
25 __asm__ __volatile__ ( \
26 "1: " insn " \n" \
27 "2: \n" \
28 " .section .fixup,\"ax\" \n" \
29 " .balign 4 \n" \
30 "3: li %[r],%[e] \n" \
31 " jump 2b,%[t] \n" \
32 " .previous \n" \
33 " .section __ex_table,\"a\" \n" \
34 " .balign " RISCV_SZPTR " \n" \
35 " " RISCV_PTR " 1b, 3b \n" \
36 " .previous \n" \
37 : [r] "+r" (ret), [ov] "=&r" (oldval), \
38 [u] "+m" (*uaddr), [t] "=&r" (tmp) \
39 : [op] "Jr" (oparg), [e] "i" (-EFAULT) \
40 : "memory"); \
41 __disable_user_access(); \
42}
43
44static inline int
45arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
46{
47 int oldval = 0, ret = 0;
48
49 if (!access_ok(uaddr, sizeof(u32)))
50 return -EFAULT;
51
52 switch (op) {
53 case FUTEX_OP_SET:
54 __futex_atomic_op("amoswap.w.aqrl %[ov],%z[op],%[u]",
55 ret, oldval, uaddr, oparg);
56 break;
57 case FUTEX_OP_ADD:
58 __futex_atomic_op("amoadd.w.aqrl %[ov],%z[op],%[u]",
59 ret, oldval, uaddr, oparg);
60 break;
61 case FUTEX_OP_OR:
62 __futex_atomic_op("amoor.w.aqrl %[ov],%z[op],%[u]",
63 ret, oldval, uaddr, oparg);
64 break;
65 case FUTEX_OP_ANDN:
66 __futex_atomic_op("amoand.w.aqrl %[ov],%z[op],%[u]",
67 ret, oldval, uaddr, ~oparg);
68 break;
69 case FUTEX_OP_XOR:
70 __futex_atomic_op("amoxor.w.aqrl %[ov],%z[op],%[u]",
71 ret, oldval, uaddr, oparg);
72 break;
73 default:
74 ret = -ENOSYS;
75 }
76
77 if (!ret)
78 *oval = oldval;
79
80 return ret;
81}
82
83static inline int
84futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
85 u32 oldval, u32 newval)
86{
87 int ret = 0;
88 u32 val;
89 uintptr_t tmp;
90
91 if (!access_ok(uaddr, sizeof(u32)))
92 return -EFAULT;
93
94 __enable_user_access();
95 __asm__ __volatile__ (
96 "1: lr.w.aqrl %[v],%[u] \n"
97 " bne %[v],%z[ov],3f \n"
98 "2: sc.w.aqrl %[t],%z[nv],%[u] \n"
99 " bnez %[t],1b \n"
100 "3: \n"
101 " .section .fixup,\"ax\" \n"
102 " .balign 4 \n"
103 "4: li %[r],%[e] \n"
104 " jump 3b,%[t] \n"
105 " .previous \n"
106 " .section __ex_table,\"a\" \n"
107 " .balign " RISCV_SZPTR " \n"
108 " " RISCV_PTR " 1b, 4b \n"
109 " " RISCV_PTR " 2b, 4b \n"
110 " .previous \n"
111 : [r] "+r" (ret), [v] "=&r" (val), [u] "+m" (*uaddr), [t] "=&r" (tmp)
112 : [ov] "Jr" (oldval), [nv] "Jr" (newval), [e] "i" (-EFAULT)
113 : "memory");
114 __disable_user_access();
115
116 *uval = val;
117 return ret;
118}
119
120#endif
121