1
2
3
4
5
6
7
8
9
10
11
12
13
14#ifndef __ASM_AVR32_ATOMIC_H
15#define __ASM_AVR32_ATOMIC_H
16
17#include <linux/types.h>
18#include <asm/cmpxchg.h>
19
20#define ATOMIC_INIT(i) { (i) }
21
22#define atomic_read(v) ACCESS_ONCE((v)->counter)
23#define atomic_set(v, i) (((v)->counter) = i)
24
25#define ATOMIC_OP_RETURN(op, asm_op, asm_con) \
26static inline int __atomic_##op##_return(int i, atomic_t *v) \
27{ \
28 int result; \
29 \
30 asm volatile( \
31 "/* atomic_" #op "_return */\n" \
32 "1: ssrf 5\n" \
33 " ld.w %0, %2\n" \
34 " " #asm_op " %0, %3\n" \
35 " stcond %1, %0\n" \
36 " brne 1b" \
37 : "=&r" (result), "=o" (v->counter) \
38 : "m" (v->counter), #asm_con (i) \
39 : "cc"); \
40 \
41 return result; \
42}
43
44ATOMIC_OP_RETURN(sub, sub, rKs21)
45ATOMIC_OP_RETURN(add, add, r)
46
47#define ATOMIC_OP(op, asm_op) \
48ATOMIC_OP_RETURN(op, asm_op, r) \
49static inline void atomic_##op(int i, atomic_t *v) \
50{ \
51 (void)__atomic_##op##_return(i, v); \
52}
53
54ATOMIC_OP(and, and)
55ATOMIC_OP(or, or)
56ATOMIC_OP(xor, eor)
57
58#undef ATOMIC_OP
59#undef ATOMIC_OP_RETURN
60
61
62
63
64
65
66
67
68
69
70
71
72#define IS_21BIT_CONST(i) \
73 (__builtin_constant_p(i) && ((i) >= -1048575) && ((i) <= 1048576))
74
75
76
77
78
79
80
81
82static inline int atomic_add_return(int i, atomic_t *v)
83{
84 if (IS_21BIT_CONST(i))
85 return __atomic_sub_return(-i, v);
86
87 return __atomic_add_return(i, v);
88}
89
90
91
92
93
94
95
96
97static inline int atomic_sub_return(int i, atomic_t *v)
98{
99 if (IS_21BIT_CONST(i))
100 return __atomic_sub_return(i, v);
101
102 return __atomic_add_return(-i, v);
103}
104
105
106
107
108
109
110
111
112
113
114static inline int __atomic_add_unless(atomic_t *v, int a, int u)
115{
116 int tmp, old = atomic_read(v);
117
118 if (IS_21BIT_CONST(a)) {
119 asm volatile(
120 "/* __atomic_sub_unless */\n"
121 "1: ssrf 5\n"
122 " ld.w %0, %2\n"
123 " cp.w %0, %4\n"
124 " breq 1f\n"
125 " sub %0, %3\n"
126 " stcond %1, %0\n"
127 " brne 1b\n"
128 "1:"
129 : "=&r"(tmp), "=o"(v->counter)
130 : "m"(v->counter), "rKs21"(-a), "rKs21"(u)
131 : "cc", "memory");
132 } else {
133 asm volatile(
134 "/* __atomic_add_unless */\n"
135 "1: ssrf 5\n"
136 " ld.w %0, %2\n"
137 " cp.w %0, %4\n"
138 " breq 1f\n"
139 " add %0, %3\n"
140 " stcond %1, %0\n"
141 " brne 1b\n"
142 "1:"
143 : "=&r"(tmp), "=o"(v->counter)
144 : "m"(v->counter), "r"(a), "ir"(u)
145 : "cc", "memory");
146 }
147
148 return old;
149}
150
151#undef IS_21BIT_CONST
152
153
154
155
156
157
158
159
160
161static inline int atomic_sub_if_positive(int i, atomic_t *v)
162{
163 int result;
164
165 asm volatile(
166 "/* atomic_sub_if_positive */\n"
167 "1: ssrf 5\n"
168 " ld.w %0, %2\n"
169 " sub %0, %3\n"
170 " brlt 1f\n"
171 " stcond %1, %0\n"
172 " brne 1b\n"
173 "1:"
174 : "=&r"(result), "=o"(v->counter)
175 : "m"(v->counter), "ir"(i)
176 : "cc", "memory");
177
178 return result;
179}
180
181#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
182#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
183
184#define atomic_sub(i, v) (void)atomic_sub_return(i, v)
185#define atomic_add(i, v) (void)atomic_add_return(i, v)
186#define atomic_dec(v) atomic_sub(1, (v))
187#define atomic_inc(v) atomic_add(1, (v))
188
189#define atomic_dec_return(v) atomic_sub_return(1, v)
190#define atomic_inc_return(v) atomic_add_return(1, v)
191
192#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
193#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
194#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
195#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
196
197#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
198
199#endif
200