1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17#ifndef _ASM_TILE_ATOMIC_32_H
18#define _ASM_TILE_ATOMIC_32_H
19
20#include <asm/barrier.h>
21#include <arch/chip.h>
22
23#ifndef __ASSEMBLY__
24
25
26
27
28
29
30
31
32static inline void atomic_add(int i, atomic_t *v)
33{
34 _atomic_xchg_add(&v->counter, i);
35}
36
37
38
39
40
41
42
43
44static inline int atomic_add_return(int i, atomic_t *v)
45{
46 smp_mb();
47 return _atomic_xchg_add(&v->counter, i) + i;
48}
49
50
51
52
53
54
55
56
57
58
59static inline int __atomic_add_unless(atomic_t *v, int a, int u)
60{
61 smp_mb();
62 return _atomic_xchg_add_unless(&v->counter, a, u);
63}
64
65
66
67
68
69
70
71
72
73
74
75static inline void atomic_set(atomic_t *v, int n)
76{
77 _atomic_xchg(&v->counter, n);
78}
79
80
81
82typedef struct {
83 long long counter;
84} atomic64_t;
85
86#define ATOMIC64_INIT(val) { (val) }
87
88
89
90
91
92
93
94static inline long long atomic64_read(const atomic64_t *v)
95{
96
97
98
99
100
101 return _atomic64_xchg_add((long long *)&v->counter, 0);
102}
103
104
105
106
107
108
109
110
111static inline void atomic64_add(long long i, atomic64_t *v)
112{
113 _atomic64_xchg_add(&v->counter, i);
114}
115
116
117
118
119
120
121
122
123static inline long long atomic64_add_return(long long i, atomic64_t *v)
124{
125 smp_mb();
126 return _atomic64_xchg_add(&v->counter, i) + i;
127}
128
129
130
131
132
133
134
135
136
137
138static inline long long atomic64_add_unless(atomic64_t *v, long long a,
139 long long u)
140{
141 smp_mb();
142 return _atomic64_xchg_add_unless(&v->counter, a, u) != u;
143}
144
145
146
147
148
149
150
151
152
153
154
155static inline void atomic64_set(atomic64_t *v, long long n)
156{
157 _atomic64_xchg(&v->counter, n);
158}
159
160#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
161#define atomic64_inc(v) atomic64_add(1LL, (v))
162#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
163#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
164#define atomic64_sub_return(i, v) atomic64_add_return(-(i), (v))
165#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
166#define atomic64_sub(i, v) atomic64_add(-(i), (v))
167#define atomic64_dec(v) atomic64_sub(1LL, (v))
168#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
169#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
170#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
171
172
173#endif
174
175
176
177
178
179
180
181
182
183
184
185
186#define ATOMIC_HASH_SHIFT (PAGE_SHIFT - 3)
187#define ATOMIC_HASH_SIZE (1 << ATOMIC_HASH_SHIFT)
188
189#ifndef __ASSEMBLY__
190extern int atomic_locks[];
191#endif
192
193
194
195
196
197
198
199
200
201#define ATOMIC_LOCK_REG 20
202#define ATOMIC_LOCK_REG_NAME r20
203
204#ifndef __ASSEMBLY__
205
206void __init_atomic_per_cpu(void);
207
208#ifdef CONFIG_SMP
209
210void __atomic_fault_unlock(int *lock_ptr);
211#endif
212
213
214int *__atomic_hashed_lock(volatile void *v);
215
216
217struct __get_user {
218 unsigned long val;
219 int err;
220};
221extern struct __get_user __atomic_cmpxchg(volatile int *p,
222 int *lock, int o, int n);
223extern struct __get_user __atomic_xchg(volatile int *p, int *lock, int n);
224extern struct __get_user __atomic_xchg_add(volatile int *p, int *lock, int n);
225extern struct __get_user __atomic_xchg_add_unless(volatile int *p,
226 int *lock, int o, int n);
227extern struct __get_user __atomic_or(volatile int *p, int *lock, int n);
228extern struct __get_user __atomic_andn(volatile int *p, int *lock, int n);
229extern struct __get_user __atomic_xor(volatile int *p, int *lock, int n);
230extern long long __atomic64_cmpxchg(volatile long long *p, int *lock,
231 long long o, long long n);
232extern long long __atomic64_xchg(volatile long long *p, int *lock, long long n);
233extern long long __atomic64_xchg_add(volatile long long *p, int *lock,
234 long long n);
235extern long long __atomic64_xchg_add_unless(volatile long long *p,
236 int *lock, long long o, long long n);
237
238
239struct __get_user __atomic_bad_address(int __user *addr);
240
241#endif
242
243#endif
244