1#ifndef _ASM_IA64_ATOMIC_H
2#define _ASM_IA64_ATOMIC_H
3
4
5
6
7
8
9
10
11
12
13
14
15#include <linux/types.h>
16
17#include <asm/intrinsics.h>
18#include <asm/barrier.h>
19
20
21#define ATOMIC_INIT(i) { (i) }
22#define ATOMIC64_INIT(i) { (i) }
23
24#define atomic_read(v) ACCESS_ONCE((v)->counter)
25#define atomic64_read(v) ACCESS_ONCE((v)->counter)
26
27#define atomic_set(v,i) (((v)->counter) = (i))
28#define atomic64_set(v,i) (((v)->counter) = (i))
29
30#define ATOMIC_OP(op, c_op) \
31static __inline__ int \
32ia64_atomic_##op (int i, atomic_t *v) \
33{ \
34 __s32 old, new; \
35 CMPXCHG_BUGCHECK_DECL \
36 \
37 do { \
38 CMPXCHG_BUGCHECK(v); \
39 old = atomic_read(v); \
40 new = old c_op i; \
41 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
42 return new; \
43}
44
45ATOMIC_OP(add, +)
46ATOMIC_OP(sub, -)
47
48#undef ATOMIC_OP
49
50#define atomic_add_return(i,v) \
51({ \
52 int __ia64_aar_i = (i); \
53 (__builtin_constant_p(i) \
54 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
55 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
56 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
57 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
58 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
59 : ia64_atomic_add(__ia64_aar_i, v); \
60})
61
62#define atomic_sub_return(i,v) \
63({ \
64 int __ia64_asr_i = (i); \
65 (__builtin_constant_p(i) \
66 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
67 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
68 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
69 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
70 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
71 : ia64_atomic_sub(__ia64_asr_i, v); \
72})
73
74#define ATOMIC64_OP(op, c_op) \
75static __inline__ long \
76ia64_atomic64_##op (__s64 i, atomic64_t *v) \
77{ \
78 __s64 old, new; \
79 CMPXCHG_BUGCHECK_DECL \
80 \
81 do { \
82 CMPXCHG_BUGCHECK(v); \
83 old = atomic64_read(v); \
84 new = old c_op i; \
85 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
86 return new; \
87}
88
89ATOMIC64_OP(add, +)
90ATOMIC64_OP(sub, -)
91
92#undef ATOMIC64_OP
93
94#define atomic64_add_return(i,v) \
95({ \
96 long __ia64_aar_i = (i); \
97 (__builtin_constant_p(i) \
98 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
99 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
100 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
101 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
102 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
103 : ia64_atomic64_add(__ia64_aar_i, v); \
104})
105
106#define atomic64_sub_return(i,v) \
107({ \
108 long __ia64_asr_i = (i); \
109 (__builtin_constant_p(i) \
110 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
111 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
112 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
113 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
114 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
115 : ia64_atomic64_sub(__ia64_asr_i, v); \
116})
117
118#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
119#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
120
121#define atomic64_cmpxchg(v, old, new) \
122 (cmpxchg(&((v)->counter), old, new))
123#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
124
125static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
126{
127 int c, old;
128 c = atomic_read(v);
129 for (;;) {
130 if (unlikely(c == (u)))
131 break;
132 old = atomic_cmpxchg((v), c, c + (a));
133 if (likely(old == c))
134 break;
135 c = old;
136 }
137 return c;
138}
139
140
141static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
142{
143 long c, old;
144 c = atomic64_read(v);
145 for (;;) {
146 if (unlikely(c == (u)))
147 break;
148 old = atomic64_cmpxchg((v), c, c + (a));
149 if (likely(old == c))
150 break;
151 c = old;
152 }
153 return c != (u);
154}
155
156#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
157
158
159
160
161
162static __inline__ int
163atomic_add_negative (int i, atomic_t *v)
164{
165 return atomic_add_return(i, v) < 0;
166}
167
168static __inline__ long
169atomic64_add_negative (__s64 i, atomic64_t *v)
170{
171 return atomic64_add_return(i, v) < 0;
172}
173
174#define atomic_dec_return(v) atomic_sub_return(1, (v))
175#define atomic_inc_return(v) atomic_add_return(1, (v))
176#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
177#define atomic64_inc_return(v) atomic64_add_return(1, (v))
178
179#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
180#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
181#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
182#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
183#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
184#define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
185
186#define atomic_add(i,v) (void)atomic_add_return((i), (v))
187#define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
188#define atomic_inc(v) atomic_add(1, (v))
189#define atomic_dec(v) atomic_sub(1, (v))
190
191#define atomic64_add(i,v) (void)atomic64_add_return((i), (v))
192#define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v))
193#define atomic64_inc(v) atomic64_add(1, (v))
194#define atomic64_dec(v) atomic64_sub(1, (v))
195
196#endif
197