1
2#ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_
3#define _ASM_GENERIC_BITOPS_ATOMIC_H_
4
5#include <linux/atomic.h>
6#include <linux/compiler.h>
7#include <asm/barrier.h>
8
9
10
11
12
13
14static __always_inline void
15arch_set_bit(unsigned int nr, volatile unsigned long *p)
16{
17 p += BIT_WORD(nr);
18 arch_atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
19}
20
21static __always_inline void
22arch_clear_bit(unsigned int nr, volatile unsigned long *p)
23{
24 p += BIT_WORD(nr);
25 arch_atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
26}
27
28static __always_inline void
29arch_change_bit(unsigned int nr, volatile unsigned long *p)
30{
31 p += BIT_WORD(nr);
32 arch_atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
33}
34
35static __always_inline int
36arch_test_and_set_bit(unsigned int nr, volatile unsigned long *p)
37{
38 long old;
39 unsigned long mask = BIT_MASK(nr);
40
41 p += BIT_WORD(nr);
42 if (READ_ONCE(*p) & mask)
43 return 1;
44
45 old = arch_atomic_long_fetch_or(mask, (atomic_long_t *)p);
46 return !!(old & mask);
47}
48
49static __always_inline int
50arch_test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
51{
52 long old;
53 unsigned long mask = BIT_MASK(nr);
54
55 p += BIT_WORD(nr);
56 if (!(READ_ONCE(*p) & mask))
57 return 0;
58
59 old = arch_atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
60 return !!(old & mask);
61}
62
63static __always_inline int
64arch_test_and_change_bit(unsigned int nr, volatile unsigned long *p)
65{
66 long old;
67 unsigned long mask = BIT_MASK(nr);
68
69 p += BIT_WORD(nr);
70 old = arch_atomic_long_fetch_xor(mask, (atomic_long_t *)p);
71 return !!(old & mask);
72}
73
74#include <asm-generic/bitops/instrumented-atomic.h>
75
76#endif
77