1
2
3
4
5
6
7
8
9
10
11
12
13
14
15#ifndef _ASM_TILE_BITOPS_64_H
16#define _ASM_TILE_BITOPS_64_H
17
18#include <linux/compiler.h>
19#include <asm/cmpxchg.h>
20
21
22
23static inline void set_bit(unsigned nr, volatile unsigned long *addr)
24{
25 unsigned long mask = (1UL << (nr % BITS_PER_LONG));
26 __insn_fetchor((void *)(addr + nr / BITS_PER_LONG), mask);
27}
28
29static inline void clear_bit(unsigned nr, volatile unsigned long *addr)
30{
31 unsigned long mask = (1UL << (nr % BITS_PER_LONG));
32 __insn_fetchand((void *)(addr + nr / BITS_PER_LONG), ~mask);
33}
34
35#define smp_mb__before_clear_bit() smp_mb()
36#define smp_mb__after_clear_bit() smp_mb()
37
38
39static inline void change_bit(unsigned nr, volatile unsigned long *addr)
40{
41 unsigned long mask = (1UL << (nr % BITS_PER_LONG));
42 unsigned long guess, oldval;
43 addr += nr / BITS_PER_LONG;
44 oldval = *addr;
45 do {
46 guess = oldval;
47 oldval = cmpxchg(addr, guess, guess ^ mask);
48 } while (guess != oldval);
49}
50
51
52
53
54
55
56
57
58
59static inline int test_and_set_bit(unsigned nr, volatile unsigned long *addr)
60{
61 int val;
62 unsigned long mask = (1UL << (nr % BITS_PER_LONG));
63 smp_mb();
64 val = (__insn_fetchor((void *)(addr + nr / BITS_PER_LONG), mask)
65 & mask) != 0;
66 barrier();
67 return val;
68}
69
70
71static inline int test_and_clear_bit(unsigned nr, volatile unsigned long *addr)
72{
73 int val;
74 unsigned long mask = (1UL << (nr % BITS_PER_LONG));
75 smp_mb();
76 val = (__insn_fetchand((void *)(addr + nr / BITS_PER_LONG), ~mask)
77 & mask) != 0;
78 barrier();
79 return val;
80}
81
82
83static inline int test_and_change_bit(unsigned nr,
84 volatile unsigned long *addr)
85{
86 unsigned long mask = (1UL << (nr % BITS_PER_LONG));
87 unsigned long guess, oldval;
88 addr += nr / BITS_PER_LONG;
89 oldval = *addr;
90 do {
91 guess = oldval;
92 oldval = cmpxchg(addr, guess, guess ^ mask);
93 } while (guess != oldval);
94 return (oldval & mask) != 0;
95}
96
97#include <asm-generic/bitops/ext2-atomic-setbit.h>
98
99#endif
100