1#ifndef _ASM_X86_ATOMIC_H
2#define _ASM_X86_ATOMIC_H
3
4#include <linux/bitops.h>
5#include <linux/compiler.h>
6#include <linux/types.h>
7#include <asm/processor.h>
8
9typedef struct { volatile int counter; } atomic_t;
10
11
12
13
14
15
16#define ATOMIC_INIT(i) { (i) }
17
18
19
20
21
22
23
24static inline int atomic_read(const atomic_t *v)
25{
26 return READ_ONCE((v)->counter);
27}
28
29
30
31
32
33
34
35
36static inline void atomic_set(atomic_t *v, int i)
37{
38 v->counter = i;
39}
40
41
42
43
44
45
46
47
48static inline void atomic_add(int i, atomic_t *v)
49{
50 asm volatile(LOCK_PREFIX "addl %1,%0"
51 : "+m" (v->counter)
52 : "ir" (i));
53}
54
55
56
57
58
59
60
61
62static inline void atomic_sub(int i, atomic_t *v)
63{
64 asm volatile(LOCK_PREFIX "subl %1,%0"
65 : "+m" (v->counter)
66 : "ir" (i));
67}
68
69
70
71
72
73
74
75static inline void atomic_inc(atomic_t *v)
76{
77 asm volatile(LOCK_PREFIX "incl %0"
78 : "+m" (v->counter));
79}
80
81
82
83
84
85
86
87static inline void atomic_dec(atomic_t *v)
88{
89 asm volatile(LOCK_PREFIX "decl %0"
90 : "+m" (v->counter));
91}
92
93
94
95
96
97
98
99
100static inline short int atomic_inc_short(short int *v)
101{
102 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
103 return *v;
104}
105
106
107#define atomic_clear_mask(mask, addr) \
108 asm volatile(LOCK_PREFIX "andl %0,%1" \
109 : : "r" (~(mask)), "m" (*(addr)) : "memory")
110
111#define atomic_set_mask(mask, addr) \
112 asm volatile(LOCK_PREFIX "orl %0,%1" \
113 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
114 : "memory")
115
116#endif
117