1
2
3
4
5
6
7
8
9
10#ifndef __ASM_PROC_SYSTEM_H
11#define __ASM_PROC_SYSTEM_H
12
13
14
15
16#ifdef CONFIG_ARM64
17
18
19
20
21
22#define local_irq_save(flags) \
23 ({ \
24 asm volatile( \
25 "mrs %0, daif\n" \
26 "msr daifset, #3" \
27 : "=r" (flags) \
28 : \
29 : "memory"); \
30 })
31
32
33
34
35#define local_irq_restore(flags) \
36 ({ \
37 asm volatile( \
38 "msr daif, %0" \
39 : \
40 : "r" (flags) \
41 : "memory"); \
42 })
43
44
45
46
47#define local_irq_enable() \
48 ({ \
49 asm volatile( \
50 "msr daifclr, #3" \
51 : \
52 : \
53 : "memory"); \
54 })
55
56
57
58
59#define local_irq_disable() \
60 ({ \
61 asm volatile( \
62 "msr daifset, #3" \
63 : \
64 : \
65 : "memory"); \
66 })
67
68#else
69
70#define local_irq_save(x) \
71 ({ \
72 unsigned long temp; \
73 __asm__ __volatile__( \
74 "mrs %0, cpsr @ local_irq_save\n" \
75" orr %1, %0, #128\n" \
76" msr cpsr_c, %1" \
77 : "=r" (x), "=r" (temp) \
78 : \
79 : "memory"); \
80 })
81
82
83
84
85#define local_irq_enable() \
86 ({ \
87 unsigned long temp; \
88 __asm__ __volatile__( \
89 "mrs %0, cpsr @ local_irq_enable\n" \
90" bic %0, %0, #128\n" \
91" msr cpsr_c, %0" \
92 : "=r" (temp) \
93 : \
94 : "memory"); \
95 })
96
97
98
99
100#define local_irq_disable() \
101 ({ \
102 unsigned long temp; \
103 __asm__ __volatile__( \
104 "mrs %0, cpsr @ local_irq_disable\n" \
105" orr %0, %0, #128\n" \
106" msr cpsr_c, %0" \
107 : "=r" (temp) \
108 : \
109 : "memory"); \
110 })
111
112
113
114
115#define __stf() \
116 ({ \
117 unsigned long temp; \
118 __asm__ __volatile__( \
119 "mrs %0, cpsr @ stf\n" \
120" bic %0, %0, #64\n" \
121" msr cpsr_c, %0" \
122 : "=r" (temp) \
123 : \
124 : "memory"); \
125 })
126
127
128
129
130#define __clf() \
131 ({ \
132 unsigned long temp; \
133 __asm__ __volatile__( \
134 "mrs %0, cpsr @ clf\n" \
135" orr %0, %0, #64\n" \
136" msr cpsr_c, %0" \
137 : "=r" (temp) \
138 : \
139 : "memory"); \
140 })
141
142
143
144
145#define local_save_flags(x) \
146 ({ \
147 __asm__ __volatile__( \
148 "mrs %0, cpsr @ local_save_flags\n" \
149 : "=r" (x) \
150 : \
151 : "memory"); \
152 })
153
154
155
156
157#define local_irq_restore(x) \
158 __asm__ __volatile__( \
159 "msr cpsr_c, %0 @ local_irq_restore\n" \
160 : \
161 : "r" (x) \
162 : "memory")
163
164#endif
165
166#if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110) || \
167 defined(CONFIG_ARM64)
168
169
170
171
172
173
174
175
176
177
178
179
180#define swp_is_buggy
181#endif
182
183static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
184{
185 extern void __bad_xchg(volatile void *, int);
186 unsigned long ret;
187#ifdef swp_is_buggy
188 unsigned long flags;
189#endif
190
191 switch (size) {
192#ifdef swp_is_buggy
193 case 1:
194 local_irq_save(flags);
195 ret = *(volatile unsigned char *)ptr;
196 *(volatile unsigned char *)ptr = x;
197 local_irq_restore(flags);
198 break;
199
200 case 4:
201 local_irq_save(flags);
202 ret = *(volatile unsigned long *)ptr;
203 *(volatile unsigned long *)ptr = x;
204 local_irq_restore(flags);
205 break;
206#else
207 case 1: __asm__ __volatile__ ("swpb %0, %1, [%2]"
208 : "=&r" (ret)
209 : "r" (x), "r" (ptr)
210 : "memory");
211 break;
212 case 4: __asm__ __volatile__ ("swp %0, %1, [%2]"
213 : "=&r" (ret)
214 : "r" (x), "r" (ptr)
215 : "memory");
216 break;
217#endif
218 default: __bad_xchg(ptr, size), ret = 0;
219 }
220
221 return ret;
222}
223
224#endif
225