1
2
3
4
5
6
7
8
9
10
11
12#include <linux/linkage.h>
13#include <linux/init.h>
14#include <asm/assembler.h>
15#include <asm/errno.h>
16#include <asm/unwind.h>
17#include <asm/v7m.h>
18
19#include "proc-macros.S"
20
21
22.macro v7m_cache_read, rt, reg
23 movw \rt,
24 movt \rt,
25 ldr \rt, [\rt]
26.endm
27
28.macro v7m_cacheop, rt, tmp, op, c = al
29 movw\c \tmp,
30 movt\c \tmp,
31 str\c \rt, [\tmp]
32.endm
33
34
35.macro read_ccsidr, rt
36 v7m_cache_read \rt, V7M_SCB_CCSIDR
37.endm
38
39.macro read_clidr, rt
40 v7m_cache_read \rt, V7M_SCB_CLIDR
41.endm
42
43.macro write_csselr, rt, tmp
44 v7m_cacheop \rt, \tmp, V7M_SCB_CSSELR
45.endm
46
47
48
49
50.macro dcisw, rt, tmp
51 v7m_cacheop \rt, \tmp, V7M_SCB_DCISW
52.endm
53
54
55
56
57.macro dccisw, rt, tmp
58 v7m_cacheop \rt, \tmp, V7M_SCB_DCCISW
59.endm
60
61
62
63
64.irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
65.macro dccimvac\c, rt, tmp
66 v7m_cacheop \rt, \tmp, V7M_SCB_DCCIMVAC, \c
67.endm
68.endr
69
70
71
72
73.irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
74.macro dcimvac\c, rt, tmp
75 v7m_cacheop \rt, \tmp, V7M_SCB_DCIMVAC, \c
76.endm
77.endr
78
79
80
81
82.macro dccmvau, rt, tmp
83 v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAU
84.endm
85
86
87
88
89.macro dccmvac, rt, tmp
90 v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAC
91.endm
92
93
94
95
96.macro icimvau, rt, tmp
97 v7m_cacheop \rt, \tmp, V7M_SCB_ICIMVAU
98.endm
99
100
101
102
103
104.macro invalidate_icache, rt
105 v7m_cacheop \rt, \rt, V7M_SCB_ICIALLU
106 mov \rt,
107.endm
108
109
110
111
112
113.macro invalidate_bp, rt
114 v7m_cacheop \rt, \rt, V7M_SCB_BPIALL
115 mov \rt,
116.endm
117
118ENTRY(v7m_invalidate_l1)
119 mov r0,
120
121 write_csselr r0, r1
122 read_ccsidr r0
123
124 movw r1,
125 and r2, r1, r0, lsr
126
127 movw r1,
128
129 and r3, r1, r0, lsr
130 add r2, r2,
131
132 and r0, r0,
133 add r0, r0,
134
135 clz r1, r3 @ WayShift
136 add r4, r3,
1371: sub r2, r2,
138 mov r3, r4 @ Temp = NumWays
1392: subs r3, r3,
140 mov r5, r3, lsl r1
141 mov r6, r2, lsl r0
142 orr r5, r5, r6 @ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
143 dcisw r5, r6
144 bgt 2b
145 cmp r2,
146 bgt 1b
147 dsb st
148 isb
149 ret lr
150ENDPROC(v7m_invalidate_l1)
151
152
153
154
155
156
157
158
159
160ENTRY(v7m_flush_icache_all)
161 invalidate_icache r0
162 ret lr
163ENDPROC(v7m_flush_icache_all)
164
165
166
167
168
169
170
171
172ENTRY(v7m_flush_dcache_all)
173 dmb @ ensure ordering with previous memory accesses
174 read_clidr r0
175 mov r3, r0, lsr
176 ands r3, r3,
177 beq finished @ if loc is 0, then no need to clean
178start_flush_levels:
179 mov r10,
180flush_levels:
181 add r2, r10, r10, lsr
182 mov r1, r0, lsr r2 @ extract cache type bits from clidr
183 and r1, r1,
184 cmp r1,
185 blt skip @ skip if no cache, or just i-cache
186#ifdef CONFIG_PREEMPTION
187 save_and_disable_irqs_notrace r9 @ make cssr&csidr read atomic
188#endif
189 write_csselr r10, r1 @ set current cache level
190 isb @ isb to sych the new cssr&csidr
191 read_ccsidr r1 @ read the new csidr
192#ifdef CONFIG_PREEMPTION
193 restore_irqs_notrace r9
194#endif
195 and r2, r1,
196 add r2, r2,
197 movw r4,
198 ands r4, r4, r1, lsr
199 clz r5, r4 @ find bit position of way size increment
200 movw r7,
201 ands r7, r7, r1, lsr
202loop1:
203 mov r9, r7 @ create working copy of max index
204loop2:
205 lsl r6, r4, r5
206 orr r11, r10, r6 @ factor way and cache number into r11
207 lsl r6, r9, r2
208 orr r11, r11, r6 @ factor index number into r11
209 dccisw r11, r6 @ clean/invalidate by set/way
210 subs r9, r9,
211 bge loop2
212 subs r4, r4,
213 bge loop1
214skip:
215 add r10, r10,
216 cmp r3, r10
217 bgt flush_levels
218finished:
219 mov r10,
220 write_csselr r10, r3 @ select current cache level in cssr
221 dsb st
222 isb
223 ret lr
224ENDPROC(v7m_flush_dcache_all)
225
226
227
228
229
230
231
232
233
234
235
236
237ENTRY(v7m_flush_kern_cache_all)
238 stmfd sp!, {r4-r7, r9-r11, lr}
239 bl v7m_flush_dcache_all
240 invalidate_icache r0
241 ldmfd sp!, {r4-r7, r9-r11, lr}
242 ret lr
243ENDPROC(v7m_flush_kern_cache_all)
244
245
246
247
248
249
250
251
252ENTRY(v7m_flush_user_cache_all)
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267ENTRY(v7m_flush_user_cache_range)
268 ret lr
269ENDPROC(v7m_flush_user_cache_all)
270ENDPROC(v7m_flush_user_cache_range)
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285ENTRY(v7m_coherent_kern_range)
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301ENTRY(v7m_coherent_user_range)
302 UNWIND(.fnstart )
303 dcache_line_size r2, r3
304 sub r3, r2,
305 bic r12, r0, r3
3061:
307
308
309
310
311 dccmvau r12, r3
312 add r12, r12, r2
313 cmp r12, r1
314 blo 1b
315 dsb ishst
316 icache_line_size r2, r3
317 sub r3, r2,
318 bic r12, r0, r3
3192:
320 icimvau r12, r3
321 add r12, r12, r2
322 cmp r12, r1
323 blo 2b
324 invalidate_bp r0
325 dsb ishst
326 isb
327 ret lr
328 UNWIND(.fnend )
329ENDPROC(v7m_coherent_kern_range)
330ENDPROC(v7m_coherent_user_range)
331
332
333
334
335
336
337
338
339
340
341ENTRY(v7m_flush_kern_dcache_area)
342 dcache_line_size r2, r3
343 add r1, r0, r1
344 sub r3, r2,
345 bic r0, r0, r3
3461:
347 dccimvac r0, r3 @ clean & invalidate D line / unified line
348 add r0, r0, r2
349 cmp r0, r1
350 blo 1b
351 dsb st
352 ret lr
353ENDPROC(v7m_flush_kern_dcache_area)
354
355
356
357
358
359
360
361
362
363
364
365v7m_dma_inv_range:
366 dcache_line_size r2, r3
367 sub r3, r2,
368 tst r0, r3
369 bic r0, r0, r3
370 dccimvacne r0, r3
371 addne r0, r0, r2
372 subne r3, r2,
373 tst r1, r3
374 bic r1, r1, r3
375 dccimvacne r1, r3
376 cmp r0, r1
3771:
378 dcimvaclo r0, r3
379 addlo r0, r0, r2
380 cmplo r0, r1
381 blo 1b
382 dsb st
383 ret lr
384ENDPROC(v7m_dma_inv_range)
385
386
387
388
389
390
391v7m_dma_clean_range:
392 dcache_line_size r2, r3
393 sub r3, r2,
394 bic r0, r0, r3
3951:
396 dccmvac r0, r3 @ clean D / U line
397 add r0, r0, r2
398 cmp r0, r1
399 blo 1b
400 dsb st
401 ret lr
402ENDPROC(v7m_dma_clean_range)
403
404
405
406
407
408
409ENTRY(v7m_dma_flush_range)
410 dcache_line_size r2, r3
411 sub r3, r2,
412 bic r0, r0, r3
4131:
414 dccimvac r0, r3 @ clean & invalidate D / U line
415 add r0, r0, r2
416 cmp r0, r1
417 blo 1b
418 dsb st
419 ret lr
420ENDPROC(v7m_dma_flush_range)
421
422
423
424
425
426
427
428ENTRY(v7m_dma_map_area)
429 add r1, r1, r0
430 teq r2,
431 beq v7m_dma_inv_range
432 b v7m_dma_clean_range
433ENDPROC(v7m_dma_map_area)
434
435
436
437
438
439
440
441ENTRY(v7m_dma_unmap_area)
442 add r1, r1, r0
443 teq r2,
444 bne v7m_dma_inv_range
445 ret lr
446ENDPROC(v7m_dma_unmap_area)
447
448 .globl v7m_flush_kern_cache_louis
449 .equ v7m_flush_kern_cache_louis, v7m_flush_kern_cache_all
450
451 __INITDATA
452
453 @ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
454 define_cache_functions v7m
455