1
2
3
4
5
6
7
8
9
10
11
12
13
14
15#include <linux/linkage.h>
16#include <linux/init.h>
17#include <asm/assembler.h>
18#include <asm/errno.h>
19#include <asm/unwind.h>
20#include <asm/v7m.h>
21
22#include "proc-macros.S"
23
24
25.macro v7m_cache_read, rt, reg
26 movw \rt,
27 movt \rt,
28 ldr \rt, [\rt]
29.endm
30
31.macro v7m_cacheop, rt, tmp, op, c = al
32 movw\c \tmp,
33 movt\c \tmp,
34 str\c \rt, [\tmp]
35.endm
36
37
38.macro read_ccsidr, rt
39 v7m_cache_read \rt, V7M_SCB_CCSIDR
40.endm
41
42.macro read_clidr, rt
43 v7m_cache_read \rt, V7M_SCB_CLIDR
44.endm
45
46.macro write_csselr, rt, tmp
47 v7m_cacheop \rt, \tmp, V7M_SCB_CSSELR
48.endm
49
50
51
52
53.macro dcisw, rt, tmp
54 v7m_cacheop \rt, \tmp, V7M_SCB_DCISW
55.endm
56
57
58
59
60.macro dccisw, rt, tmp
61 v7m_cacheop \rt, \tmp, V7M_SCB_DCCISW
62.endm
63
64
65
66
67.irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
68.macro dccimvac\c, rt, tmp
69 v7m_cacheop \rt, \tmp, V7M_SCB_DCCIMVAC, \c
70.endm
71.endr
72
73
74
75
76.macro dcimvac, rt, tmp
77 v7m_cacheop \rt, \tmp, V7M_SCB_DCIMVAC
78.endm
79
80
81
82
83.macro dccmvau, rt, tmp
84 v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAU
85.endm
86
87
88
89
90.macro dccmvac, rt, tmp
91 v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAC
92.endm
93
94
95
96
97.macro icimvau, rt, tmp
98 v7m_cacheop \rt, \tmp, V7M_SCB_ICIMVAU
99.endm
100
101
102
103
104
105.macro invalidate_icache, rt
106 v7m_cacheop \rt, \rt, V7M_SCB_ICIALLU
107 mov \rt,
108.endm
109
110
111
112
113
114.macro invalidate_bp, rt
115 v7m_cacheop \rt, \rt, V7M_SCB_BPIALL
116 mov \rt,
117.endm
118
119ENTRY(v7m_invalidate_l1)
120 mov r0,
121
122 write_csselr r0, r1
123 read_ccsidr r0
124
125 movw r1,
126 and r2, r1, r0, lsr
127
128 movw r1,
129
130 and r3, r1, r0, lsr
131 add r2, r2,
132
133 and r0, r0,
134 add r0, r0,
135
136 clz r1, r3 @ WayShift
137 add r4, r3,
1381: sub r2, r2,
139 mov r3, r4 @ Temp = NumWays
1402: subs r3, r3,
141 mov r5, r3, lsl r1
142 mov r6, r2, lsl r0
143 orr r5, r5, r6 @ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
144 dcisw r5, r6
145 bgt 2b
146 cmp r2,
147 bgt 1b
148 dsb st
149 isb
150 ret lr
151ENDPROC(v7m_invalidate_l1)
152
153
154
155
156
157
158
159
160
161ENTRY(v7m_flush_icache_all)
162 invalidate_icache r0
163 ret lr
164ENDPROC(v7m_flush_icache_all)
165
166
167
168
169
170
171
172
173ENTRY(v7m_flush_dcache_all)
174 dmb @ ensure ordering with previous memory accesses
175 read_clidr r0
176 mov r3, r0, lsr
177 ands r3, r3,
178 beq finished @ if loc is 0, then no need to clean
179start_flush_levels:
180 mov r10,
181flush_levels:
182 add r2, r10, r10, lsr
183 mov r1, r0, lsr r2 @ extract cache type bits from clidr
184 and r1, r1,
185 cmp r1,
186 blt skip @ skip if no cache, or just i-cache
187#ifdef CONFIG_PREEMPT
188 save_and_disable_irqs_notrace r9 @ make cssr&csidr read atomic
189#endif
190 write_csselr r10, r1 @ set current cache level
191 isb @ isb to sych the new cssr&csidr
192 read_ccsidr r1 @ read the new csidr
193#ifdef CONFIG_PREEMPT
194 restore_irqs_notrace r9
195#endif
196 and r2, r1,
197 add r2, r2,
198 movw r4,
199 ands r4, r4, r1, lsr
200 clz r5, r4 @ find bit position of way size increment
201 movw r7,
202 ands r7, r7, r1, lsr
203loop1:
204 mov r9, r7 @ create working copy of max index
205loop2:
206 lsl r6, r4, r5
207 orr r11, r10, r6 @ factor way and cache number into r11
208 lsl r6, r9, r2
209 orr r11, r11, r6 @ factor index number into r11
210 dccisw r11, r6 @ clean/invalidate by set/way
211 subs r9, r9,
212 bge loop2
213 subs r4, r4,
214 bge loop1
215skip:
216 add r10, r10,
217 cmp r3, r10
218 bgt flush_levels
219finished:
220 mov r10,
221 write_csselr r10, r3 @ select current cache level in cssr
222 dsb st
223 isb
224 ret lr
225ENDPROC(v7m_flush_dcache_all)
226
227
228
229
230
231
232
233
234
235
236
237
238ENTRY(v7m_flush_kern_cache_all)
239 stmfd sp!, {r4-r7, r9-r11, lr}
240 bl v7m_flush_dcache_all
241 invalidate_icache r0
242 ldmfd sp!, {r4-r7, r9-r11, lr}
243 ret lr
244ENDPROC(v7m_flush_kern_cache_all)
245
246
247
248
249
250
251
252
253ENTRY(v7m_flush_user_cache_all)
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268ENTRY(v7m_flush_user_cache_range)
269 ret lr
270ENDPROC(v7m_flush_user_cache_all)
271ENDPROC(v7m_flush_user_cache_range)
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286ENTRY(v7m_coherent_kern_range)
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302ENTRY(v7m_coherent_user_range)
303 UNWIND(.fnstart )
304 dcache_line_size r2, r3
305 sub r3, r2,
306 bic r12, r0, r3
3071:
308
309
310
311
312 dccmvau r12, r3
313 add r12, r12, r2
314 cmp r12, r1
315 blo 1b
316 dsb ishst
317 icache_line_size r2, r3
318 sub r3, r2,
319 bic r12, r0, r3
3202:
321 icimvau r12, r3
322 add r12, r12, r2
323 cmp r12, r1
324 blo 2b
325 invalidate_bp r0
326 dsb ishst
327 isb
328 ret lr
329 UNWIND(.fnend )
330ENDPROC(v7m_coherent_kern_range)
331ENDPROC(v7m_coherent_user_range)
332
333
334
335
336
337
338
339
340
341
342ENTRY(v7m_flush_kern_dcache_area)
343 dcache_line_size r2, r3
344 add r1, r0, r1
345 sub r3, r2,
346 bic r0, r0, r3
3471:
348 dccimvac r0, r3 @ clean & invalidate D line / unified line
349 add r0, r0, r2
350 cmp r0, r1
351 blo 1b
352 dsb st
353 ret lr
354ENDPROC(v7m_flush_kern_dcache_area)
355
356
357
358
359
360
361
362
363
364
365
366v7m_dma_inv_range:
367 dcache_line_size r2, r3
368 sub r3, r2,
369 tst r0, r3
370 bic r0, r0, r3
371 dccimvacne r0, r3
372 subne r3, r2,
373 tst r1, r3
374 bic r1, r1, r3
375 dccimvacne r1, r3
3761:
377 dcimvac r0, r3
378 add r0, r0, r2
379 cmp r0, r1
380 blo 1b
381 dsb st
382 ret lr
383ENDPROC(v7m_dma_inv_range)
384
385
386
387
388
389
390v7m_dma_clean_range:
391 dcache_line_size r2, r3
392 sub r3, r2,
393 bic r0, r0, r3
3941:
395 dccmvac r0, r3 @ clean D / U line
396 add r0, r0, r2
397 cmp r0, r1
398 blo 1b
399 dsb st
400 ret lr
401ENDPROC(v7m_dma_clean_range)
402
403
404
405
406
407
408ENTRY(v7m_dma_flush_range)
409 dcache_line_size r2, r3
410 sub r3, r2,
411 bic r0, r0, r3
4121:
413 dccimvac r0, r3 @ clean & invalidate D / U line
414 add r0, r0, r2
415 cmp r0, r1
416 blo 1b
417 dsb st
418 ret lr
419ENDPROC(v7m_dma_flush_range)
420
421
422
423
424
425
426
427ENTRY(v7m_dma_map_area)
428 add r1, r1, r0
429 teq r2,
430 beq v7m_dma_inv_range
431 b v7m_dma_clean_range
432ENDPROC(v7m_dma_map_area)
433
434
435
436
437
438
439
440ENTRY(v7m_dma_unmap_area)
441 add r1, r1, r0
442 teq r2,
443 bne v7m_dma_inv_range
444 ret lr
445ENDPROC(v7m_dma_unmap_area)
446
447 .globl v7m_flush_kern_cache_louis
448 .equ v7m_flush_kern_cache_louis, v7m_flush_kern_cache_all
449
450 __INITDATA
451
452 @ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
453 define_cache_functions v7m
454