1
2
3
4
5
6
7
8#include <linux/types.h>
9#include <common.h>
10#include <asm/armv7.h>
11#include <asm/utils.h>
12
13#define ARMV7_DCACHE_INVAL_RANGE 1
14#define ARMV7_DCACHE_CLEAN_INVAL_RANGE 2
15
16#ifndef CONFIG_SYS_DCACHE_OFF
17
18
19void v7_flush_dcache_all(void);
20void v7_invalidate_dcache_all(void);
21
22static int check_cache_range(unsigned long start, unsigned long stop)
23{
24 int ok = 1;
25
26 if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
27 ok = 0;
28
29 if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
30 ok = 0;
31
32 if (!ok)
33 debug("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
34 start, stop);
35
36 return ok;
37}
38
39static u32 get_ccsidr(void)
40{
41 u32 ccsidr;
42
43
44 asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
45 return ccsidr;
46}
47
48static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
49{
50 u32 mva;
51
52
53 start &= ~(line_len - 1);
54 for (mva = start; mva < stop; mva = mva + line_len) {
55
56 asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
57 }
58}
59
60static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
61{
62 u32 mva;
63
64
65
66
67
68 if (start & (line_len - 1)) {
69 printf("ERROR: %s - start address is not aligned - 0x%08x\n",
70 __func__, start);
71
72 start = (start + line_len - 1) & ~(line_len - 1);
73 }
74
75
76
77
78
79 if (stop & (line_len - 1)) {
80 printf("ERROR: %s - stop address is not aligned - 0x%08x\n",
81 __func__, stop);
82
83 stop &= ~(line_len - 1);
84 }
85
86 for (mva = start; mva < stop; mva = mva + line_len) {
87
88 asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
89 }
90}
91
92static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
93{
94 u32 line_len, ccsidr;
95
96 ccsidr = get_ccsidr();
97 line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
98 CCSIDR_LINE_SIZE_OFFSET) + 2;
99
100 line_len += 2;
101
102 line_len = 1 << line_len;
103
104 switch (range_op) {
105 case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
106 v7_dcache_clean_inval_range(start, stop, line_len);
107 break;
108 case ARMV7_DCACHE_INVAL_RANGE:
109 v7_dcache_inval_range(start, stop, line_len);
110 break;
111 }
112
113
114 DSB;
115}
116
117
118static void v7_inval_tlb(void)
119{
120
121 asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
122
123 asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
124
125 asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
126
127 DSB;
128
129 ISB;
130}
131
132void invalidate_dcache_all(void)
133{
134 v7_invalidate_dcache_all();
135
136 v7_outer_cache_inval_all();
137}
138
139
140
141
142
143void flush_dcache_all(void)
144{
145 v7_flush_dcache_all();
146
147 v7_outer_cache_flush_all();
148}
149
150
151
152
153
154void invalidate_dcache_range(unsigned long start, unsigned long stop)
155{
156 check_cache_range(start, stop);
157
158 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
159
160 v7_outer_cache_inval_range(start, stop);
161}
162
163
164
165
166
167
168void flush_dcache_range(unsigned long start, unsigned long stop)
169{
170 check_cache_range(start, stop);
171
172 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
173
174 v7_outer_cache_flush_range(start, stop);
175}
176
177void arm_init_before_mmu(void)
178{
179 v7_outer_cache_enable();
180 invalidate_dcache_all();
181 v7_inval_tlb();
182}
183
184void mmu_page_table_flush(unsigned long start, unsigned long stop)
185{
186 flush_dcache_range(start, stop);
187 v7_inval_tlb();
188}
189#else
190void invalidate_dcache_all(void)
191{
192}
193
194void flush_dcache_all(void)
195{
196}
197
198void arm_init_before_mmu(void)
199{
200}
201
202void mmu_page_table_flush(unsigned long start, unsigned long stop)
203{
204}
205
206void arm_init_domains(void)
207{
208}
209#endif
210
211#ifndef CONFIG_SYS_ICACHE_OFF
212
213void invalidate_icache_all(void)
214{
215
216
217
218
219 asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
220
221
222 asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
223
224
225 DSB;
226
227
228 ISB;
229}
230#else
231void invalidate_icache_all(void)
232{
233}
234#endif
235
236
237__weak void v7_outer_cache_enable(void) {}
238__weak void v7_outer_cache_disable(void) {}
239__weak void v7_outer_cache_flush_all(void) {}
240__weak void v7_outer_cache_inval_all(void) {}
241__weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
242__weak void v7_outer_cache_inval_range(u32 start, u32 end) {}
243