1
2
3
4
5
6
7
8
9#include <common.h>
10#include <asm/bitops.h>
11#include <asm/global_data.h>
12#include <asm/processor.h>
13#include <asm/mmu.h>
14#ifdef CONFIG_ADDR_MAP
15#include <addr_map.h>
16#endif
17
18#include <linux/log2.h>
19
20DECLARE_GLOBAL_DATA_PTR;
21
22void invalidate_tlb(u8 tlb)
23{
24 if (tlb == 0)
25 mtspr(MMUCSR0, 0x4);
26 if (tlb == 1)
27 mtspr(MMUCSR0, 0x2);
28}
29
30__weak void init_tlbs(void)
31{
32 int i;
33
34 for (i = 0; i < num_tlb_entries; i++) {
35 write_tlb(tlb_table[i].mas0,
36 tlb_table[i].mas1,
37 tlb_table[i].mas2,
38 tlb_table[i].mas3,
39 tlb_table[i].mas7);
40 }
41
42 return ;
43}
44
45#if !defined(CONFIG_NAND_SPL) && \
46 (!defined(CONFIG_SPL_BUILD) || !defined(CONFIG_SPL_INIT_MINIMAL))
47void read_tlbcam_entry(int idx, u32 *valid, u32 *tsize, unsigned long *epn,
48 phys_addr_t *rpn)
49{
50 u32 _mas1;
51
52 mtspr(MAS0, FSL_BOOKE_MAS0(1, idx, 0));
53 asm volatile("tlbre;isync");
54 _mas1 = mfspr(MAS1);
55
56 *valid = (_mas1 & MAS1_VALID);
57 *tsize = (_mas1 >> 7) & 0x1f;
58 *epn = mfspr(MAS2) & MAS2_EPN;
59 *rpn = mfspr(MAS3) & MAS3_RPN;
60#ifdef CONFIG_ENABLE_36BIT_PHYS
61 *rpn |= ((u64)mfspr(MAS7)) << 32;
62#endif
63}
64
65void print_tlbcam(void)
66{
67 int i;
68 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff;
69
70
71 printf("TLBCAM entries\n");
72 for (i = 0; i < num_cam; i++) {
73 unsigned long epn;
74 u32 tsize, valid;
75 phys_addr_t rpn;
76
77 read_tlbcam_entry(i, &valid, &tsize, &epn, &rpn);
78 printf("entry %02d: V: %d EPN 0x%08x RPN 0x%08llx size:",
79 i, (valid == 0) ? 0 : 1, (unsigned int)epn,
80 (unsigned long long)rpn);
81 print_size(TSIZE_TO_BYTES(tsize), "\n");
82 }
83}
84
85static inline void use_tlb_cam(u8 idx)
86{
87 int i = idx / 32;
88 int bit = idx % 32;
89
90 gd->arch.used_tlb_cams[i] |= (1 << bit);
91}
92
93static inline void free_tlb_cam(u8 idx)
94{
95 int i = idx / 32;
96 int bit = idx % 32;
97
98 gd->arch.used_tlb_cams[i] &= ~(1 << bit);
99}
100
101void init_used_tlb_cams(void)
102{
103 int i;
104 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff;
105
106 for (i = 0; i < ((CONFIG_SYS_NUM_TLBCAMS+31)/32); i++)
107 gd->arch.used_tlb_cams[i] = 0;
108
109
110 for (i = 0; i < num_cam; i++) {
111 mtspr(MAS0, FSL_BOOKE_MAS0(1, i, 0));
112 asm volatile("tlbre;isync");
113 if (mfspr(MAS1) & MAS1_VALID)
114 use_tlb_cam(i);
115 }
116}
117
118int find_free_tlbcam(void)
119{
120 int i;
121 u32 idx;
122
123 for (i = 0; i < ((CONFIG_SYS_NUM_TLBCAMS+31)/32); i++) {
124 idx = ffz(gd->arch.used_tlb_cams[i]);
125
126 if (idx != 32)
127 break;
128 }
129
130 idx += i * 32;
131
132 if (idx >= CONFIG_SYS_NUM_TLBCAMS)
133 return -1;
134
135 return idx;
136}
137
138void set_tlb(u8 tlb, u32 epn, u64 rpn,
139 u8 perms, u8 wimge,
140 u8 ts, u8 esel, u8 tsize, u8 iprot)
141{
142 u32 _mas0, _mas1, _mas2, _mas3, _mas7;
143
144 if (tlb == 1)
145 use_tlb_cam(esel);
146
147 if ((mfspr(SPRN_MMUCFG) & MMUCFG_MAVN) == MMUCFG_MAVN_V1 &&
148 tsize & 1) {
149 printf("%s: bad tsize %d on entry %d at 0x%08x\n",
150 __func__, tsize, tlb, epn);
151 return;
152 }
153
154 _mas0 = FSL_BOOKE_MAS0(tlb, esel, 0);
155 _mas1 = FSL_BOOKE_MAS1(1, iprot, 0, ts, tsize);
156 _mas2 = FSL_BOOKE_MAS2(epn, wimge);
157 _mas3 = FSL_BOOKE_MAS3(rpn, 0, perms);
158 _mas7 = FSL_BOOKE_MAS7(rpn);
159
160 write_tlb(_mas0, _mas1, _mas2, _mas3, _mas7);
161
162#ifdef CONFIG_ADDR_MAP
163 if ((tlb == 1) && (gd->flags & GD_FLG_RELOC))
164 addrmap_set_entry(epn, rpn, TSIZE_TO_BYTES(tsize), esel);
165#endif
166}
167
168void disable_tlb(u8 esel)
169{
170 u32 _mas0, _mas1, _mas2, _mas3;
171
172 free_tlb_cam(esel);
173
174 _mas0 = FSL_BOOKE_MAS0(1, esel, 0);
175 _mas1 = 0;
176 _mas2 = 0;
177 _mas3 = 0;
178
179 mtspr(MAS0, _mas0);
180 mtspr(MAS1, _mas1);
181 mtspr(MAS2, _mas2);
182 mtspr(MAS3, _mas3);
183#ifdef CONFIG_ENABLE_36BIT_PHYS
184 mtspr(MAS7, 0);
185#endif
186 asm volatile("isync;msync;tlbwe;isync");
187
188#ifdef CONFIG_ADDR_MAP
189 if (gd->flags & GD_FLG_RELOC)
190 addrmap_set_entry(0, 0, 0, esel);
191#endif
192}
193
194static void tlbsx (const volatile unsigned *addr)
195{
196 __asm__ __volatile__ ("tlbsx 0,%0" : : "r" (addr), "m" (*addr));
197}
198
199
200int find_tlb_idx(void *addr, u8 tlbsel)
201{
202 u32 _mas0, _mas1;
203
204
205 mtspr(MAS6, 0);
206
207 tlbsx(addr);
208
209 _mas0 = mfspr(MAS0);
210 _mas1 = mfspr(MAS1);
211
212
213 if ((MAS1_VALID & _mas1) &&
214 (MAS0_TLBSEL(tlbsel) == (_mas0 & MAS0_TLBSEL_MSK))) {
215 return ((_mas0 & MAS0_ESEL_MSK) >> 16);
216 }
217
218 return -1;
219}
220
221#ifdef CONFIG_ADDR_MAP
222void init_addr_map(void)
223{
224 int i;
225 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff;
226
227
228 for (i = 0; i < num_cam; i++) {
229 unsigned long epn;
230 u32 tsize, valid;
231 phys_addr_t rpn;
232
233 read_tlbcam_entry(i, &valid, &tsize, &epn, &rpn);
234 if (valid & MAS1_VALID)
235 addrmap_set_entry(epn, rpn, TSIZE_TO_BYTES(tsize), i);
236 }
237
238 return ;
239}
240#endif
241
242uint64_t tlb_map_range(ulong v_addr, phys_addr_t p_addr, uint64_t size,
243 enum tlb_map_type map_type)
244{
245 int i;
246 unsigned int tlb_size;
247 unsigned int wimge;
248 unsigned int perm;
249 unsigned int max_cam, tsize_mask;
250
251 if (map_type == TLB_MAP_RAM) {
252 perm = MAS3_SX|MAS3_SW|MAS3_SR;
253 wimge = MAS2_M;
254#ifdef CONFIG_SYS_PPC_DDR_WIMGE
255 wimge = CONFIG_SYS_PPC_DDR_WIMGE;
256#endif
257 } else {
258 perm = MAS3_SW|MAS3_SR;
259 wimge = MAS2_I|MAS2_G;
260 }
261
262 if ((mfspr(SPRN_MMUCFG) & MMUCFG_MAVN) == MMUCFG_MAVN_V1) {
263
264 max_cam = ((mfspr(SPRN_TLB1CFG) >> 16) & 0xf) * 2 + 10;
265 tsize_mask = ~1U;
266 } else {
267
268 max_cam = __ilog2(mfspr(SPRN_TLB1PS)) + 10;
269 tsize_mask = ~0U;
270 }
271
272 for (i = 0; size && i < 8; i++) {
273 int tlb_index = find_free_tlbcam();
274 u32 camsize = __ilog2_u64(size) & tsize_mask;
275 u32 align = __ilog2(v_addr) & tsize_mask;
276
277 if (tlb_index == -1)
278 break;
279
280 if (align == -2) align = max_cam;
281 if (camsize > align)
282 camsize = align;
283
284 if (camsize > max_cam)
285 camsize = max_cam;
286
287 tlb_size = camsize - 10;
288
289 set_tlb(1, v_addr, p_addr, perm, wimge,
290 0, tlb_index, tlb_size, 1);
291
292 size -= 1ULL << camsize;
293 v_addr += 1UL << camsize;
294 p_addr += 1UL << camsize;
295 }
296
297 return size;
298}
299
300unsigned int setup_ddr_tlbs_phys(phys_addr_t p_addr,
301 unsigned int memsize_in_meg)
302{
303 unsigned int ram_tlb_address = (unsigned int)CONFIG_SYS_DDR_SDRAM_BASE;
304 u64 memsize = (u64)memsize_in_meg << 20;
305 u64 size;
306
307 size = min(memsize, (u64)CONFIG_MAX_MEM_MAPPED);
308 size = tlb_map_range(ram_tlb_address, p_addr, size, TLB_MAP_RAM);
309
310 if (size || memsize > CONFIG_MAX_MEM_MAPPED) {
311 print_size(memsize > CONFIG_MAX_MEM_MAPPED ?
312 memsize - CONFIG_MAX_MEM_MAPPED + size : size,
313 " left unmapped\n");
314 }
315
316 return memsize_in_meg;
317}
318
319unsigned int setup_ddr_tlbs(unsigned int memsize_in_meg)
320{
321 return
322 setup_ddr_tlbs_phys(CONFIG_SYS_DDR_SDRAM_BASE, memsize_in_meg);
323}
324
325
326void clear_ddr_tlbs_phys(phys_addr_t p_addr, unsigned int memsize_in_meg)
327{
328 u32 vstart = CONFIG_SYS_DDR_SDRAM_BASE;
329 unsigned long epn;
330 u32 tsize, valid, ptr;
331 phys_addr_t rpn = 0;
332 int ddr_esel;
333 u64 memsize = (u64)memsize_in_meg << 20;
334
335 ptr = vstart;
336
337 while (ptr < (vstart + memsize)) {
338 ddr_esel = find_tlb_idx((void *)ptr, 1);
339 if (ddr_esel != -1) {
340 read_tlbcam_entry(ddr_esel, &valid, &tsize, &epn, &rpn);
341 disable_tlb(ddr_esel);
342 }
343 ptr += TSIZE_TO_BYTES(tsize);
344 }
345}
346
347void clear_ddr_tlbs(unsigned int memsize_in_meg)
348{
349 clear_ddr_tlbs_phys(CONFIG_SYS_DDR_SDRAM_BASE, memsize_in_meg);
350}
351
352
353#endif
354