1
2
3
4
5
6
7
8
9#include <common.h>
10#include <display_options.h>
11#include <init.h>
12#include <asm/bitops.h>
13#include <asm/global_data.h>
14#include <asm/processor.h>
15#include <asm/mmu.h>
16#ifdef CONFIG_ADDR_MAP
17#include <addr_map.h>
18#endif
19
20#include <linux/log2.h>
21
22DECLARE_GLOBAL_DATA_PTR;
23
24void invalidate_tlb(u8 tlb)
25{
26 if (tlb == 0)
27 mtspr(MMUCSR0, 0x4);
28 if (tlb == 1)
29 mtspr(MMUCSR0, 0x2);
30}
31
32__weak void init_tlbs(void)
33{
34 int i;
35
36 for (i = 0; i < num_tlb_entries; i++) {
37 write_tlb(tlb_table[i].mas0,
38 tlb_table[i].mas1,
39 tlb_table[i].mas2,
40 tlb_table[i].mas3,
41 tlb_table[i].mas7);
42 }
43
44 return;
45}
46
47#if !defined(CONFIG_NAND_SPL) && \
48 (!defined(CONFIG_SPL_BUILD) || !CONFIG_IS_ENABLED(INIT_MINIMAL))
49void read_tlbcam_entry(int idx, u32 *valid, u32 *tsize, unsigned long *epn,
50 phys_addr_t *rpn)
51{
52 u32 _mas1;
53
54 mtspr(MAS0, FSL_BOOKE_MAS0(1, idx, 0));
55 asm volatile("tlbre;isync");
56 _mas1 = mfspr(MAS1);
57
58 *valid = (_mas1 & MAS1_VALID);
59 *tsize = (_mas1 >> 7) & 0x1f;
60 *epn = mfspr(MAS2) & MAS2_EPN;
61 *rpn = mfspr(MAS3) & MAS3_RPN;
62#ifdef CONFIG_ENABLE_36BIT_PHYS
63 *rpn |= ((u64)mfspr(MAS7)) << 32;
64#endif
65}
66
67void print_tlbcam(void)
68{
69 int i;
70 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff;
71
72
73 printf("TLBCAM entries\n");
74 for (i = 0; i < num_cam; i++) {
75 unsigned long epn;
76 u32 tsize, valid;
77 phys_addr_t rpn;
78
79 read_tlbcam_entry(i, &valid, &tsize, &epn, &rpn);
80 printf("entry %02d: V: %d EPN 0x%08x RPN 0x%08llx size:",
81 i, (valid == 0) ? 0 : 1, (unsigned int)epn,
82 (unsigned long long)rpn);
83 print_size(TSIZE_TO_BYTES(tsize), "\n");
84 }
85}
86
87static inline void use_tlb_cam(u8 idx)
88{
89 int i = idx / 32;
90 int bit = idx % 32;
91
92 gd->arch.used_tlb_cams[i] |= (1 << bit);
93}
94
95static inline void free_tlb_cam(u8 idx)
96{
97 int i = idx / 32;
98 int bit = idx % 32;
99
100 gd->arch.used_tlb_cams[i] &= ~(1 << bit);
101}
102
103void init_used_tlb_cams(void)
104{
105 int i;
106 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff;
107
108 for (i = 0; i < ((CONFIG_SYS_NUM_TLBCAMS+31)/32); i++)
109 gd->arch.used_tlb_cams[i] = 0;
110
111
112 for (i = 0; i < num_cam; i++) {
113 mtspr(MAS0, FSL_BOOKE_MAS0(1, i, 0));
114 asm volatile("tlbre;isync");
115 if (mfspr(MAS1) & MAS1_VALID)
116 use_tlb_cam(i);
117 }
118}
119
120int find_free_tlbcam(void)
121{
122 int i;
123 u32 idx;
124
125 for (i = 0; i < ((CONFIG_SYS_NUM_TLBCAMS+31)/32); i++) {
126 idx = ffz(gd->arch.used_tlb_cams[i]);
127
128 if (idx != 32)
129 break;
130 }
131
132 idx += i * 32;
133
134 if (idx >= CONFIG_SYS_NUM_TLBCAMS)
135 return -1;
136
137 return idx;
138}
139
140void set_tlb(u8 tlb, u32 epn, u64 rpn,
141 u8 perms, u8 wimge,
142 u8 ts, u8 esel, u8 tsize, u8 iprot)
143{
144 u32 _mas0, _mas1, _mas2, _mas3, _mas7;
145
146 if (tlb == 1)
147 use_tlb_cam(esel);
148
149 if ((mfspr(SPRN_MMUCFG) & MMUCFG_MAVN) == MMUCFG_MAVN_V1 &&
150 tsize & 1) {
151 printf("%s: bad tsize %d on entry %d at 0x%08x\n",
152 __func__, tsize, tlb, epn);
153 return;
154 }
155
156 _mas0 = FSL_BOOKE_MAS0(tlb, esel, 0);
157 _mas1 = FSL_BOOKE_MAS1(1, iprot, 0, ts, tsize);
158 _mas2 = FSL_BOOKE_MAS2(epn, wimge);
159 _mas3 = FSL_BOOKE_MAS3(rpn, 0, perms);
160 _mas7 = FSL_BOOKE_MAS7(rpn);
161
162 write_tlb(_mas0, _mas1, _mas2, _mas3, _mas7);
163
164#ifdef CONFIG_ADDR_MAP
165 if ((tlb == 1) && (gd->flags & GD_FLG_RELOC))
166 addrmap_set_entry(epn, rpn, TSIZE_TO_BYTES(tsize), esel);
167#endif
168}
169
170void disable_tlb(u8 esel)
171{
172 u32 _mas0, _mas1, _mas2, _mas3;
173
174 free_tlb_cam(esel);
175
176 _mas0 = FSL_BOOKE_MAS0(1, esel, 0);
177 _mas1 = 0;
178 _mas2 = 0;
179 _mas3 = 0;
180
181 mtspr(MAS0, _mas0);
182 mtspr(MAS1, _mas1);
183 mtspr(MAS2, _mas2);
184 mtspr(MAS3, _mas3);
185#ifdef CONFIG_ENABLE_36BIT_PHYS
186 mtspr(MAS7, 0);
187#endif
188 asm volatile("isync;msync;tlbwe;isync");
189
190#ifdef CONFIG_ADDR_MAP
191 if (gd->flags & GD_FLG_RELOC)
192 addrmap_set_entry(0, 0, 0, esel);
193#endif
194}
195
196static void tlbsx (const volatile unsigned *addr)
197{
198 __asm__ __volatile__ ("tlbsx 0,%0" : : "r" (addr), "m" (*addr));
199}
200
201
202int find_tlb_idx(void *addr, u8 tlbsel)
203{
204 u32 _mas0, _mas1;
205
206
207 mtspr(MAS6, 0);
208
209 tlbsx(addr);
210
211 _mas0 = mfspr(MAS0);
212 _mas1 = mfspr(MAS1);
213
214
215 if ((MAS1_VALID & _mas1) &&
216 (MAS0_TLBSEL(tlbsel) == (_mas0 & MAS0_TLBSEL_MSK))) {
217 return ((_mas0 & MAS0_ESEL_MSK) >> 16);
218 }
219
220 return -1;
221}
222
223#ifdef CONFIG_ADDR_MAP
224int init_addr_map(void)
225{
226 int i;
227 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff;
228
229
230 for (i = 0; i < num_cam; i++) {
231 unsigned long epn;
232 u32 tsize, valid;
233 phys_addr_t rpn;
234
235 read_tlbcam_entry(i, &valid, &tsize, &epn, &rpn);
236 if (valid & MAS1_VALID)
237 addrmap_set_entry(epn, rpn, TSIZE_TO_BYTES(tsize), i);
238 }
239
240 return 0;
241}
242#endif
243
244uint64_t tlb_map_range(ulong v_addr, phys_addr_t p_addr, uint64_t size,
245 enum tlb_map_type map_type)
246{
247 int i;
248 unsigned int tlb_size;
249 unsigned int wimge;
250 unsigned int perm;
251 unsigned int max_cam, tsize_mask;
252
253 if (map_type == TLB_MAP_RAM) {
254 perm = MAS3_SX|MAS3_SW|MAS3_SR;
255 wimge = MAS2_M;
256#ifdef CONFIG_SYS_PPC_DDR_WIMGE
257 wimge = CONFIG_SYS_PPC_DDR_WIMGE;
258#endif
259 } else {
260 perm = MAS3_SW|MAS3_SR;
261 wimge = MAS2_I|MAS2_G;
262 }
263
264 if ((mfspr(SPRN_MMUCFG) & MMUCFG_MAVN) == MMUCFG_MAVN_V1) {
265
266 max_cam = ((mfspr(SPRN_TLB1CFG) >> 16) & 0xf) * 2 + 10;
267 tsize_mask = ~1U;
268 } else {
269
270 max_cam = __ilog2(mfspr(SPRN_TLB1PS)) + 10;
271 tsize_mask = ~0U;
272 }
273
274 for (i = 0; size && i < 8; i++) {
275 int tlb_index = find_free_tlbcam();
276 u32 camsize = __ilog2_u64(size) & tsize_mask;
277 u32 align = __ilog2(v_addr) & tsize_mask;
278
279 if (tlb_index == -1)
280 break;
281
282 if (align == -2) align = max_cam;
283 if (camsize > align)
284 camsize = align;
285
286 if (camsize > max_cam)
287 camsize = max_cam;
288
289 tlb_size = camsize - 10;
290
291 set_tlb(1, v_addr, p_addr, perm, wimge,
292 0, tlb_index, tlb_size, 1);
293
294 size -= 1ULL << camsize;
295 v_addr += 1UL << camsize;
296 p_addr += 1UL << camsize;
297 }
298
299 return size;
300}
301
302unsigned int setup_ddr_tlbs_phys(phys_addr_t p_addr,
303 unsigned int memsize_in_meg)
304{
305 unsigned int ram_tlb_address = (unsigned int)CFG_SYS_DDR_SDRAM_BASE;
306 u64 memsize = (u64)memsize_in_meg << 20;
307 u64 size;
308
309 size = min(memsize, (u64)CFG_MAX_MEM_MAPPED);
310 size = tlb_map_range(ram_tlb_address, p_addr, size, TLB_MAP_RAM);
311
312 if (size || memsize > CFG_MAX_MEM_MAPPED) {
313 print_size(memsize > CFG_MAX_MEM_MAPPED ?
314 memsize - CFG_MAX_MEM_MAPPED + size : size,
315 " of DDR memory left unmapped in U-Boot\n");
316#ifndef CONFIG_SPL_BUILD
317 puts(" ");
318#endif
319 }
320
321 return memsize_in_meg;
322}
323
324unsigned int setup_ddr_tlbs(unsigned int memsize_in_meg)
325{
326 return
327 setup_ddr_tlbs_phys(CFG_SYS_DDR_SDRAM_BASE, memsize_in_meg);
328}
329
330
331void clear_ddr_tlbs_phys(phys_addr_t p_addr, unsigned int memsize_in_meg)
332{
333 u32 vstart = CFG_SYS_DDR_SDRAM_BASE;
334 unsigned long epn;
335 u32 tsize, valid, ptr;
336 phys_addr_t rpn = 0;
337 int ddr_esel;
338 u64 memsize = (u64)memsize_in_meg << 20;
339
340 ptr = vstart;
341
342 while (ptr < (vstart + memsize)) {
343 ddr_esel = find_tlb_idx((void *)ptr, 1);
344 if (ddr_esel != -1) {
345 read_tlbcam_entry(ddr_esel, &valid, &tsize, &epn, &rpn);
346 disable_tlb(ddr_esel);
347 }
348 ptr += TSIZE_TO_BYTES(tsize);
349 }
350}
351
352void clear_ddr_tlbs(unsigned int memsize_in_meg)
353{
354 clear_ddr_tlbs_phys(CFG_SYS_DDR_SDRAM_BASE, memsize_in_meg);
355}
356
357
358#endif
359