1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <common.h>
27#include <asm/processor.h>
28#include <asm/mmu.h>
29#ifdef CONFIG_ADDR_MAP
30#include <addr_map.h>
31#endif
32
33DECLARE_GLOBAL_DATA_PTR;
34
35void invalidate_tlb(u8 tlb)
36{
37 if (tlb == 0)
38 mtspr(MMUCSR0, 0x4);
39 if (tlb == 1)
40 mtspr(MMUCSR0, 0x2);
41}
42
43void init_tlbs(void)
44{
45 int i;
46
47 for (i = 0; i < num_tlb_entries; i++) {
48 write_tlb(tlb_table[i].mas0,
49 tlb_table[i].mas1,
50 tlb_table[i].mas2,
51 tlb_table[i].mas3,
52 tlb_table[i].mas7);
53 }
54
55 return ;
56}
57
58#ifndef CONFIG_NAND_SPL
59static inline void use_tlb_cam(u8 idx)
60{
61 int i = idx / 32;
62 int bit = idx % 32;
63
64 gd->used_tlb_cams[i] |= (1 << bit);
65}
66
67static inline void free_tlb_cam(u8 idx)
68{
69 int i = idx / 32;
70 int bit = idx % 32;
71
72 gd->used_tlb_cams[i] &= ~(1 << bit);
73}
74
75void init_used_tlb_cams(void)
76{
77 int i;
78 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff;
79
80 for (i = 0; i < ((CONFIG_SYS_NUM_TLBCAMS+31)/32); i++)
81 gd->used_tlb_cams[i] = 0;
82
83
84 for (i = 0; i < num_cam; i++) {
85 u32 _mas1;
86
87 mtspr(MAS0, FSL_BOOKE_MAS0(1, i, 0));
88
89 asm volatile("tlbre;isync");
90 _mas1 = mfspr(MAS1);
91
92
93 if ((_mas1 & MAS1_VALID))
94 use_tlb_cam(i);
95 }
96}
97
98int find_free_tlbcam(void)
99{
100 int i;
101 u32 idx;
102
103 for (i = 0; i < ((CONFIG_SYS_NUM_TLBCAMS+31)/32); i++) {
104 idx = ffz(gd->used_tlb_cams[i]);
105
106 if (idx != 32)
107 break;
108 }
109
110 idx += i * 32;
111
112 if (idx >= CONFIG_SYS_NUM_TLBCAMS)
113 return -1;
114
115 return idx;
116}
117
118void set_tlb(u8 tlb, u32 epn, u64 rpn,
119 u8 perms, u8 wimge,
120 u8 ts, u8 esel, u8 tsize, u8 iprot)
121{
122 u32 _mas0, _mas1, _mas2, _mas3, _mas7;
123
124 if (tlb == 1)
125 use_tlb_cam(esel);
126
127 _mas0 = FSL_BOOKE_MAS0(tlb, esel, 0);
128 _mas1 = FSL_BOOKE_MAS1(1, iprot, 0, ts, tsize);
129 _mas2 = FSL_BOOKE_MAS2(epn, wimge);
130 _mas3 = FSL_BOOKE_MAS3(rpn, 0, perms);
131 _mas7 = FSL_BOOKE_MAS7(rpn);
132
133 write_tlb(_mas0, _mas1, _mas2, _mas3, _mas7);
134
135#ifdef CONFIG_ADDR_MAP
136 if ((tlb == 1) && (gd->flags & GD_FLG_RELOC))
137 addrmap_set_entry(epn, rpn, (1UL << ((tsize * 2) + 10)), esel);
138#endif
139}
140
141void disable_tlb(u8 esel)
142{
143 u32 _mas0, _mas1, _mas2, _mas3, _mas7;
144
145 free_tlb_cam(esel);
146
147 _mas0 = FSL_BOOKE_MAS0(1, esel, 0);
148 _mas1 = 0;
149 _mas2 = 0;
150 _mas3 = 0;
151 _mas7 = 0;
152
153 mtspr(MAS0, _mas0);
154 mtspr(MAS1, _mas1);
155 mtspr(MAS2, _mas2);
156 mtspr(MAS3, _mas3);
157#ifdef CONFIG_ENABLE_36BIT_PHYS
158 mtspr(MAS7, _mas7);
159#endif
160 asm volatile("isync;msync;tlbwe;isync");
161
162#ifdef CONFIG_ADDR_MAP
163 if (gd->flags & GD_FLG_RELOC)
164 addrmap_set_entry(0, 0, 0, esel);
165#endif
166}
167
168static void tlbsx (const volatile unsigned *addr)
169{
170 __asm__ __volatile__ ("tlbsx 0,%0" : : "r" (addr), "m" (*addr));
171}
172
173
174int find_tlb_idx(void *addr, u8 tlbsel)
175{
176 u32 _mas0, _mas1;
177
178
179 mtspr(MAS6, 0);
180
181 tlbsx(addr);
182
183 _mas0 = mfspr(MAS0);
184 _mas1 = mfspr(MAS1);
185
186
187 if ((MAS1_VALID & _mas1) &&
188 (MAS0_TLBSEL(tlbsel) == (_mas0 & MAS0_TLBSEL_MSK))) {
189 return ((_mas0 & MAS0_ESEL_MSK) >> 16);
190 }
191
192 return -1;
193}
194
195#ifdef CONFIG_ADDR_MAP
196void init_addr_map(void)
197{
198 int i;
199 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff;
200
201
202 for (i = 0; i < num_cam; i++) {
203 unsigned long epn;
204 u32 tsize, _mas1;
205 phys_addr_t rpn;
206
207 mtspr(MAS0, FSL_BOOKE_MAS0(1, i, 0));
208
209 asm volatile("tlbre;isync");
210 _mas1 = mfspr(MAS1);
211
212
213 if (!(_mas1 & MAS1_VALID))
214 continue;
215
216 tsize = (_mas1 >> 8) & 0xf;
217 epn = mfspr(MAS2) & MAS2_EPN;
218 rpn = mfspr(MAS3) & MAS3_RPN;
219#ifdef CONFIG_ENABLE_36BIT_PHYS
220 rpn |= ((phys_addr_t)mfspr(MAS7)) << 32;
221#endif
222
223 addrmap_set_entry(epn, rpn, (1UL << ((tsize * 2) + 10)), i);
224 }
225
226 return ;
227}
228#endif
229
230unsigned int setup_ddr_tlbs(unsigned int memsize_in_meg)
231{
232 int i;
233 unsigned int tlb_size;
234 unsigned int ram_tlb_address = (unsigned int)CONFIG_SYS_DDR_SDRAM_BASE;
235 unsigned int max_cam = (mfspr(SPRN_TLB1CFG) >> 16) & 0xf;
236 u64 size, memsize = (u64)memsize_in_meg << 20;
237
238 size = min(memsize, CONFIG_MAX_MEM_MAPPED);
239
240
241 max_cam = max_cam * 2 + 10;
242
243 for (i = 0; size && i < 8; i++) {
244 int ram_tlb_index = find_free_tlbcam();
245 u32 camsize = __ilog2_u64(size) & ~1U;
246 u32 align = __ilog2(ram_tlb_address) & ~1U;
247
248 if (ram_tlb_index == -1)
249 break;
250
251 if (align == -2) align = max_cam;
252 if (camsize > align)
253 camsize = align;
254
255 if (camsize > max_cam)
256 camsize = max_cam;
257
258 tlb_size = (camsize - 10) / 2;
259
260 set_tlb(1, ram_tlb_address, ram_tlb_address,
261 MAS3_SX|MAS3_SW|MAS3_SR, 0,
262 0, ram_tlb_index, tlb_size, 1);
263
264 size -= 1ULL << camsize;
265 memsize -= 1ULL << camsize;
266 ram_tlb_address += 1UL << camsize;
267 }
268
269 if (memsize)
270 print_size(memsize, " left unmapped\n");
271
272
273
274
275 return memsize_in_meg;
276}
277#endif
278