1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23#ifndef _XTENSA_INITIALIZE_MMU_H
24#define _XTENSA_INITIALIZE_MMU_H
25
26#include <linux/init.h>
27#include <linux/pgtable.h>
28#include <asm/vectors.h>
29
30#if XCHAL_HAVE_PTP_MMU
31#define CA_BYPASS (_PAGE_CA_BYPASS | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
32#define CA_WRITEBACK (_PAGE_CA_WB | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
33#else
34#define CA_WRITEBACK (0x4)
35#endif
36
37#ifdef __ASSEMBLY__
38
39#define XTENSA_HWVERSION_RC_2009_0 230000
40
41 .macro initialize_mmu
42
43#if XCHAL_HAVE_S32C1I && (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
44
45
46
47
48#if XCHAL_DCACHE_IS_COHERENT
49 movi a3, 0x25
50
51
52#else
53 movi a3, 0x29
54
55
56#endif
57 wsr a3, atomctl
58#endif
59
60
61
62#if defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
63
64
65
66
67#if !XCHAL_HAVE_VECBASE
68# error "MMU v3 requires reloc vectors"
69#endif
70
71 movi a1, 0
72 _call0 1f
73 _j 2f
74
75 .align 4
761:
77
78#if CONFIG_KERNEL_LOAD_ADDRESS < 0x40000000ul
79#define TEMP_MAPPING_VADDR 0x40000000
80#else
81#define TEMP_MAPPING_VADDR 0x00000000
82#endif
83
84
85
86 movi a2, TEMP_MAPPING_VADDR | XCHAL_SPANNING_WAY
87 idtlb a2
88 iitlb a2
89 isync
90
91
92
93
94
95 srli a3, a0, 27
96 slli a3, a3, 27
97 addi a3, a3, CA_BYPASS
98 addi a7, a2, 5 - XCHAL_SPANNING_WAY
99 wdtlb a3, a7
100 witlb a3, a7
101 isync
102
103 slli a4, a0, 5
104 srli a4, a4, 5
105 addi a5, a2, -XCHAL_SPANNING_WAY
106 add a4, a4, a5
107 jx a4
108
109
110
111
1122: movi a4, 0x20000000
113 add a5, a2, a4
1143: idtlb a5
115 iitlb a5
116 add a5, a5, a4
117 bne a5, a2, 3b
118
119
120
121 movi a6, 0x01000000
122 wsr a6, ITLBCFG
123 wsr a6, DTLBCFG
124 isync
125
126 movi a5, XCHAL_KSEG_CACHED_VADDR + XCHAL_KSEG_TLB_WAY
127 movi a4, XCHAL_KSEG_PADDR + CA_WRITEBACK
128 wdtlb a4, a5
129 witlb a4, a5
130
131 movi a5, XCHAL_KSEG_BYPASS_VADDR + XCHAL_KSEG_TLB_WAY
132 movi a4, XCHAL_KSEG_PADDR + CA_BYPASS
133 wdtlb a4, a5
134 witlb a4, a5
135
136#ifdef CONFIG_XTENSA_KSEG_512M
137 movi a5, XCHAL_KSEG_CACHED_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
138 movi a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_WRITEBACK
139 wdtlb a4, a5
140 witlb a4, a5
141
142 movi a5, XCHAL_KSEG_BYPASS_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
143 movi a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_BYPASS
144 wdtlb a4, a5
145 witlb a4, a5
146#endif
147
148 movi a5, XCHAL_KIO_CACHED_VADDR + XCHAL_KIO_TLB_WAY
149 movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_WRITEBACK
150 wdtlb a4, a5
151 witlb a4, a5
152
153 movi a5, XCHAL_KIO_BYPASS_VADDR + XCHAL_KIO_TLB_WAY
154 movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_BYPASS
155 wdtlb a4, a5
156 witlb a4, a5
157
158 isync
159
160
161 movi a4, 1f
162 jx a4
163
1641:
165
166 idtlb a7
167 iitlb a7
168 isync
169
170 movi a0, 0
171 wsr a0, ptevaddr
172 rsync
173
174#endif
175
176
177 .endm
178
179 .macro initialize_cacheattr
180
181#if !defined(CONFIG_MMU) && (XCHAL_HAVE_TLBS || XCHAL_HAVE_MPU)
182#if CONFIG_MEMMAP_CACHEATTR == 0x22222222 && XCHAL_HAVE_PTP_MMU
183#error Default MEMMAP_CACHEATTR of 0x22222222 does not work with full MMU.
184#endif
185
186#if XCHAL_HAVE_MPU
187 __REFCONST
188 .align 4
189.Lattribute_table:
190 .long 0x000000, 0x1fff00, 0x1ddf00, 0x1eef00
191 .long 0x006600, 0x000000, 0x000000, 0x000000
192 .long 0x000000, 0x000000, 0x000000, 0x000000
193 .long 0x000000, 0x000000, 0x000000, 0x000000
194 .previous
195
196 movi a3, .Lattribute_table
197 movi a4, CONFIG_MEMMAP_CACHEATTR
198 movi a5, 1
199 movi a6, XCHAL_MPU_ENTRIES
200 movi a10, 0x20000000
201 movi a11, -1
2021:
203 sub a5, a5, a10
204 extui a8, a4, 28, 4
205 beq a8, a11, 2f
206 addi a6, a6, -1
207 mov a11, a8
2082:
209 addx4 a9, a8, a3
210 l32i a9, a9, 0
211 or a9, a9, a6
212 wptlb a9, a5
213 slli a4, a4, 4
214 bgeu a5, a10, 1b
215
216#else
217 movi a5, XCHAL_SPANNING_WAY
218 movi a6, ~_PAGE_ATTRIB_MASK
219 movi a4, CONFIG_MEMMAP_CACHEATTR
220 movi a8, 0x20000000
2211:
222 rdtlb1 a3, a5
223 xor a3, a3, a4
224 and a3, a3, a6
225 xor a3, a3, a4
226 wdtlb a3, a5
227 ritlb1 a3, a5
228 xor a3, a3, a4
229 and a3, a3, a6
230 xor a3, a3, a4
231 witlb a3, a5
232
233 add a5, a5, a8
234 srli a4, a4, 4
235 bgeu a5, a8, 1b
236
237 isync
238#endif
239#endif
240
241 .endm
242
243#endif
244
245#endif
246