1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21#include <linux/init.h>
22#include <linux/linkage.h>
23#include <asm/assembler.h>
24#include <asm/asm-offsets.h>
25#include <asm/hwcap.h>
26#include <asm/pgtable.h>
27#include <asm/pgtable-hwdef.h>
28#include <asm/cpufeature.h>
29#include <asm/alternative.h>
30
31#ifdef CONFIG_ARM64_64K_PAGES
32#define TCR_TG_FLAGS TCR_TG0_64K | TCR_TG1_64K
33
34#define TCR_TG_FLAGS TCR_TG0_16K | TCR_TG1_16K
35#else
36#define TCR_TG_FLAGS TCR_TG0_4K | TCR_TG1_4K
37#endif
38
39#ifdef CONFIG_RANDOMIZE_BASE
40#define TCR_KASLR_FLAGS TCR_NFD1
41#else
42#define TCR_KASLR_FLAGS 0
43#endif
44
45#define TCR_SMP_FLAGS TCR_SHARED
46
47
48#define TCR_CACHE_FLAGS TCR_IRGN_WBWA | TCR_ORGN_WBWA
49
50#ifdef CONFIG_KASAN_SW_TAGS
51#define TCR_KASAN_FLAGS TCR_TBI1
52#else
53#define TCR_KASAN_FLAGS 0
54#endif
55
56#define MAIR(attr, mt) ((attr) << ((mt) * 8))
57
58#ifdef CONFIG_CPU_PM
59
60
61
62
63
64ENTRY(cpu_do_suspend)
65 mrs x2, tpidr_el0
66 mrs x3, tpidrro_el0
67 mrs x4, contextidr_el1
68 mrs x5, cpacr_el1
69 mrs x6, tcr_el1
70 mrs x7, vbar_el1
71 mrs x8, mdscr_el1
72 mrs x9, oslsr_el1
73 mrs x10, sctlr_el1
74alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
75 mrs x11, tpidr_el1
76alternative_else
77 mrs x11, tpidr_el2
78alternative_endif
79 mrs x12, sp_el0
80 stp x2, x3, [x0]
81 stp x4, xzr, [x0,
82 stp x5, x6, [x0,
83 stp x7, x8, [x0,
84 stp x9, x10, [x0,
85 stp x11, x12, [x0,
86 ret
87ENDPROC(cpu_do_suspend)
88
89
90
91
92
93
94 .pushsection ".idmap.text", "awx"
95ENTRY(cpu_do_resume)
96 ldp x2, x3, [x0]
97 ldp x4, x5, [x0,
98 ldp x6, x8, [x0,
99 ldp x9, x10, [x0,
100 ldp x11, x12, [x0,
101 ldp x13, x14, [x0,
102 msr tpidr_el0, x2
103 msr tpidrro_el0, x3
104 msr contextidr_el1, x4
105 msr cpacr_el1, x6
106
107
108 mrs x5, tcr_el1
109 bfi x8, x5, TCR_T0SZ_OFFSET, TCR_TxSZ_WIDTH
110
111 msr tcr_el1, x8
112 msr vbar_el1, x9
113
114
115
116
117
118
119
120 disable_daif
121 msr mdscr_el1, x10
122
123 msr sctlr_el1, x12
124alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
125 msr tpidr_el1, x13
126alternative_else
127 msr tpidr_el2, x13
128alternative_endif
129 msr sp_el0, x14
130
131
132
133 ubfx x11, x11,
134 msr oslar_el1, x11
135 reset_pmuserenr_el0 x0
136
137alternative_if ARM64_HAS_RAS_EXTN
138 msr_s SYS_DISR_EL1, xzr
139alternative_else_nop_endif
140
141 isb
142 ret
143ENDPROC(cpu_do_resume)
144 .popsection
145#endif
146
147
148
149
150
151
152
153
154ENTRY(cpu_do_switch_mm)
155 mrs x2, ttbr1_el1
156 mmid x1, x1
157 phys_to_ttbr x3, x0
158
159alternative_if ARM64_HAS_CNP
160 cbz x1, 1f
161 orr x3, x3,
1621:
163alternative_else_nop_endif
164#ifdef CONFIG_ARM64_SW_TTBR0_PAN
165 bfi x3, x1,
166#endif
167 bfi x2, x1,
168 msr ttbr1_el1, x2
169 isb
170 msr ttbr0_el1, x3
171 isb
172 b post_ttbr_update_workaround
173ENDPROC(cpu_do_switch_mm)
174
175 .pushsection ".idmap.text", "awx"
176
177.macro __idmap_cpu_set_reserved_ttbr1, tmp1, tmp2
178 adrp \tmp1, empty_zero_page
179 phys_to_ttbr \tmp2, \tmp1
180 offset_ttbr1 \tmp2
181 msr ttbr1_el1, \tmp2
182 isb
183 tlbi vmalle1
184 dsb nsh
185 isb
186.endm
187
188
189
190
191
192
193
194ENTRY(idmap_cpu_replace_ttbr1)
195 save_and_disable_daif flags=x2
196
197 __idmap_cpu_set_reserved_ttbr1 x1, x3
198
199 offset_ttbr1 x0
200 msr ttbr1_el1, x0
201 isb
202
203 restore_daif x2
204
205 ret
206ENDPROC(idmap_cpu_replace_ttbr1)
207 .popsection
208
209#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
210 .pushsection ".idmap.text", "awx"
211
212 .macro __idmap_kpti_get_pgtable_ent, type
213 dc cvac, cur_\()\type\()p
214 dmb sy
215 ldr \type, [cur_\()\type\()p]
216 tbz \type,
217 tbnz \type,
218 .endm
219
220 .macro __idmap_kpti_put_pgtable_ent_ng, type
221 orr \type, \type,
222 str \type, [cur_\()\type\()p]
223 dmb sy
224 dc civac, cur_\()\type\()p
225 .endm
226
227
228
229
230
231
232__idmap_kpti_flag:
233 .long 1
234ENTRY(idmap_kpti_install_ng_mappings)
235 cpu .req w0
236 num_cpus .req w1
237 swapper_pa .req x2
238 swapper_ttb .req x3
239 flag_ptr .req x4
240 cur_pgdp .req x5
241 end_pgdp .req x6
242 pgd .req x7
243 cur_pudp .req x8
244 end_pudp .req x9
245 pud .req x10
246 cur_pmdp .req x11
247 end_pmdp .req x12
248 pmd .req x13
249 cur_ptep .req x14
250 end_ptep .req x15
251 pte .req x16
252
253 mrs swapper_ttb, ttbr1_el1
254 restore_ttbr1 swapper_ttb
255 adr flag_ptr, __idmap_kpti_flag
256
257 cbnz cpu, __idmap_kpti_secondary
258
259
260 sevl
2611: wfe
262 ldaxr w18, [flag_ptr]
263 eor w18, w18, num_cpus
264 cbnz w18, 1b
265
266
267 pre_disable_mmu_workaround
268 mrs x18, sctlr_el1
269 bic x18, x18,
270 msr sctlr_el1, x18
271 isb
272
273
274
275 mov cur_pgdp, swapper_pa
276 add end_pgdp, cur_pgdp,
277do_pgd: __idmap_kpti_get_pgtable_ent pgd
278 tbnz pgd,
279next_pgd:
280 __idmap_kpti_put_pgtable_ent_ng pgd
281skip_pgd:
282 add cur_pgdp, cur_pgdp,
283 cmp cur_pgdp, end_pgdp
284 b.ne do_pgd
285
286
287 dsb sy
288 tlbi vmalle1is
289 dsb ish
290 isb
291
292
293 mrs x18, sctlr_el1
294 orr x18, x18,
295 msr sctlr_el1, x18
296 isb
297
298
299 str wzr, [flag_ptr]
300 ret
301
302
303walk_puds:
304 .if CONFIG_PGTABLE_LEVELS > 3
305 pte_to_phys cur_pudp, pgd
306 add end_pudp, cur_pudp,
307do_pud: __idmap_kpti_get_pgtable_ent pud
308 tbnz pud,
309next_pud:
310 __idmap_kpti_put_pgtable_ent_ng pud
311skip_pud:
312 add cur_pudp, cur_pudp, 8
313 cmp cur_pudp, end_pudp
314 b.ne do_pud
315 b next_pgd
316 .else
317 mov pud, pgd
318 b walk_pmds
319next_pud:
320 b next_pgd
321 .endif
322
323
324walk_pmds:
325 .if CONFIG_PGTABLE_LEVELS > 2
326 pte_to_phys cur_pmdp, pud
327 add end_pmdp, cur_pmdp,
328do_pmd: __idmap_kpti_get_pgtable_ent pmd
329 tbnz pmd,
330next_pmd:
331 __idmap_kpti_put_pgtable_ent_ng pmd
332skip_pmd:
333 add cur_pmdp, cur_pmdp,
334 cmp cur_pmdp, end_pmdp
335 b.ne do_pmd
336 b next_pud
337 .else
338 mov pmd, pud
339 b walk_ptes
340next_pmd:
341 b next_pud
342 .endif
343
344
345walk_ptes:
346 pte_to_phys cur_ptep, pmd
347 add end_ptep, cur_ptep,
348do_pte: __idmap_kpti_get_pgtable_ent pte
349 __idmap_kpti_put_pgtable_ent_ng pte
350skip_pte:
351 add cur_ptep, cur_ptep,
352 cmp cur_ptep, end_ptep
353 b.ne do_pte
354 b next_pmd
355
356
357__idmap_kpti_secondary:
358
359 __idmap_cpu_set_reserved_ttbr1 x18, x17
360
361
3621: ldxr w18, [flag_ptr]
363 add w18, w18,
364 stxr w17, w18, [flag_ptr]
365 cbnz w17, 1b
366
367
368 sevl
3691: wfe
370 ldxr w18, [flag_ptr]
371 cbnz w18, 1b
372
373
374 offset_ttbr1 swapper_ttb
375 msr ttbr1_el1, swapper_ttb
376 isb
377 ret
378
379 .unreq cpu
380 .unreq num_cpus
381 .unreq swapper_pa
382 .unreq swapper_ttb
383 .unreq flag_ptr
384 .unreq cur_pgdp
385 .unreq end_pgdp
386 .unreq pgd
387 .unreq cur_pudp
388 .unreq end_pudp
389 .unreq pud
390 .unreq cur_pmdp
391 .unreq end_pmdp
392 .unreq pmd
393 .unreq cur_ptep
394 .unreq end_ptep
395 .unreq pte
396ENDPROC(idmap_kpti_install_ng_mappings)
397 .popsection
398#endif
399
400
401
402
403
404
405
406 .pushsection ".idmap.text", "awx"
407ENTRY(__cpu_setup)
408 tlbi vmalle1
409 dsb nsh
410
411 mov x0,
412 msr cpacr_el1, x0
413 mov x0,
414 msr mdscr_el1, x0
415 isb
416 enable_dbg
417 reset_pmuserenr_el0 x0
418
419
420
421
422
423
424
425
426
427
428
429
430 ldr x5, =MAIR(0x00, MT_DEVICE_nGnRnE) | \
431 MAIR(0x04, MT_DEVICE_nGnRE) | \
432 MAIR(0x0c, MT_DEVICE_GRE) | \
433 MAIR(0x44, MT_NORMAL_NC) | \
434 MAIR(0xff, MT_NORMAL) | \
435 MAIR(0xbb, MT_NORMAL_WT)
436 msr mair_el1, x5
437
438
439
440 mov_q x0, SCTLR_EL1_SET
441
442
443
444
445 ldr x10, =TCR_TxSZ(VA_BITS) | TCR_CACHE_FLAGS | TCR_SMP_FLAGS | \
446 TCR_TG_FLAGS | TCR_KASLR_FLAGS | TCR_ASID16 | \
447 TCR_TBI0 | TCR_A1 | TCR_KASAN_FLAGS
448 tcr_clear_errata_bits x10, x9, x5
449
450#ifdef CONFIG_ARM64_USER_VA_BITS_52
451 ldr_l x9, vabits_user
452 sub x9, xzr, x9
453 add x9, x9,
454#else
455 ldr_l x9, idmap_t0sz
456#endif
457 tcr_set_t0sz x10, x9
458
459
460
461
462 tcr_compute_pa_size x10,
463#ifdef CONFIG_ARM64_HW_AFDBM
464
465
466
467
468
469 mrs x9, ID_AA64MMFR1_EL1
470 and x9, x9,
471 cbz x9, 1f
472 orr x10, x10,
4731:
474#endif
475 msr tcr_el1, x10
476 ret
477ENDPROC(__cpu_setup)
478