1
2
3
4
5
6
7#include <linux/arm-smccc.h>
8#include <linux/linkage.h>
9
10#include <asm/alternative.h>
11#include <asm/assembler.h>
12#include <asm/el2_setup.h>
13#include <asm/kvm_arm.h>
14#include <asm/kvm_asm.h>
15#include <asm/kvm_mmu.h>
16#include <asm/pgtable-hwdef.h>
17#include <asm/sysreg.h>
18#include <asm/virt.h>
19
20 .text
21 .pushsection .idmap.text, "ax"
22
23 .align 11
24
25SYM_CODE_START(__kvm_hyp_init)
26 ventry __invalid
27 ventry __invalid
28 ventry __invalid
29 ventry __invalid
30
31 ventry __invalid
32 ventry __invalid
33 ventry __invalid
34 ventry __invalid
35
36 ventry __do_hyp_init
37 ventry __invalid
38 ventry __invalid
39 ventry __invalid
40
41 ventry __invalid
42 ventry __invalid
43 ventry __invalid
44 ventry __invalid
45
46__invalid:
47 b .
48
49
50
51
52
53
54
55__do_hyp_init:
56
57 cmp x0,
58 b.lo __kvm_handle_stub_hvc
59
60 mov x3,
61 cmp x0, x3
62 b.eq 1f
63
64 mov x0,
65 eret
66
671: mov x0, x1
68 mov x3, lr
69 bl ___kvm_hyp_init
70 mov lr, x3
71
72
73 mov x0,
74 eret
75SYM_CODE_END(__kvm_hyp_init)
76
77
78
79
80
81
82
83
84
85SYM_CODE_START_LOCAL(___kvm_hyp_init)
86 ldr x1, [x0,
87 msr tpidr_el2, x1
88
89 ldr x1, [x0,
90 mov sp, x1
91
92 ldr x1, [x0,
93 msr mair_el2, x1
94
95 ldr x1, [x0,
96 msr hcr_el2, x1
97
98 ldr x1, [x0,
99 msr vttbr_el2, x1
100
101 ldr x1, [x0,
102 msr vtcr_el2, x1
103
104 ldr x1, [x0,
105 phys_to_ttbr x2, x1
106alternative_if ARM64_HAS_CNP
107 orr x2, x2,
108alternative_else_nop_endif
109 msr ttbr0_el2, x2
110
111
112
113
114 ldr x0, [x0,
115 tcr_compute_pa_size x0,
116 msr tcr_el2, x0
117
118 isb
119
120
121 tlbi alle2
122 tlbi vmalls12e1
123 dsb sy
124
125 mov_q x0, INIT_SCTLR_EL2_MMU_ON
126alternative_if ARM64_HAS_ADDRESS_AUTH
127 mov_q x1, (SCTLR_ELx_ENIA | SCTLR_ELx_ENIB | \
128 SCTLR_ELx_ENDA | SCTLR_ELx_ENDB)
129 orr x0, x0, x1
130alternative_else_nop_endif
131 msr sctlr_el2, x0
132 isb
133
134
135 ldr x0, =__kvm_hyp_host_vector
136 msr vbar_el2, x0
137
138 ret
139SYM_CODE_END(___kvm_hyp_init)
140
141
142
143
144
145
146SYM_CODE_START(kvm_hyp_cpu_entry)
147 mov x1,
148 b __kvm_hyp_init_cpu
149SYM_CODE_END(kvm_hyp_cpu_entry)
150
151
152
153
154
155
156SYM_CODE_START(kvm_hyp_cpu_resume)
157 mov x1,
158 b __kvm_hyp_init_cpu
159SYM_CODE_END(kvm_hyp_cpu_resume)
160
161
162
163
164
165
166
167
168SYM_CODE_START_LOCAL(__kvm_hyp_init_cpu)
169 mov x28, x0
170 mov x29, x1
171
172
173 mrs x0, CurrentEL
174 cmp x0,
175 b.eq 2f
176
177
1781: wfe
179 wfi
180 b 1b
181
1822: msr SPsel,
183
184
185 init_el2_state
186
187
188 mov x0, x28
189 bl ___kvm_hyp_init
190
191
192 mov x0, x29
193 ldr x1, =kvm_host_psci_cpu_entry
194 br x1
195SYM_CODE_END(__kvm_hyp_init_cpu)
196
197SYM_CODE_START(__kvm_handle_stub_hvc)
198 cmp x0,
199 b.ne 1f
200
201
202 msr elr_el2, x1
203 mov x0,
204 msr spsr_el2, x0
205
206
207 mov x0, x2
208 mov x1, x3
209 mov x2, x4
210 b reset
211
2121: cmp x0,
213 b.ne 1f
214
215
216
217
218
219
220 mov x0, xzr
221reset:
222
223 mov_q x5, INIT_SCTLR_EL2_MMU_OFF
224 pre_disable_mmu_workaround
225 msr sctlr_el2, x5
226 isb
227
228alternative_if ARM64_KVM_PROTECTED_MODE
229 mov_q x5, HCR_HOST_NVHE_FLAGS
230 msr hcr_el2, x5
231alternative_else_nop_endif
232
233
234 adr_l x5, __hyp_stub_vectors
235 msr vbar_el2, x5
236 eret
237
2381:
239 mov_q x0, HVC_STUB_ERR
240 eret
241
242SYM_CODE_END(__kvm_handle_stub_hvc)
243
244SYM_FUNC_START(__pkvm_init_switch_pgd)
245
246 pre_disable_mmu_workaround
247 mrs x2, sctlr_el2
248 bic x3, x2,
249 msr sctlr_el2, x3
250 isb
251
252 tlbi alle2
253
254
255 ldr x3, [x0,
256 phys_to_ttbr x4, x3
257alternative_if ARM64_HAS_CNP
258 orr x4, x4,
259alternative_else_nop_endif
260 msr ttbr0_el2, x4
261
262
263 ldr x0, [x0,
264 mov sp, x0
265
266
267 set_sctlr_el2 x2
268 ret x1
269SYM_FUNC_END(__pkvm_init_switch_pgd)
270
271 .popsection
272