1
2
3
4
5
6
7#include <linux/arm-smccc.h>
8#include <linux/linkage.h>
9
10#include <asm/alternative.h>
11#include <asm/assembler.h>
12#include <asm/el2_setup.h>
13#include <asm/kvm_arm.h>
14#include <asm/kvm_asm.h>
15#include <asm/kvm_mmu.h>
16#include <asm/pgtable-hwdef.h>
17#include <asm/sysreg.h>
18#include <asm/virt.h>
19
20 .text
21 .pushsection .hyp.idmap.text, "ax"
22
23 .align 11
24
25SYM_CODE_START(__kvm_hyp_init)
26 ventry __invalid
27 ventry __invalid
28 ventry __invalid
29 ventry __invalid
30
31 ventry __invalid
32 ventry __invalid
33 ventry __invalid
34 ventry __invalid
35
36 ventry __do_hyp_init
37 ventry __invalid
38 ventry __invalid
39 ventry __invalid
40
41 ventry __invalid
42 ventry __invalid
43 ventry __invalid
44 ventry __invalid
45
46__invalid:
47 b .
48
49
50
51
52
53
54
55__do_hyp_init:
56
57 cmp x0,
58 b.lo __kvm_handle_stub_hvc
59
60
61
62
63
64#endif
65
66 ror x0, x0,
67 eor x0, x0,
68 ror x0, x0,
69 eor x0, x0,
70 cbz x0, 1f
71 mov x0,
72 eret
73
741: mov x0, x1
75 mov x3, lr
76 bl ___kvm_hyp_init
77 mov lr, x3
78
79
80 mov x0,
81 eret
82SYM_CODE_END(__kvm_hyp_init)
83
84
85
86
87
88
89
90
91
92SYM_CODE_START_LOCAL(___kvm_hyp_init)
93alternative_if ARM64_KVM_PROTECTED_MODE
94 mov_q x1, HCR_HOST_NVHE_PROTECTED_FLAGS
95 msr hcr_el2, x1
96alternative_else_nop_endif
97
98 ldr x1, [x0,
99 msr tpidr_el2, x1
100
101 ldr x1, [x0,
102 mov sp, x1
103
104 ldr x1, [x0,
105 msr mair_el2, x1
106
107 ldr x1, [x0,
108 phys_to_ttbr x2, x1
109alternative_if ARM64_HAS_CNP
110 orr x2, x2,
111alternative_else_nop_endif
112 msr ttbr0_el2, x2
113
114
115
116
117 ldr x0, [x0,
118 tcr_compute_pa_size x0,
119 msr tcr_el2, x0
120
121 isb
122
123
124 tlbi alle2
125 dsb sy
126
127
128
129
130
131
132 mov_q x0, (SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A))
133CPU_BE( orr x0, x0,
134alternative_if ARM64_HAS_ADDRESS_AUTH
135 mov_q x1, (SCTLR_ELx_ENIA | SCTLR_ELx_ENIB | \
136 SCTLR_ELx_ENDA | SCTLR_ELx_ENDB)
137 orr x0, x0, x1
138alternative_else_nop_endif
139 msr sctlr_el2, x0
140 isb
141
142
143 ldr x0, =__kvm_hyp_host_vector
144 kimg_hyp_va x0, x1
145 msr vbar_el2, x0
146
147 ret
148SYM_CODE_END(___kvm_hyp_init)
149
150
151
152
153
154
155SYM_CODE_START(kvm_hyp_cpu_entry)
156 mov x1,
157 b __kvm_hyp_init_cpu
158SYM_CODE_END(kvm_hyp_cpu_entry)
159
160
161
162
163
164
165SYM_CODE_START(kvm_hyp_cpu_resume)
166 mov x1,
167 b __kvm_hyp_init_cpu
168SYM_CODE_END(kvm_hyp_cpu_resume)
169
170
171
172
173
174
175
176
177SYM_CODE_START_LOCAL(__kvm_hyp_init_cpu)
178 mov x28, x0
179 mov x29, x1
180
181
182 mrs x0, CurrentEL
183 cmp x0,
184 b.eq 2f
185
186
1871: wfe
188 wfi
189 b 1b
190
1912: msr SPsel,
192
193
194 init_el2_state nvhe
195
196
197 mov x0, x28
198 bl ___kvm_hyp_init
199
200
201 mov x0, x29
202 ldr x1, =kvm_host_psci_cpu_entry
203 kimg_hyp_va x1, x2
204 br x1
205SYM_CODE_END(__kvm_hyp_init_cpu)
206
207SYM_CODE_START(__kvm_handle_stub_hvc)
208 cmp x0,
209 b.ne 1f
210
211
212 msr elr_el2, x1
213 mov x0,
214 msr spsr_el2, x0
215
216
217 mov x0, x2
218 mov x1, x3
219 mov x2, x4
220 b reset
221
2221: cmp x0,
223 b.ne 1f
224
225
226
227
228
229
230 mov x0, xzr
231reset:
232
233 mrs x5, sctlr_el2
234 mov_q x6, SCTLR_ELx_FLAGS
235 bic x5, x5, x6
236 pre_disable_mmu_workaround
237 msr sctlr_el2, x5
238 isb
239
240alternative_if ARM64_KVM_PROTECTED_MODE
241 mov_q x5, HCR_HOST_NVHE_FLAGS
242 msr hcr_el2, x5
243alternative_else_nop_endif
244
245
246 adr_l x5, __hyp_stub_vectors
247 msr vbar_el2, x5
248 eret
249
2501:
251 mov_q x0, HVC_STUB_ERR
252 eret
253
254SYM_CODE_END(__kvm_handle_stub_hvc)
255
256 .popsection
257