1
2
3
4
5
6
7#include <linux/arm-smccc.h>
8#include <linux/linkage.h>
9
10#include <asm/alternative.h>
11#include <asm/assembler.h>
12#include <asm/el2_setup.h>
13#include <asm/kvm_arm.h>
14#include <asm/kvm_asm.h>
15#include <asm/kvm_mmu.h>
16#include <asm/pgtable-hwdef.h>
17#include <asm/sysreg.h>
18#include <asm/virt.h>
19
20 .text
21 .pushsection .idmap.text, "ax"
22
23 .align 11
24
25SYM_CODE_START(__kvm_hyp_init)
26 ventry __invalid
27 ventry __invalid
28 ventry __invalid
29 ventry __invalid
30
31 ventry __invalid
32 ventry __invalid
33 ventry __invalid
34 ventry __invalid
35
36 ventry __do_hyp_init
37 ventry __invalid
38 ventry __invalid
39 ventry __invalid
40
41 ventry __invalid
42 ventry __invalid
43 ventry __invalid
44 ventry __invalid
45
46__invalid:
47 b .
48
49
50
51
52
53
54
55__do_hyp_init:
56
57 cmp x0,
58 b.lo __kvm_handle_stub_hvc
59
60 mov x3,
61 cmp x0, x3
62 b.eq 1f
63
64 mov x0,
65 eret
66
671: mov x0, x1
68 mov x3, lr
69 bl ___kvm_hyp_init
70 mov lr, x3
71
72
73 mov x0,
74 eret
75SYM_CODE_END(__kvm_hyp_init)
76
77
78
79
80
81
82
83
84
85SYM_CODE_START_LOCAL(___kvm_hyp_init)
86alternative_if ARM64_KVM_PROTECTED_MODE
87 mov_q x1, HCR_HOST_NVHE_PROTECTED_FLAGS
88 msr hcr_el2, x1
89alternative_else_nop_endif
90
91 ldr x1, [x0,
92 msr tpidr_el2, x1
93
94 ldr x1, [x0,
95 mov sp, x1
96
97 ldr x1, [x0,
98 msr mair_el2, x1
99
100 ldr x1, [x0,
101 phys_to_ttbr x2, x1
102alternative_if ARM64_HAS_CNP
103 orr x2, x2,
104alternative_else_nop_endif
105 msr ttbr0_el2, x2
106
107
108
109
110 ldr x0, [x0,
111 tcr_compute_pa_size x0,
112 msr tcr_el2, x0
113
114 isb
115
116
117 tlbi alle2
118 dsb sy
119
120
121
122
123
124
125 mov_q x0, (SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A))
126CPU_BE( orr x0, x0,
127alternative_if ARM64_HAS_ADDRESS_AUTH
128 mov_q x1, (SCTLR_ELx_ENIA | SCTLR_ELx_ENIB | \
129 SCTLR_ELx_ENDA | SCTLR_ELx_ENDB)
130 orr x0, x0, x1
131alternative_else_nop_endif
132 msr sctlr_el2, x0
133 isb
134
135
136 ldr x0, =__kvm_hyp_host_vector
137 msr vbar_el2, x0
138
139 ret
140SYM_CODE_END(___kvm_hyp_init)
141
142
143
144
145
146
147SYM_CODE_START(kvm_hyp_cpu_entry)
148 mov x1,
149 b __kvm_hyp_init_cpu
150SYM_CODE_END(kvm_hyp_cpu_entry)
151
152
153
154
155
156
157SYM_CODE_START(kvm_hyp_cpu_resume)
158 mov x1,
159 b __kvm_hyp_init_cpu
160SYM_CODE_END(kvm_hyp_cpu_resume)
161
162
163
164
165
166
167
168
169SYM_CODE_START_LOCAL(__kvm_hyp_init_cpu)
170 mov x28, x0
171 mov x29, x1
172
173
174 mrs x0, CurrentEL
175 cmp x0,
176 b.eq 2f
177
178
1791: wfe
180 wfi
181 b 1b
182
1832: msr SPsel,
184
185
186 init_el2_state
187
188
189 mov x0, x28
190 bl ___kvm_hyp_init
191
192
193 mov x0, x29
194 ldr x1, =kvm_host_psci_cpu_entry
195 br x1
196SYM_CODE_END(__kvm_hyp_init_cpu)
197
198SYM_CODE_START(__kvm_handle_stub_hvc)
199 cmp x0,
200 b.ne 1f
201
202
203 msr elr_el2, x1
204 mov x0,
205 msr spsr_el2, x0
206
207
208 mov x0, x2
209 mov x1, x3
210 mov x2, x4
211 b reset
212
2131: cmp x0,
214 b.ne 1f
215
216
217
218
219
220
221 mov x0, xzr
222reset:
223
224 mrs x5, sctlr_el2
225 mov_q x6, SCTLR_ELx_FLAGS
226 bic x5, x5, x6
227 pre_disable_mmu_workaround
228 msr sctlr_el2, x5
229 isb
230
231alternative_if ARM64_KVM_PROTECTED_MODE
232 mov_q x5, HCR_HOST_NVHE_FLAGS
233 msr hcr_el2, x5
234alternative_else_nop_endif
235
236
237 adr_l x5, __hyp_stub_vectors
238 msr vbar_el2, x5
239 eret
240
2411:
242 mov_q x0, HVC_STUB_ERR
243 eret
244
245SYM_CODE_END(__kvm_handle_stub_hvc)
246
247 .popsection
248