1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <asm/ppc_asm.h>
21#include <asm/kvm_asm.h>
22#include <asm/reg.h>
23#include <asm/page.h>
24#include <asm/asm-offsets.h>
25#include <asm/exception-64s.h>
26
27
28#ifdef PPC64_ELF_ABI_v2
29#define FUNC(name) name
30#else
31#define FUNC(name) GLUE(.,name)
32#endif
33#define GET_SHADOW_VCPU(reg) addi reg, r13, PACA_SVCPU
34
35
36#define FUNC(name) name
37#define GET_SHADOW_VCPU(reg) lwz reg, (THREAD + THREAD_KVM_SVCPU)(r2)
38
39#endif
40
41#define VCPU_LOAD_NVGPRS(vcpu) \
42 PPC_LL r14, VCPU_GPR(R14)(vcpu); \
43 PPC_LL r15, VCPU_GPR(R15)(vcpu); \
44 PPC_LL r16, VCPU_GPR(R16)(vcpu); \
45 PPC_LL r17, VCPU_GPR(R17)(vcpu); \
46 PPC_LL r18, VCPU_GPR(R18)(vcpu); \
47 PPC_LL r19, VCPU_GPR(R19)(vcpu); \
48 PPC_LL r20, VCPU_GPR(R20)(vcpu); \
49 PPC_LL r21, VCPU_GPR(R21)(vcpu); \
50 PPC_LL r22, VCPU_GPR(R22)(vcpu); \
51 PPC_LL r23, VCPU_GPR(R23)(vcpu); \
52 PPC_LL r24, VCPU_GPR(R24)(vcpu); \
53 PPC_LL r25, VCPU_GPR(R25)(vcpu); \
54 PPC_LL r26, VCPU_GPR(R26)(vcpu); \
55 PPC_LL r27, VCPU_GPR(R27)(vcpu); \
56 PPC_LL r28, VCPU_GPR(R28)(vcpu); \
57 PPC_LL r29, VCPU_GPR(R29)(vcpu); \
58 PPC_LL r30, VCPU_GPR(R30)(vcpu); \
59 PPC_LL r31, VCPU_GPR(R31)(vcpu); \
60
61
62
63
64
65
66
67
68
69
70
71_GLOBAL(__kvmppc_vcpu_run)
72
73kvm_start_entry:
74
75 mflr r0
76 PPC_STL r0,PPC_LR_STKOFF(r1)
77
78
79 PPC_STLU r1, -SWITCH_FRAME_SIZE(r1)
80
81
82 SAVE_2GPRS(3, r1)
83
84
85 SAVE_NVGPRS(r1)
86
87
88 mfcr r14
89 stw r14, _CCR(r1)
90
91
92 PPC_STL r0, _LINK(r1)
93
94
95 VCPU_LOAD_NVGPRS(r4)
96
97kvm_start_lightweight:
98
99 mr r3, r4
100 bl FUNC(kvmppc_copy_to_svcpu)
101 nop
102 REST_GPR(4, r1)
103
104#ifdef CONFIG_PPC_BOOK3S_64
105
106 PPC_LL r3, VCPU_HFLAGS(r4)
107 rldicl r3, r3, 0, 63
108 stb r3, HSTATE_RESTORE_HID5(r13)
109
110
111 lwz r3, VCPU_SHAREDBE(r4)
112 cmpwi r3, 0
113 ld r5, VCPU_SHARED(r4)
114 beq sprg3_little_endian
115sprg3_big_endian:
116#ifdef __BIG_ENDIAN__
117 ld r3, VCPU_SHARED_SPRG3(r5)
118#else
119 addi r5, r5, VCPU_SHARED_SPRG3
120 ldbrx r3, 0, r5
121#endif
122 b after_sprg3_load
123sprg3_little_endian:
124#ifdef __LITTLE_ENDIAN__
125 ld r3, VCPU_SHARED_SPRG3(r5)
126#else
127 addi r5, r5, VCPU_SHARED_SPRG3
128 ldbrx r3, 0, r5
129#endif
130
131after_sprg3_load:
132 mtspr SPRN_SPRG3, r3
133#endif
134
135 PPC_LL r4, VCPU_SHADOW_MSR(r4)
136
137
138 bl FUNC(kvmppc_entry_trampoline)
139 nop
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159 PPC_LL r3, GPR4(r1)
160
161
162
163
164
165 stw r12, VCPU_TRAP(r3)
166
167
168
169 bl FUNC(kvmppc_copy_from_svcpu)
170 nop
171
172#ifdef CONFIG_PPC_BOOK3S_64
173
174
175
176
177 ld r3, PACA_SPRG_VDSO(r13)
178 mtspr SPRN_SPRG_VDSO_WRITE, r3
179#endif
180
181
182 PPC_LL r7, GPR4(r1)
183
184 PPC_STL r14, VCPU_GPR(R14)(r7)
185 PPC_STL r15, VCPU_GPR(R15)(r7)
186 PPC_STL r16, VCPU_GPR(R16)(r7)
187 PPC_STL r17, VCPU_GPR(R17)(r7)
188 PPC_STL r18, VCPU_GPR(R18)(r7)
189 PPC_STL r19, VCPU_GPR(R19)(r7)
190 PPC_STL r20, VCPU_GPR(R20)(r7)
191 PPC_STL r21, VCPU_GPR(R21)(r7)
192 PPC_STL r22, VCPU_GPR(R22)(r7)
193 PPC_STL r23, VCPU_GPR(R23)(r7)
194 PPC_STL r24, VCPU_GPR(R24)(r7)
195 PPC_STL r25, VCPU_GPR(R25)(r7)
196 PPC_STL r26, VCPU_GPR(R26)(r7)
197 PPC_STL r27, VCPU_GPR(R27)(r7)
198 PPC_STL r28, VCPU_GPR(R28)(r7)
199 PPC_STL r29, VCPU_GPR(R29)(r7)
200 PPC_STL r30, VCPU_GPR(R30)(r7)
201 PPC_STL r31, VCPU_GPR(R31)(r7)
202
203
204 lwz r5, VCPU_TRAP(r7)
205
206
207 REST_2GPRS(3, r1)
208 bl FUNC(kvmppc_handle_exit_pr)
209
210
211 cmpwi r3, RESUME_GUEST
212 beq kvm_loop_lightweight
213
214 cmpwi r3, RESUME_GUEST_NV
215 beq kvm_loop_heavyweight
216
217kvm_exit_loop:
218
219 PPC_LL r4, _LINK(r1)
220 mtlr r4
221
222 lwz r14, _CCR(r1)
223 mtcr r14
224
225
226 REST_NVGPRS(r1)
227
228 addi r1, r1, SWITCH_FRAME_SIZE
229 blr
230
231kvm_loop_heavyweight:
232
233 PPC_LL r4, _LINK(r1)
234 PPC_STL r4, (PPC_LR_STKOFF + SWITCH_FRAME_SIZE)(r1)
235
236
237 REST_2GPRS(3, r1)
238
239
240 VCPU_LOAD_NVGPRS(r4)
241
242
243 b kvm_start_lightweight
244
245kvm_loop_lightweight:
246
247
248 REST_GPR(4, r1)
249
250
251 b kvm_start_lightweight
252