1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22#ifndef __KERNEL__
23#include <stdio.h>
24#include <stdint.h>
25#include <public/xen.h>
26#define DPRINTF(_f, _a ...) printf(_f , ## _a)
27#else
28#include <linux/kvm_host.h>
29#include "kvm_cache_regs.h"
30#define DPRINTF(x...) do {} while (0)
31#endif
32#include <linux/module.h>
33#include <asm/kvm_emulate.h>
34
35#include "mmu.h"
36
37
38
39
40
41
42
43
44
45
46
47#define ByteOp (1<<0)
48
49#define ImplicitOps (1<<1)
50#define DstReg (2<<1)
51#define DstMem (3<<1)
52#define DstAcc (4<<1)
53#define DstMask (7<<1)
54
55#define SrcNone (0<<4)
56#define SrcImplicit (0<<4)
57#define SrcReg (1<<4)
58#define SrcMem (2<<4)
59#define SrcMem16 (3<<4)
60#define SrcMem32 (4<<4)
61#define SrcImm (5<<4)
62#define SrcImmByte (6<<4)
63#define SrcOne (7<<4)
64#define SrcImmUByte (8<<4)
65#define SrcImmU (9<<4)
66#define SrcMask (0xf<<4)
67
68#define ModRM (1<<8)
69
70#define Mov (1<<9)
71#define BitOp (1<<10)
72#define MemAbs (1<<11)
73#define String (1<<12)
74#define Stack (1<<13)
75#define Group (1<<14)
76#define GroupDual (1<<15)
77#define GroupMask 0xff
78
79#define Src2None (0<<29)
80#define Src2CL (1<<29)
81#define Src2ImmByte (2<<29)
82#define Src2One (3<<29)
83#define Src2Imm16 (4<<29)
84#define Src2Mask (7<<29)
85
86enum {
87 Group1_80, Group1_81, Group1_82, Group1_83,
88 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
89};
90
91static u32 opcode_table[256] = {
92
93 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
94 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
95 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm, 0, 0,
96
97 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
98 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
99 0, 0, 0, 0,
100
101 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
102 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
103 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm, 0, 0,
104
105 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
106 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
107 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm, 0, 0,
108
109 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
110 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
111 DstAcc | SrcImmByte, DstAcc | SrcImm, 0, 0,
112
113 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
114 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
115 0, 0, 0, 0,
116
117 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
118 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
119 0, 0, 0, 0,
120
121 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
122 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
123 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm,
124 0, 0,
125
126 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
127
128 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
129
130 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
131 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
132
133 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
134 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
135
136 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov ,
137 0, 0, 0, 0,
138
139 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
140 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
141 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
142
143 SrcImmByte, SrcImmByte, SrcImmByte, SrcImmByte,
144 SrcImmByte, SrcImmByte, SrcImmByte, SrcImmByte,
145
146 SrcImmByte, SrcImmByte, SrcImmByte, SrcImmByte,
147 SrcImmByte, SrcImmByte, SrcImmByte, SrcImmByte,
148
149 Group | Group1_80, Group | Group1_81,
150 Group | Group1_82, Group | Group1_83,
151 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
152 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
153
154 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
155 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
156 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
157 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
158
159 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
160
161 0, 0, SrcImm | Src2Imm16, 0,
162 ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
163
164 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
165 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
166 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
167 ByteOp | ImplicitOps | String, ImplicitOps | String,
168
169 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
170 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
171 ByteOp | ImplicitOps | String, ImplicitOps | String,
172
173 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
174 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
175 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
176 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
177
178 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
179 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
180 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
181 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
182
183 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
184 0, ImplicitOps | Stack, 0, 0,
185 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
186
187 0, 0, 0, ImplicitOps | Stack,
188 ImplicitOps, SrcImmByte, ImplicitOps, ImplicitOps,
189
190 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
191 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
192 0, 0, 0, 0,
193
194 0, 0, 0, 0, 0, 0, 0, 0,
195
196 0, 0, 0, 0,
197 ByteOp | SrcImmUByte, SrcImmUByte,
198 ByteOp | SrcImmUByte, SrcImmUByte,
199
200 SrcImm | Stack, SrcImm | ImplicitOps,
201 SrcImmU | Src2Imm16, SrcImmByte | ImplicitOps,
202 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
203 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
204
205 0, 0, 0, 0,
206 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
207
208 ImplicitOps, 0, ImplicitOps, ImplicitOps,
209 ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
210};
211
212static u32 twobyte_table[256] = {
213
214 0, Group | GroupDual | Group7, 0, 0, 0, ImplicitOps, ImplicitOps, 0,
215 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
216
217 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
218
219 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
220 0, 0, 0, 0, 0, 0, 0, 0,
221
222 ImplicitOps, 0, ImplicitOps, 0,
223 ImplicitOps, ImplicitOps, 0, 0,
224 0, 0, 0, 0, 0, 0, 0, 0,
225
226 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
227 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
228 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
229 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
230
231 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
232 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
233 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
234 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
235
236 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
237
238 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
239
240 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
241
242 SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm,
243 SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm,
244
245 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
246
247 0, 0, 0, DstMem | SrcReg | ModRM | BitOp,
248 DstMem | SrcReg | Src2ImmByte | ModRM,
249 DstMem | SrcReg | Src2CL | ModRM, 0, 0,
250
251 0, 0, 0, DstMem | SrcReg | ModRM | BitOp,
252 DstMem | SrcReg | Src2ImmByte | ModRM,
253 DstMem | SrcReg | Src2CL | ModRM,
254 ModRM, 0,
255
256 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
257 DstMem | SrcReg | ModRM | BitOp,
258 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
259 DstReg | SrcMem16 | ModRM | Mov,
260
261 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
262 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
263 DstReg | SrcMem16 | ModRM | Mov,
264
265 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
266 0, 0, 0, 0, 0, 0, 0, 0,
267
268 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
269
270 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
271
272 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
273};
274
275static u32 group_table[] = {
276 [Group1_80*8] =
277 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
278 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
279 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
280 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
281 [Group1_81*8] =
282 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
283 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
284 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
285 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
286 [Group1_82*8] =
287 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
288 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
289 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
290 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
291 [Group1_83*8] =
292 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
293 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
294 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
295 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
296 [Group1A*8] =
297 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
298 [Group3_Byte*8] =
299 ByteOp | SrcImm | DstMem | ModRM, 0,
300 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
301 0, 0, 0, 0,
302 [Group3*8] =
303 DstMem | SrcImm | ModRM, 0,
304 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
305 0, 0, 0, 0,
306 [Group4*8] =
307 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
308 0, 0, 0, 0, 0, 0,
309 [Group5*8] =
310 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
311 SrcMem | ModRM | Stack, 0,
312 SrcMem | ModRM | Stack, 0, SrcMem | ModRM | Stack, 0,
313 [Group7*8] =
314 0, 0, ModRM | SrcMem, ModRM | SrcMem,
315 SrcNone | ModRM | DstMem | Mov, 0,
316 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
317};
318
319static u32 group2_table[] = {
320 [Group7*8] =
321 SrcNone | ModRM, 0, 0, SrcNone | ModRM,
322 SrcNone | ModRM | DstMem | Mov, 0,
323 SrcMem16 | ModRM | Mov, 0,
324};
325
326
327#define EFLG_VM (1<<17)
328#define EFLG_RF (1<<16)
329#define EFLG_OF (1<<11)
330#define EFLG_DF (1<<10)
331#define EFLG_IF (1<<9)
332#define EFLG_SF (1<<7)
333#define EFLG_ZF (1<<6)
334#define EFLG_AF (1<<4)
335#define EFLG_PF (1<<2)
336#define EFLG_CF (1<<0)
337
338
339
340
341
342
343
344
345#if defined(CONFIG_X86_64)
346#define _LO32 "k"
347#define _STK "%%rsp"
348#elif defined(__i386__)
349#define _LO32 ""
350#define _STK "%%esp"
351#endif
352
353
354
355
356
357#define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
358
359
360#define _PRE_EFLAGS(_sav, _msk, _tmp) \
361 \
362 "movl %"_sav",%"_LO32 _tmp"; " \
363 "push %"_tmp"; " \
364 "push %"_tmp"; " \
365 "movl %"_msk",%"_LO32 _tmp"; " \
366 "andl %"_LO32 _tmp",("_STK"); " \
367 "pushf; " \
368 "notl %"_LO32 _tmp"; " \
369 "andl %"_LO32 _tmp",("_STK"); " \
370 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
371 "pop %"_tmp"; " \
372 "orl %"_LO32 _tmp",("_STK"); " \
373 "popf; " \
374 "pop %"_sav"; "
375
376
377#define _POST_EFLAGS(_sav, _msk, _tmp) \
378 \
379 "pushf; " \
380 "pop %"_tmp"; " \
381 "andl %"_msk",%"_LO32 _tmp"; " \
382 "orl %"_LO32 _tmp",%"_sav"; "
383
384#ifdef CONFIG_X86_64
385#define ON64(x) x
386#else
387#define ON64(x)
388#endif
389
390#define ____emulate_2op(_op, _src, _dst, _eflags, _x, _y, _suffix) \
391 do { \
392 __asm__ __volatile__ ( \
393 _PRE_EFLAGS("0", "4", "2") \
394 _op _suffix " %"_x"3,%1; " \
395 _POST_EFLAGS("0", "4", "2") \
396 : "=m" (_eflags), "=m" ((_dst).val), \
397 "=&r" (_tmp) \
398 : _y ((_src).val), "i" (EFLAGS_MASK)); \
399 } while (0)
400
401
402
403#define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
404 do { \
405 unsigned long _tmp; \
406 \
407 switch ((_dst).bytes) { \
408 case 2: \
409 ____emulate_2op(_op,_src,_dst,_eflags,_wx,_wy,"w"); \
410 break; \
411 case 4: \
412 ____emulate_2op(_op,_src,_dst,_eflags,_lx,_ly,"l"); \
413 break; \
414 case 8: \
415 ON64(____emulate_2op(_op,_src,_dst,_eflags,_qx,_qy,"q")); \
416 break; \
417 } \
418 } while (0)
419
420#define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
421 do { \
422 unsigned long _tmp; \
423 switch ((_dst).bytes) { \
424 case 1: \
425 ____emulate_2op(_op,_src,_dst,_eflags,_bx,_by,"b"); \
426 break; \
427 default: \
428 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
429 _wx, _wy, _lx, _ly, _qx, _qy); \
430 break; \
431 } \
432 } while (0)
433
434
435#define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
436 __emulate_2op(_op, _src, _dst, _eflags, \
437 "b", "c", "b", "c", "b", "c", "b", "c")
438
439
440#define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
441 __emulate_2op(_op, _src, _dst, _eflags, \
442 "b", "q", "w", "r", _LO32, "r", "", "r")
443
444
445#define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
446 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
447 "w", "r", _LO32, "r", "", "r")
448
449
450#define __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, _suffix, _type) \
451 do { \
452 unsigned long _tmp; \
453 _type _clv = (_cl).val; \
454 _type _srcv = (_src).val; \
455 _type _dstv = (_dst).val; \
456 \
457 __asm__ __volatile__ ( \
458 _PRE_EFLAGS("0", "5", "2") \
459 _op _suffix " %4,%1 \n" \
460 _POST_EFLAGS("0", "5", "2") \
461 : "=m" (_eflags), "+r" (_dstv), "=&r" (_tmp) \
462 : "c" (_clv) , "r" (_srcv), "i" (EFLAGS_MASK) \
463 ); \
464 \
465 (_cl).val = (unsigned long) _clv; \
466 (_src).val = (unsigned long) _srcv; \
467 (_dst).val = (unsigned long) _dstv; \
468 } while (0)
469
470#define emulate_2op_cl(_op, _cl, _src, _dst, _eflags) \
471 do { \
472 switch ((_dst).bytes) { \
473 case 2: \
474 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
475 "w", unsigned short); \
476 break; \
477 case 4: \
478 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
479 "l", unsigned int); \
480 break; \
481 case 8: \
482 ON64(__emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
483 "q", unsigned long)); \
484 break; \
485 } \
486 } while (0)
487
488#define __emulate_1op(_op, _dst, _eflags, _suffix) \
489 do { \
490 unsigned long _tmp; \
491 \
492 __asm__ __volatile__ ( \
493 _PRE_EFLAGS("0", "3", "2") \
494 _op _suffix " %1; " \
495 _POST_EFLAGS("0", "3", "2") \
496 : "=m" (_eflags), "+m" ((_dst).val), \
497 "=&r" (_tmp) \
498 : "i" (EFLAGS_MASK)); \
499 } while (0)
500
501
502#define emulate_1op(_op, _dst, _eflags) \
503 do { \
504 switch ((_dst).bytes) { \
505 case 1: __emulate_1op(_op, _dst, _eflags, "b"); break; \
506 case 2: __emulate_1op(_op, _dst, _eflags, "w"); break; \
507 case 4: __emulate_1op(_op, _dst, _eflags, "l"); break; \
508 case 8: ON64(__emulate_1op(_op, _dst, _eflags, "q")); break; \
509 } \
510 } while (0)
511
512
513#define insn_fetch(_type, _size, _eip) \
514({ unsigned long _x; \
515 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
516 if (rc != 0) \
517 goto done; \
518 (_eip) += (_size); \
519 (_type)_x; \
520})
521
522static inline unsigned long ad_mask(struct decode_cache *c)
523{
524 return (1UL << (c->ad_bytes << 3)) - 1;
525}
526
527
528static inline unsigned long
529address_mask(struct decode_cache *c, unsigned long reg)
530{
531 if (c->ad_bytes == sizeof(unsigned long))
532 return reg;
533 else
534 return reg & ad_mask(c);
535}
536
537static inline unsigned long
538register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
539{
540 return base + address_mask(c, reg);
541}
542
543static inline void
544register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
545{
546 if (c->ad_bytes == sizeof(unsigned long))
547 *reg += inc;
548 else
549 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
550}
551
552static inline void jmp_rel(struct decode_cache *c, int rel)
553{
554 register_address_increment(c, &c->eip, rel);
555}
556
557static void set_seg_override(struct decode_cache *c, int seg)
558{
559 c->has_seg_override = true;
560 c->seg_override = seg;
561}
562
563static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
564{
565 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
566 return 0;
567
568 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
569}
570
571static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
572 struct decode_cache *c)
573{
574 if (!c->has_seg_override)
575 return 0;
576
577 return seg_base(ctxt, c->seg_override);
578}
579
580static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
581{
582 return seg_base(ctxt, VCPU_SREG_ES);
583}
584
585static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
586{
587 return seg_base(ctxt, VCPU_SREG_SS);
588}
589
590static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
591 struct x86_emulate_ops *ops,
592 unsigned long linear, u8 *dest)
593{
594 struct fetch_cache *fc = &ctxt->decode.fetch;
595 int rc;
596 int size;
597
598 if (linear < fc->start || linear >= fc->end) {
599 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
600 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
601 if (rc)
602 return rc;
603 fc->start = linear;
604 fc->end = linear + size;
605 }
606 *dest = fc->data[linear - fc->start];
607 return 0;
608}
609
610static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
611 struct x86_emulate_ops *ops,
612 unsigned long eip, void *dest, unsigned size)
613{
614 int rc = 0;
615
616 eip += ctxt->cs_base;
617 while (size--) {
618 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
619 if (rc)
620 return rc;
621 }
622 return 0;
623}
624
625
626
627
628
629
630static void *decode_register(u8 modrm_reg, unsigned long *regs,
631 int highbyte_regs)
632{
633 void *p;
634
635 p = ®s[modrm_reg];
636 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
637 p = (unsigned char *)®s[modrm_reg & 3] + 1;
638 return p;
639}
640
641static int read_descriptor(struct x86_emulate_ctxt *ctxt,
642 struct x86_emulate_ops *ops,
643 void *ptr,
644 u16 *size, unsigned long *address, int op_bytes)
645{
646 int rc;
647
648 if (op_bytes == 2)
649 op_bytes = 3;
650 *address = 0;
651 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
652 ctxt->vcpu);
653 if (rc)
654 return rc;
655 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
656 ctxt->vcpu);
657 return rc;
658}
659
660static int test_cc(unsigned int condition, unsigned int flags)
661{
662 int rc = 0;
663
664 switch ((condition & 15) >> 1) {
665 case 0:
666 rc |= (flags & EFLG_OF);
667 break;
668 case 1:
669 rc |= (flags & EFLG_CF);
670 break;
671 case 2:
672 rc |= (flags & EFLG_ZF);
673 break;
674 case 3:
675 rc |= (flags & (EFLG_CF|EFLG_ZF));
676 break;
677 case 4:
678 rc |= (flags & EFLG_SF);
679 break;
680 case 5:
681 rc |= (flags & EFLG_PF);
682 break;
683 case 7:
684 rc |= (flags & EFLG_ZF);
685
686 case 6:
687 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
688 break;
689 }
690
691
692 return (!!rc ^ (condition & 1));
693}
694
695static void decode_register_operand(struct operand *op,
696 struct decode_cache *c,
697 int inhibit_bytereg)
698{
699 unsigned reg = c->modrm_reg;
700 int highbyte_regs = c->rex_prefix == 0;
701
702 if (!(c->d & ModRM))
703 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
704 op->type = OP_REG;
705 if ((c->d & ByteOp) && !inhibit_bytereg) {
706 op->ptr = decode_register(reg, c->regs, highbyte_regs);
707 op->val = *(u8 *)op->ptr;
708 op->bytes = 1;
709 } else {
710 op->ptr = decode_register(reg, c->regs, 0);
711 op->bytes = c->op_bytes;
712 switch (op->bytes) {
713 case 2:
714 op->val = *(u16 *)op->ptr;
715 break;
716 case 4:
717 op->val = *(u32 *)op->ptr;
718 break;
719 case 8:
720 op->val = *(u64 *) op->ptr;
721 break;
722 }
723 }
724 op->orig_val = op->val;
725}
726
727static int decode_modrm(struct x86_emulate_ctxt *ctxt,
728 struct x86_emulate_ops *ops)
729{
730 struct decode_cache *c = &ctxt->decode;
731 u8 sib;
732 int index_reg = 0, base_reg = 0, scale;
733 int rc = 0;
734
735 if (c->rex_prefix) {
736 c->modrm_reg = (c->rex_prefix & 4) << 1;
737 index_reg = (c->rex_prefix & 2) << 2;
738 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3;
739 }
740
741 c->modrm = insn_fetch(u8, 1, c->eip);
742 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
743 c->modrm_reg |= (c->modrm & 0x38) >> 3;
744 c->modrm_rm |= (c->modrm & 0x07);
745 c->modrm_ea = 0;
746 c->use_modrm_ea = 1;
747
748 if (c->modrm_mod == 3) {
749 c->modrm_ptr = decode_register(c->modrm_rm,
750 c->regs, c->d & ByteOp);
751 c->modrm_val = *(unsigned long *)c->modrm_ptr;
752 return rc;
753 }
754
755 if (c->ad_bytes == 2) {
756 unsigned bx = c->regs[VCPU_REGS_RBX];
757 unsigned bp = c->regs[VCPU_REGS_RBP];
758 unsigned si = c->regs[VCPU_REGS_RSI];
759 unsigned di = c->regs[VCPU_REGS_RDI];
760
761
762 switch (c->modrm_mod) {
763 case 0:
764 if (c->modrm_rm == 6)
765 c->modrm_ea += insn_fetch(u16, 2, c->eip);
766 break;
767 case 1:
768 c->modrm_ea += insn_fetch(s8, 1, c->eip);
769 break;
770 case 2:
771 c->modrm_ea += insn_fetch(u16, 2, c->eip);
772 break;
773 }
774 switch (c->modrm_rm) {
775 case 0:
776 c->modrm_ea += bx + si;
777 break;
778 case 1:
779 c->modrm_ea += bx + di;
780 break;
781 case 2:
782 c->modrm_ea += bp + si;
783 break;
784 case 3:
785 c->modrm_ea += bp + di;
786 break;
787 case 4:
788 c->modrm_ea += si;
789 break;
790 case 5:
791 c->modrm_ea += di;
792 break;
793 case 6:
794 if (c->modrm_mod != 0)
795 c->modrm_ea += bp;
796 break;
797 case 7:
798 c->modrm_ea += bx;
799 break;
800 }
801 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
802 (c->modrm_rm == 6 && c->modrm_mod != 0))
803 if (!c->has_seg_override)
804 set_seg_override(c, VCPU_SREG_SS);
805 c->modrm_ea = (u16)c->modrm_ea;
806 } else {
807
808 if ((c->modrm_rm & 7) == 4) {
809 sib = insn_fetch(u8, 1, c->eip);
810 index_reg |= (sib >> 3) & 7;
811 base_reg |= sib & 7;
812 scale = sib >> 6;
813
814 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
815 c->modrm_ea += insn_fetch(s32, 4, c->eip);
816 else
817 c->modrm_ea += c->regs[base_reg];
818 if (index_reg != 4)
819 c->modrm_ea += c->regs[index_reg] << scale;
820 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
821 if (ctxt->mode == X86EMUL_MODE_PROT64)
822 c->rip_relative = 1;
823 } else
824 c->modrm_ea += c->regs[c->modrm_rm];
825 switch (c->modrm_mod) {
826 case 0:
827 if (c->modrm_rm == 5)
828 c->modrm_ea += insn_fetch(s32, 4, c->eip);
829 break;
830 case 1:
831 c->modrm_ea += insn_fetch(s8, 1, c->eip);
832 break;
833 case 2:
834 c->modrm_ea += insn_fetch(s32, 4, c->eip);
835 break;
836 }
837 }
838done:
839 return rc;
840}
841
842static int decode_abs(struct x86_emulate_ctxt *ctxt,
843 struct x86_emulate_ops *ops)
844{
845 struct decode_cache *c = &ctxt->decode;
846 int rc = 0;
847
848 switch (c->ad_bytes) {
849 case 2:
850 c->modrm_ea = insn_fetch(u16, 2, c->eip);
851 break;
852 case 4:
853 c->modrm_ea = insn_fetch(u32, 4, c->eip);
854 break;
855 case 8:
856 c->modrm_ea = insn_fetch(u64, 8, c->eip);
857 break;
858 }
859done:
860 return rc;
861}
862
863int
864x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
865{
866 struct decode_cache *c = &ctxt->decode;
867 int rc = 0;
868 int mode = ctxt->mode;
869 int def_op_bytes, def_ad_bytes, group;
870
871
872
873 memset(c, 0, sizeof(struct decode_cache));
874 c->eip = kvm_rip_read(ctxt->vcpu);
875 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
876 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
877
878 switch (mode) {
879 case X86EMUL_MODE_REAL:
880 case X86EMUL_MODE_PROT16:
881 def_op_bytes = def_ad_bytes = 2;
882 break;
883 case X86EMUL_MODE_PROT32:
884 def_op_bytes = def_ad_bytes = 4;
885 break;
886#ifdef CONFIG_X86_64
887 case X86EMUL_MODE_PROT64:
888 def_op_bytes = 4;
889 def_ad_bytes = 8;
890 break;
891#endif
892 default:
893 return -1;
894 }
895
896 c->op_bytes = def_op_bytes;
897 c->ad_bytes = def_ad_bytes;
898
899
900 for (;;) {
901 switch (c->b = insn_fetch(u8, 1, c->eip)) {
902 case 0x66:
903
904 c->op_bytes = def_op_bytes ^ 6;
905 break;
906 case 0x67:
907 if (mode == X86EMUL_MODE_PROT64)
908
909 c->ad_bytes = def_ad_bytes ^ 12;
910 else
911
912 c->ad_bytes = def_ad_bytes ^ 6;
913 break;
914 case 0x26:
915 case 0x2e:
916 case 0x36:
917 case 0x3e:
918 set_seg_override(c, (c->b >> 3) & 3);
919 break;
920 case 0x64:
921 case 0x65:
922 set_seg_override(c, c->b & 7);
923 break;
924 case 0x40 ... 0x4f:
925 if (mode != X86EMUL_MODE_PROT64)
926 goto done_prefixes;
927 c->rex_prefix = c->b;
928 continue;
929 case 0xf0:
930 c->lock_prefix = 1;
931 break;
932 case 0xf2:
933 c->rep_prefix = REPNE_PREFIX;
934 break;
935 case 0xf3:
936 c->rep_prefix = REPE_PREFIX;
937 break;
938 default:
939 goto done_prefixes;
940 }
941
942
943
944 c->rex_prefix = 0;
945 }
946
947done_prefixes:
948
949
950 if (c->rex_prefix)
951 if (c->rex_prefix & 8)
952 c->op_bytes = 8;
953
954
955 c->d = opcode_table[c->b];
956 if (c->d == 0) {
957
958 if (c->b == 0x0f) {
959 c->twobyte = 1;
960 c->b = insn_fetch(u8, 1, c->eip);
961 c->d = twobyte_table[c->b];
962 }
963 }
964
965 if (c->d & Group) {
966 group = c->d & GroupMask;
967 c->modrm = insn_fetch(u8, 1, c->eip);
968 --c->eip;
969
970 group = (group << 3) + ((c->modrm >> 3) & 7);
971 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
972 c->d = group2_table[group];
973 else
974 c->d = group_table[group];
975 }
976
977
978 if (c->d == 0) {
979 DPRINTF("Cannot emulate %02x\n", c->b);
980 return -1;
981 }
982
983 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
984 c->op_bytes = 8;
985
986
987 if (c->d & ModRM)
988 rc = decode_modrm(ctxt, ops);
989 else if (c->d & MemAbs)
990 rc = decode_abs(ctxt, ops);
991 if (rc)
992 goto done;
993
994 if (!c->has_seg_override)
995 set_seg_override(c, VCPU_SREG_DS);
996
997 if (!(!c->twobyte && c->b == 0x8d))
998 c->modrm_ea += seg_override_base(ctxt, c);
999
1000 if (c->ad_bytes != 8)
1001 c->modrm_ea = (u32)c->modrm_ea;
1002
1003
1004
1005
1006 switch (c->d & SrcMask) {
1007 case SrcNone:
1008 break;
1009 case SrcReg:
1010 decode_register_operand(&c->src, c, 0);
1011 break;
1012 case SrcMem16:
1013 c->src.bytes = 2;
1014 goto srcmem_common;
1015 case SrcMem32:
1016 c->src.bytes = 4;
1017 goto srcmem_common;
1018 case SrcMem:
1019 c->src.bytes = (c->d & ByteOp) ? 1 :
1020 c->op_bytes;
1021
1022 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1023 break;
1024 srcmem_common:
1025
1026
1027
1028
1029 if ((c->d & ModRM) && c->modrm_mod == 3) {
1030 c->src.type = OP_REG;
1031 c->src.val = c->modrm_val;
1032 c->src.ptr = c->modrm_ptr;
1033 break;
1034 }
1035 c->src.type = OP_MEM;
1036 break;
1037 case SrcImm:
1038 case SrcImmU:
1039 c->src.type = OP_IMM;
1040 c->src.ptr = (unsigned long *)c->eip;
1041 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1042 if (c->src.bytes == 8)
1043 c->src.bytes = 4;
1044
1045 switch (c->src.bytes) {
1046 case 1:
1047 c->src.val = insn_fetch(s8, 1, c->eip);
1048 break;
1049 case 2:
1050 c->src.val = insn_fetch(s16, 2, c->eip);
1051 break;
1052 case 4:
1053 c->src.val = insn_fetch(s32, 4, c->eip);
1054 break;
1055 }
1056 if ((c->d & SrcMask) == SrcImmU) {
1057 switch (c->src.bytes) {
1058 case 1:
1059 c->src.val &= 0xff;
1060 break;
1061 case 2:
1062 c->src.val &= 0xffff;
1063 break;
1064 case 4:
1065 c->src.val &= 0xffffffff;
1066 break;
1067 }
1068 }
1069 break;
1070 case SrcImmByte:
1071 case SrcImmUByte:
1072 c->src.type = OP_IMM;
1073 c->src.ptr = (unsigned long *)c->eip;
1074 c->src.bytes = 1;
1075 if ((c->d & SrcMask) == SrcImmByte)
1076 c->src.val = insn_fetch(s8, 1, c->eip);
1077 else
1078 c->src.val = insn_fetch(u8, 1, c->eip);
1079 break;
1080 case SrcOne:
1081 c->src.bytes = 1;
1082 c->src.val = 1;
1083 break;
1084 }
1085
1086
1087
1088
1089
1090 switch (c->d & Src2Mask) {
1091 case Src2None:
1092 break;
1093 case Src2CL:
1094 c->src2.bytes = 1;
1095 c->src2.val = c->regs[VCPU_REGS_RCX] & 0x8;
1096 break;
1097 case Src2ImmByte:
1098 c->src2.type = OP_IMM;
1099 c->src2.ptr = (unsigned long *)c->eip;
1100 c->src2.bytes = 1;
1101 c->src2.val = insn_fetch(u8, 1, c->eip);
1102 break;
1103 case Src2Imm16:
1104 c->src2.type = OP_IMM;
1105 c->src2.ptr = (unsigned long *)c->eip;
1106 c->src2.bytes = 2;
1107 c->src2.val = insn_fetch(u16, 2, c->eip);
1108 break;
1109 case Src2One:
1110 c->src2.bytes = 1;
1111 c->src2.val = 1;
1112 break;
1113 }
1114
1115
1116 switch (c->d & DstMask) {
1117 case ImplicitOps:
1118
1119 return 0;
1120 case DstReg:
1121 decode_register_operand(&c->dst, c,
1122 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1123 break;
1124 case DstMem:
1125 if ((c->d & ModRM) && c->modrm_mod == 3) {
1126 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1127 c->dst.type = OP_REG;
1128 c->dst.val = c->dst.orig_val = c->modrm_val;
1129 c->dst.ptr = c->modrm_ptr;
1130 break;
1131 }
1132 c->dst.type = OP_MEM;
1133 break;
1134 case DstAcc:
1135 c->dst.type = OP_REG;
1136 c->dst.bytes = c->op_bytes;
1137 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1138 switch (c->op_bytes) {
1139 case 1:
1140 c->dst.val = *(u8 *)c->dst.ptr;
1141 break;
1142 case 2:
1143 c->dst.val = *(u16 *)c->dst.ptr;
1144 break;
1145 case 4:
1146 c->dst.val = *(u32 *)c->dst.ptr;
1147 break;
1148 }
1149 c->dst.orig_val = c->dst.val;
1150 break;
1151 }
1152
1153 if (c->rip_relative)
1154 c->modrm_ea += c->eip;
1155
1156done:
1157 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1158}
1159
1160static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1161{
1162 struct decode_cache *c = &ctxt->decode;
1163
1164 c->dst.type = OP_MEM;
1165 c->dst.bytes = c->op_bytes;
1166 c->dst.val = c->src.val;
1167 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1168 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1169 c->regs[VCPU_REGS_RSP]);
1170}
1171
1172static int emulate_pop(struct x86_emulate_ctxt *ctxt,
1173 struct x86_emulate_ops *ops,
1174 void *dest, int len)
1175{
1176 struct decode_cache *c = &ctxt->decode;
1177 int rc;
1178
1179 rc = ops->read_emulated(register_address(c, ss_base(ctxt),
1180 c->regs[VCPU_REGS_RSP]),
1181 dest, len, ctxt->vcpu);
1182 if (rc != 0)
1183 return rc;
1184
1185 register_address_increment(c, &c->regs[VCPU_REGS_RSP], len);
1186 return rc;
1187}
1188
1189static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1190 struct x86_emulate_ops *ops)
1191{
1192 struct decode_cache *c = &ctxt->decode;
1193 int rc;
1194
1195 rc = emulate_pop(ctxt, ops, &c->dst.val, c->dst.bytes);
1196 if (rc != 0)
1197 return rc;
1198 return 0;
1199}
1200
1201static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1202{
1203 struct decode_cache *c = &ctxt->decode;
1204 switch (c->modrm_reg) {
1205 case 0:
1206 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1207 break;
1208 case 1:
1209 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1210 break;
1211 case 2:
1212 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1213 break;
1214 case 3:
1215 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1216 break;
1217 case 4:
1218 case 6:
1219 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1220 break;
1221 case 5:
1222 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1223 break;
1224 case 7:
1225 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1226 break;
1227 }
1228}
1229
1230static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1231 struct x86_emulate_ops *ops)
1232{
1233 struct decode_cache *c = &ctxt->decode;
1234 int rc = 0;
1235
1236 switch (c->modrm_reg) {
1237 case 0 ... 1:
1238 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1239 break;
1240 case 2:
1241 c->dst.val = ~c->dst.val;
1242 break;
1243 case 3:
1244 emulate_1op("neg", c->dst, ctxt->eflags);
1245 break;
1246 default:
1247 DPRINTF("Cannot emulate %02x\n", c->b);
1248 rc = X86EMUL_UNHANDLEABLE;
1249 break;
1250 }
1251 return rc;
1252}
1253
1254static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1255 struct x86_emulate_ops *ops)
1256{
1257 struct decode_cache *c = &ctxt->decode;
1258
1259 switch (c->modrm_reg) {
1260 case 0:
1261 emulate_1op("inc", c->dst, ctxt->eflags);
1262 break;
1263 case 1:
1264 emulate_1op("dec", c->dst, ctxt->eflags);
1265 break;
1266 case 2: {
1267 long int old_eip;
1268 old_eip = c->eip;
1269 c->eip = c->src.val;
1270 c->src.val = old_eip;
1271 emulate_push(ctxt);
1272 break;
1273 }
1274 case 4:
1275 c->eip = c->src.val;
1276 break;
1277 case 6:
1278 emulate_push(ctxt);
1279 break;
1280 }
1281 return 0;
1282}
1283
1284static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1285 struct x86_emulate_ops *ops,
1286 unsigned long memop)
1287{
1288 struct decode_cache *c = &ctxt->decode;
1289 u64 old, new;
1290 int rc;
1291
1292 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1293 if (rc != 0)
1294 return rc;
1295
1296 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1297 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1298
1299 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1300 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1301 ctxt->eflags &= ~EFLG_ZF;
1302
1303 } else {
1304 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1305 (u32) c->regs[VCPU_REGS_RBX];
1306
1307 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1308 if (rc != 0)
1309 return rc;
1310 ctxt->eflags |= EFLG_ZF;
1311 }
1312 return 0;
1313}
1314
1315static int emulate_ret_far(struct x86_emulate_ctxt *ctxt,
1316 struct x86_emulate_ops *ops)
1317{
1318 struct decode_cache *c = &ctxt->decode;
1319 int rc;
1320 unsigned long cs;
1321
1322 rc = emulate_pop(ctxt, ops, &c->eip, c->op_bytes);
1323 if (rc)
1324 return rc;
1325 if (c->op_bytes == 4)
1326 c->eip = (u32)c->eip;
1327 rc = emulate_pop(ctxt, ops, &cs, c->op_bytes);
1328 if (rc)
1329 return rc;
1330 rc = kvm_load_segment_descriptor(ctxt->vcpu, (u16)cs, 1, VCPU_SREG_CS);
1331 return rc;
1332}
1333
1334static inline int writeback(struct x86_emulate_ctxt *ctxt,
1335 struct x86_emulate_ops *ops)
1336{
1337 int rc;
1338 struct decode_cache *c = &ctxt->decode;
1339
1340 switch (c->dst.type) {
1341 case OP_REG:
1342
1343
1344
1345 switch (c->dst.bytes) {
1346 case 1:
1347 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1348 break;
1349 case 2:
1350 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1351 break;
1352 case 4:
1353 *c->dst.ptr = (u32)c->dst.val;
1354 break;
1355 case 8:
1356 *c->dst.ptr = c->dst.val;
1357 break;
1358 }
1359 break;
1360 case OP_MEM:
1361 if (c->lock_prefix)
1362 rc = ops->cmpxchg_emulated(
1363 (unsigned long)c->dst.ptr,
1364 &c->dst.orig_val,
1365 &c->dst.val,
1366 c->dst.bytes,
1367 ctxt->vcpu);
1368 else
1369 rc = ops->write_emulated(
1370 (unsigned long)c->dst.ptr,
1371 &c->dst.val,
1372 c->dst.bytes,
1373 ctxt->vcpu);
1374 if (rc != 0)
1375 return rc;
1376 break;
1377 case OP_NONE:
1378
1379 break;
1380 default:
1381 break;
1382 }
1383 return 0;
1384}
1385
1386static void toggle_interruptibility(struct x86_emulate_ctxt *ctxt, u32 mask)
1387{
1388 u32 int_shadow = kvm_x86_ops->get_interrupt_shadow(ctxt->vcpu, mask);
1389
1390
1391
1392
1393
1394
1395
1396 if (!(int_shadow & mask))
1397 ctxt->interruptibility = mask;
1398}
1399
1400static inline void
1401setup_syscalls_segments(struct x86_emulate_ctxt *ctxt,
1402 struct kvm_segment *cs, struct kvm_segment *ss)
1403{
1404 memset(cs, 0, sizeof(struct kvm_segment));
1405 kvm_x86_ops->get_segment(ctxt->vcpu, cs, VCPU_SREG_CS);
1406 memset(ss, 0, sizeof(struct kvm_segment));
1407
1408 cs->l = 0;
1409 cs->base = 0;
1410 cs->g = 1;
1411 cs->limit = 0xffffffff;
1412 cs->type = 0x0b;
1413 cs->s = 1;
1414 cs->dpl = 0;
1415 cs->present = 1;
1416 cs->db = 1;
1417
1418 ss->unusable = 0;
1419 ss->base = 0;
1420 ss->limit = 0xffffffff;
1421 ss->g = 1;
1422 ss->s = 1;
1423 ss->type = 0x03;
1424 ss->db = 1;
1425 ss->dpl = 0;
1426 ss->present = 1;
1427}
1428
1429static int
1430emulate_syscall(struct x86_emulate_ctxt *ctxt)
1431{
1432 struct decode_cache *c = &ctxt->decode;
1433 struct kvm_segment cs, ss;
1434 u64 msr_data;
1435
1436
1437 if (c->lock_prefix || ctxt->mode == X86EMUL_MODE_REAL
1438 || !(ctxt->vcpu->arch.cr0 & X86_CR0_PE))
1439 return -1;
1440
1441 setup_syscalls_segments(ctxt, &cs, &ss);
1442
1443 kvm_x86_ops->get_msr(ctxt->vcpu, MSR_STAR, &msr_data);
1444 msr_data >>= 32;
1445 cs.selector = (u16)(msr_data & 0xfffc);
1446 ss.selector = (u16)(msr_data + 8);
1447
1448 if (is_long_mode(ctxt->vcpu)) {
1449 cs.db = 0;
1450 cs.l = 1;
1451 }
1452 kvm_x86_ops->set_segment(ctxt->vcpu, &cs, VCPU_SREG_CS);
1453 kvm_x86_ops->set_segment(ctxt->vcpu, &ss, VCPU_SREG_SS);
1454
1455 c->regs[VCPU_REGS_RCX] = c->eip;
1456 if (is_long_mode(ctxt->vcpu)) {
1457#ifdef CONFIG_X86_64
1458 c->regs[VCPU_REGS_R11] = ctxt->eflags & ~EFLG_RF;
1459
1460 kvm_x86_ops->get_msr(ctxt->vcpu,
1461 ctxt->mode == X86EMUL_MODE_PROT64 ?
1462 MSR_LSTAR : MSR_CSTAR, &msr_data);
1463 c->eip = msr_data;
1464
1465 kvm_x86_ops->get_msr(ctxt->vcpu, MSR_SYSCALL_MASK, &msr_data);
1466 ctxt->eflags &= ~(msr_data | EFLG_RF);
1467#endif
1468 } else {
1469
1470 kvm_x86_ops->get_msr(ctxt->vcpu, MSR_STAR, &msr_data);
1471 c->eip = (u32)msr_data;
1472
1473 ctxt->eflags &= ~(EFLG_VM | EFLG_IF | EFLG_RF);
1474 }
1475
1476 return 0;
1477}
1478
1479static int
1480emulate_sysenter(struct x86_emulate_ctxt *ctxt)
1481{
1482 struct decode_cache *c = &ctxt->decode;
1483 struct kvm_segment cs, ss;
1484 u64 msr_data;
1485
1486
1487 if (c->lock_prefix)
1488 return -1;
1489
1490
1491 if (ctxt->mode == X86EMUL_MODE_REAL ||
1492 !(ctxt->vcpu->arch.cr0 & X86_CR0_PE)) {
1493 kvm_inject_gp(ctxt->vcpu, 0);
1494 return -1;
1495 }
1496
1497
1498
1499
1500 if (ctxt->mode == X86EMUL_MODE_PROT64)
1501 return -1;
1502
1503 setup_syscalls_segments(ctxt, &cs, &ss);
1504
1505 kvm_x86_ops->get_msr(ctxt->vcpu, MSR_IA32_SYSENTER_CS, &msr_data);
1506 switch (ctxt->mode) {
1507 case X86EMUL_MODE_PROT32:
1508 if ((msr_data & 0xfffc) == 0x0) {
1509 kvm_inject_gp(ctxt->vcpu, 0);
1510 return -1;
1511 }
1512 break;
1513 case X86EMUL_MODE_PROT64:
1514 if (msr_data == 0x0) {
1515 kvm_inject_gp(ctxt->vcpu, 0);
1516 return -1;
1517 }
1518 break;
1519 }
1520
1521 ctxt->eflags &= ~(EFLG_VM | EFLG_IF | EFLG_RF);
1522 cs.selector = (u16)msr_data;
1523 cs.selector &= ~SELECTOR_RPL_MASK;
1524 ss.selector = cs.selector + 8;
1525 ss.selector &= ~SELECTOR_RPL_MASK;
1526 if (ctxt->mode == X86EMUL_MODE_PROT64
1527 || is_long_mode(ctxt->vcpu)) {
1528 cs.db = 0;
1529 cs.l = 1;
1530 }
1531
1532 kvm_x86_ops->set_segment(ctxt->vcpu, &cs, VCPU_SREG_CS);
1533 kvm_x86_ops->set_segment(ctxt->vcpu, &ss, VCPU_SREG_SS);
1534
1535 kvm_x86_ops->get_msr(ctxt->vcpu, MSR_IA32_SYSENTER_EIP, &msr_data);
1536 c->eip = msr_data;
1537
1538 kvm_x86_ops->get_msr(ctxt->vcpu, MSR_IA32_SYSENTER_ESP, &msr_data);
1539 c->regs[VCPU_REGS_RSP] = msr_data;
1540
1541 return 0;
1542}
1543
1544static int
1545emulate_sysexit(struct x86_emulate_ctxt *ctxt)
1546{
1547 struct decode_cache *c = &ctxt->decode;
1548 struct kvm_segment cs, ss;
1549 u64 msr_data;
1550 int usermode;
1551
1552
1553 if (c->lock_prefix)
1554 return -1;
1555
1556
1557 if (ctxt->mode == X86EMUL_MODE_REAL
1558 || !(ctxt->vcpu->arch.cr0 & X86_CR0_PE)) {
1559 kvm_inject_gp(ctxt->vcpu, 0);
1560 return -1;
1561 }
1562
1563
1564 if (kvm_x86_ops->get_cpl(ctxt->vcpu) != 0) {
1565 kvm_inject_gp(ctxt->vcpu, 0);
1566 return -1;
1567 }
1568
1569 setup_syscalls_segments(ctxt, &cs, &ss);
1570
1571 if ((c->rex_prefix & 0x8) != 0x0)
1572 usermode = X86EMUL_MODE_PROT64;
1573 else
1574 usermode = X86EMUL_MODE_PROT32;
1575
1576 cs.dpl = 3;
1577 ss.dpl = 3;
1578 kvm_x86_ops->get_msr(ctxt->vcpu, MSR_IA32_SYSENTER_CS, &msr_data);
1579 switch (usermode) {
1580 case X86EMUL_MODE_PROT32:
1581 cs.selector = (u16)(msr_data + 16);
1582 if ((msr_data & 0xfffc) == 0x0) {
1583 kvm_inject_gp(ctxt->vcpu, 0);
1584 return -1;
1585 }
1586 ss.selector = (u16)(msr_data + 24);
1587 break;
1588 case X86EMUL_MODE_PROT64:
1589 cs.selector = (u16)(msr_data + 32);
1590 if (msr_data == 0x0) {
1591 kvm_inject_gp(ctxt->vcpu, 0);
1592 return -1;
1593 }
1594 ss.selector = cs.selector + 8;
1595 cs.db = 0;
1596 cs.l = 1;
1597 break;
1598 }
1599 cs.selector |= SELECTOR_RPL_MASK;
1600 ss.selector |= SELECTOR_RPL_MASK;
1601
1602 kvm_x86_ops->set_segment(ctxt->vcpu, &cs, VCPU_SREG_CS);
1603 kvm_x86_ops->set_segment(ctxt->vcpu, &ss, VCPU_SREG_SS);
1604
1605 c->eip = ctxt->vcpu->arch.regs[VCPU_REGS_RDX];
1606 c->regs[VCPU_REGS_RSP] = ctxt->vcpu->arch.regs[VCPU_REGS_RCX];
1607
1608 return 0;
1609}
1610
1611int
1612x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1613{
1614 unsigned long memop = 0;
1615 u64 msr_data;
1616 unsigned long saved_eip = 0;
1617 struct decode_cache *c = &ctxt->decode;
1618 unsigned int port;
1619 int io_dir_in;
1620 int rc = 0;
1621
1622 ctxt->interruptibility = 0;
1623
1624
1625
1626
1627
1628
1629 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1630 saved_eip = c->eip;
1631
1632 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1633 memop = c->modrm_ea;
1634
1635 if (c->rep_prefix && (c->d & String)) {
1636
1637 if (c->regs[VCPU_REGS_RCX] == 0) {
1638 kvm_rip_write(ctxt->vcpu, c->eip);
1639 goto done;
1640 }
1641
1642
1643
1644
1645
1646
1647
1648 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1649 (c->b == 0xae) || (c->b == 0xaf)) {
1650 if ((c->rep_prefix == REPE_PREFIX) &&
1651 ((ctxt->eflags & EFLG_ZF) == 0)) {
1652 kvm_rip_write(ctxt->vcpu, c->eip);
1653 goto done;
1654 }
1655 if ((c->rep_prefix == REPNE_PREFIX) &&
1656 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1657 kvm_rip_write(ctxt->vcpu, c->eip);
1658 goto done;
1659 }
1660 }
1661 c->regs[VCPU_REGS_RCX]--;
1662 c->eip = kvm_rip_read(ctxt->vcpu);
1663 }
1664
1665 if (c->src.type == OP_MEM) {
1666 c->src.ptr = (unsigned long *)memop;
1667 c->src.val = 0;
1668 rc = ops->read_emulated((unsigned long)c->src.ptr,
1669 &c->src.val,
1670 c->src.bytes,
1671 ctxt->vcpu);
1672 if (rc != 0)
1673 goto done;
1674 c->src.orig_val = c->src.val;
1675 }
1676
1677 if ((c->d & DstMask) == ImplicitOps)
1678 goto special_insn;
1679
1680
1681 if (c->dst.type == OP_MEM) {
1682 c->dst.ptr = (unsigned long *)memop;
1683 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1684 c->dst.val = 0;
1685 if (c->d & BitOp) {
1686 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1687
1688 c->dst.ptr = (void *)c->dst.ptr +
1689 (c->src.val & mask) / 8;
1690 }
1691 if (!(c->d & Mov) &&
1692
1693 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1694 &c->dst.val,
1695 c->dst.bytes, ctxt->vcpu)) != 0))
1696 goto done;
1697 }
1698 c->dst.orig_val = c->dst.val;
1699
1700special_insn:
1701
1702 if (c->twobyte)
1703 goto twobyte_insn;
1704
1705 switch (c->b) {
1706 case 0x00 ... 0x05:
1707 add:
1708 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1709 break;
1710 case 0x08 ... 0x0d:
1711 or:
1712 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1713 break;
1714 case 0x10 ... 0x15:
1715 adc:
1716 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1717 break;
1718 case 0x18 ... 0x1d:
1719 sbb:
1720 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1721 break;
1722 case 0x20 ... 0x25:
1723 and:
1724 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1725 break;
1726 case 0x28 ... 0x2d:
1727 sub:
1728 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1729 break;
1730 case 0x30 ... 0x35:
1731 xor:
1732 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1733 break;
1734 case 0x38 ... 0x3d:
1735 cmp:
1736 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1737 break;
1738 case 0x40 ... 0x47:
1739 emulate_1op("inc", c->dst, ctxt->eflags);
1740 break;
1741 case 0x48 ... 0x4f:
1742 emulate_1op("dec", c->dst, ctxt->eflags);
1743 break;
1744 case 0x50 ... 0x57:
1745 emulate_push(ctxt);
1746 break;
1747 case 0x58 ... 0x5f:
1748 pop_instruction:
1749 rc = emulate_pop(ctxt, ops, &c->dst.val, c->op_bytes);
1750 if (rc != 0)
1751 goto done;
1752 break;
1753 case 0x63:
1754 if (ctxt->mode != X86EMUL_MODE_PROT64)
1755 goto cannot_emulate;
1756 c->dst.val = (s32) c->src.val;
1757 break;
1758 case 0x68:
1759 case 0x6a:
1760 emulate_push(ctxt);
1761 break;
1762 case 0x6c:
1763 case 0x6d:
1764 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1765 1,
1766 (c->d & ByteOp) ? 1 : c->op_bytes,
1767 c->rep_prefix ?
1768 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1769 (ctxt->eflags & EFLG_DF),
1770 register_address(c, es_base(ctxt),
1771 c->regs[VCPU_REGS_RDI]),
1772 c->rep_prefix,
1773 c->regs[VCPU_REGS_RDX]) == 0) {
1774 c->eip = saved_eip;
1775 return -1;
1776 }
1777 return 0;
1778 case 0x6e:
1779 case 0x6f:
1780 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1781 0,
1782 (c->d & ByteOp) ? 1 : c->op_bytes,
1783 c->rep_prefix ?
1784 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1785 (ctxt->eflags & EFLG_DF),
1786 register_address(c,
1787 seg_override_base(ctxt, c),
1788 c->regs[VCPU_REGS_RSI]),
1789 c->rep_prefix,
1790 c->regs[VCPU_REGS_RDX]) == 0) {
1791 c->eip = saved_eip;
1792 return -1;
1793 }
1794 return 0;
1795 case 0x70 ... 0x7f:
1796 if (test_cc(c->b, ctxt->eflags))
1797 jmp_rel(c, c->src.val);
1798 break;
1799 case 0x80 ... 0x83:
1800 switch (c->modrm_reg) {
1801 case 0:
1802 goto add;
1803 case 1:
1804 goto or;
1805 case 2:
1806 goto adc;
1807 case 3:
1808 goto sbb;
1809 case 4:
1810 goto and;
1811 case 5:
1812 goto sub;
1813 case 6:
1814 goto xor;
1815 case 7:
1816 goto cmp;
1817 }
1818 break;
1819 case 0x84 ... 0x85:
1820 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1821 break;
1822 case 0x86 ... 0x87:
1823 xchg:
1824
1825 switch (c->dst.bytes) {
1826 case 1:
1827 *(u8 *) c->src.ptr = (u8) c->dst.val;
1828 break;
1829 case 2:
1830 *(u16 *) c->src.ptr = (u16) c->dst.val;
1831 break;
1832 case 4:
1833 *c->src.ptr = (u32) c->dst.val;
1834 break;
1835 case 8:
1836 *c->src.ptr = c->dst.val;
1837 break;
1838 }
1839
1840
1841
1842
1843 c->dst.val = c->src.val;
1844 c->lock_prefix = 1;
1845 break;
1846 case 0x88 ... 0x8b:
1847 goto mov;
1848 case 0x8c: {
1849 struct kvm_segment segreg;
1850
1851 if (c->modrm_reg <= 5)
1852 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1853 else {
1854 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1855 c->modrm);
1856 goto cannot_emulate;
1857 }
1858 c->dst.val = segreg.selector;
1859 break;
1860 }
1861 case 0x8d:
1862 c->dst.val = c->modrm_ea;
1863 break;
1864 case 0x8e: {
1865 uint16_t sel;
1866 int type_bits;
1867 int err;
1868
1869 sel = c->src.val;
1870 if (c->modrm_reg == VCPU_SREG_SS)
1871 toggle_interruptibility(ctxt, X86_SHADOW_INT_MOV_SS);
1872
1873 if (c->modrm_reg <= 5) {
1874 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1875 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1876 type_bits, c->modrm_reg);
1877 } else {
1878 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1879 c->modrm);
1880 goto cannot_emulate;
1881 }
1882
1883 if (err < 0)
1884 goto cannot_emulate;
1885
1886 c->dst.type = OP_NONE;
1887 break;
1888 }
1889 case 0x8f:
1890 rc = emulate_grp1a(ctxt, ops);
1891 if (rc != 0)
1892 goto done;
1893 break;
1894 case 0x90:
1895 if (!(c->rex_prefix & 1)) {
1896 c->dst.type = OP_NONE;
1897 break;
1898 }
1899 case 0x91 ... 0x97:
1900 c->src.type = c->dst.type = OP_REG;
1901 c->src.bytes = c->dst.bytes = c->op_bytes;
1902 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1903 c->src.val = *(c->src.ptr);
1904 goto xchg;
1905 case 0x9c:
1906 c->src.val = (unsigned long) ctxt->eflags;
1907 emulate_push(ctxt);
1908 break;
1909 case 0x9d:
1910 c->dst.type = OP_REG;
1911 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1912 c->dst.bytes = c->op_bytes;
1913 goto pop_instruction;
1914 case 0xa0 ... 0xa1:
1915 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1916 c->dst.val = c->src.val;
1917 break;
1918 case 0xa2 ... 0xa3:
1919 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1920 break;
1921 case 0xa4 ... 0xa5:
1922 c->dst.type = OP_MEM;
1923 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1924 c->dst.ptr = (unsigned long *)register_address(c,
1925 es_base(ctxt),
1926 c->regs[VCPU_REGS_RDI]);
1927 if ((rc = ops->read_emulated(register_address(c,
1928 seg_override_base(ctxt, c),
1929 c->regs[VCPU_REGS_RSI]),
1930 &c->dst.val,
1931 c->dst.bytes, ctxt->vcpu)) != 0)
1932 goto done;
1933 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1934 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1935 : c->dst.bytes);
1936 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1937 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1938 : c->dst.bytes);
1939 break;
1940 case 0xa6 ... 0xa7:
1941 c->src.type = OP_NONE;
1942 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1943 c->src.ptr = (unsigned long *)register_address(c,
1944 seg_override_base(ctxt, c),
1945 c->regs[VCPU_REGS_RSI]);
1946 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1947 &c->src.val,
1948 c->src.bytes,
1949 ctxt->vcpu)) != 0)
1950 goto done;
1951
1952 c->dst.type = OP_NONE;
1953 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1954 c->dst.ptr = (unsigned long *)register_address(c,
1955 es_base(ctxt),
1956 c->regs[VCPU_REGS_RDI]);
1957 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1958 &c->dst.val,
1959 c->dst.bytes,
1960 ctxt->vcpu)) != 0)
1961 goto done;
1962
1963 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1964
1965 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1966
1967 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1968 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1969 : c->src.bytes);
1970 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1971 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1972 : c->dst.bytes);
1973
1974 break;
1975 case 0xaa ... 0xab:
1976 c->dst.type = OP_MEM;
1977 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1978 c->dst.ptr = (unsigned long *)register_address(c,
1979 es_base(ctxt),
1980 c->regs[VCPU_REGS_RDI]);
1981 c->dst.val = c->regs[VCPU_REGS_RAX];
1982 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1983 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1984 : c->dst.bytes);
1985 break;
1986 case 0xac ... 0xad:
1987 c->dst.type = OP_REG;
1988 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1989 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1990 if ((rc = ops->read_emulated(register_address(c,
1991 seg_override_base(ctxt, c),
1992 c->regs[VCPU_REGS_RSI]),
1993 &c->dst.val,
1994 c->dst.bytes,
1995 ctxt->vcpu)) != 0)
1996 goto done;
1997 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1998 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1999 : c->dst.bytes);
2000 break;
2001 case 0xae ... 0xaf:
2002 DPRINTF("Urk! I don't handle SCAS.\n");
2003 goto cannot_emulate;
2004 case 0xb0 ... 0xbf:
2005 goto mov;
2006 case 0xc0 ... 0xc1:
2007 emulate_grp2(ctxt);
2008 break;
2009 case 0xc3:
2010 c->dst.type = OP_REG;
2011 c->dst.ptr = &c->eip;
2012 c->dst.bytes = c->op_bytes;
2013 goto pop_instruction;
2014 case 0xc6 ... 0xc7:
2015 mov:
2016 c->dst.val = c->src.val;
2017 break;
2018 case 0xcb:
2019 rc = emulate_ret_far(ctxt, ops);
2020 if (rc)
2021 goto done;
2022 break;
2023 case 0xd0 ... 0xd1:
2024 c->src.val = 1;
2025 emulate_grp2(ctxt);
2026 break;
2027 case 0xd2 ... 0xd3:
2028 c->src.val = c->regs[VCPU_REGS_RCX];
2029 emulate_grp2(ctxt);
2030 break;
2031 case 0xe4:
2032 case 0xe5:
2033 port = c->src.val;
2034 io_dir_in = 1;
2035 goto do_io;
2036 case 0xe6:
2037 case 0xe7:
2038 port = c->src.val;
2039 io_dir_in = 0;
2040 goto do_io;
2041 case 0xe8: {
2042 long int rel = c->src.val;
2043 c->src.val = (unsigned long) c->eip;
2044 jmp_rel(c, rel);
2045 emulate_push(ctxt);
2046 break;
2047 }
2048 case 0xe9:
2049 goto jmp;
2050 case 0xea:
2051 if (kvm_load_segment_descriptor(ctxt->vcpu, c->src2.val, 9,
2052 VCPU_SREG_CS) < 0) {
2053 DPRINTF("jmp far: Failed to load CS descriptor\n");
2054 goto cannot_emulate;
2055 }
2056
2057 c->eip = c->src.val;
2058 break;
2059 case 0xeb:
2060 jmp:
2061 jmp_rel(c, c->src.val);
2062 c->dst.type = OP_NONE;
2063 break;
2064 case 0xec:
2065 case 0xed:
2066 port = c->regs[VCPU_REGS_RDX];
2067 io_dir_in = 1;
2068 goto do_io;
2069 case 0xee:
2070 case 0xef:
2071 port = c->regs[VCPU_REGS_RDX];
2072 io_dir_in = 0;
2073 do_io: if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
2074 (c->d & ByteOp) ? 1 : c->op_bytes,
2075 port) != 0) {
2076 c->eip = saved_eip;
2077 goto cannot_emulate;
2078 }
2079 break;
2080 case 0xf4:
2081 ctxt->vcpu->arch.halt_request = 1;
2082 break;
2083 case 0xf5:
2084
2085 ctxt->eflags ^= EFLG_CF;
2086 c->dst.type = OP_NONE;
2087 break;
2088 case 0xf6 ... 0xf7:
2089 rc = emulate_grp3(ctxt, ops);
2090 if (rc != 0)
2091 goto done;
2092 break;
2093 case 0xf8:
2094 ctxt->eflags &= ~EFLG_CF;
2095 c->dst.type = OP_NONE;
2096 break;
2097 case 0xfa:
2098 ctxt->eflags &= ~X86_EFLAGS_IF;
2099 c->dst.type = OP_NONE;
2100 break;
2101 case 0xfb:
2102 toggle_interruptibility(ctxt, X86_SHADOW_INT_STI);
2103 ctxt->eflags |= X86_EFLAGS_IF;
2104 c->dst.type = OP_NONE;
2105 break;
2106 case 0xfc:
2107 ctxt->eflags &= ~EFLG_DF;
2108 c->dst.type = OP_NONE;
2109 break;
2110 case 0xfd:
2111 ctxt->eflags |= EFLG_DF;
2112 c->dst.type = OP_NONE;
2113 break;
2114 case 0xfe ... 0xff:
2115 rc = emulate_grp45(ctxt, ops);
2116 if (rc != 0)
2117 goto done;
2118 break;
2119 }
2120
2121writeback:
2122 rc = writeback(ctxt, ops);
2123 if (rc != 0)
2124 goto done;
2125
2126
2127 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
2128 kvm_rip_write(ctxt->vcpu, c->eip);
2129
2130done:
2131 if (rc == X86EMUL_UNHANDLEABLE) {
2132 c->eip = saved_eip;
2133 return -1;
2134 }
2135 return 0;
2136
2137twobyte_insn:
2138 switch (c->b) {
2139 case 0x01:
2140 switch (c->modrm_reg) {
2141 u16 size;
2142 unsigned long address;
2143
2144 case 0:
2145 if (c->modrm_mod != 3 || c->modrm_rm != 1)
2146 goto cannot_emulate;
2147
2148 rc = kvm_fix_hypercall(ctxt->vcpu);
2149 if (rc)
2150 goto done;
2151
2152
2153 c->eip = kvm_rip_read(ctxt->vcpu);
2154
2155 c->dst.type = OP_NONE;
2156 break;
2157 case 2:
2158 rc = read_descriptor(ctxt, ops, c->src.ptr,
2159 &size, &address, c->op_bytes);
2160 if (rc)
2161 goto done;
2162 realmode_lgdt(ctxt->vcpu, size, address);
2163
2164 c->dst.type = OP_NONE;
2165 break;
2166 case 3:
2167 if (c->modrm_mod == 3) {
2168 switch (c->modrm_rm) {
2169 case 1:
2170 rc = kvm_fix_hypercall(ctxt->vcpu);
2171 if (rc)
2172 goto done;
2173 break;
2174 default:
2175 goto cannot_emulate;
2176 }
2177 } else {
2178 rc = read_descriptor(ctxt, ops, c->src.ptr,
2179 &size, &address,
2180 c->op_bytes);
2181 if (rc)
2182 goto done;
2183 realmode_lidt(ctxt->vcpu, size, address);
2184 }
2185
2186 c->dst.type = OP_NONE;
2187 break;
2188 case 4:
2189 c->dst.bytes = 2;
2190 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
2191 break;
2192 case 6:
2193 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
2194 &ctxt->eflags);
2195 c->dst.type = OP_NONE;
2196 break;
2197 case 7:
2198 emulate_invlpg(ctxt->vcpu, memop);
2199
2200 c->dst.type = OP_NONE;
2201 break;
2202 default:
2203 goto cannot_emulate;
2204 }
2205 break;
2206 case 0x05:
2207 if (emulate_syscall(ctxt) == -1)
2208 goto cannot_emulate;
2209 else
2210 goto writeback;
2211 break;
2212 case 0x06:
2213 emulate_clts(ctxt->vcpu);
2214 c->dst.type = OP_NONE;
2215 break;
2216 case 0x08:
2217 case 0x09:
2218 case 0x0d:
2219 case 0x18:
2220 c->dst.type = OP_NONE;
2221 break;
2222 case 0x20:
2223 if (c->modrm_mod != 3)
2224 goto cannot_emulate;
2225 c->regs[c->modrm_rm] =
2226 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
2227 c->dst.type = OP_NONE;
2228 break;
2229 case 0x21:
2230 if (c->modrm_mod != 3)
2231 goto cannot_emulate;
2232 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
2233 if (rc)
2234 goto cannot_emulate;
2235 c->dst.type = OP_NONE;
2236 break;
2237 case 0x22:
2238 if (c->modrm_mod != 3)
2239 goto cannot_emulate;
2240 realmode_set_cr(ctxt->vcpu,
2241 c->modrm_reg, c->modrm_val, &ctxt->eflags);
2242 c->dst.type = OP_NONE;
2243 break;
2244 case 0x23:
2245 if (c->modrm_mod != 3)
2246 goto cannot_emulate;
2247 rc = emulator_set_dr(ctxt, c->modrm_reg,
2248 c->regs[c->modrm_rm]);
2249 if (rc)
2250 goto cannot_emulate;
2251 c->dst.type = OP_NONE;
2252 break;
2253 case 0x30:
2254
2255 msr_data = (u32)c->regs[VCPU_REGS_RAX]
2256 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
2257 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
2258 if (rc) {
2259 kvm_inject_gp(ctxt->vcpu, 0);
2260 c->eip = kvm_rip_read(ctxt->vcpu);
2261 }
2262 rc = X86EMUL_CONTINUE;
2263 c->dst.type = OP_NONE;
2264 break;
2265 case 0x32:
2266
2267 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
2268 if (rc) {
2269 kvm_inject_gp(ctxt->vcpu, 0);
2270 c->eip = kvm_rip_read(ctxt->vcpu);
2271 } else {
2272 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
2273 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
2274 }
2275 rc = X86EMUL_CONTINUE;
2276 c->dst.type = OP_NONE;
2277 break;
2278 case 0x34:
2279 if (emulate_sysenter(ctxt) == -1)
2280 goto cannot_emulate;
2281 else
2282 goto writeback;
2283 break;
2284 case 0x35:
2285 if (emulate_sysexit(ctxt) == -1)
2286 goto cannot_emulate;
2287 else
2288 goto writeback;
2289 break;
2290 case 0x40 ... 0x4f:
2291 c->dst.val = c->dst.orig_val = c->src.val;
2292 if (!test_cc(c->b, ctxt->eflags))
2293 c->dst.type = OP_NONE;
2294 break;
2295 case 0x80 ... 0x8f:
2296 if (test_cc(c->b, ctxt->eflags))
2297 jmp_rel(c, c->src.val);
2298 c->dst.type = OP_NONE;
2299 break;
2300 case 0xa3:
2301 bt:
2302 c->dst.type = OP_NONE;
2303
2304 c->src.val &= (c->dst.bytes << 3) - 1;
2305 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
2306 break;
2307 case 0xa4:
2308 case 0xa5:
2309 emulate_2op_cl("shld", c->src2, c->src, c->dst, ctxt->eflags);
2310 break;
2311 case 0xab:
2312 bts:
2313
2314 c->src.val &= (c->dst.bytes << 3) - 1;
2315 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
2316 break;
2317 case 0xac:
2318 case 0xad:
2319 emulate_2op_cl("shrd", c->src2, c->src, c->dst, ctxt->eflags);
2320 break;
2321 case 0xae:
2322 break;
2323 case 0xb0 ... 0xb1:
2324
2325
2326
2327
2328 c->src.orig_val = c->src.val;
2329 c->src.val = c->regs[VCPU_REGS_RAX];
2330 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
2331 if (ctxt->eflags & EFLG_ZF) {
2332
2333 c->dst.val = c->src.orig_val;
2334 } else {
2335
2336 c->dst.type = OP_REG;
2337 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
2338 }
2339 break;
2340 case 0xb3:
2341 btr:
2342
2343 c->src.val &= (c->dst.bytes << 3) - 1;
2344 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
2345 break;
2346 case 0xb6 ... 0xb7:
2347 c->dst.bytes = c->op_bytes;
2348 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
2349 : (u16) c->src.val;
2350 break;
2351 case 0xba:
2352 switch (c->modrm_reg & 3) {
2353 case 0:
2354 goto bt;
2355 case 1:
2356 goto bts;
2357 case 2:
2358 goto btr;
2359 case 3:
2360 goto btc;
2361 }
2362 break;
2363 case 0xbb:
2364 btc:
2365
2366 c->src.val &= (c->dst.bytes << 3) - 1;
2367 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2368 break;
2369 case 0xbe ... 0xbf:
2370 c->dst.bytes = c->op_bytes;
2371 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2372 (s16) c->src.val;
2373 break;
2374 case 0xc3:
2375 c->dst.bytes = c->op_bytes;
2376 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2377 (u64) c->src.val;
2378 break;
2379 case 0xc7:
2380 rc = emulate_grp9(ctxt, ops, memop);
2381 if (rc != 0)
2382 goto done;
2383 c->dst.type = OP_NONE;
2384 break;
2385 }
2386 goto writeback;
2387
2388cannot_emulate:
2389 DPRINTF("Cannot emulate %02x\n", c->b);
2390 c->eip = saved_eip;
2391 return -1;
2392}
2393