1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
30 #define DPRINTF(x...) do {} while (0)
32 #include "x86_emulate.h"
33 #include <linux/module.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstMask (3<<1)
51 /* Source operand type. */
52 #define SrcNone (0<<3) /* No source operand. */
53 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
54 #define SrcReg (1<<3) /* Register operand. */
55 #define SrcMem (2<<3) /* Memory operand. */
56 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
57 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
58 #define SrcImm (5<<3) /* Immediate operand. */
59 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
60 #define SrcMask (7<<3)
61 /* Generic ModRM decode. */
63 /* Destination is only written; never read. */
66 #define MemAbs (1<<9) /* Memory operand is absolute displacement */
68 static u16 opcode_table
[256] = {
70 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
71 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
74 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
75 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
78 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
79 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
82 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
83 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
86 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
87 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
88 SrcImmByte
, SrcImm
, 0, 0,
90 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
91 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
94 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
95 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
98 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
99 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
102 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
104 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
106 SrcReg
, SrcReg
, SrcReg
, SrcReg
, SrcReg
, SrcReg
, SrcReg
, SrcReg
,
108 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
110 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
113 0, 0, ImplicitOps
|Mov
, 0,
114 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* insb, insw/insd */
115 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* outsb, outsw/outsd */
117 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
118 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
120 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
121 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
123 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
124 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
125 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
126 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
128 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
129 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
130 0, ModRM
| DstReg
, 0, DstMem
| SrcNone
| ModRM
| Mov
,
132 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
, ImplicitOps
, 0, 0,
134 ByteOp
| DstReg
| SrcMem
| Mov
| MemAbs
, DstReg
| SrcMem
| Mov
| MemAbs
,
135 ByteOp
| DstMem
| SrcReg
| Mov
| MemAbs
, DstMem
| SrcReg
| Mov
| MemAbs
,
136 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
137 ByteOp
| ImplicitOps
, ImplicitOps
,
139 0, 0, ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
140 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
141 ByteOp
| ImplicitOps
, ImplicitOps
,
143 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
145 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
146 0, ImplicitOps
, 0, 0,
147 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
149 0, 0, 0, 0, 0, 0, 0, 0,
151 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
152 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
155 0, 0, 0, 0, 0, 0, 0, 0,
157 0, 0, 0, 0, 0, 0, 0, 0,
159 ImplicitOps
, SrcImm
|ImplicitOps
, 0, SrcImmByte
|ImplicitOps
, 0, 0, 0, 0,
162 ImplicitOps
, ImplicitOps
,
163 ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
165 ImplicitOps
, 0, ImplicitOps
, ImplicitOps
,
166 0, 0, ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
169 static u16 twobyte_table
[256] = {
171 0, SrcMem
| ModRM
| DstReg
, 0, 0, 0, 0, ImplicitOps
, 0,
172 ImplicitOps
, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
174 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
176 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
177 0, 0, 0, 0, 0, 0, 0, 0,
179 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
181 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
182 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
183 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
184 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
186 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
187 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
188 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
189 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
191 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
193 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
195 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
197 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
198 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
199 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
200 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
202 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
204 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
206 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
208 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
209 DstMem
| SrcReg
| ModRM
| BitOp
,
210 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
211 DstReg
| SrcMem16
| ModRM
| Mov
,
213 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
214 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
215 DstReg
| SrcMem16
| ModRM
| Mov
,
217 0, 0, 0, DstMem
| SrcReg
| ModRM
| Mov
, 0, 0, 0, ImplicitOps
| ModRM
,
218 0, 0, 0, 0, 0, 0, 0, 0,
220 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
222 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
224 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
227 /* EFLAGS bit definitions. */
228 #define EFLG_OF (1<<11)
229 #define EFLG_DF (1<<10)
230 #define EFLG_SF (1<<7)
231 #define EFLG_ZF (1<<6)
232 #define EFLG_AF (1<<4)
233 #define EFLG_PF (1<<2)
234 #define EFLG_CF (1<<0)
237 * Instruction emulation:
238 * Most instructions are emulated directly via a fragment of inline assembly
239 * code. This allows us to save/restore EFLAGS and thus very easily pick up
240 * any modified flags.
243 #if defined(CONFIG_X86_64)
244 #define _LO32 "k" /* force 32-bit operand */
245 #define _STK "%%rsp" /* stack pointer */
246 #elif defined(__i386__)
247 #define _LO32 "" /* force 32-bit operand */
248 #define _STK "%%esp" /* stack pointer */
252 * These EFLAGS bits are restored from saved value during emulation, and
253 * any changes are written back to the saved value after emulation.
255 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
257 /* Before executing instruction: restore necessary bits in EFLAGS. */
258 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
259 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
261 "movl %"_msk",%"_LO32 _tmp"; " \
262 "andl %"_LO32 _tmp",("_STK"); " \
264 "notl %"_LO32 _tmp"; " \
265 "andl %"_LO32 _tmp",("_STK"); " \
267 "orl %"_LO32 _tmp",("_STK"); " \
269 /* _sav &= ~msk; */ \
270 "movl %"_msk",%"_LO32 _tmp"; " \
271 "notl %"_LO32 _tmp"; " \
272 "andl %"_LO32 _tmp",%"_sav"; "
274 /* After executing instruction: write-back necessary bits in EFLAGS. */
275 #define _POST_EFLAGS(_sav, _msk, _tmp) \
276 /* _sav |= EFLAGS & _msk; */ \
279 "andl %"_msk",%"_LO32 _tmp"; " \
280 "orl %"_LO32 _tmp",%"_sav"; "
282 /* Raw emulation: instruction has two explicit operands. */
283 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
285 unsigned long _tmp; \
287 switch ((_dst).bytes) { \
289 __asm__ __volatile__ ( \
290 _PRE_EFLAGS("0", "4", "2") \
291 _op"w %"_wx"3,%1; " \
292 _POST_EFLAGS("0", "4", "2") \
293 : "=m" (_eflags), "=m" ((_dst).val), \
295 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
298 __asm__ __volatile__ ( \
299 _PRE_EFLAGS("0", "4", "2") \
300 _op"l %"_lx"3,%1; " \
301 _POST_EFLAGS("0", "4", "2") \
302 : "=m" (_eflags), "=m" ((_dst).val), \
304 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
307 __emulate_2op_8byte(_op, _src, _dst, \
308 _eflags, _qx, _qy); \
313 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
315 unsigned long _tmp; \
316 switch ((_dst).bytes) { \
318 __asm__ __volatile__ ( \
319 _PRE_EFLAGS("0", "4", "2") \
320 _op"b %"_bx"3,%1; " \
321 _POST_EFLAGS("0", "4", "2") \
322 : "=m" (_eflags), "=m" ((_dst).val), \
324 : _by ((_src).val), "i" (EFLAGS_MASK)); \
327 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
328 _wx, _wy, _lx, _ly, _qx, _qy); \
333 /* Source operand is byte-sized and may be restricted to just %cl. */
334 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
335 __emulate_2op(_op, _src, _dst, _eflags, \
336 "b", "c", "b", "c", "b", "c", "b", "c")
338 /* Source operand is byte, word, long or quad sized. */
339 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
340 __emulate_2op(_op, _src, _dst, _eflags, \
341 "b", "q", "w", "r", _LO32, "r", "", "r")
343 /* Source operand is word, long or quad sized. */
344 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
345 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
346 "w", "r", _LO32, "r", "", "r")
348 /* Instruction has only one explicit operand (no source operand). */
349 #define emulate_1op(_op, _dst, _eflags) \
351 unsigned long _tmp; \
353 switch ((_dst).bytes) { \
355 __asm__ __volatile__ ( \
356 _PRE_EFLAGS("0", "3", "2") \
358 _POST_EFLAGS("0", "3", "2") \
359 : "=m" (_eflags), "=m" ((_dst).val), \
361 : "i" (EFLAGS_MASK)); \
364 __asm__ __volatile__ ( \
365 _PRE_EFLAGS("0", "3", "2") \
367 _POST_EFLAGS("0", "3", "2") \
368 : "=m" (_eflags), "=m" ((_dst).val), \
370 : "i" (EFLAGS_MASK)); \
373 __asm__ __volatile__ ( \
374 _PRE_EFLAGS("0", "3", "2") \
376 _POST_EFLAGS("0", "3", "2") \
377 : "=m" (_eflags), "=m" ((_dst).val), \
379 : "i" (EFLAGS_MASK)); \
382 __emulate_1op_8byte(_op, _dst, _eflags); \
387 /* Emulate an instruction with quadword operands (x86/64 only). */
388 #if defined(CONFIG_X86_64)
389 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
391 __asm__ __volatile__ ( \
392 _PRE_EFLAGS("0", "4", "2") \
393 _op"q %"_qx"3,%1; " \
394 _POST_EFLAGS("0", "4", "2") \
395 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
396 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
399 #define __emulate_1op_8byte(_op, _dst, _eflags) \
401 __asm__ __volatile__ ( \
402 _PRE_EFLAGS("0", "3", "2") \
404 _POST_EFLAGS("0", "3", "2") \
405 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
406 : "i" (EFLAGS_MASK)); \
409 #elif defined(__i386__)
410 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
411 #define __emulate_1op_8byte(_op, _dst, _eflags)
412 #endif /* __i386__ */
414 /* Fetch next part of the instruction being emulated. */
415 #define insn_fetch(_type, _size, _eip) \
416 ({ unsigned long _x; \
417 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
418 (_size), ctxt->vcpu); \
425 /* Access/update address held in a register, based on addressing mode. */
426 #define address_mask(reg) \
427 ((c->ad_bytes == sizeof(unsigned long)) ? \
428 (reg) : ((reg) & ((1UL << (c->ad_bytes << 3)) - 1)))
429 #define register_address(base, reg) \
430 ((base) + address_mask(reg))
431 #define register_address_increment(reg, inc) \
433 /* signed type ensures sign extension to long */ \
435 if (c->ad_bytes == sizeof(unsigned long)) \
439 ~((1UL << (c->ad_bytes << 3)) - 1)) | \
441 ((1UL << (c->ad_bytes << 3)) - 1)); \
444 #define JMP_REL(rel) \
446 register_address_increment(c->eip, rel); \
450 * Given the 'reg' portion of a ModRM byte, and a register block, return a
451 * pointer into the block that addresses the relevant register.
452 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
454 static void *decode_register(u8 modrm_reg
, unsigned long *regs
,
459 p
= ®s
[modrm_reg
];
460 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
461 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
465 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
466 struct x86_emulate_ops
*ops
,
468 u16
*size
, unsigned long *address
, int op_bytes
)
475 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2,
479 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
,
484 static int test_cc(unsigned int condition
, unsigned int flags
)
488 switch ((condition
& 15) >> 1) {
490 rc
|= (flags
& EFLG_OF
);
492 case 1: /* b/c/nae */
493 rc
|= (flags
& EFLG_CF
);
496 rc
|= (flags
& EFLG_ZF
);
499 rc
|= (flags
& (EFLG_CF
|EFLG_ZF
));
502 rc
|= (flags
& EFLG_SF
);
505 rc
|= (flags
& EFLG_PF
);
508 rc
|= (flags
& EFLG_ZF
);
511 rc
|= (!(flags
& EFLG_SF
) != !(flags
& EFLG_OF
));
515 /* Odd condition identifiers (lsb == 1) have inverted sense. */
516 return (!!rc
^ (condition
& 1));
519 static void decode_register_operand(struct operand
*op
,
520 struct decode_cache
*c
,
523 unsigned reg
= c
->modrm_reg
;
524 int highbyte_regs
= c
->rex_prefix
== 0;
527 reg
= (c
->b
& 7) | ((c
->rex_prefix
& 1) << 3);
529 if ((c
->d
& ByteOp
) && !inhibit_bytereg
) {
530 op
->ptr
= decode_register(reg
, c
->regs
, highbyte_regs
);
531 op
->val
= *(u8
*)op
->ptr
;
534 op
->ptr
= decode_register(reg
, c
->regs
, 0);
535 op
->bytes
= c
->op_bytes
;
538 op
->val
= *(u16
*)op
->ptr
;
541 op
->val
= *(u32
*)op
->ptr
;
544 op
->val
= *(u64
*) op
->ptr
;
548 op
->orig_val
= op
->val
;
552 x86_decode_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
554 struct decode_cache
*c
= &ctxt
->decode
;
557 int mode
= ctxt
->mode
;
558 int index_reg
= 0, base_reg
= 0, scale
, rip_relative
= 0;
560 /* Shadow copy of register state. Committed on successful emulation. */
562 memset(c
, 0, sizeof(struct decode_cache
));
563 c
->eip
= ctxt
->vcpu
->rip
;
564 memcpy(c
->regs
, ctxt
->vcpu
->regs
, sizeof c
->regs
);
567 case X86EMUL_MODE_REAL
:
568 case X86EMUL_MODE_PROT16
:
569 c
->op_bytes
= c
->ad_bytes
= 2;
571 case X86EMUL_MODE_PROT32
:
572 c
->op_bytes
= c
->ad_bytes
= 4;
575 case X86EMUL_MODE_PROT64
:
584 /* Legacy prefixes. */
586 switch (c
->b
= insn_fetch(u8
, 1, c
->eip
)) {
587 case 0x66: /* operand-size override */
588 c
->op_bytes
^= 6; /* switch between 2/4 bytes */
590 case 0x67: /* address-size override */
591 if (mode
== X86EMUL_MODE_PROT64
)
592 /* switch between 4/8 bytes */
595 /* switch between 2/4 bytes */
598 case 0x2e: /* CS override */
599 c
->override_base
= &ctxt
->cs_base
;
601 case 0x3e: /* DS override */
602 c
->override_base
= &ctxt
->ds_base
;
604 case 0x26: /* ES override */
605 c
->override_base
= &ctxt
->es_base
;
607 case 0x64: /* FS override */
608 c
->override_base
= &ctxt
->fs_base
;
610 case 0x65: /* GS override */
611 c
->override_base
= &ctxt
->gs_base
;
613 case 0x36: /* SS override */
614 c
->override_base
= &ctxt
->ss_base
;
616 case 0x40 ... 0x4f: /* REX */
617 if (mode
!= X86EMUL_MODE_PROT64
)
619 c
->rex_prefix
= c
->b
;
621 case 0xf0: /* LOCK */
624 case 0xf2: /* REPNE/REPNZ */
625 case 0xf3: /* REP/REPE/REPZ */
632 /* Any legacy prefix after a REX prefix nullifies its effect. */
641 if (c
->rex_prefix
& 8)
642 c
->op_bytes
= 8; /* REX.W */
643 c
->modrm_reg
= (c
->rex_prefix
& 4) << 1; /* REX.R */
644 index_reg
= (c
->rex_prefix
& 2) << 2; /* REX.X */
645 c
->modrm_rm
= base_reg
= (c
->rex_prefix
& 1) << 3; /* REG.B */
648 /* Opcode byte(s). */
649 c
->d
= opcode_table
[c
->b
];
651 /* Two-byte opcode? */
654 c
->b
= insn_fetch(u8
, 1, c
->eip
);
655 c
->d
= twobyte_table
[c
->b
];
660 DPRINTF("Cannot emulate %02x\n", c
->b
);
665 /* ModRM and SIB bytes. */
667 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
668 c
->modrm_mod
|= (c
->modrm
& 0xc0) >> 6;
669 c
->modrm_reg
|= (c
->modrm
& 0x38) >> 3;
670 c
->modrm_rm
|= (c
->modrm
& 0x07);
674 if (c
->modrm_mod
== 3) {
675 c
->modrm_val
= *(unsigned long *)
676 decode_register(c
->modrm_rm
, c
->regs
, c
->d
& ByteOp
);
680 if (c
->ad_bytes
== 2) {
681 unsigned bx
= c
->regs
[VCPU_REGS_RBX
];
682 unsigned bp
= c
->regs
[VCPU_REGS_RBP
];
683 unsigned si
= c
->regs
[VCPU_REGS_RSI
];
684 unsigned di
= c
->regs
[VCPU_REGS_RDI
];
686 /* 16-bit ModR/M decode. */
687 switch (c
->modrm_mod
) {
689 if (c
->modrm_rm
== 6)
691 insn_fetch(u16
, 2, c
->eip
);
694 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
697 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
700 switch (c
->modrm_rm
) {
702 c
->modrm_ea
+= bx
+ si
;
705 c
->modrm_ea
+= bx
+ di
;
708 c
->modrm_ea
+= bp
+ si
;
711 c
->modrm_ea
+= bp
+ di
;
720 if (c
->modrm_mod
!= 0)
727 if (c
->modrm_rm
== 2 || c
->modrm_rm
== 3 ||
728 (c
->modrm_rm
== 6 && c
->modrm_mod
!= 0))
729 if (!c
->override_base
)
730 c
->override_base
= &ctxt
->ss_base
;
731 c
->modrm_ea
= (u16
)c
->modrm_ea
;
733 /* 32/64-bit ModR/M decode. */
734 switch (c
->modrm_rm
) {
737 sib
= insn_fetch(u8
, 1, c
->eip
);
738 index_reg
|= (sib
>> 3) & 7;
744 if (c
->modrm_mod
!= 0)
749 insn_fetch(s32
, 4, c
->eip
);
752 c
->modrm_ea
+= c
->regs
[base_reg
];
759 c
->regs
[index_reg
] << scale
;
764 if (c
->modrm_mod
!= 0)
765 c
->modrm_ea
+= c
->regs
[c
->modrm_rm
];
766 else if (mode
== X86EMUL_MODE_PROT64
)
770 c
->modrm_ea
+= c
->regs
[c
->modrm_rm
];
773 switch (c
->modrm_mod
) {
775 if (c
->modrm_rm
== 5)
777 insn_fetch(s32
, 4, c
->eip
);
780 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
783 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
788 c
->modrm_ea
+= c
->eip
;
789 switch (c
->d
& SrcMask
) {
797 if (c
->op_bytes
== 8)
800 c
->modrm_ea
+= c
->op_bytes
;
805 } else if (c
->d
& MemAbs
) {
806 switch (c
->ad_bytes
) {
808 c
->modrm_ea
= insn_fetch(u16
, 2, c
->eip
);
811 c
->modrm_ea
= insn_fetch(u32
, 4, c
->eip
);
814 c
->modrm_ea
= insn_fetch(u64
, 8, c
->eip
);
820 if (!c
->override_base
)
821 c
->override_base
= &ctxt
->ds_base
;
822 if (mode
== X86EMUL_MODE_PROT64
&&
823 c
->override_base
!= &ctxt
->fs_base
&&
824 c
->override_base
!= &ctxt
->gs_base
)
825 c
->override_base
= NULL
;
827 if (c
->override_base
)
828 c
->modrm_ea
+= *c
->override_base
;
830 if (c
->ad_bytes
!= 8)
831 c
->modrm_ea
= (u32
)c
->modrm_ea
;
833 * Decode and fetch the source operand: register, memory
836 switch (c
->d
& SrcMask
) {
840 decode_register_operand(&c
->src
, c
, 0);
849 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 :
851 /* Don't fetch the address for invlpg: it could be unmapped. */
852 if (c
->twobyte
&& c
->b
== 0x01 && c
->modrm_reg
== 7)
856 * For instructions with a ModR/M byte, switch to register
859 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
860 c
->src
.type
= OP_REG
;
863 c
->src
.type
= OP_MEM
;
866 c
->src
.type
= OP_IMM
;
867 c
->src
.ptr
= (unsigned long *)c
->eip
;
868 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
869 if (c
->src
.bytes
== 8)
871 /* NB. Immediates are sign-extended as necessary. */
872 switch (c
->src
.bytes
) {
874 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
877 c
->src
.val
= insn_fetch(s16
, 2, c
->eip
);
880 c
->src
.val
= insn_fetch(s32
, 4, c
->eip
);
885 c
->src
.type
= OP_IMM
;
886 c
->src
.ptr
= (unsigned long *)c
->eip
;
888 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
892 /* Decode and fetch the destination operand: register or memory. */
893 switch (c
->d
& DstMask
) {
895 /* Special instructions do their own operand decoding. */
898 decode_register_operand(&c
->dst
, c
,
899 c
->twobyte
&& (c
->b
== 0xb6 || c
->b
== 0xb7));
902 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
903 c
->dst
.type
= OP_REG
;
906 c
->dst
.type
= OP_MEM
;
911 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
914 static inline void emulate_push(struct x86_emulate_ctxt
*ctxt
)
916 struct decode_cache
*c
= &ctxt
->decode
;
918 c
->dst
.type
= OP_MEM
;
919 c
->dst
.bytes
= c
->op_bytes
;
920 c
->dst
.val
= c
->src
.val
;
921 register_address_increment(c
->regs
[VCPU_REGS_RSP
], -c
->op_bytes
);
922 c
->dst
.ptr
= (void *) register_address(ctxt
->ss_base
,
923 c
->regs
[VCPU_REGS_RSP
]);
926 static inline int emulate_grp1a(struct x86_emulate_ctxt
*ctxt
,
927 struct x86_emulate_ops
*ops
)
929 struct decode_cache
*c
= &ctxt
->decode
;
932 /* 64-bit mode: POP always pops a 64-bit operand. */
934 if (ctxt
->mode
== X86EMUL_MODE_PROT64
)
937 rc
= ops
->read_std(register_address(ctxt
->ss_base
,
938 c
->regs
[VCPU_REGS_RSP
]),
939 &c
->dst
.val
, c
->dst
.bytes
, ctxt
->vcpu
);
943 register_address_increment(c
->regs
[VCPU_REGS_RSP
], c
->dst
.bytes
);
948 static inline void emulate_grp2(struct x86_emulate_ctxt
*ctxt
)
950 struct decode_cache
*c
= &ctxt
->decode
;
951 switch (c
->modrm_reg
) {
953 emulate_2op_SrcB("rol", c
->src
, c
->dst
, ctxt
->eflags
);
956 emulate_2op_SrcB("ror", c
->src
, c
->dst
, ctxt
->eflags
);
959 emulate_2op_SrcB("rcl", c
->src
, c
->dst
, ctxt
->eflags
);
962 emulate_2op_SrcB("rcr", c
->src
, c
->dst
, ctxt
->eflags
);
964 case 4: /* sal/shl */
965 case 6: /* sal/shl */
966 emulate_2op_SrcB("sal", c
->src
, c
->dst
, ctxt
->eflags
);
969 emulate_2op_SrcB("shr", c
->src
, c
->dst
, ctxt
->eflags
);
972 emulate_2op_SrcB("sar", c
->src
, c
->dst
, ctxt
->eflags
);
977 static inline int emulate_grp3(struct x86_emulate_ctxt
*ctxt
,
978 struct x86_emulate_ops
*ops
)
980 struct decode_cache
*c
= &ctxt
->decode
;
983 switch (c
->modrm_reg
) {
984 case 0 ... 1: /* test */
986 * Special case in Grp3: test has an immediate
989 c
->src
.type
= OP_IMM
;
990 c
->src
.ptr
= (unsigned long *)c
->eip
;
991 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
992 if (c
->src
.bytes
== 8)
994 switch (c
->src
.bytes
) {
996 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
999 c
->src
.val
= insn_fetch(s16
, 2, c
->eip
);
1002 c
->src
.val
= insn_fetch(s32
, 4, c
->eip
);
1005 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1008 c
->dst
.val
= ~c
->dst
.val
;
1011 emulate_1op("neg", c
->dst
, ctxt
->eflags
);
1014 DPRINTF("Cannot emulate %02x\n", c
->b
);
1015 rc
= X86EMUL_UNHANDLEABLE
;
1022 static inline int emulate_grp45(struct x86_emulate_ctxt
*ctxt
,
1023 struct x86_emulate_ops
*ops
)
1025 struct decode_cache
*c
= &ctxt
->decode
;
1028 switch (c
->modrm_reg
) {
1030 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1033 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1035 case 4: /* jmp abs */
1037 c
->eip
= c
->dst
.val
;
1039 DPRINTF("Cannot emulate %02x\n", c
->b
);
1040 return X86EMUL_UNHANDLEABLE
;
1045 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1047 if (ctxt
->mode
== X86EMUL_MODE_PROT64
) {
1049 rc
= ops
->read_std((unsigned long)c
->dst
.ptr
,
1050 &c
->dst
.val
, 8, ctxt
->vcpu
);
1054 register_address_increment(c
->regs
[VCPU_REGS_RSP
],
1056 rc
= ops
->write_emulated(register_address(ctxt
->ss_base
,
1057 c
->regs
[VCPU_REGS_RSP
]), &c
->dst
.val
,
1058 c
->dst
.bytes
, ctxt
->vcpu
);
1061 c
->dst
.type
= OP_NONE
;
1064 DPRINTF("Cannot emulate %02x\n", c
->b
);
1065 return X86EMUL_UNHANDLEABLE
;
1070 static inline int emulate_grp9(struct x86_emulate_ctxt
*ctxt
,
1071 struct x86_emulate_ops
*ops
,
1074 struct decode_cache
*c
= &ctxt
->decode
;
1078 rc
= ops
->read_emulated(cr2
, &old
, 8, ctxt
->vcpu
);
1082 if (((u32
) (old
>> 0) != (u32
) c
->regs
[VCPU_REGS_RAX
]) ||
1083 ((u32
) (old
>> 32) != (u32
) c
->regs
[VCPU_REGS_RDX
])) {
1085 c
->regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1086 c
->regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1087 ctxt
->eflags
&= ~EFLG_ZF
;
1090 new = ((u64
)c
->regs
[VCPU_REGS_RCX
] << 32) |
1091 (u32
) c
->regs
[VCPU_REGS_RBX
];
1093 rc
= ops
->cmpxchg_emulated(cr2
, &old
, &new, 8, ctxt
->vcpu
);
1096 ctxt
->eflags
|= EFLG_ZF
;
1101 static inline int writeback(struct x86_emulate_ctxt
*ctxt
,
1102 struct x86_emulate_ops
*ops
)
1105 struct decode_cache
*c
= &ctxt
->decode
;
1107 switch (c
->dst
.type
) {
1109 /* The 4-byte case *is* correct:
1110 * in 64-bit mode we zero-extend.
1112 switch (c
->dst
.bytes
) {
1114 *(u8
*)c
->dst
.ptr
= (u8
)c
->dst
.val
;
1117 *(u16
*)c
->dst
.ptr
= (u16
)c
->dst
.val
;
1120 *c
->dst
.ptr
= (u32
)c
->dst
.val
;
1121 break; /* 64b: zero-ext */
1123 *c
->dst
.ptr
= c
->dst
.val
;
1129 rc
= ops
->cmpxchg_emulated(
1130 (unsigned long)c
->dst
.ptr
,
1136 rc
= ops
->write_emulated(
1137 (unsigned long)c
->dst
.ptr
,
1154 x86_emulate_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
1156 unsigned long cr2
= ctxt
->cr2
;
1158 unsigned long saved_eip
= 0;
1159 struct decode_cache
*c
= &ctxt
->decode
;
1162 /* Shadow copy of register state. Committed on successful emulation.
1163 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1167 memcpy(c
->regs
, ctxt
->vcpu
->regs
, sizeof c
->regs
);
1170 if (((c
->d
& ModRM
) && (c
->modrm_mod
!= 3)) || (c
->d
& MemAbs
))
1173 if (c
->src
.type
== OP_MEM
) {
1174 c
->src
.ptr
= (unsigned long *)cr2
;
1176 rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1182 c
->src
.orig_val
= c
->src
.val
;
1185 if ((c
->d
& DstMask
) == ImplicitOps
)
1189 if (c
->dst
.type
== OP_MEM
) {
1190 c
->dst
.ptr
= (unsigned long *)cr2
;
1191 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1194 unsigned long mask
= ~(c
->dst
.bytes
* 8 - 1);
1196 c
->dst
.ptr
= (void *)c
->dst
.ptr
+
1197 (c
->src
.val
& mask
) / 8;
1199 if (!(c
->d
& Mov
) &&
1200 /* optimisation - avoid slow emulated read */
1201 ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1203 c
->dst
.bytes
, ctxt
->vcpu
)) != 0))
1206 c
->dst
.orig_val
= c
->dst
.val
;
1214 emulate_2op_SrcV("add", c
->src
, c
->dst
, ctxt
->eflags
);
1218 emulate_2op_SrcV("or", c
->src
, c
->dst
, ctxt
->eflags
);
1222 emulate_2op_SrcV("adc", c
->src
, c
->dst
, ctxt
->eflags
);
1226 emulate_2op_SrcV("sbb", c
->src
, c
->dst
, ctxt
->eflags
);
1230 emulate_2op_SrcV("and", c
->src
, c
->dst
, ctxt
->eflags
);
1232 case 0x24: /* and al imm8 */
1233 c
->dst
.type
= OP_REG
;
1234 c
->dst
.ptr
= &c
->regs
[VCPU_REGS_RAX
];
1235 c
->dst
.val
= *(u8
*)c
->dst
.ptr
;
1237 c
->dst
.orig_val
= c
->dst
.val
;
1239 case 0x25: /* and ax imm16, or eax imm32 */
1240 c
->dst
.type
= OP_REG
;
1241 c
->dst
.bytes
= c
->op_bytes
;
1242 c
->dst
.ptr
= &c
->regs
[VCPU_REGS_RAX
];
1243 if (c
->op_bytes
== 2)
1244 c
->dst
.val
= *(u16
*)c
->dst
.ptr
;
1246 c
->dst
.val
= *(u32
*)c
->dst
.ptr
;
1247 c
->dst
.orig_val
= c
->dst
.val
;
1251 emulate_2op_SrcV("sub", c
->src
, c
->dst
, ctxt
->eflags
);
1255 emulate_2op_SrcV("xor", c
->src
, c
->dst
, ctxt
->eflags
);
1259 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1261 case 0x40 ... 0x47: /* inc r16/r32 */
1262 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1264 case 0x48 ... 0x4f: /* dec r16/r32 */
1265 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1267 case 0x50 ... 0x57: /* push reg */
1268 c
->dst
.type
= OP_MEM
;
1269 c
->dst
.bytes
= c
->op_bytes
;
1270 c
->dst
.val
= c
->src
.val
;
1271 register_address_increment(c
->regs
[VCPU_REGS_RSP
],
1273 c
->dst
.ptr
= (void *) register_address(
1274 ctxt
->ss_base
, c
->regs
[VCPU_REGS_RSP
]);
1276 case 0x58 ... 0x5f: /* pop reg */
1278 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
1279 c
->regs
[VCPU_REGS_RSP
]), c
->dst
.ptr
,
1280 c
->op_bytes
, ctxt
->vcpu
)) != 0)
1283 register_address_increment(c
->regs
[VCPU_REGS_RSP
],
1285 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1287 case 0x63: /* movsxd */
1288 if (ctxt
->mode
!= X86EMUL_MODE_PROT64
)
1289 goto cannot_emulate
;
1290 c
->dst
.val
= (s32
) c
->src
.val
;
1292 case 0x80 ... 0x83: /* Grp1 */
1293 switch (c
->modrm_reg
) {
1313 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1315 case 0x86 ... 0x87: /* xchg */
1316 /* Write back the register source. */
1317 switch (c
->dst
.bytes
) {
1319 *(u8
*) c
->src
.ptr
= (u8
) c
->dst
.val
;
1322 *(u16
*) c
->src
.ptr
= (u16
) c
->dst
.val
;
1325 *c
->src
.ptr
= (u32
) c
->dst
.val
;
1326 break; /* 64b reg: zero-extend */
1328 *c
->src
.ptr
= c
->dst
.val
;
1332 * Write back the memory destination with implicit LOCK
1335 c
->dst
.val
= c
->src
.val
;
1338 case 0x88 ... 0x8b: /* mov */
1340 case 0x8d: /* lea r16/r32, m */
1341 c
->dst
.val
= c
->modrm_val
;
1343 case 0x8f: /* pop (sole member of Grp1a) */
1344 rc
= emulate_grp1a(ctxt
, ops
);
1348 case 0xa0 ... 0xa1: /* mov */
1349 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1350 c
->dst
.val
= c
->src
.val
;
1352 case 0xa2 ... 0xa3: /* mov */
1353 c
->dst
.val
= (unsigned long)c
->regs
[VCPU_REGS_RAX
];
1358 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1360 c
->dst
.val
= c
->src
.val
;
1362 case 0xd0 ... 0xd1: /* Grp2 */
1366 case 0xd2 ... 0xd3: /* Grp2 */
1367 c
->src
.val
= c
->regs
[VCPU_REGS_RCX
];
1370 case 0xf6 ... 0xf7: /* Grp3 */
1371 rc
= emulate_grp3(ctxt
, ops
);
1375 case 0xfe ... 0xff: /* Grp4/Grp5 */
1376 rc
= emulate_grp45(ctxt
, ops
);
1383 rc
= writeback(ctxt
, ops
);
1387 /* Commit shadow register state. */
1388 memcpy(ctxt
->vcpu
->regs
, c
->regs
, sizeof c
->regs
);
1389 ctxt
->vcpu
->rip
= c
->eip
;
1392 if (rc
== X86EMUL_UNHANDLEABLE
) {
1400 goto twobyte_special_insn
;
1402 case 0x6a: /* push imm8 */
1404 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1407 case 0x6c: /* insb */
1408 case 0x6d: /* insw/insd */
1409 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1411 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1413 address_mask(c
->regs
[VCPU_REGS_RCX
]) : 1,
1414 (ctxt
->eflags
& EFLG_DF
),
1415 register_address(ctxt
->es_base
,
1416 c
->regs
[VCPU_REGS_RDI
]),
1418 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1423 case 0x6e: /* outsb */
1424 case 0x6f: /* outsw/outsd */
1425 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1427 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1429 address_mask(c
->regs
[VCPU_REGS_RCX
]) : 1,
1430 (ctxt
->eflags
& EFLG_DF
),
1431 register_address(c
->override_base
?
1434 c
->regs
[VCPU_REGS_RSI
]),
1436 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1441 case 0x70 ... 0x7f: /* jcc (short) */ {
1442 int rel
= insn_fetch(s8
, 1, c
->eip
);
1444 if (test_cc(c
->b
, ctxt
->eflags
))
1448 case 0x9c: /* pushf */
1449 c
->src
.val
= (unsigned long) ctxt
->eflags
;
1452 case 0x9d: /* popf */
1453 c
->dst
.ptr
= (unsigned long *) &ctxt
->eflags
;
1454 goto pop_instruction
;
1455 case 0xc3: /* ret */
1456 c
->dst
.ptr
= &c
->eip
;
1457 goto pop_instruction
;
1458 case 0xf4: /* hlt */
1459 ctxt
->vcpu
->halt_request
= 1;
1461 case 0xf5: /* cmc */
1462 /* complement carry flag from eflags reg */
1463 ctxt
->eflags
^= EFLG_CF
;
1464 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1466 case 0xf8: /* clc */
1467 ctxt
->eflags
&= ~EFLG_CF
;
1468 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1470 case 0xfa: /* cli */
1471 ctxt
->eflags
&= ~X86_EFLAGS_IF
;
1472 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1474 case 0xfb: /* sti */
1475 ctxt
->eflags
|= X86_EFLAGS_IF
;
1476 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1479 if (c
->rep_prefix
) {
1480 if (c
->regs
[VCPU_REGS_RCX
] == 0) {
1481 ctxt
->vcpu
->rip
= c
->eip
;
1484 c
->regs
[VCPU_REGS_RCX
]--;
1485 c
->eip
= ctxt
->vcpu
->rip
;
1488 case 0xa4 ... 0xa5: /* movs */
1489 c
->dst
.type
= OP_MEM
;
1490 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1491 c
->dst
.ptr
= (unsigned long *)register_address(
1493 c
->regs
[VCPU_REGS_RDI
]);
1494 if ((rc
= ops
->read_emulated(register_address(
1495 c
->override_base
? *c
->override_base
:
1497 c
->regs
[VCPU_REGS_RSI
]),
1499 c
->dst
.bytes
, ctxt
->vcpu
)) != 0)
1501 register_address_increment(c
->regs
[VCPU_REGS_RSI
],
1502 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1504 register_address_increment(c
->regs
[VCPU_REGS_RDI
],
1505 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1508 case 0xa6 ... 0xa7: /* cmps */
1509 DPRINTF("Urk! I don't handle CMPS.\n");
1510 goto cannot_emulate
;
1511 case 0xaa ... 0xab: /* stos */
1512 c
->dst
.type
= OP_MEM
;
1513 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1514 c
->dst
.ptr
= (unsigned long *)cr2
;
1515 c
->dst
.val
= c
->regs
[VCPU_REGS_RAX
];
1516 register_address_increment(c
->regs
[VCPU_REGS_RDI
],
1517 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1520 case 0xac ... 0xad: /* lods */
1521 c
->dst
.type
= OP_REG
;
1522 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1523 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1524 if ((rc
= ops
->read_emulated(cr2
, &c
->dst
.val
,
1528 register_address_increment(c
->regs
[VCPU_REGS_RSI
],
1529 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1532 case 0xae ... 0xaf: /* scas */
1533 DPRINTF("Urk! I don't handle SCAS.\n");
1534 goto cannot_emulate
;
1535 case 0xe8: /* call (near) */ {
1537 switch (c
->op_bytes
) {
1539 rel
= insn_fetch(s16
, 2, c
->eip
);
1542 rel
= insn_fetch(s32
, 4, c
->eip
);
1545 rel
= insn_fetch(s64
, 8, c
->eip
);
1548 DPRINTF("Call: Invalid op_bytes\n");
1549 goto cannot_emulate
;
1551 c
->src
.val
= (unsigned long) c
->eip
;
1553 c
->op_bytes
= c
->ad_bytes
;
1557 case 0xe9: /* jmp rel */
1558 case 0xeb: /* jmp rel short */
1559 JMP_REL(c
->src
.val
);
1560 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1569 case 0x01: /* lgdt, lidt, lmsw */
1570 switch (c
->modrm_reg
) {
1572 unsigned long address
;
1574 case 0: /* vmcall */
1575 if (c
->modrm_mod
!= 3 || c
->modrm_rm
!= 1)
1576 goto cannot_emulate
;
1578 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1582 kvm_emulate_hypercall(ctxt
->vcpu
);
1585 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1586 &size
, &address
, c
->op_bytes
);
1589 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1591 case 3: /* lidt/vmmcall */
1592 if (c
->modrm_mod
== 3 && c
->modrm_rm
== 1) {
1593 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1596 kvm_emulate_hypercall(ctxt
->vcpu
);
1598 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1603 realmode_lidt(ctxt
->vcpu
, size
, address
);
1607 if (c
->modrm_mod
!= 3)
1608 goto cannot_emulate
;
1609 *(u16
*)&c
->regs
[c
->modrm_rm
]
1610 = realmode_get_cr(ctxt
->vcpu
, 0);
1613 if (c
->modrm_mod
!= 3)
1614 goto cannot_emulate
;
1615 realmode_lmsw(ctxt
->vcpu
, (u16
)c
->modrm_val
,
1619 emulate_invlpg(ctxt
->vcpu
, cr2
);
1622 goto cannot_emulate
;
1624 /* Disable writeback. */
1625 c
->dst
.type
= OP_NONE
;
1627 case 0x21: /* mov from dr to reg */
1628 if (c
->modrm_mod
!= 3)
1629 goto cannot_emulate
;
1630 rc
= emulator_get_dr(ctxt
, c
->modrm_reg
, &c
->regs
[c
->modrm_rm
]);
1632 goto cannot_emulate
;
1633 c
->dst
.type
= OP_NONE
; /* no writeback */
1635 case 0x23: /* mov from reg to dr */
1636 if (c
->modrm_mod
!= 3)
1637 goto cannot_emulate
;
1638 rc
= emulator_set_dr(ctxt
, c
->modrm_reg
,
1639 c
->regs
[c
->modrm_rm
]);
1641 goto cannot_emulate
;
1642 c
->dst
.type
= OP_NONE
; /* no writeback */
1644 case 0x40 ... 0x4f: /* cmov */
1645 c
->dst
.val
= c
->dst
.orig_val
= c
->src
.val
;
1646 if (!test_cc(c
->b
, ctxt
->eflags
))
1647 c
->dst
.type
= OP_NONE
; /* no writeback */
1651 c
->dst
.type
= OP_NONE
;
1652 /* only subword offset */
1653 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1654 emulate_2op_SrcV_nobyte("bt", c
->src
, c
->dst
, ctxt
->eflags
);
1658 /* only subword offset */
1659 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1660 emulate_2op_SrcV_nobyte("bts", c
->src
, c
->dst
, ctxt
->eflags
);
1662 case 0xb0 ... 0xb1: /* cmpxchg */
1664 * Save real source value, then compare EAX against
1667 c
->src
.orig_val
= c
->src
.val
;
1668 c
->src
.val
= c
->regs
[VCPU_REGS_RAX
];
1669 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1670 if (ctxt
->eflags
& EFLG_ZF
) {
1671 /* Success: write back to memory. */
1672 c
->dst
.val
= c
->src
.orig_val
;
1674 /* Failure: write the value we saw to EAX. */
1675 c
->dst
.type
= OP_REG
;
1676 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1681 /* only subword offset */
1682 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1683 emulate_2op_SrcV_nobyte("btr", c
->src
, c
->dst
, ctxt
->eflags
);
1685 case 0xb6 ... 0xb7: /* movzx */
1686 c
->dst
.bytes
= c
->op_bytes
;
1687 c
->dst
.val
= (c
->d
& ByteOp
) ? (u8
) c
->src
.val
1690 case 0xba: /* Grp8 */
1691 switch (c
->modrm_reg
& 3) {
1704 /* only subword offset */
1705 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1706 emulate_2op_SrcV_nobyte("btc", c
->src
, c
->dst
, ctxt
->eflags
);
1708 case 0xbe ... 0xbf: /* movsx */
1709 c
->dst
.bytes
= c
->op_bytes
;
1710 c
->dst
.val
= (c
->d
& ByteOp
) ? (s8
) c
->src
.val
:
1713 case 0xc3: /* movnti */
1714 c
->dst
.bytes
= c
->op_bytes
;
1715 c
->dst
.val
= (c
->op_bytes
== 4) ? (u32
) c
->src
.val
:
1721 twobyte_special_insn
:
1724 emulate_clts(ctxt
->vcpu
);
1726 case 0x08: /* invd */
1728 case 0x09: /* wbinvd */
1730 case 0x0d: /* GrpP (prefetch) */
1731 case 0x18: /* Grp16 (prefetch/nop) */
1733 case 0x20: /* mov cr, reg */
1734 if (c
->modrm_mod
!= 3)
1735 goto cannot_emulate
;
1736 c
->regs
[c
->modrm_rm
] =
1737 realmode_get_cr(ctxt
->vcpu
, c
->modrm_reg
);
1739 case 0x22: /* mov reg, cr */
1740 if (c
->modrm_mod
!= 3)
1741 goto cannot_emulate
;
1742 realmode_set_cr(ctxt
->vcpu
,
1743 c
->modrm_reg
, c
->modrm_val
, &ctxt
->eflags
);
1747 msr_data
= (u32
)c
->regs
[VCPU_REGS_RAX
]
1748 | ((u64
)c
->regs
[VCPU_REGS_RDX
] << 32);
1749 rc
= kvm_set_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], msr_data
);
1751 kvm_x86_ops
->inject_gp(ctxt
->vcpu
, 0);
1752 c
->eip
= ctxt
->vcpu
->rip
;
1754 rc
= X86EMUL_CONTINUE
;
1758 rc
= kvm_get_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], &msr_data
);
1760 kvm_x86_ops
->inject_gp(ctxt
->vcpu
, 0);
1761 c
->eip
= ctxt
->vcpu
->rip
;
1763 c
->regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
1764 c
->regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
1766 rc
= X86EMUL_CONTINUE
;
1768 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1771 switch (c
->op_bytes
) {
1773 rel
= insn_fetch(s16
, 2, c
->eip
);
1776 rel
= insn_fetch(s32
, 4, c
->eip
);
1779 rel
= insn_fetch(s64
, 8, c
->eip
);
1782 DPRINTF("jnz: Invalid op_bytes\n");
1783 goto cannot_emulate
;
1785 if (test_cc(c
->b
, ctxt
->eflags
))
1789 case 0xc7: /* Grp9 (cmpxchg8b) */
1790 rc
= emulate_grp9(ctxt
, ops
, cr2
);
1795 /* Disable writeback. */
1796 c
->dst
.type
= OP_NONE
;
1800 DPRINTF("Cannot emulate %02x\n", c
->b
);
This page took 0.088367 seconds and 6 git commands to generate.