1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstAcc (4<<1) /* Destination Accumulator */
51 #define DstMask (7<<1)
52 /* Source operand type. */
53 #define SrcNone (0<<4) /* No source operand. */
54 #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
55 #define SrcReg (1<<4) /* Register operand. */
56 #define SrcMem (2<<4) /* Memory operand. */
57 #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
58 #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
59 #define SrcImm (5<<4) /* Immediate operand. */
60 #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
61 #define SrcOne (7<<4) /* Implied '1' */
62 #define SrcMask (7<<4)
63 /* Generic ModRM decode. */
65 /* Destination is only written; never read. */
68 #define MemAbs (1<<10) /* Memory operand is absolute displacement */
69 #define String (1<<12) /* String instruction (rep capable) */
70 #define Stack (1<<13) /* Stack instruction (push/pop) */
71 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
72 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
73 #define GroupMask 0xff /* Group number stored in bits 0:7 */
74 /* Source 2 operand type */
75 #define Src2None (0<<29)
76 #define Src2CL (1<<29)
77 #define Src2ImmByte (2<<29)
78 #define Src2One (3<<29)
79 #define Src2Imm16 (4<<29)
80 #define Src2Mask (7<<29)
83 Group1_80
, Group1_81
, Group1_82
, Group1_83
,
84 Group1A
, Group3_Byte
, Group3
, Group4
, Group5
, Group7
,
87 static u32 opcode_table
[256] = {
89 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
90 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
91 ByteOp
| DstAcc
| SrcImm
, DstAcc
| SrcImm
, 0, 0,
93 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
94 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
97 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
98 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
101 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
102 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
105 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
106 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
107 DstAcc
| SrcImmByte
, DstAcc
| SrcImm
, 0, 0,
109 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
110 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
113 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
114 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
117 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
118 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
119 ByteOp
| DstAcc
| SrcImm
, DstAcc
| SrcImm
,
122 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
124 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
126 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
127 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
129 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
130 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
132 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
135 SrcImm
| Mov
| Stack
, 0, SrcImmByte
| Mov
| Stack
, 0,
136 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* insb, insw/insd */
137 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* outsb, outsw/outsd */
139 SrcImmByte
, SrcImmByte
, SrcImmByte
, SrcImmByte
,
140 SrcImmByte
, SrcImmByte
, SrcImmByte
, SrcImmByte
,
142 SrcImmByte
, SrcImmByte
, SrcImmByte
, SrcImmByte
,
143 SrcImmByte
, SrcImmByte
, SrcImmByte
, SrcImmByte
,
145 Group
| Group1_80
, Group
| Group1_81
,
146 Group
| Group1_82
, Group
| Group1_83
,
147 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
148 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
150 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
151 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
152 DstMem
| SrcReg
| ModRM
| Mov
, ModRM
| DstReg
,
153 DstReg
| SrcMem
| ModRM
| Mov
, Group
| Group1A
,
155 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
157 0, 0, SrcImm
| Src2Imm16
, 0,
158 ImplicitOps
| Stack
, ImplicitOps
| Stack
, 0, 0,
160 ByteOp
| DstReg
| SrcMem
| Mov
| MemAbs
, DstReg
| SrcMem
| Mov
| MemAbs
,
161 ByteOp
| DstMem
| SrcReg
| Mov
| MemAbs
, DstMem
| SrcReg
| Mov
| MemAbs
,
162 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
163 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
165 0, 0, ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
166 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
167 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
169 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
170 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
171 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
172 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
174 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
175 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
176 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
177 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
179 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
180 0, ImplicitOps
| Stack
, 0, 0,
181 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
183 0, 0, 0, ImplicitOps
| Stack
, 0, 0, 0, 0,
185 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
186 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
189 0, 0, 0, 0, 0, 0, 0, 0,
192 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
193 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
195 SrcImm
| Stack
, SrcImm
| ImplicitOps
,
196 SrcImm
| Src2Imm16
, SrcImmByte
| ImplicitOps
,
197 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
198 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
201 ImplicitOps
, ImplicitOps
, Group
| Group3_Byte
, Group
| Group3
,
203 ImplicitOps
, 0, ImplicitOps
, ImplicitOps
,
204 ImplicitOps
, ImplicitOps
, Group
| Group4
, Group
| Group5
,
207 static u32 twobyte_table
[256] = {
209 0, Group
| GroupDual
| Group7
, 0, 0, 0, 0, ImplicitOps
, 0,
210 ImplicitOps
, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
212 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
214 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
215 0, 0, 0, 0, 0, 0, 0, 0,
217 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
219 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
220 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
221 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
222 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
224 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
225 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
226 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
227 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
229 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
233 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
235 SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
,
236 SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
,
238 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
240 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
,
241 DstMem
| SrcReg
| Src2ImmByte
| ModRM
,
242 DstMem
| SrcReg
| Src2CL
| ModRM
, 0, 0,
244 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
,
245 DstMem
| SrcReg
| Src2ImmByte
| ModRM
,
246 DstMem
| SrcReg
| Src2CL
| ModRM
,
249 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
250 DstMem
| SrcReg
| ModRM
| BitOp
,
251 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
252 DstReg
| SrcMem16
| ModRM
| Mov
,
254 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
255 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
256 DstReg
| SrcMem16
| ModRM
| Mov
,
258 0, 0, 0, DstMem
| SrcReg
| ModRM
| Mov
, 0, 0, 0, ImplicitOps
| ModRM
,
259 0, 0, 0, 0, 0, 0, 0, 0,
261 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
263 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
265 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
268 static u32 group_table
[] = {
270 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
271 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
272 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
273 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
275 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
276 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
277 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
278 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
280 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
281 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
282 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
283 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
285 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
286 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
287 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
288 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
290 DstMem
| SrcNone
| ModRM
| Mov
| Stack
, 0, 0, 0, 0, 0, 0, 0,
292 ByteOp
| SrcImm
| DstMem
| ModRM
, 0,
293 ByteOp
| DstMem
| SrcNone
| ModRM
, ByteOp
| DstMem
| SrcNone
| ModRM
,
296 DstMem
| SrcImm
| ModRM
, 0,
297 DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
300 ByteOp
| DstMem
| SrcNone
| ModRM
, ByteOp
| DstMem
| SrcNone
| ModRM
,
303 DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
304 SrcMem
| ModRM
| Stack
, 0,
305 SrcMem
| ModRM
| Stack
, 0, SrcMem
| ModRM
| Stack
, 0,
307 0, 0, ModRM
| SrcMem
, ModRM
| SrcMem
,
308 SrcNone
| ModRM
| DstMem
| Mov
, 0,
309 SrcMem16
| ModRM
| Mov
, SrcMem
| ModRM
| ByteOp
,
312 static u32 group2_table
[] = {
314 SrcNone
| ModRM
, 0, 0, SrcNone
| ModRM
,
315 SrcNone
| ModRM
| DstMem
| Mov
, 0,
316 SrcMem16
| ModRM
| Mov
, 0,
319 /* EFLAGS bit definitions. */
320 #define EFLG_OF (1<<11)
321 #define EFLG_DF (1<<10)
322 #define EFLG_SF (1<<7)
323 #define EFLG_ZF (1<<6)
324 #define EFLG_AF (1<<4)
325 #define EFLG_PF (1<<2)
326 #define EFLG_CF (1<<0)
329 * Instruction emulation:
330 * Most instructions are emulated directly via a fragment of inline assembly
331 * code. This allows us to save/restore EFLAGS and thus very easily pick up
332 * any modified flags.
335 #if defined(CONFIG_X86_64)
336 #define _LO32 "k" /* force 32-bit operand */
337 #define _STK "%%rsp" /* stack pointer */
338 #elif defined(__i386__)
339 #define _LO32 "" /* force 32-bit operand */
340 #define _STK "%%esp" /* stack pointer */
344 * These EFLAGS bits are restored from saved value during emulation, and
345 * any changes are written back to the saved value after emulation.
347 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
349 /* Before executing instruction: restore necessary bits in EFLAGS. */
350 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
351 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
352 "movl %"_sav",%"_LO32 _tmp"; " \
355 "movl %"_msk",%"_LO32 _tmp"; " \
356 "andl %"_LO32 _tmp",("_STK"); " \
358 "notl %"_LO32 _tmp"; " \
359 "andl %"_LO32 _tmp",("_STK"); " \
360 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
362 "orl %"_LO32 _tmp",("_STK"); " \
366 /* After executing instruction: write-back necessary bits in EFLAGS. */
367 #define _POST_EFLAGS(_sav, _msk, _tmp) \
368 /* _sav |= EFLAGS & _msk; */ \
371 "andl %"_msk",%"_LO32 _tmp"; " \
372 "orl %"_LO32 _tmp",%"_sav"; "
380 #define ____emulate_2op(_op, _src, _dst, _eflags, _x, _y, _suffix) \
382 __asm__ __volatile__ ( \
383 _PRE_EFLAGS("0", "4", "2") \
384 _op _suffix " %"_x"3,%1; " \
385 _POST_EFLAGS("0", "4", "2") \
386 : "=m" (_eflags), "=m" ((_dst).val), \
388 : _y ((_src).val), "i" (EFLAGS_MASK)); \
392 /* Raw emulation: instruction has two explicit operands. */
393 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
395 unsigned long _tmp; \
397 switch ((_dst).bytes) { \
399 ____emulate_2op(_op,_src,_dst,_eflags,_wx,_wy,"w"); \
402 ____emulate_2op(_op,_src,_dst,_eflags,_lx,_ly,"l"); \
405 ON64(____emulate_2op(_op,_src,_dst,_eflags,_qx,_qy,"q")); \
410 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
412 unsigned long _tmp; \
413 switch ((_dst).bytes) { \
415 ____emulate_2op(_op,_src,_dst,_eflags,_bx,_by,"b"); \
418 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
419 _wx, _wy, _lx, _ly, _qx, _qy); \
424 /* Source operand is byte-sized and may be restricted to just %cl. */
425 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
426 __emulate_2op(_op, _src, _dst, _eflags, \
427 "b", "c", "b", "c", "b", "c", "b", "c")
429 /* Source operand is byte, word, long or quad sized. */
430 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
431 __emulate_2op(_op, _src, _dst, _eflags, \
432 "b", "q", "w", "r", _LO32, "r", "", "r")
434 /* Source operand is word, long or quad sized. */
435 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
436 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
437 "w", "r", _LO32, "r", "", "r")
439 /* Instruction has three operands and one operand is stored in ECX register */
440 #define __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, _suffix, _type) \
442 unsigned long _tmp; \
443 _type _clv = (_cl).val; \
444 _type _srcv = (_src).val; \
445 _type _dstv = (_dst).val; \
447 __asm__ __volatile__ ( \
448 _PRE_EFLAGS("0", "5", "2") \
449 _op _suffix " %4,%1 \n" \
450 _POST_EFLAGS("0", "5", "2") \
451 : "=m" (_eflags), "+r" (_dstv), "=&r" (_tmp) \
452 : "c" (_clv) , "r" (_srcv), "i" (EFLAGS_MASK) \
455 (_cl).val = (unsigned long) _clv; \
456 (_src).val = (unsigned long) _srcv; \
457 (_dst).val = (unsigned long) _dstv; \
460 #define emulate_2op_cl(_op, _cl, _src, _dst, _eflags) \
462 switch ((_dst).bytes) { \
464 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
465 "w", unsigned short); \
468 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
469 "l", unsigned int); \
472 ON64(__emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
473 "q", unsigned long)); \
478 #define __emulate_1op(_op, _dst, _eflags, _suffix) \
480 unsigned long _tmp; \
482 __asm__ __volatile__ ( \
483 _PRE_EFLAGS("0", "3", "2") \
484 _op _suffix " %1; " \
485 _POST_EFLAGS("0", "3", "2") \
486 : "=m" (_eflags), "+m" ((_dst).val), \
488 : "i" (EFLAGS_MASK)); \
491 /* Instruction has only one explicit operand (no source operand). */
492 #define emulate_1op(_op, _dst, _eflags) \
494 switch ((_dst).bytes) { \
495 case 1: __emulate_1op(_op, _dst, _eflags, "b"); break; \
496 case 2: __emulate_1op(_op, _dst, _eflags, "w"); break; \
497 case 4: __emulate_1op(_op, _dst, _eflags, "l"); break; \
498 case 8: ON64(__emulate_1op(_op, _dst, _eflags, "q")); break; \
502 /* Fetch next part of the instruction being emulated. */
503 #define insn_fetch(_type, _size, _eip) \
504 ({ unsigned long _x; \
505 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
512 static inline unsigned long ad_mask(struct decode_cache
*c
)
514 return (1UL << (c
->ad_bytes
<< 3)) - 1;
517 /* Access/update address held in a register, based on addressing mode. */
518 static inline unsigned long
519 address_mask(struct decode_cache
*c
, unsigned long reg
)
521 if (c
->ad_bytes
== sizeof(unsigned long))
524 return reg
& ad_mask(c
);
527 static inline unsigned long
528 register_address(struct decode_cache
*c
, unsigned long base
, unsigned long reg
)
530 return base
+ address_mask(c
, reg
);
534 register_address_increment(struct decode_cache
*c
, unsigned long *reg
, int inc
)
536 if (c
->ad_bytes
== sizeof(unsigned long))
539 *reg
= (*reg
& ~ad_mask(c
)) | ((*reg
+ inc
) & ad_mask(c
));
542 static inline void jmp_rel(struct decode_cache
*c
, int rel
)
544 register_address_increment(c
, &c
->eip
, rel
);
547 static void set_seg_override(struct decode_cache
*c
, int seg
)
549 c
->has_seg_override
= true;
550 c
->seg_override
= seg
;
553 static unsigned long seg_base(struct x86_emulate_ctxt
*ctxt
, int seg
)
555 if (ctxt
->mode
== X86EMUL_MODE_PROT64
&& seg
< VCPU_SREG_FS
)
558 return kvm_x86_ops
->get_segment_base(ctxt
->vcpu
, seg
);
561 static unsigned long seg_override_base(struct x86_emulate_ctxt
*ctxt
,
562 struct decode_cache
*c
)
564 if (!c
->has_seg_override
)
567 return seg_base(ctxt
, c
->seg_override
);
570 static unsigned long es_base(struct x86_emulate_ctxt
*ctxt
)
572 return seg_base(ctxt
, VCPU_SREG_ES
);
575 static unsigned long ss_base(struct x86_emulate_ctxt
*ctxt
)
577 return seg_base(ctxt
, VCPU_SREG_SS
);
580 static int do_fetch_insn_byte(struct x86_emulate_ctxt
*ctxt
,
581 struct x86_emulate_ops
*ops
,
582 unsigned long linear
, u8
*dest
)
584 struct fetch_cache
*fc
= &ctxt
->decode
.fetch
;
588 if (linear
< fc
->start
|| linear
>= fc
->end
) {
589 size
= min(15UL, PAGE_SIZE
- offset_in_page(linear
));
590 rc
= ops
->read_std(linear
, fc
->data
, size
, ctxt
->vcpu
);
594 fc
->end
= linear
+ size
;
596 *dest
= fc
->data
[linear
- fc
->start
];
600 static int do_insn_fetch(struct x86_emulate_ctxt
*ctxt
,
601 struct x86_emulate_ops
*ops
,
602 unsigned long eip
, void *dest
, unsigned size
)
606 eip
+= ctxt
->cs_base
;
608 rc
= do_fetch_insn_byte(ctxt
, ops
, eip
++, dest
++);
616 * Given the 'reg' portion of a ModRM byte, and a register block, return a
617 * pointer into the block that addresses the relevant register.
618 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
620 static void *decode_register(u8 modrm_reg
, unsigned long *regs
,
625 p
= ®s
[modrm_reg
];
626 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
627 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
631 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
632 struct x86_emulate_ops
*ops
,
634 u16
*size
, unsigned long *address
, int op_bytes
)
641 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2,
645 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
,
650 static int test_cc(unsigned int condition
, unsigned int flags
)
654 switch ((condition
& 15) >> 1) {
656 rc
|= (flags
& EFLG_OF
);
658 case 1: /* b/c/nae */
659 rc
|= (flags
& EFLG_CF
);
662 rc
|= (flags
& EFLG_ZF
);
665 rc
|= (flags
& (EFLG_CF
|EFLG_ZF
));
668 rc
|= (flags
& EFLG_SF
);
671 rc
|= (flags
& EFLG_PF
);
674 rc
|= (flags
& EFLG_ZF
);
677 rc
|= (!(flags
& EFLG_SF
) != !(flags
& EFLG_OF
));
681 /* Odd condition identifiers (lsb == 1) have inverted sense. */
682 return (!!rc
^ (condition
& 1));
685 static void decode_register_operand(struct operand
*op
,
686 struct decode_cache
*c
,
689 unsigned reg
= c
->modrm_reg
;
690 int highbyte_regs
= c
->rex_prefix
== 0;
693 reg
= (c
->b
& 7) | ((c
->rex_prefix
& 1) << 3);
695 if ((c
->d
& ByteOp
) && !inhibit_bytereg
) {
696 op
->ptr
= decode_register(reg
, c
->regs
, highbyte_regs
);
697 op
->val
= *(u8
*)op
->ptr
;
700 op
->ptr
= decode_register(reg
, c
->regs
, 0);
701 op
->bytes
= c
->op_bytes
;
704 op
->val
= *(u16
*)op
->ptr
;
707 op
->val
= *(u32
*)op
->ptr
;
710 op
->val
= *(u64
*) op
->ptr
;
714 op
->orig_val
= op
->val
;
717 static int decode_modrm(struct x86_emulate_ctxt
*ctxt
,
718 struct x86_emulate_ops
*ops
)
720 struct decode_cache
*c
= &ctxt
->decode
;
722 int index_reg
= 0, base_reg
= 0, scale
;
726 c
->modrm_reg
= (c
->rex_prefix
& 4) << 1; /* REX.R */
727 index_reg
= (c
->rex_prefix
& 2) << 2; /* REX.X */
728 c
->modrm_rm
= base_reg
= (c
->rex_prefix
& 1) << 3; /* REG.B */
731 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
732 c
->modrm_mod
|= (c
->modrm
& 0xc0) >> 6;
733 c
->modrm_reg
|= (c
->modrm
& 0x38) >> 3;
734 c
->modrm_rm
|= (c
->modrm
& 0x07);
738 if (c
->modrm_mod
== 3) {
739 c
->modrm_ptr
= decode_register(c
->modrm_rm
,
740 c
->regs
, c
->d
& ByteOp
);
741 c
->modrm_val
= *(unsigned long *)c
->modrm_ptr
;
745 if (c
->ad_bytes
== 2) {
746 unsigned bx
= c
->regs
[VCPU_REGS_RBX
];
747 unsigned bp
= c
->regs
[VCPU_REGS_RBP
];
748 unsigned si
= c
->regs
[VCPU_REGS_RSI
];
749 unsigned di
= c
->regs
[VCPU_REGS_RDI
];
751 /* 16-bit ModR/M decode. */
752 switch (c
->modrm_mod
) {
754 if (c
->modrm_rm
== 6)
755 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
758 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
761 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
764 switch (c
->modrm_rm
) {
766 c
->modrm_ea
+= bx
+ si
;
769 c
->modrm_ea
+= bx
+ di
;
772 c
->modrm_ea
+= bp
+ si
;
775 c
->modrm_ea
+= bp
+ di
;
784 if (c
->modrm_mod
!= 0)
791 if (c
->modrm_rm
== 2 || c
->modrm_rm
== 3 ||
792 (c
->modrm_rm
== 6 && c
->modrm_mod
!= 0))
793 if (!c
->has_seg_override
)
794 set_seg_override(c
, VCPU_SREG_SS
);
795 c
->modrm_ea
= (u16
)c
->modrm_ea
;
797 /* 32/64-bit ModR/M decode. */
798 if ((c
->modrm_rm
& 7) == 4) {
799 sib
= insn_fetch(u8
, 1, c
->eip
);
800 index_reg
|= (sib
>> 3) & 7;
804 if ((base_reg
& 7) == 5 && c
->modrm_mod
== 0)
805 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
807 c
->modrm_ea
+= c
->regs
[base_reg
];
809 c
->modrm_ea
+= c
->regs
[index_reg
] << scale
;
810 } else if ((c
->modrm_rm
& 7) == 5 && c
->modrm_mod
== 0) {
811 if (ctxt
->mode
== X86EMUL_MODE_PROT64
)
814 c
->modrm_ea
+= c
->regs
[c
->modrm_rm
];
815 switch (c
->modrm_mod
) {
817 if (c
->modrm_rm
== 5)
818 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
821 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
824 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
832 static int decode_abs(struct x86_emulate_ctxt
*ctxt
,
833 struct x86_emulate_ops
*ops
)
835 struct decode_cache
*c
= &ctxt
->decode
;
838 switch (c
->ad_bytes
) {
840 c
->modrm_ea
= insn_fetch(u16
, 2, c
->eip
);
843 c
->modrm_ea
= insn_fetch(u32
, 4, c
->eip
);
846 c
->modrm_ea
= insn_fetch(u64
, 8, c
->eip
);
854 x86_decode_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
856 struct decode_cache
*c
= &ctxt
->decode
;
858 int mode
= ctxt
->mode
;
859 int def_op_bytes
, def_ad_bytes
, group
;
861 /* Shadow copy of register state. Committed on successful emulation. */
863 memset(c
, 0, sizeof(struct decode_cache
));
864 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
865 ctxt
->cs_base
= seg_base(ctxt
, VCPU_SREG_CS
);
866 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
869 case X86EMUL_MODE_REAL
:
870 case X86EMUL_MODE_PROT16
:
871 def_op_bytes
= def_ad_bytes
= 2;
873 case X86EMUL_MODE_PROT32
:
874 def_op_bytes
= def_ad_bytes
= 4;
877 case X86EMUL_MODE_PROT64
:
886 c
->op_bytes
= def_op_bytes
;
887 c
->ad_bytes
= def_ad_bytes
;
889 /* Legacy prefixes. */
891 switch (c
->b
= insn_fetch(u8
, 1, c
->eip
)) {
892 case 0x66: /* operand-size override */
893 /* switch between 2/4 bytes */
894 c
->op_bytes
= def_op_bytes
^ 6;
896 case 0x67: /* address-size override */
897 if (mode
== X86EMUL_MODE_PROT64
)
898 /* switch between 4/8 bytes */
899 c
->ad_bytes
= def_ad_bytes
^ 12;
901 /* switch between 2/4 bytes */
902 c
->ad_bytes
= def_ad_bytes
^ 6;
904 case 0x26: /* ES override */
905 case 0x2e: /* CS override */
906 case 0x36: /* SS override */
907 case 0x3e: /* DS override */
908 set_seg_override(c
, (c
->b
>> 3) & 3);
910 case 0x64: /* FS override */
911 case 0x65: /* GS override */
912 set_seg_override(c
, c
->b
& 7);
914 case 0x40 ... 0x4f: /* REX */
915 if (mode
!= X86EMUL_MODE_PROT64
)
917 c
->rex_prefix
= c
->b
;
919 case 0xf0: /* LOCK */
922 case 0xf2: /* REPNE/REPNZ */
923 c
->rep_prefix
= REPNE_PREFIX
;
925 case 0xf3: /* REP/REPE/REPZ */
926 c
->rep_prefix
= REPE_PREFIX
;
932 /* Any legacy prefix after a REX prefix nullifies its effect. */
941 if (c
->rex_prefix
& 8)
942 c
->op_bytes
= 8; /* REX.W */
944 /* Opcode byte(s). */
945 c
->d
= opcode_table
[c
->b
];
947 /* Two-byte opcode? */
950 c
->b
= insn_fetch(u8
, 1, c
->eip
);
951 c
->d
= twobyte_table
[c
->b
];
956 group
= c
->d
& GroupMask
;
957 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
960 group
= (group
<< 3) + ((c
->modrm
>> 3) & 7);
961 if ((c
->d
& GroupDual
) && (c
->modrm
>> 6) == 3)
962 c
->d
= group2_table
[group
];
964 c
->d
= group_table
[group
];
969 DPRINTF("Cannot emulate %02x\n", c
->b
);
973 if (mode
== X86EMUL_MODE_PROT64
&& (c
->d
& Stack
))
976 /* ModRM and SIB bytes. */
978 rc
= decode_modrm(ctxt
, ops
);
979 else if (c
->d
& MemAbs
)
980 rc
= decode_abs(ctxt
, ops
);
984 if (!c
->has_seg_override
)
985 set_seg_override(c
, VCPU_SREG_DS
);
987 if (!(!c
->twobyte
&& c
->b
== 0x8d))
988 c
->modrm_ea
+= seg_override_base(ctxt
, c
);
990 if (c
->ad_bytes
!= 8)
991 c
->modrm_ea
= (u32
)c
->modrm_ea
;
993 * Decode and fetch the source operand: register, memory
996 switch (c
->d
& SrcMask
) {
1000 decode_register_operand(&c
->src
, c
, 0);
1009 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 :
1011 /* Don't fetch the address for invlpg: it could be unmapped. */
1012 if (c
->twobyte
&& c
->b
== 0x01 && c
->modrm_reg
== 7)
1016 * For instructions with a ModR/M byte, switch to register
1017 * access if Mod = 3.
1019 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
1020 c
->src
.type
= OP_REG
;
1021 c
->src
.val
= c
->modrm_val
;
1022 c
->src
.ptr
= c
->modrm_ptr
;
1025 c
->src
.type
= OP_MEM
;
1028 c
->src
.type
= OP_IMM
;
1029 c
->src
.ptr
= (unsigned long *)c
->eip
;
1030 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1031 if (c
->src
.bytes
== 8)
1033 /* NB. Immediates are sign-extended as necessary. */
1034 switch (c
->src
.bytes
) {
1036 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1039 c
->src
.val
= insn_fetch(s16
, 2, c
->eip
);
1042 c
->src
.val
= insn_fetch(s32
, 4, c
->eip
);
1047 c
->src
.type
= OP_IMM
;
1048 c
->src
.ptr
= (unsigned long *)c
->eip
;
1050 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1059 * Decode and fetch the second source operand: register, memory
1062 switch (c
->d
& Src2Mask
) {
1067 c
->src2
.val
= c
->regs
[VCPU_REGS_RCX
] & 0x8;
1070 c
->src2
.type
= OP_IMM
;
1071 c
->src2
.ptr
= (unsigned long *)c
->eip
;
1073 c
->src2
.val
= insn_fetch(u8
, 1, c
->eip
);
1076 c
->src2
.type
= OP_IMM
;
1077 c
->src2
.ptr
= (unsigned long *)c
->eip
;
1079 c
->src2
.val
= insn_fetch(u16
, 2, c
->eip
);
1087 /* Decode and fetch the destination operand: register or memory. */
1088 switch (c
->d
& DstMask
) {
1090 /* Special instructions do their own operand decoding. */
1093 decode_register_operand(&c
->dst
, c
,
1094 c
->twobyte
&& (c
->b
== 0xb6 || c
->b
== 0xb7));
1097 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
1098 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1099 c
->dst
.type
= OP_REG
;
1100 c
->dst
.val
= c
->dst
.orig_val
= c
->modrm_val
;
1101 c
->dst
.ptr
= c
->modrm_ptr
;
1104 c
->dst
.type
= OP_MEM
;
1107 c
->dst
.type
= OP_REG
;
1108 c
->dst
.bytes
= c
->op_bytes
;
1109 c
->dst
.ptr
= &c
->regs
[VCPU_REGS_RAX
];
1110 switch (c
->op_bytes
) {
1112 c
->dst
.val
= *(u8
*)c
->dst
.ptr
;
1115 c
->dst
.val
= *(u16
*)c
->dst
.ptr
;
1118 c
->dst
.val
= *(u32
*)c
->dst
.ptr
;
1121 c
->dst
.orig_val
= c
->dst
.val
;
1125 if (c
->rip_relative
)
1126 c
->modrm_ea
+= c
->eip
;
1129 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1132 static inline void emulate_push(struct x86_emulate_ctxt
*ctxt
)
1134 struct decode_cache
*c
= &ctxt
->decode
;
1136 c
->dst
.type
= OP_MEM
;
1137 c
->dst
.bytes
= c
->op_bytes
;
1138 c
->dst
.val
= c
->src
.val
;
1139 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
], -c
->op_bytes
);
1140 c
->dst
.ptr
= (void *) register_address(c
, ss_base(ctxt
),
1141 c
->regs
[VCPU_REGS_RSP
]);
1144 static int emulate_pop(struct x86_emulate_ctxt
*ctxt
,
1145 struct x86_emulate_ops
*ops
,
1146 void *dest
, int len
)
1148 struct decode_cache
*c
= &ctxt
->decode
;
1151 rc
= ops
->read_emulated(register_address(c
, ss_base(ctxt
),
1152 c
->regs
[VCPU_REGS_RSP
]),
1153 dest
, len
, ctxt
->vcpu
);
1157 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
], len
);
1161 static inline int emulate_grp1a(struct x86_emulate_ctxt
*ctxt
,
1162 struct x86_emulate_ops
*ops
)
1164 struct decode_cache
*c
= &ctxt
->decode
;
1167 rc
= emulate_pop(ctxt
, ops
, &c
->dst
.val
, c
->dst
.bytes
);
1173 static inline void emulate_grp2(struct x86_emulate_ctxt
*ctxt
)
1175 struct decode_cache
*c
= &ctxt
->decode
;
1176 switch (c
->modrm_reg
) {
1178 emulate_2op_SrcB("rol", c
->src
, c
->dst
, ctxt
->eflags
);
1181 emulate_2op_SrcB("ror", c
->src
, c
->dst
, ctxt
->eflags
);
1184 emulate_2op_SrcB("rcl", c
->src
, c
->dst
, ctxt
->eflags
);
1187 emulate_2op_SrcB("rcr", c
->src
, c
->dst
, ctxt
->eflags
);
1189 case 4: /* sal/shl */
1190 case 6: /* sal/shl */
1191 emulate_2op_SrcB("sal", c
->src
, c
->dst
, ctxt
->eflags
);
1194 emulate_2op_SrcB("shr", c
->src
, c
->dst
, ctxt
->eflags
);
1197 emulate_2op_SrcB("sar", c
->src
, c
->dst
, ctxt
->eflags
);
1202 static inline int emulate_grp3(struct x86_emulate_ctxt
*ctxt
,
1203 struct x86_emulate_ops
*ops
)
1205 struct decode_cache
*c
= &ctxt
->decode
;
1208 switch (c
->modrm_reg
) {
1209 case 0 ... 1: /* test */
1210 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1213 c
->dst
.val
= ~c
->dst
.val
;
1216 emulate_1op("neg", c
->dst
, ctxt
->eflags
);
1219 DPRINTF("Cannot emulate %02x\n", c
->b
);
1220 rc
= X86EMUL_UNHANDLEABLE
;
1226 static inline int emulate_grp45(struct x86_emulate_ctxt
*ctxt
,
1227 struct x86_emulate_ops
*ops
)
1229 struct decode_cache
*c
= &ctxt
->decode
;
1231 switch (c
->modrm_reg
) {
1233 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1236 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1238 case 2: /* call near abs */ {
1241 c
->eip
= c
->src
.val
;
1242 c
->src
.val
= old_eip
;
1246 case 4: /* jmp abs */
1247 c
->eip
= c
->src
.val
;
1256 static inline int emulate_grp9(struct x86_emulate_ctxt
*ctxt
,
1257 struct x86_emulate_ops
*ops
,
1258 unsigned long memop
)
1260 struct decode_cache
*c
= &ctxt
->decode
;
1264 rc
= ops
->read_emulated(memop
, &old
, 8, ctxt
->vcpu
);
1268 if (((u32
) (old
>> 0) != (u32
) c
->regs
[VCPU_REGS_RAX
]) ||
1269 ((u32
) (old
>> 32) != (u32
) c
->regs
[VCPU_REGS_RDX
])) {
1271 c
->regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1272 c
->regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1273 ctxt
->eflags
&= ~EFLG_ZF
;
1276 new = ((u64
)c
->regs
[VCPU_REGS_RCX
] << 32) |
1277 (u32
) c
->regs
[VCPU_REGS_RBX
];
1279 rc
= ops
->cmpxchg_emulated(memop
, &old
, &new, 8, ctxt
->vcpu
);
1282 ctxt
->eflags
|= EFLG_ZF
;
1287 static int emulate_ret_far(struct x86_emulate_ctxt
*ctxt
,
1288 struct x86_emulate_ops
*ops
)
1290 struct decode_cache
*c
= &ctxt
->decode
;
1294 rc
= emulate_pop(ctxt
, ops
, &c
->eip
, c
->op_bytes
);
1297 if (c
->op_bytes
== 4)
1298 c
->eip
= (u32
)c
->eip
;
1299 rc
= emulate_pop(ctxt
, ops
, &cs
, c
->op_bytes
);
1302 rc
= kvm_load_segment_descriptor(ctxt
->vcpu
, (u16
)cs
, 1, VCPU_SREG_CS
);
1306 static inline int writeback(struct x86_emulate_ctxt
*ctxt
,
1307 struct x86_emulate_ops
*ops
)
1310 struct decode_cache
*c
= &ctxt
->decode
;
1312 switch (c
->dst
.type
) {
1314 /* The 4-byte case *is* correct:
1315 * in 64-bit mode we zero-extend.
1317 switch (c
->dst
.bytes
) {
1319 *(u8
*)c
->dst
.ptr
= (u8
)c
->dst
.val
;
1322 *(u16
*)c
->dst
.ptr
= (u16
)c
->dst
.val
;
1325 *c
->dst
.ptr
= (u32
)c
->dst
.val
;
1326 break; /* 64b: zero-ext */
1328 *c
->dst
.ptr
= c
->dst
.val
;
1334 rc
= ops
->cmpxchg_emulated(
1335 (unsigned long)c
->dst
.ptr
,
1341 rc
= ops
->write_emulated(
1342 (unsigned long)c
->dst
.ptr
,
1359 x86_emulate_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
1361 unsigned long memop
= 0;
1363 unsigned long saved_eip
= 0;
1364 struct decode_cache
*c
= &ctxt
->decode
;
1369 /* Shadow copy of register state. Committed on successful emulation.
1370 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1374 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
1377 if (((c
->d
& ModRM
) && (c
->modrm_mod
!= 3)) || (c
->d
& MemAbs
))
1378 memop
= c
->modrm_ea
;
1380 if (c
->rep_prefix
&& (c
->d
& String
)) {
1381 /* All REP prefixes have the same first termination condition */
1382 if (c
->regs
[VCPU_REGS_RCX
] == 0) {
1383 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1386 /* The second termination condition only applies for REPE
1387 * and REPNE. Test if the repeat string operation prefix is
1388 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1389 * corresponding termination condition according to:
1390 * - if REPE/REPZ and ZF = 0 then done
1391 * - if REPNE/REPNZ and ZF = 1 then done
1393 if ((c
->b
== 0xa6) || (c
->b
== 0xa7) ||
1394 (c
->b
== 0xae) || (c
->b
== 0xaf)) {
1395 if ((c
->rep_prefix
== REPE_PREFIX
) &&
1396 ((ctxt
->eflags
& EFLG_ZF
) == 0)) {
1397 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1400 if ((c
->rep_prefix
== REPNE_PREFIX
) &&
1401 ((ctxt
->eflags
& EFLG_ZF
) == EFLG_ZF
)) {
1402 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1406 c
->regs
[VCPU_REGS_RCX
]--;
1407 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1410 if (c
->src
.type
== OP_MEM
) {
1411 c
->src
.ptr
= (unsigned long *)memop
;
1413 rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1419 c
->src
.orig_val
= c
->src
.val
;
1422 if ((c
->d
& DstMask
) == ImplicitOps
)
1426 if (c
->dst
.type
== OP_MEM
) {
1427 c
->dst
.ptr
= (unsigned long *)memop
;
1428 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1431 unsigned long mask
= ~(c
->dst
.bytes
* 8 - 1);
1433 c
->dst
.ptr
= (void *)c
->dst
.ptr
+
1434 (c
->src
.val
& mask
) / 8;
1436 if (!(c
->d
& Mov
) &&
1437 /* optimisation - avoid slow emulated read */
1438 ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1440 c
->dst
.bytes
, ctxt
->vcpu
)) != 0))
1443 c
->dst
.orig_val
= c
->dst
.val
;
1453 emulate_2op_SrcV("add", c
->src
, c
->dst
, ctxt
->eflags
);
1457 emulate_2op_SrcV("or", c
->src
, c
->dst
, ctxt
->eflags
);
1461 emulate_2op_SrcV("adc", c
->src
, c
->dst
, ctxt
->eflags
);
1465 emulate_2op_SrcV("sbb", c
->src
, c
->dst
, ctxt
->eflags
);
1469 emulate_2op_SrcV("and", c
->src
, c
->dst
, ctxt
->eflags
);
1473 emulate_2op_SrcV("sub", c
->src
, c
->dst
, ctxt
->eflags
);
1477 emulate_2op_SrcV("xor", c
->src
, c
->dst
, ctxt
->eflags
);
1481 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1483 case 0x40 ... 0x47: /* inc r16/r32 */
1484 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1486 case 0x48 ... 0x4f: /* dec r16/r32 */
1487 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1489 case 0x50 ... 0x57: /* push reg */
1492 case 0x58 ... 0x5f: /* pop reg */
1494 rc
= emulate_pop(ctxt
, ops
, &c
->dst
.val
, c
->op_bytes
);
1498 case 0x63: /* movsxd */
1499 if (ctxt
->mode
!= X86EMUL_MODE_PROT64
)
1500 goto cannot_emulate
;
1501 c
->dst
.val
= (s32
) c
->src
.val
;
1503 case 0x68: /* push imm */
1504 case 0x6a: /* push imm8 */
1507 case 0x6c: /* insb */
1508 case 0x6d: /* insw/insd */
1509 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1511 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1513 address_mask(c
, c
->regs
[VCPU_REGS_RCX
]) : 1,
1514 (ctxt
->eflags
& EFLG_DF
),
1515 register_address(c
, es_base(ctxt
),
1516 c
->regs
[VCPU_REGS_RDI
]),
1518 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1523 case 0x6e: /* outsb */
1524 case 0x6f: /* outsw/outsd */
1525 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1527 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1529 address_mask(c
, c
->regs
[VCPU_REGS_RCX
]) : 1,
1530 (ctxt
->eflags
& EFLG_DF
),
1532 seg_override_base(ctxt
, c
),
1533 c
->regs
[VCPU_REGS_RSI
]),
1535 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1540 case 0x70 ... 0x7f: /* jcc (short) */
1541 if (test_cc(c
->b
, ctxt
->eflags
))
1542 jmp_rel(c
, c
->src
.val
);
1544 case 0x80 ... 0x83: /* Grp1 */
1545 switch (c
->modrm_reg
) {
1565 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1567 case 0x86 ... 0x87: /* xchg */
1569 /* Write back the register source. */
1570 switch (c
->dst
.bytes
) {
1572 *(u8
*) c
->src
.ptr
= (u8
) c
->dst
.val
;
1575 *(u16
*) c
->src
.ptr
= (u16
) c
->dst
.val
;
1578 *c
->src
.ptr
= (u32
) c
->dst
.val
;
1579 break; /* 64b reg: zero-extend */
1581 *c
->src
.ptr
= c
->dst
.val
;
1585 * Write back the memory destination with implicit LOCK
1588 c
->dst
.val
= c
->src
.val
;
1591 case 0x88 ... 0x8b: /* mov */
1593 case 0x8c: { /* mov r/m, sreg */
1594 struct kvm_segment segreg
;
1596 if (c
->modrm_reg
<= 5)
1597 kvm_get_segment(ctxt
->vcpu
, &segreg
, c
->modrm_reg
);
1599 printk(KERN_INFO
"0x8c: Invalid segreg in modrm byte 0x%02x\n",
1601 goto cannot_emulate
;
1603 c
->dst
.val
= segreg
.selector
;
1606 case 0x8d: /* lea r16/r32, m */
1607 c
->dst
.val
= c
->modrm_ea
;
1609 case 0x8e: { /* mov seg, r/m16 */
1615 if (c
->modrm_reg
<= 5) {
1616 type_bits
= (c
->modrm_reg
== 1) ? 9 : 1;
1617 err
= kvm_load_segment_descriptor(ctxt
->vcpu
, sel
,
1618 type_bits
, c
->modrm_reg
);
1620 printk(KERN_INFO
"Invalid segreg in modrm byte 0x%02x\n",
1622 goto cannot_emulate
;
1626 goto cannot_emulate
;
1628 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1631 case 0x8f: /* pop (sole member of Grp1a) */
1632 rc
= emulate_grp1a(ctxt
, ops
);
1636 case 0x90: /* nop / xchg r8,rax */
1637 if (!(c
->rex_prefix
& 1)) { /* nop */
1638 c
->dst
.type
= OP_NONE
;
1641 case 0x91 ... 0x97: /* xchg reg,rax */
1642 c
->src
.type
= c
->dst
.type
= OP_REG
;
1643 c
->src
.bytes
= c
->dst
.bytes
= c
->op_bytes
;
1644 c
->src
.ptr
= (unsigned long *) &c
->regs
[VCPU_REGS_RAX
];
1645 c
->src
.val
= *(c
->src
.ptr
);
1647 case 0x9c: /* pushf */
1648 c
->src
.val
= (unsigned long) ctxt
->eflags
;
1651 case 0x9d: /* popf */
1652 c
->dst
.type
= OP_REG
;
1653 c
->dst
.ptr
= (unsigned long *) &ctxt
->eflags
;
1654 c
->dst
.bytes
= c
->op_bytes
;
1655 goto pop_instruction
;
1656 case 0xa0 ... 0xa1: /* mov */
1657 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1658 c
->dst
.val
= c
->src
.val
;
1660 case 0xa2 ... 0xa3: /* mov */
1661 c
->dst
.val
= (unsigned long)c
->regs
[VCPU_REGS_RAX
];
1663 case 0xa4 ... 0xa5: /* movs */
1664 c
->dst
.type
= OP_MEM
;
1665 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1666 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1668 c
->regs
[VCPU_REGS_RDI
]);
1669 if ((rc
= ops
->read_emulated(register_address(c
,
1670 seg_override_base(ctxt
, c
),
1671 c
->regs
[VCPU_REGS_RSI
]),
1673 c
->dst
.bytes
, ctxt
->vcpu
)) != 0)
1675 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1676 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1678 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1679 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1682 case 0xa6 ... 0xa7: /* cmps */
1683 c
->src
.type
= OP_NONE
; /* Disable writeback. */
1684 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1685 c
->src
.ptr
= (unsigned long *)register_address(c
,
1686 seg_override_base(ctxt
, c
),
1687 c
->regs
[VCPU_REGS_RSI
]);
1688 if ((rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1694 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1695 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1696 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1698 c
->regs
[VCPU_REGS_RDI
]);
1699 if ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1705 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c
->src
.ptr
, c
->dst
.ptr
);
1707 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1709 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1710 (ctxt
->eflags
& EFLG_DF
) ? -c
->src
.bytes
1712 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1713 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1717 case 0xaa ... 0xab: /* stos */
1718 c
->dst
.type
= OP_MEM
;
1719 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1720 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1722 c
->regs
[VCPU_REGS_RDI
]);
1723 c
->dst
.val
= c
->regs
[VCPU_REGS_RAX
];
1724 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1725 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1728 case 0xac ... 0xad: /* lods */
1729 c
->dst
.type
= OP_REG
;
1730 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1731 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1732 if ((rc
= ops
->read_emulated(register_address(c
,
1733 seg_override_base(ctxt
, c
),
1734 c
->regs
[VCPU_REGS_RSI
]),
1739 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1740 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1743 case 0xae ... 0xaf: /* scas */
1744 DPRINTF("Urk! I don't handle SCAS.\n");
1745 goto cannot_emulate
;
1746 case 0xb0 ... 0xbf: /* mov r, imm */
1751 case 0xc3: /* ret */
1752 c
->dst
.type
= OP_REG
;
1753 c
->dst
.ptr
= &c
->eip
;
1754 c
->dst
.bytes
= c
->op_bytes
;
1755 goto pop_instruction
;
1756 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1758 c
->dst
.val
= c
->src
.val
;
1760 case 0xcb: /* ret far */
1761 rc
= emulate_ret_far(ctxt
, ops
);
1765 case 0xd0 ... 0xd1: /* Grp2 */
1769 case 0xd2 ... 0xd3: /* Grp2 */
1770 c
->src
.val
= c
->regs
[VCPU_REGS_RCX
];
1773 case 0xe4: /* inb */
1775 port
= insn_fetch(u8
, 1, c
->eip
);
1778 case 0xe6: /* outb */
1779 case 0xe7: /* out */
1780 port
= insn_fetch(u8
, 1, c
->eip
);
1783 case 0xe8: /* call (near) */ {
1784 long int rel
= c
->src
.val
;
1785 c
->src
.val
= (unsigned long) c
->eip
;
1790 case 0xe9: /* jmp rel */
1792 case 0xea: /* jmp far */
1793 if (kvm_load_segment_descriptor(ctxt
->vcpu
, c
->src2
.val
, 9,
1794 VCPU_SREG_CS
) < 0) {
1795 DPRINTF("jmp far: Failed to load CS descriptor\n");
1796 goto cannot_emulate
;
1799 c
->eip
= c
->src
.val
;
1802 jmp
: /* jmp rel short */
1803 jmp_rel(c
, c
->src
.val
);
1804 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1806 case 0xec: /* in al,dx */
1807 case 0xed: /* in (e/r)ax,dx */
1808 port
= c
->regs
[VCPU_REGS_RDX
];
1811 case 0xee: /* out al,dx */
1812 case 0xef: /* out (e/r)ax,dx */
1813 port
= c
->regs
[VCPU_REGS_RDX
];
1815 do_io
: if (kvm_emulate_pio(ctxt
->vcpu
, NULL
, io_dir_in
,
1816 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1819 goto cannot_emulate
;
1822 case 0xf4: /* hlt */
1823 ctxt
->vcpu
->arch
.halt_request
= 1;
1825 case 0xf5: /* cmc */
1826 /* complement carry flag from eflags reg */
1827 ctxt
->eflags
^= EFLG_CF
;
1828 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1830 case 0xf6 ... 0xf7: /* Grp3 */
1831 rc
= emulate_grp3(ctxt
, ops
);
1835 case 0xf8: /* clc */
1836 ctxt
->eflags
&= ~EFLG_CF
;
1837 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1839 case 0xfa: /* cli */
1840 ctxt
->eflags
&= ~X86_EFLAGS_IF
;
1841 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1843 case 0xfb: /* sti */
1844 ctxt
->eflags
|= X86_EFLAGS_IF
;
1845 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1847 case 0xfc: /* cld */
1848 ctxt
->eflags
&= ~EFLG_DF
;
1849 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1851 case 0xfd: /* std */
1852 ctxt
->eflags
|= EFLG_DF
;
1853 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1855 case 0xfe ... 0xff: /* Grp4/Grp5 */
1856 rc
= emulate_grp45(ctxt
, ops
);
1863 rc
= writeback(ctxt
, ops
);
1867 /* Commit shadow register state. */
1868 memcpy(ctxt
->vcpu
->arch
.regs
, c
->regs
, sizeof c
->regs
);
1869 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1872 if (rc
== X86EMUL_UNHANDLEABLE
) {
1880 case 0x01: /* lgdt, lidt, lmsw */
1881 switch (c
->modrm_reg
) {
1883 unsigned long address
;
1885 case 0: /* vmcall */
1886 if (c
->modrm_mod
!= 3 || c
->modrm_rm
!= 1)
1887 goto cannot_emulate
;
1889 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1893 /* Let the processor re-execute the fixed hypercall */
1894 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1895 /* Disable writeback. */
1896 c
->dst
.type
= OP_NONE
;
1899 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1900 &size
, &address
, c
->op_bytes
);
1903 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1904 /* Disable writeback. */
1905 c
->dst
.type
= OP_NONE
;
1907 case 3: /* lidt/vmmcall */
1908 if (c
->modrm_mod
== 3) {
1909 switch (c
->modrm_rm
) {
1911 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1916 goto cannot_emulate
;
1919 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1924 realmode_lidt(ctxt
->vcpu
, size
, address
);
1926 /* Disable writeback. */
1927 c
->dst
.type
= OP_NONE
;
1931 c
->dst
.val
= realmode_get_cr(ctxt
->vcpu
, 0);
1934 realmode_lmsw(ctxt
->vcpu
, (u16
)c
->src
.val
,
1936 c
->dst
.type
= OP_NONE
;
1939 emulate_invlpg(ctxt
->vcpu
, memop
);
1940 /* Disable writeback. */
1941 c
->dst
.type
= OP_NONE
;
1944 goto cannot_emulate
;
1948 emulate_clts(ctxt
->vcpu
);
1949 c
->dst
.type
= OP_NONE
;
1951 case 0x08: /* invd */
1952 case 0x09: /* wbinvd */
1953 case 0x0d: /* GrpP (prefetch) */
1954 case 0x18: /* Grp16 (prefetch/nop) */
1955 c
->dst
.type
= OP_NONE
;
1957 case 0x20: /* mov cr, reg */
1958 if (c
->modrm_mod
!= 3)
1959 goto cannot_emulate
;
1960 c
->regs
[c
->modrm_rm
] =
1961 realmode_get_cr(ctxt
->vcpu
, c
->modrm_reg
);
1962 c
->dst
.type
= OP_NONE
; /* no writeback */
1964 case 0x21: /* mov from dr to reg */
1965 if (c
->modrm_mod
!= 3)
1966 goto cannot_emulate
;
1967 rc
= emulator_get_dr(ctxt
, c
->modrm_reg
, &c
->regs
[c
->modrm_rm
]);
1969 goto cannot_emulate
;
1970 c
->dst
.type
= OP_NONE
; /* no writeback */
1972 case 0x22: /* mov reg, cr */
1973 if (c
->modrm_mod
!= 3)
1974 goto cannot_emulate
;
1975 realmode_set_cr(ctxt
->vcpu
,
1976 c
->modrm_reg
, c
->modrm_val
, &ctxt
->eflags
);
1977 c
->dst
.type
= OP_NONE
;
1979 case 0x23: /* mov from reg to dr */
1980 if (c
->modrm_mod
!= 3)
1981 goto cannot_emulate
;
1982 rc
= emulator_set_dr(ctxt
, c
->modrm_reg
,
1983 c
->regs
[c
->modrm_rm
]);
1985 goto cannot_emulate
;
1986 c
->dst
.type
= OP_NONE
; /* no writeback */
1990 msr_data
= (u32
)c
->regs
[VCPU_REGS_RAX
]
1991 | ((u64
)c
->regs
[VCPU_REGS_RDX
] << 32);
1992 rc
= kvm_set_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], msr_data
);
1994 kvm_inject_gp(ctxt
->vcpu
, 0);
1995 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1997 rc
= X86EMUL_CONTINUE
;
1998 c
->dst
.type
= OP_NONE
;
2002 rc
= kvm_get_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], &msr_data
);
2004 kvm_inject_gp(ctxt
->vcpu
, 0);
2005 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
2007 c
->regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
2008 c
->regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
2010 rc
= X86EMUL_CONTINUE
;
2011 c
->dst
.type
= OP_NONE
;
2013 case 0x40 ... 0x4f: /* cmov */
2014 c
->dst
.val
= c
->dst
.orig_val
= c
->src
.val
;
2015 if (!test_cc(c
->b
, ctxt
->eflags
))
2016 c
->dst
.type
= OP_NONE
; /* no writeback */
2018 case 0x80 ... 0x8f: /* jnz rel, etc*/
2019 if (test_cc(c
->b
, ctxt
->eflags
))
2020 jmp_rel(c
, c
->src
.val
);
2021 c
->dst
.type
= OP_NONE
;
2025 c
->dst
.type
= OP_NONE
;
2026 /* only subword offset */
2027 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2028 emulate_2op_SrcV_nobyte("bt", c
->src
, c
->dst
, ctxt
->eflags
);
2030 case 0xa4: /* shld imm8, r, r/m */
2031 case 0xa5: /* shld cl, r, r/m */
2032 emulate_2op_cl("shld", c
->src2
, c
->src
, c
->dst
, ctxt
->eflags
);
2036 /* only subword offset */
2037 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2038 emulate_2op_SrcV_nobyte("bts", c
->src
, c
->dst
, ctxt
->eflags
);
2040 case 0xac: /* shrd imm8, r, r/m */
2041 case 0xad: /* shrd cl, r, r/m */
2042 emulate_2op_cl("shrd", c
->src2
, c
->src
, c
->dst
, ctxt
->eflags
);
2044 case 0xae: /* clflush */
2046 case 0xb0 ... 0xb1: /* cmpxchg */
2048 * Save real source value, then compare EAX against
2051 c
->src
.orig_val
= c
->src
.val
;
2052 c
->src
.val
= c
->regs
[VCPU_REGS_RAX
];
2053 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
2054 if (ctxt
->eflags
& EFLG_ZF
) {
2055 /* Success: write back to memory. */
2056 c
->dst
.val
= c
->src
.orig_val
;
2058 /* Failure: write the value we saw to EAX. */
2059 c
->dst
.type
= OP_REG
;
2060 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
2065 /* only subword offset */
2066 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2067 emulate_2op_SrcV_nobyte("btr", c
->src
, c
->dst
, ctxt
->eflags
);
2069 case 0xb6 ... 0xb7: /* movzx */
2070 c
->dst
.bytes
= c
->op_bytes
;
2071 c
->dst
.val
= (c
->d
& ByteOp
) ? (u8
) c
->src
.val
2074 case 0xba: /* Grp8 */
2075 switch (c
->modrm_reg
& 3) {
2088 /* only subword offset */
2089 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2090 emulate_2op_SrcV_nobyte("btc", c
->src
, c
->dst
, ctxt
->eflags
);
2092 case 0xbe ... 0xbf: /* movsx */
2093 c
->dst
.bytes
= c
->op_bytes
;
2094 c
->dst
.val
= (c
->d
& ByteOp
) ? (s8
) c
->src
.val
:
2097 case 0xc3: /* movnti */
2098 c
->dst
.bytes
= c
->op_bytes
;
2099 c
->dst
.val
= (c
->op_bytes
== 4) ? (u32
) c
->src
.val
:
2102 case 0xc7: /* Grp9 (cmpxchg8b) */
2103 rc
= emulate_grp9(ctxt
, ops
, memop
);
2106 c
->dst
.type
= OP_NONE
;
2112 DPRINTF("Cannot emulate %02x\n", c
->b
);
This page took 0.076122 seconds and 6 git commands to generate.