KVM: x86: accessors for guest registers
[deliverable/linux.git] / arch / x86 / kvm / x86_emulate.c
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 *
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
10 *
11 * Copyright (C) 2006 Qumranet
12 *
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
15 *
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
18 *
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
20 */
21
22 #ifndef __KERNEL__
23 #include <stdio.h>
24 #include <stdint.h>
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
27 #else
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
31 #endif
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
34
35 /*
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
41 * not be handled.
42 */
43
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstMask (3<<1)
51 /* Source operand type. */
52 #define SrcNone (0<<3) /* No source operand. */
53 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
54 #define SrcReg (1<<3) /* Register operand. */
55 #define SrcMem (2<<3) /* Memory operand. */
56 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
57 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
58 #define SrcImm (5<<3) /* Immediate operand. */
59 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
60 #define SrcMask (7<<3)
61 /* Generic ModRM decode. */
62 #define ModRM (1<<6)
63 /* Destination is only written; never read. */
64 #define Mov (1<<7)
65 #define BitOp (1<<8)
66 #define MemAbs (1<<9) /* Memory operand is absolute displacement */
67 #define String (1<<10) /* String instruction (rep capable) */
68 #define Stack (1<<11) /* Stack instruction (push/pop) */
69 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
70 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
71 #define GroupMask 0xff /* Group number stored in bits 0:7 */
72
73 enum {
74 Group1_80, Group1_81, Group1_82, Group1_83,
75 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
76 };
77
78 static u16 opcode_table[256] = {
79 /* 0x00 - 0x07 */
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
82 0, 0, 0, 0,
83 /* 0x08 - 0x0F */
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
86 0, 0, 0, 0,
87 /* 0x10 - 0x17 */
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
90 0, 0, 0, 0,
91 /* 0x18 - 0x1F */
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
94 0, 0, 0, 0,
95 /* 0x20 - 0x27 */
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
98 SrcImmByte, SrcImm, 0, 0,
99 /* 0x28 - 0x2F */
100 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
101 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
102 0, 0, 0, 0,
103 /* 0x30 - 0x37 */
104 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
105 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
106 0, 0, 0, 0,
107 /* 0x38 - 0x3F */
108 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
109 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
110 0, 0, 0, 0,
111 /* 0x40 - 0x47 */
112 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
113 /* 0x48 - 0x4F */
114 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
115 /* 0x50 - 0x57 */
116 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
117 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
118 /* 0x58 - 0x5F */
119 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
120 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
121 /* 0x60 - 0x67 */
122 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
123 0, 0, 0, 0,
124 /* 0x68 - 0x6F */
125 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
126 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
127 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
128 /* 0x70 - 0x77 */
129 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 /* 0x78 - 0x7F */
132 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
133 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
134 /* 0x80 - 0x87 */
135 Group | Group1_80, Group | Group1_81,
136 Group | Group1_82, Group | Group1_83,
137 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
138 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
139 /* 0x88 - 0x8F */
140 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
141 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
142 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
143 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
144 /* 0x90 - 0x97 */
145 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
146 /* 0x98 - 0x9F */
147 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
148 /* 0xA0 - 0xA7 */
149 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
150 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
151 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
152 ByteOp | ImplicitOps | String, ImplicitOps | String,
153 /* 0xA8 - 0xAF */
154 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
155 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
156 ByteOp | ImplicitOps | String, ImplicitOps | String,
157 /* 0xB0 - 0xBF */
158 0, 0, 0, 0, 0, 0, 0, 0,
159 DstReg | SrcImm | Mov, 0, 0, 0, 0, 0, 0, 0,
160 /* 0xC0 - 0xC7 */
161 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
162 0, ImplicitOps | Stack, 0, 0,
163 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
164 /* 0xC8 - 0xCF */
165 0, 0, 0, 0, 0, 0, 0, 0,
166 /* 0xD0 - 0xD7 */
167 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
168 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
169 0, 0, 0, 0,
170 /* 0xD8 - 0xDF */
171 0, 0, 0, 0, 0, 0, 0, 0,
172 /* 0xE0 - 0xE7 */
173 0, 0, 0, 0, 0, 0, 0, 0,
174 /* 0xE8 - 0xEF */
175 ImplicitOps | Stack, SrcImm | ImplicitOps,
176 ImplicitOps, SrcImmByte | ImplicitOps,
177 0, 0, 0, 0,
178 /* 0xF0 - 0xF7 */
179 0, 0, 0, 0,
180 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
181 /* 0xF8 - 0xFF */
182 ImplicitOps, 0, ImplicitOps, ImplicitOps,
183 0, 0, Group | Group4, Group | Group5,
184 };
185
186 static u16 twobyte_table[256] = {
187 /* 0x00 - 0x0F */
188 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
189 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
190 /* 0x10 - 0x1F */
191 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
192 /* 0x20 - 0x2F */
193 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
194 0, 0, 0, 0, 0, 0, 0, 0,
195 /* 0x30 - 0x3F */
196 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
197 /* 0x40 - 0x47 */
198 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
199 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
200 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
201 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
202 /* 0x48 - 0x4F */
203 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
204 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
205 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
206 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
207 /* 0x50 - 0x5F */
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 /* 0x60 - 0x6F */
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 /* 0x70 - 0x7F */
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 /* 0x80 - 0x8F */
214 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
215 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
216 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
217 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
218 /* 0x90 - 0x9F */
219 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
220 /* 0xA0 - 0xA7 */
221 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
222 /* 0xA8 - 0xAF */
223 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, ModRM, 0,
224 /* 0xB0 - 0xB7 */
225 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
226 DstMem | SrcReg | ModRM | BitOp,
227 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
228 DstReg | SrcMem16 | ModRM | Mov,
229 /* 0xB8 - 0xBF */
230 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
231 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
232 DstReg | SrcMem16 | ModRM | Mov,
233 /* 0xC0 - 0xCF */
234 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
235 0, 0, 0, 0, 0, 0, 0, 0,
236 /* 0xD0 - 0xDF */
237 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
238 /* 0xE0 - 0xEF */
239 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
240 /* 0xF0 - 0xFF */
241 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
242 };
243
244 static u16 group_table[] = {
245 [Group1_80*8] =
246 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
247 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
248 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
249 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
250 [Group1_81*8] =
251 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
252 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
253 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
254 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
255 [Group1_82*8] =
256 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
257 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
258 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
259 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
260 [Group1_83*8] =
261 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
262 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
263 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
264 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
265 [Group1A*8] =
266 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
267 [Group3_Byte*8] =
268 ByteOp | SrcImm | DstMem | ModRM, 0,
269 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
270 0, 0, 0, 0,
271 [Group3*8] =
272 DstMem | SrcImm | ModRM | SrcImm, 0,
273 DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
274 0, 0, 0, 0,
275 [Group4*8] =
276 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
277 0, 0, 0, 0, 0, 0,
278 [Group5*8] =
279 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM, 0, 0,
280 SrcMem | ModRM, 0, SrcMem | ModRM | Stack, 0,
281 [Group7*8] =
282 0, 0, ModRM | SrcMem, ModRM | SrcMem,
283 SrcNone | ModRM | DstMem | Mov, 0,
284 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
285 };
286
287 static u16 group2_table[] = {
288 [Group7*8] =
289 SrcNone | ModRM, 0, 0, 0,
290 SrcNone | ModRM | DstMem | Mov, 0,
291 SrcMem16 | ModRM | Mov, 0,
292 };
293
294 /* EFLAGS bit definitions. */
295 #define EFLG_OF (1<<11)
296 #define EFLG_DF (1<<10)
297 #define EFLG_SF (1<<7)
298 #define EFLG_ZF (1<<6)
299 #define EFLG_AF (1<<4)
300 #define EFLG_PF (1<<2)
301 #define EFLG_CF (1<<0)
302
303 /*
304 * Instruction emulation:
305 * Most instructions are emulated directly via a fragment of inline assembly
306 * code. This allows us to save/restore EFLAGS and thus very easily pick up
307 * any modified flags.
308 */
309
310 #if defined(CONFIG_X86_64)
311 #define _LO32 "k" /* force 32-bit operand */
312 #define _STK "%%rsp" /* stack pointer */
313 #elif defined(__i386__)
314 #define _LO32 "" /* force 32-bit operand */
315 #define _STK "%%esp" /* stack pointer */
316 #endif
317
318 /*
319 * These EFLAGS bits are restored from saved value during emulation, and
320 * any changes are written back to the saved value after emulation.
321 */
322 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
323
324 /* Before executing instruction: restore necessary bits in EFLAGS. */
325 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
326 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
327 "movl %"_sav",%"_LO32 _tmp"; " \
328 "push %"_tmp"; " \
329 "push %"_tmp"; " \
330 "movl %"_msk",%"_LO32 _tmp"; " \
331 "andl %"_LO32 _tmp",("_STK"); " \
332 "pushf; " \
333 "notl %"_LO32 _tmp"; " \
334 "andl %"_LO32 _tmp",("_STK"); " \
335 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
336 "pop %"_tmp"; " \
337 "orl %"_LO32 _tmp",("_STK"); " \
338 "popf; " \
339 "pop %"_sav"; "
340
341 /* After executing instruction: write-back necessary bits in EFLAGS. */
342 #define _POST_EFLAGS(_sav, _msk, _tmp) \
343 /* _sav |= EFLAGS & _msk; */ \
344 "pushf; " \
345 "pop %"_tmp"; " \
346 "andl %"_msk",%"_LO32 _tmp"; " \
347 "orl %"_LO32 _tmp",%"_sav"; "
348
349 /* Raw emulation: instruction has two explicit operands. */
350 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
351 do { \
352 unsigned long _tmp; \
353 \
354 switch ((_dst).bytes) { \
355 case 2: \
356 __asm__ __volatile__ ( \
357 _PRE_EFLAGS("0", "4", "2") \
358 _op"w %"_wx"3,%1; " \
359 _POST_EFLAGS("0", "4", "2") \
360 : "=m" (_eflags), "=m" ((_dst).val), \
361 "=&r" (_tmp) \
362 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
363 break; \
364 case 4: \
365 __asm__ __volatile__ ( \
366 _PRE_EFLAGS("0", "4", "2") \
367 _op"l %"_lx"3,%1; " \
368 _POST_EFLAGS("0", "4", "2") \
369 : "=m" (_eflags), "=m" ((_dst).val), \
370 "=&r" (_tmp) \
371 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
372 break; \
373 case 8: \
374 __emulate_2op_8byte(_op, _src, _dst, \
375 _eflags, _qx, _qy); \
376 break; \
377 } \
378 } while (0)
379
380 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
381 do { \
382 unsigned long __tmp; \
383 switch ((_dst).bytes) { \
384 case 1: \
385 __asm__ __volatile__ ( \
386 _PRE_EFLAGS("0", "4", "2") \
387 _op"b %"_bx"3,%1; " \
388 _POST_EFLAGS("0", "4", "2") \
389 : "=m" (_eflags), "=m" ((_dst).val), \
390 "=&r" (__tmp) \
391 : _by ((_src).val), "i" (EFLAGS_MASK)); \
392 break; \
393 default: \
394 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
395 _wx, _wy, _lx, _ly, _qx, _qy); \
396 break; \
397 } \
398 } while (0)
399
400 /* Source operand is byte-sized and may be restricted to just %cl. */
401 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
402 __emulate_2op(_op, _src, _dst, _eflags, \
403 "b", "c", "b", "c", "b", "c", "b", "c")
404
405 /* Source operand is byte, word, long or quad sized. */
406 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
407 __emulate_2op(_op, _src, _dst, _eflags, \
408 "b", "q", "w", "r", _LO32, "r", "", "r")
409
410 /* Source operand is word, long or quad sized. */
411 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
412 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
413 "w", "r", _LO32, "r", "", "r")
414
415 /* Instruction has only one explicit operand (no source operand). */
416 #define emulate_1op(_op, _dst, _eflags) \
417 do { \
418 unsigned long _tmp; \
419 \
420 switch ((_dst).bytes) { \
421 case 1: \
422 __asm__ __volatile__ ( \
423 _PRE_EFLAGS("0", "3", "2") \
424 _op"b %1; " \
425 _POST_EFLAGS("0", "3", "2") \
426 : "=m" (_eflags), "=m" ((_dst).val), \
427 "=&r" (_tmp) \
428 : "i" (EFLAGS_MASK)); \
429 break; \
430 case 2: \
431 __asm__ __volatile__ ( \
432 _PRE_EFLAGS("0", "3", "2") \
433 _op"w %1; " \
434 _POST_EFLAGS("0", "3", "2") \
435 : "=m" (_eflags), "=m" ((_dst).val), \
436 "=&r" (_tmp) \
437 : "i" (EFLAGS_MASK)); \
438 break; \
439 case 4: \
440 __asm__ __volatile__ ( \
441 _PRE_EFLAGS("0", "3", "2") \
442 _op"l %1; " \
443 _POST_EFLAGS("0", "3", "2") \
444 : "=m" (_eflags), "=m" ((_dst).val), \
445 "=&r" (_tmp) \
446 : "i" (EFLAGS_MASK)); \
447 break; \
448 case 8: \
449 __emulate_1op_8byte(_op, _dst, _eflags); \
450 break; \
451 } \
452 } while (0)
453
454 /* Emulate an instruction with quadword operands (x86/64 only). */
455 #if defined(CONFIG_X86_64)
456 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
457 do { \
458 __asm__ __volatile__ ( \
459 _PRE_EFLAGS("0", "4", "2") \
460 _op"q %"_qx"3,%1; " \
461 _POST_EFLAGS("0", "4", "2") \
462 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
463 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
464 } while (0)
465
466 #define __emulate_1op_8byte(_op, _dst, _eflags) \
467 do { \
468 __asm__ __volatile__ ( \
469 _PRE_EFLAGS("0", "3", "2") \
470 _op"q %1; " \
471 _POST_EFLAGS("0", "3", "2") \
472 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
473 : "i" (EFLAGS_MASK)); \
474 } while (0)
475
476 #elif defined(__i386__)
477 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
478 #define __emulate_1op_8byte(_op, _dst, _eflags)
479 #endif /* __i386__ */
480
481 /* Fetch next part of the instruction being emulated. */
482 #define insn_fetch(_type, _size, _eip) \
483 ({ unsigned long _x; \
484 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
485 if (rc != 0) \
486 goto done; \
487 (_eip) += (_size); \
488 (_type)_x; \
489 })
490
491 static inline unsigned long ad_mask(struct decode_cache *c)
492 {
493 return (1UL << (c->ad_bytes << 3)) - 1;
494 }
495
496 /* Access/update address held in a register, based on addressing mode. */
497 static inline unsigned long
498 address_mask(struct decode_cache *c, unsigned long reg)
499 {
500 if (c->ad_bytes == sizeof(unsigned long))
501 return reg;
502 else
503 return reg & ad_mask(c);
504 }
505
506 static inline unsigned long
507 register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
508 {
509 return base + address_mask(c, reg);
510 }
511
512 static inline void
513 register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
514 {
515 if (c->ad_bytes == sizeof(unsigned long))
516 *reg += inc;
517 else
518 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
519 }
520
521 static inline void jmp_rel(struct decode_cache *c, int rel)
522 {
523 register_address_increment(c, &c->eip, rel);
524 }
525
526 static void set_seg_override(struct decode_cache *c, int seg)
527 {
528 c->has_seg_override = true;
529 c->seg_override = seg;
530 }
531
532 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
533 {
534 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
535 return 0;
536
537 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
538 }
539
540 static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
541 struct decode_cache *c)
542 {
543 if (!c->has_seg_override)
544 return 0;
545
546 return seg_base(ctxt, c->seg_override);
547 }
548
549 static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
550 {
551 return seg_base(ctxt, VCPU_SREG_ES);
552 }
553
554 static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
555 {
556 return seg_base(ctxt, VCPU_SREG_SS);
557 }
558
559 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
560 struct x86_emulate_ops *ops,
561 unsigned long linear, u8 *dest)
562 {
563 struct fetch_cache *fc = &ctxt->decode.fetch;
564 int rc;
565 int size;
566
567 if (linear < fc->start || linear >= fc->end) {
568 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
569 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
570 if (rc)
571 return rc;
572 fc->start = linear;
573 fc->end = linear + size;
574 }
575 *dest = fc->data[linear - fc->start];
576 return 0;
577 }
578
579 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
580 struct x86_emulate_ops *ops,
581 unsigned long eip, void *dest, unsigned size)
582 {
583 int rc = 0;
584
585 eip += ctxt->cs_base;
586 while (size--) {
587 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
588 if (rc)
589 return rc;
590 }
591 return 0;
592 }
593
594 /*
595 * Given the 'reg' portion of a ModRM byte, and a register block, return a
596 * pointer into the block that addresses the relevant register.
597 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
598 */
599 static void *decode_register(u8 modrm_reg, unsigned long *regs,
600 int highbyte_regs)
601 {
602 void *p;
603
604 p = &regs[modrm_reg];
605 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
606 p = (unsigned char *)&regs[modrm_reg & 3] + 1;
607 return p;
608 }
609
610 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
611 struct x86_emulate_ops *ops,
612 void *ptr,
613 u16 *size, unsigned long *address, int op_bytes)
614 {
615 int rc;
616
617 if (op_bytes == 2)
618 op_bytes = 3;
619 *address = 0;
620 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
621 ctxt->vcpu);
622 if (rc)
623 return rc;
624 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
625 ctxt->vcpu);
626 return rc;
627 }
628
629 static int test_cc(unsigned int condition, unsigned int flags)
630 {
631 int rc = 0;
632
633 switch ((condition & 15) >> 1) {
634 case 0: /* o */
635 rc |= (flags & EFLG_OF);
636 break;
637 case 1: /* b/c/nae */
638 rc |= (flags & EFLG_CF);
639 break;
640 case 2: /* z/e */
641 rc |= (flags & EFLG_ZF);
642 break;
643 case 3: /* be/na */
644 rc |= (flags & (EFLG_CF|EFLG_ZF));
645 break;
646 case 4: /* s */
647 rc |= (flags & EFLG_SF);
648 break;
649 case 5: /* p/pe */
650 rc |= (flags & EFLG_PF);
651 break;
652 case 7: /* le/ng */
653 rc |= (flags & EFLG_ZF);
654 /* fall through */
655 case 6: /* l/nge */
656 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
657 break;
658 }
659
660 /* Odd condition identifiers (lsb == 1) have inverted sense. */
661 return (!!rc ^ (condition & 1));
662 }
663
664 static void decode_register_operand(struct operand *op,
665 struct decode_cache *c,
666 int inhibit_bytereg)
667 {
668 unsigned reg = c->modrm_reg;
669 int highbyte_regs = c->rex_prefix == 0;
670
671 if (!(c->d & ModRM))
672 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
673 op->type = OP_REG;
674 if ((c->d & ByteOp) && !inhibit_bytereg) {
675 op->ptr = decode_register(reg, c->regs, highbyte_regs);
676 op->val = *(u8 *)op->ptr;
677 op->bytes = 1;
678 } else {
679 op->ptr = decode_register(reg, c->regs, 0);
680 op->bytes = c->op_bytes;
681 switch (op->bytes) {
682 case 2:
683 op->val = *(u16 *)op->ptr;
684 break;
685 case 4:
686 op->val = *(u32 *)op->ptr;
687 break;
688 case 8:
689 op->val = *(u64 *) op->ptr;
690 break;
691 }
692 }
693 op->orig_val = op->val;
694 }
695
696 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
697 struct x86_emulate_ops *ops)
698 {
699 struct decode_cache *c = &ctxt->decode;
700 u8 sib;
701 int index_reg = 0, base_reg = 0, scale;
702 int rc = 0;
703
704 if (c->rex_prefix) {
705 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
706 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
707 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
708 }
709
710 c->modrm = insn_fetch(u8, 1, c->eip);
711 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
712 c->modrm_reg |= (c->modrm & 0x38) >> 3;
713 c->modrm_rm |= (c->modrm & 0x07);
714 c->modrm_ea = 0;
715 c->use_modrm_ea = 1;
716
717 if (c->modrm_mod == 3) {
718 c->modrm_ptr = decode_register(c->modrm_rm,
719 c->regs, c->d & ByteOp);
720 c->modrm_val = *(unsigned long *)c->modrm_ptr;
721 return rc;
722 }
723
724 if (c->ad_bytes == 2) {
725 unsigned bx = c->regs[VCPU_REGS_RBX];
726 unsigned bp = c->regs[VCPU_REGS_RBP];
727 unsigned si = c->regs[VCPU_REGS_RSI];
728 unsigned di = c->regs[VCPU_REGS_RDI];
729
730 /* 16-bit ModR/M decode. */
731 switch (c->modrm_mod) {
732 case 0:
733 if (c->modrm_rm == 6)
734 c->modrm_ea += insn_fetch(u16, 2, c->eip);
735 break;
736 case 1:
737 c->modrm_ea += insn_fetch(s8, 1, c->eip);
738 break;
739 case 2:
740 c->modrm_ea += insn_fetch(u16, 2, c->eip);
741 break;
742 }
743 switch (c->modrm_rm) {
744 case 0:
745 c->modrm_ea += bx + si;
746 break;
747 case 1:
748 c->modrm_ea += bx + di;
749 break;
750 case 2:
751 c->modrm_ea += bp + si;
752 break;
753 case 3:
754 c->modrm_ea += bp + di;
755 break;
756 case 4:
757 c->modrm_ea += si;
758 break;
759 case 5:
760 c->modrm_ea += di;
761 break;
762 case 6:
763 if (c->modrm_mod != 0)
764 c->modrm_ea += bp;
765 break;
766 case 7:
767 c->modrm_ea += bx;
768 break;
769 }
770 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
771 (c->modrm_rm == 6 && c->modrm_mod != 0))
772 if (!c->has_seg_override)
773 set_seg_override(c, VCPU_SREG_SS);
774 c->modrm_ea = (u16)c->modrm_ea;
775 } else {
776 /* 32/64-bit ModR/M decode. */
777 if ((c->modrm_rm & 7) == 4) {
778 sib = insn_fetch(u8, 1, c->eip);
779 index_reg |= (sib >> 3) & 7;
780 base_reg |= sib & 7;
781 scale = sib >> 6;
782
783 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
784 c->modrm_ea += insn_fetch(s32, 4, c->eip);
785 else
786 c->modrm_ea += c->regs[base_reg];
787 if (index_reg != 4)
788 c->modrm_ea += c->regs[index_reg] << scale;
789 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
790 if (ctxt->mode == X86EMUL_MODE_PROT64)
791 c->rip_relative = 1;
792 } else
793 c->modrm_ea += c->regs[c->modrm_rm];
794 switch (c->modrm_mod) {
795 case 0:
796 if (c->modrm_rm == 5)
797 c->modrm_ea += insn_fetch(s32, 4, c->eip);
798 break;
799 case 1:
800 c->modrm_ea += insn_fetch(s8, 1, c->eip);
801 break;
802 case 2:
803 c->modrm_ea += insn_fetch(s32, 4, c->eip);
804 break;
805 }
806 }
807 done:
808 return rc;
809 }
810
811 static int decode_abs(struct x86_emulate_ctxt *ctxt,
812 struct x86_emulate_ops *ops)
813 {
814 struct decode_cache *c = &ctxt->decode;
815 int rc = 0;
816
817 switch (c->ad_bytes) {
818 case 2:
819 c->modrm_ea = insn_fetch(u16, 2, c->eip);
820 break;
821 case 4:
822 c->modrm_ea = insn_fetch(u32, 4, c->eip);
823 break;
824 case 8:
825 c->modrm_ea = insn_fetch(u64, 8, c->eip);
826 break;
827 }
828 done:
829 return rc;
830 }
831
832 int
833 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
834 {
835 struct decode_cache *c = &ctxt->decode;
836 int rc = 0;
837 int mode = ctxt->mode;
838 int def_op_bytes, def_ad_bytes, group;
839
840 /* Shadow copy of register state. Committed on successful emulation. */
841
842 memset(c, 0, sizeof(struct decode_cache));
843 c->eip = kvm_rip_read(ctxt->vcpu);
844 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
845 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
846
847 switch (mode) {
848 case X86EMUL_MODE_REAL:
849 case X86EMUL_MODE_PROT16:
850 def_op_bytes = def_ad_bytes = 2;
851 break;
852 case X86EMUL_MODE_PROT32:
853 def_op_bytes = def_ad_bytes = 4;
854 break;
855 #ifdef CONFIG_X86_64
856 case X86EMUL_MODE_PROT64:
857 def_op_bytes = 4;
858 def_ad_bytes = 8;
859 break;
860 #endif
861 default:
862 return -1;
863 }
864
865 c->op_bytes = def_op_bytes;
866 c->ad_bytes = def_ad_bytes;
867
868 /* Legacy prefixes. */
869 for (;;) {
870 switch (c->b = insn_fetch(u8, 1, c->eip)) {
871 case 0x66: /* operand-size override */
872 /* switch between 2/4 bytes */
873 c->op_bytes = def_op_bytes ^ 6;
874 break;
875 case 0x67: /* address-size override */
876 if (mode == X86EMUL_MODE_PROT64)
877 /* switch between 4/8 bytes */
878 c->ad_bytes = def_ad_bytes ^ 12;
879 else
880 /* switch between 2/4 bytes */
881 c->ad_bytes = def_ad_bytes ^ 6;
882 break;
883 case 0x26: /* ES override */
884 case 0x2e: /* CS override */
885 case 0x36: /* SS override */
886 case 0x3e: /* DS override */
887 set_seg_override(c, (c->b >> 3) & 3);
888 break;
889 case 0x64: /* FS override */
890 case 0x65: /* GS override */
891 set_seg_override(c, c->b & 7);
892 break;
893 case 0x40 ... 0x4f: /* REX */
894 if (mode != X86EMUL_MODE_PROT64)
895 goto done_prefixes;
896 c->rex_prefix = c->b;
897 continue;
898 case 0xf0: /* LOCK */
899 c->lock_prefix = 1;
900 break;
901 case 0xf2: /* REPNE/REPNZ */
902 c->rep_prefix = REPNE_PREFIX;
903 break;
904 case 0xf3: /* REP/REPE/REPZ */
905 c->rep_prefix = REPE_PREFIX;
906 break;
907 default:
908 goto done_prefixes;
909 }
910
911 /* Any legacy prefix after a REX prefix nullifies its effect. */
912
913 c->rex_prefix = 0;
914 }
915
916 done_prefixes:
917
918 /* REX prefix. */
919 if (c->rex_prefix)
920 if (c->rex_prefix & 8)
921 c->op_bytes = 8; /* REX.W */
922
923 /* Opcode byte(s). */
924 c->d = opcode_table[c->b];
925 if (c->d == 0) {
926 /* Two-byte opcode? */
927 if (c->b == 0x0f) {
928 c->twobyte = 1;
929 c->b = insn_fetch(u8, 1, c->eip);
930 c->d = twobyte_table[c->b];
931 }
932 }
933
934 if (c->d & Group) {
935 group = c->d & GroupMask;
936 c->modrm = insn_fetch(u8, 1, c->eip);
937 --c->eip;
938
939 group = (group << 3) + ((c->modrm >> 3) & 7);
940 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
941 c->d = group2_table[group];
942 else
943 c->d = group_table[group];
944 }
945
946 /* Unrecognised? */
947 if (c->d == 0) {
948 DPRINTF("Cannot emulate %02x\n", c->b);
949 return -1;
950 }
951
952 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
953 c->op_bytes = 8;
954
955 /* ModRM and SIB bytes. */
956 if (c->d & ModRM)
957 rc = decode_modrm(ctxt, ops);
958 else if (c->d & MemAbs)
959 rc = decode_abs(ctxt, ops);
960 if (rc)
961 goto done;
962
963 if (!c->has_seg_override)
964 set_seg_override(c, VCPU_SREG_DS);
965
966 if (!(!c->twobyte && c->b == 0x8d))
967 c->modrm_ea += seg_override_base(ctxt, c);
968
969 if (c->ad_bytes != 8)
970 c->modrm_ea = (u32)c->modrm_ea;
971 /*
972 * Decode and fetch the source operand: register, memory
973 * or immediate.
974 */
975 switch (c->d & SrcMask) {
976 case SrcNone:
977 break;
978 case SrcReg:
979 decode_register_operand(&c->src, c, 0);
980 break;
981 case SrcMem16:
982 c->src.bytes = 2;
983 goto srcmem_common;
984 case SrcMem32:
985 c->src.bytes = 4;
986 goto srcmem_common;
987 case SrcMem:
988 c->src.bytes = (c->d & ByteOp) ? 1 :
989 c->op_bytes;
990 /* Don't fetch the address for invlpg: it could be unmapped. */
991 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
992 break;
993 srcmem_common:
994 /*
995 * For instructions with a ModR/M byte, switch to register
996 * access if Mod = 3.
997 */
998 if ((c->d & ModRM) && c->modrm_mod == 3) {
999 c->src.type = OP_REG;
1000 c->src.val = c->modrm_val;
1001 c->src.ptr = c->modrm_ptr;
1002 break;
1003 }
1004 c->src.type = OP_MEM;
1005 break;
1006 case SrcImm:
1007 c->src.type = OP_IMM;
1008 c->src.ptr = (unsigned long *)c->eip;
1009 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1010 if (c->src.bytes == 8)
1011 c->src.bytes = 4;
1012 /* NB. Immediates are sign-extended as necessary. */
1013 switch (c->src.bytes) {
1014 case 1:
1015 c->src.val = insn_fetch(s8, 1, c->eip);
1016 break;
1017 case 2:
1018 c->src.val = insn_fetch(s16, 2, c->eip);
1019 break;
1020 case 4:
1021 c->src.val = insn_fetch(s32, 4, c->eip);
1022 break;
1023 }
1024 break;
1025 case SrcImmByte:
1026 c->src.type = OP_IMM;
1027 c->src.ptr = (unsigned long *)c->eip;
1028 c->src.bytes = 1;
1029 c->src.val = insn_fetch(s8, 1, c->eip);
1030 break;
1031 }
1032
1033 /* Decode and fetch the destination operand: register or memory. */
1034 switch (c->d & DstMask) {
1035 case ImplicitOps:
1036 /* Special instructions do their own operand decoding. */
1037 return 0;
1038 case DstReg:
1039 decode_register_operand(&c->dst, c,
1040 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1041 break;
1042 case DstMem:
1043 if ((c->d & ModRM) && c->modrm_mod == 3) {
1044 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1045 c->dst.type = OP_REG;
1046 c->dst.val = c->dst.orig_val = c->modrm_val;
1047 c->dst.ptr = c->modrm_ptr;
1048 break;
1049 }
1050 c->dst.type = OP_MEM;
1051 break;
1052 }
1053
1054 if (c->rip_relative)
1055 c->modrm_ea += c->eip;
1056
1057 done:
1058 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1059 }
1060
1061 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1062 {
1063 struct decode_cache *c = &ctxt->decode;
1064
1065 c->dst.type = OP_MEM;
1066 c->dst.bytes = c->op_bytes;
1067 c->dst.val = c->src.val;
1068 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1069 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1070 c->regs[VCPU_REGS_RSP]);
1071 }
1072
1073 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1074 struct x86_emulate_ops *ops)
1075 {
1076 struct decode_cache *c = &ctxt->decode;
1077 int rc;
1078
1079 rc = ops->read_std(register_address(c, ss_base(ctxt),
1080 c->regs[VCPU_REGS_RSP]),
1081 &c->dst.val, c->dst.bytes, ctxt->vcpu);
1082 if (rc != 0)
1083 return rc;
1084
1085 register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->dst.bytes);
1086
1087 return 0;
1088 }
1089
1090 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1091 {
1092 struct decode_cache *c = &ctxt->decode;
1093 switch (c->modrm_reg) {
1094 case 0: /* rol */
1095 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1096 break;
1097 case 1: /* ror */
1098 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1099 break;
1100 case 2: /* rcl */
1101 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1102 break;
1103 case 3: /* rcr */
1104 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1105 break;
1106 case 4: /* sal/shl */
1107 case 6: /* sal/shl */
1108 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1109 break;
1110 case 5: /* shr */
1111 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1112 break;
1113 case 7: /* sar */
1114 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1115 break;
1116 }
1117 }
1118
1119 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1120 struct x86_emulate_ops *ops)
1121 {
1122 struct decode_cache *c = &ctxt->decode;
1123 int rc = 0;
1124
1125 switch (c->modrm_reg) {
1126 case 0 ... 1: /* test */
1127 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1128 break;
1129 case 2: /* not */
1130 c->dst.val = ~c->dst.val;
1131 break;
1132 case 3: /* neg */
1133 emulate_1op("neg", c->dst, ctxt->eflags);
1134 break;
1135 default:
1136 DPRINTF("Cannot emulate %02x\n", c->b);
1137 rc = X86EMUL_UNHANDLEABLE;
1138 break;
1139 }
1140 return rc;
1141 }
1142
1143 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1144 struct x86_emulate_ops *ops)
1145 {
1146 struct decode_cache *c = &ctxt->decode;
1147
1148 switch (c->modrm_reg) {
1149 case 0: /* inc */
1150 emulate_1op("inc", c->dst, ctxt->eflags);
1151 break;
1152 case 1: /* dec */
1153 emulate_1op("dec", c->dst, ctxt->eflags);
1154 break;
1155 case 4: /* jmp abs */
1156 c->eip = c->src.val;
1157 break;
1158 case 6: /* push */
1159 emulate_push(ctxt);
1160 break;
1161 }
1162 return 0;
1163 }
1164
1165 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1166 struct x86_emulate_ops *ops,
1167 unsigned long memop)
1168 {
1169 struct decode_cache *c = &ctxt->decode;
1170 u64 old, new;
1171 int rc;
1172
1173 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1174 if (rc != 0)
1175 return rc;
1176
1177 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1178 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1179
1180 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1181 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1182 ctxt->eflags &= ~EFLG_ZF;
1183
1184 } else {
1185 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1186 (u32) c->regs[VCPU_REGS_RBX];
1187
1188 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1189 if (rc != 0)
1190 return rc;
1191 ctxt->eflags |= EFLG_ZF;
1192 }
1193 return 0;
1194 }
1195
1196 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1197 struct x86_emulate_ops *ops)
1198 {
1199 int rc;
1200 struct decode_cache *c = &ctxt->decode;
1201
1202 switch (c->dst.type) {
1203 case OP_REG:
1204 /* The 4-byte case *is* correct:
1205 * in 64-bit mode we zero-extend.
1206 */
1207 switch (c->dst.bytes) {
1208 case 1:
1209 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1210 break;
1211 case 2:
1212 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1213 break;
1214 case 4:
1215 *c->dst.ptr = (u32)c->dst.val;
1216 break; /* 64b: zero-ext */
1217 case 8:
1218 *c->dst.ptr = c->dst.val;
1219 break;
1220 }
1221 break;
1222 case OP_MEM:
1223 if (c->lock_prefix)
1224 rc = ops->cmpxchg_emulated(
1225 (unsigned long)c->dst.ptr,
1226 &c->dst.orig_val,
1227 &c->dst.val,
1228 c->dst.bytes,
1229 ctxt->vcpu);
1230 else
1231 rc = ops->write_emulated(
1232 (unsigned long)c->dst.ptr,
1233 &c->dst.val,
1234 c->dst.bytes,
1235 ctxt->vcpu);
1236 if (rc != 0)
1237 return rc;
1238 break;
1239 case OP_NONE:
1240 /* no writeback */
1241 break;
1242 default:
1243 break;
1244 }
1245 return 0;
1246 }
1247
1248 int
1249 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1250 {
1251 unsigned long memop = 0;
1252 u64 msr_data;
1253 unsigned long saved_eip = 0;
1254 struct decode_cache *c = &ctxt->decode;
1255 int rc = 0;
1256
1257 /* Shadow copy of register state. Committed on successful emulation.
1258 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1259 * modify them.
1260 */
1261
1262 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1263 saved_eip = c->eip;
1264
1265 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1266 memop = c->modrm_ea;
1267
1268 if (c->rep_prefix && (c->d & String)) {
1269 /* All REP prefixes have the same first termination condition */
1270 if (c->regs[VCPU_REGS_RCX] == 0) {
1271 kvm_rip_write(ctxt->vcpu, c->eip);
1272 goto done;
1273 }
1274 /* The second termination condition only applies for REPE
1275 * and REPNE. Test if the repeat string operation prefix is
1276 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1277 * corresponding termination condition according to:
1278 * - if REPE/REPZ and ZF = 0 then done
1279 * - if REPNE/REPNZ and ZF = 1 then done
1280 */
1281 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1282 (c->b == 0xae) || (c->b == 0xaf)) {
1283 if ((c->rep_prefix == REPE_PREFIX) &&
1284 ((ctxt->eflags & EFLG_ZF) == 0)) {
1285 kvm_rip_write(ctxt->vcpu, c->eip);
1286 goto done;
1287 }
1288 if ((c->rep_prefix == REPNE_PREFIX) &&
1289 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1290 kvm_rip_write(ctxt->vcpu, c->eip);
1291 goto done;
1292 }
1293 }
1294 c->regs[VCPU_REGS_RCX]--;
1295 c->eip = kvm_rip_read(ctxt->vcpu);
1296 }
1297
1298 if (c->src.type == OP_MEM) {
1299 c->src.ptr = (unsigned long *)memop;
1300 c->src.val = 0;
1301 rc = ops->read_emulated((unsigned long)c->src.ptr,
1302 &c->src.val,
1303 c->src.bytes,
1304 ctxt->vcpu);
1305 if (rc != 0)
1306 goto done;
1307 c->src.orig_val = c->src.val;
1308 }
1309
1310 if ((c->d & DstMask) == ImplicitOps)
1311 goto special_insn;
1312
1313
1314 if (c->dst.type == OP_MEM) {
1315 c->dst.ptr = (unsigned long *)memop;
1316 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1317 c->dst.val = 0;
1318 if (c->d & BitOp) {
1319 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1320
1321 c->dst.ptr = (void *)c->dst.ptr +
1322 (c->src.val & mask) / 8;
1323 }
1324 if (!(c->d & Mov) &&
1325 /* optimisation - avoid slow emulated read */
1326 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1327 &c->dst.val,
1328 c->dst.bytes, ctxt->vcpu)) != 0))
1329 goto done;
1330 }
1331 c->dst.orig_val = c->dst.val;
1332
1333 special_insn:
1334
1335 if (c->twobyte)
1336 goto twobyte_insn;
1337
1338 switch (c->b) {
1339 case 0x00 ... 0x05:
1340 add: /* add */
1341 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1342 break;
1343 case 0x08 ... 0x0d:
1344 or: /* or */
1345 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1346 break;
1347 case 0x10 ... 0x15:
1348 adc: /* adc */
1349 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1350 break;
1351 case 0x18 ... 0x1d:
1352 sbb: /* sbb */
1353 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1354 break;
1355 case 0x20 ... 0x23:
1356 and: /* and */
1357 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1358 break;
1359 case 0x24: /* and al imm8 */
1360 c->dst.type = OP_REG;
1361 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1362 c->dst.val = *(u8 *)c->dst.ptr;
1363 c->dst.bytes = 1;
1364 c->dst.orig_val = c->dst.val;
1365 goto and;
1366 case 0x25: /* and ax imm16, or eax imm32 */
1367 c->dst.type = OP_REG;
1368 c->dst.bytes = c->op_bytes;
1369 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1370 if (c->op_bytes == 2)
1371 c->dst.val = *(u16 *)c->dst.ptr;
1372 else
1373 c->dst.val = *(u32 *)c->dst.ptr;
1374 c->dst.orig_val = c->dst.val;
1375 goto and;
1376 case 0x28 ... 0x2d:
1377 sub: /* sub */
1378 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1379 break;
1380 case 0x30 ... 0x35:
1381 xor: /* xor */
1382 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1383 break;
1384 case 0x38 ... 0x3d:
1385 cmp: /* cmp */
1386 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1387 break;
1388 case 0x40 ... 0x47: /* inc r16/r32 */
1389 emulate_1op("inc", c->dst, ctxt->eflags);
1390 break;
1391 case 0x48 ... 0x4f: /* dec r16/r32 */
1392 emulate_1op("dec", c->dst, ctxt->eflags);
1393 break;
1394 case 0x50 ... 0x57: /* push reg */
1395 c->dst.type = OP_MEM;
1396 c->dst.bytes = c->op_bytes;
1397 c->dst.val = c->src.val;
1398 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1399 -c->op_bytes);
1400 c->dst.ptr = (void *) register_address(
1401 c, ss_base(ctxt), c->regs[VCPU_REGS_RSP]);
1402 break;
1403 case 0x58 ... 0x5f: /* pop reg */
1404 pop_instruction:
1405 if ((rc = ops->read_std(register_address(c, ss_base(ctxt),
1406 c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1407 c->op_bytes, ctxt->vcpu)) != 0)
1408 goto done;
1409
1410 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1411 c->op_bytes);
1412 c->dst.type = OP_NONE; /* Disable writeback. */
1413 break;
1414 case 0x63: /* movsxd */
1415 if (ctxt->mode != X86EMUL_MODE_PROT64)
1416 goto cannot_emulate;
1417 c->dst.val = (s32) c->src.val;
1418 break;
1419 case 0x68: /* push imm */
1420 case 0x6a: /* push imm8 */
1421 emulate_push(ctxt);
1422 break;
1423 case 0x6c: /* insb */
1424 case 0x6d: /* insw/insd */
1425 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1426 1,
1427 (c->d & ByteOp) ? 1 : c->op_bytes,
1428 c->rep_prefix ?
1429 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1430 (ctxt->eflags & EFLG_DF),
1431 register_address(c, es_base(ctxt),
1432 c->regs[VCPU_REGS_RDI]),
1433 c->rep_prefix,
1434 c->regs[VCPU_REGS_RDX]) == 0) {
1435 c->eip = saved_eip;
1436 return -1;
1437 }
1438 return 0;
1439 case 0x6e: /* outsb */
1440 case 0x6f: /* outsw/outsd */
1441 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1442 0,
1443 (c->d & ByteOp) ? 1 : c->op_bytes,
1444 c->rep_prefix ?
1445 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1446 (ctxt->eflags & EFLG_DF),
1447 register_address(c,
1448 seg_override_base(ctxt, c),
1449 c->regs[VCPU_REGS_RSI]),
1450 c->rep_prefix,
1451 c->regs[VCPU_REGS_RDX]) == 0) {
1452 c->eip = saved_eip;
1453 return -1;
1454 }
1455 return 0;
1456 case 0x70 ... 0x7f: /* jcc (short) */ {
1457 int rel = insn_fetch(s8, 1, c->eip);
1458
1459 if (test_cc(c->b, ctxt->eflags))
1460 jmp_rel(c, rel);
1461 break;
1462 }
1463 case 0x80 ... 0x83: /* Grp1 */
1464 switch (c->modrm_reg) {
1465 case 0:
1466 goto add;
1467 case 1:
1468 goto or;
1469 case 2:
1470 goto adc;
1471 case 3:
1472 goto sbb;
1473 case 4:
1474 goto and;
1475 case 5:
1476 goto sub;
1477 case 6:
1478 goto xor;
1479 case 7:
1480 goto cmp;
1481 }
1482 break;
1483 case 0x84 ... 0x85:
1484 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1485 break;
1486 case 0x86 ... 0x87: /* xchg */
1487 xchg:
1488 /* Write back the register source. */
1489 switch (c->dst.bytes) {
1490 case 1:
1491 *(u8 *) c->src.ptr = (u8) c->dst.val;
1492 break;
1493 case 2:
1494 *(u16 *) c->src.ptr = (u16) c->dst.val;
1495 break;
1496 case 4:
1497 *c->src.ptr = (u32) c->dst.val;
1498 break; /* 64b reg: zero-extend */
1499 case 8:
1500 *c->src.ptr = c->dst.val;
1501 break;
1502 }
1503 /*
1504 * Write back the memory destination with implicit LOCK
1505 * prefix.
1506 */
1507 c->dst.val = c->src.val;
1508 c->lock_prefix = 1;
1509 break;
1510 case 0x88 ... 0x8b: /* mov */
1511 goto mov;
1512 case 0x8c: { /* mov r/m, sreg */
1513 struct kvm_segment segreg;
1514
1515 if (c->modrm_reg <= 5)
1516 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1517 else {
1518 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1519 c->modrm);
1520 goto cannot_emulate;
1521 }
1522 c->dst.val = segreg.selector;
1523 break;
1524 }
1525 case 0x8d: /* lea r16/r32, m */
1526 c->dst.val = c->modrm_ea;
1527 break;
1528 case 0x8e: { /* mov seg, r/m16 */
1529 uint16_t sel;
1530 int type_bits;
1531 int err;
1532
1533 sel = c->src.val;
1534 if (c->modrm_reg <= 5) {
1535 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1536 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1537 type_bits, c->modrm_reg);
1538 } else {
1539 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1540 c->modrm);
1541 goto cannot_emulate;
1542 }
1543
1544 if (err < 0)
1545 goto cannot_emulate;
1546
1547 c->dst.type = OP_NONE; /* Disable writeback. */
1548 break;
1549 }
1550 case 0x8f: /* pop (sole member of Grp1a) */
1551 rc = emulate_grp1a(ctxt, ops);
1552 if (rc != 0)
1553 goto done;
1554 break;
1555 case 0x90: /* nop / xchg r8,rax */
1556 if (!(c->rex_prefix & 1)) { /* nop */
1557 c->dst.type = OP_NONE;
1558 break;
1559 }
1560 case 0x91 ... 0x97: /* xchg reg,rax */
1561 c->src.type = c->dst.type = OP_REG;
1562 c->src.bytes = c->dst.bytes = c->op_bytes;
1563 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1564 c->src.val = *(c->src.ptr);
1565 goto xchg;
1566 case 0x9c: /* pushf */
1567 c->src.val = (unsigned long) ctxt->eflags;
1568 emulate_push(ctxt);
1569 break;
1570 case 0x9d: /* popf */
1571 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1572 goto pop_instruction;
1573 case 0xa0 ... 0xa1: /* mov */
1574 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1575 c->dst.val = c->src.val;
1576 break;
1577 case 0xa2 ... 0xa3: /* mov */
1578 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1579 break;
1580 case 0xa4 ... 0xa5: /* movs */
1581 c->dst.type = OP_MEM;
1582 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1583 c->dst.ptr = (unsigned long *)register_address(c,
1584 es_base(ctxt),
1585 c->regs[VCPU_REGS_RDI]);
1586 if ((rc = ops->read_emulated(register_address(c,
1587 seg_override_base(ctxt, c),
1588 c->regs[VCPU_REGS_RSI]),
1589 &c->dst.val,
1590 c->dst.bytes, ctxt->vcpu)) != 0)
1591 goto done;
1592 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1593 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1594 : c->dst.bytes);
1595 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1596 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1597 : c->dst.bytes);
1598 break;
1599 case 0xa6 ... 0xa7: /* cmps */
1600 c->src.type = OP_NONE; /* Disable writeback. */
1601 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1602 c->src.ptr = (unsigned long *)register_address(c,
1603 seg_override_base(ctxt, c),
1604 c->regs[VCPU_REGS_RSI]);
1605 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1606 &c->src.val,
1607 c->src.bytes,
1608 ctxt->vcpu)) != 0)
1609 goto done;
1610
1611 c->dst.type = OP_NONE; /* Disable writeback. */
1612 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1613 c->dst.ptr = (unsigned long *)register_address(c,
1614 es_base(ctxt),
1615 c->regs[VCPU_REGS_RDI]);
1616 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1617 &c->dst.val,
1618 c->dst.bytes,
1619 ctxt->vcpu)) != 0)
1620 goto done;
1621
1622 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1623
1624 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1625
1626 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1627 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1628 : c->src.bytes);
1629 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1630 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1631 : c->dst.bytes);
1632
1633 break;
1634 case 0xaa ... 0xab: /* stos */
1635 c->dst.type = OP_MEM;
1636 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1637 c->dst.ptr = (unsigned long *)register_address(c,
1638 es_base(ctxt),
1639 c->regs[VCPU_REGS_RDI]);
1640 c->dst.val = c->regs[VCPU_REGS_RAX];
1641 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1642 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1643 : c->dst.bytes);
1644 break;
1645 case 0xac ... 0xad: /* lods */
1646 c->dst.type = OP_REG;
1647 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1648 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1649 if ((rc = ops->read_emulated(register_address(c,
1650 seg_override_base(ctxt, c),
1651 c->regs[VCPU_REGS_RSI]),
1652 &c->dst.val,
1653 c->dst.bytes,
1654 ctxt->vcpu)) != 0)
1655 goto done;
1656 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1657 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1658 : c->dst.bytes);
1659 break;
1660 case 0xae ... 0xaf: /* scas */
1661 DPRINTF("Urk! I don't handle SCAS.\n");
1662 goto cannot_emulate;
1663 case 0xb8: /* mov r, imm */
1664 goto mov;
1665 case 0xc0 ... 0xc1:
1666 emulate_grp2(ctxt);
1667 break;
1668 case 0xc3: /* ret */
1669 c->dst.ptr = &c->eip;
1670 goto pop_instruction;
1671 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1672 mov:
1673 c->dst.val = c->src.val;
1674 break;
1675 case 0xd0 ... 0xd1: /* Grp2 */
1676 c->src.val = 1;
1677 emulate_grp2(ctxt);
1678 break;
1679 case 0xd2 ... 0xd3: /* Grp2 */
1680 c->src.val = c->regs[VCPU_REGS_RCX];
1681 emulate_grp2(ctxt);
1682 break;
1683 case 0xe8: /* call (near) */ {
1684 long int rel;
1685 switch (c->op_bytes) {
1686 case 2:
1687 rel = insn_fetch(s16, 2, c->eip);
1688 break;
1689 case 4:
1690 rel = insn_fetch(s32, 4, c->eip);
1691 break;
1692 default:
1693 DPRINTF("Call: Invalid op_bytes\n");
1694 goto cannot_emulate;
1695 }
1696 c->src.val = (unsigned long) c->eip;
1697 jmp_rel(c, rel);
1698 c->op_bytes = c->ad_bytes;
1699 emulate_push(ctxt);
1700 break;
1701 }
1702 case 0xe9: /* jmp rel */
1703 goto jmp;
1704 case 0xea: /* jmp far */ {
1705 uint32_t eip;
1706 uint16_t sel;
1707
1708 switch (c->op_bytes) {
1709 case 2:
1710 eip = insn_fetch(u16, 2, c->eip);
1711 break;
1712 case 4:
1713 eip = insn_fetch(u32, 4, c->eip);
1714 break;
1715 default:
1716 DPRINTF("jmp far: Invalid op_bytes\n");
1717 goto cannot_emulate;
1718 }
1719 sel = insn_fetch(u16, 2, c->eip);
1720 if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
1721 DPRINTF("jmp far: Failed to load CS descriptor\n");
1722 goto cannot_emulate;
1723 }
1724
1725 c->eip = eip;
1726 break;
1727 }
1728 case 0xeb:
1729 jmp: /* jmp rel short */
1730 jmp_rel(c, c->src.val);
1731 c->dst.type = OP_NONE; /* Disable writeback. */
1732 break;
1733 case 0xf4: /* hlt */
1734 ctxt->vcpu->arch.halt_request = 1;
1735 break;
1736 case 0xf5: /* cmc */
1737 /* complement carry flag from eflags reg */
1738 ctxt->eflags ^= EFLG_CF;
1739 c->dst.type = OP_NONE; /* Disable writeback. */
1740 break;
1741 case 0xf6 ... 0xf7: /* Grp3 */
1742 rc = emulate_grp3(ctxt, ops);
1743 if (rc != 0)
1744 goto done;
1745 break;
1746 case 0xf8: /* clc */
1747 ctxt->eflags &= ~EFLG_CF;
1748 c->dst.type = OP_NONE; /* Disable writeback. */
1749 break;
1750 case 0xfa: /* cli */
1751 ctxt->eflags &= ~X86_EFLAGS_IF;
1752 c->dst.type = OP_NONE; /* Disable writeback. */
1753 break;
1754 case 0xfb: /* sti */
1755 ctxt->eflags |= X86_EFLAGS_IF;
1756 c->dst.type = OP_NONE; /* Disable writeback. */
1757 break;
1758 case 0xfe ... 0xff: /* Grp4/Grp5 */
1759 rc = emulate_grp45(ctxt, ops);
1760 if (rc != 0)
1761 goto done;
1762 break;
1763 }
1764
1765 writeback:
1766 rc = writeback(ctxt, ops);
1767 if (rc != 0)
1768 goto done;
1769
1770 /* Commit shadow register state. */
1771 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1772 kvm_rip_write(ctxt->vcpu, c->eip);
1773
1774 done:
1775 if (rc == X86EMUL_UNHANDLEABLE) {
1776 c->eip = saved_eip;
1777 return -1;
1778 }
1779 return 0;
1780
1781 twobyte_insn:
1782 switch (c->b) {
1783 case 0x01: /* lgdt, lidt, lmsw */
1784 switch (c->modrm_reg) {
1785 u16 size;
1786 unsigned long address;
1787
1788 case 0: /* vmcall */
1789 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1790 goto cannot_emulate;
1791
1792 rc = kvm_fix_hypercall(ctxt->vcpu);
1793 if (rc)
1794 goto done;
1795
1796 /* Let the processor re-execute the fixed hypercall */
1797 c->eip = kvm_rip_read(ctxt->vcpu);
1798 /* Disable writeback. */
1799 c->dst.type = OP_NONE;
1800 break;
1801 case 2: /* lgdt */
1802 rc = read_descriptor(ctxt, ops, c->src.ptr,
1803 &size, &address, c->op_bytes);
1804 if (rc)
1805 goto done;
1806 realmode_lgdt(ctxt->vcpu, size, address);
1807 /* Disable writeback. */
1808 c->dst.type = OP_NONE;
1809 break;
1810 case 3: /* lidt/vmmcall */
1811 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1812 rc = kvm_fix_hypercall(ctxt->vcpu);
1813 if (rc)
1814 goto done;
1815 kvm_emulate_hypercall(ctxt->vcpu);
1816 } else {
1817 rc = read_descriptor(ctxt, ops, c->src.ptr,
1818 &size, &address,
1819 c->op_bytes);
1820 if (rc)
1821 goto done;
1822 realmode_lidt(ctxt->vcpu, size, address);
1823 }
1824 /* Disable writeback. */
1825 c->dst.type = OP_NONE;
1826 break;
1827 case 4: /* smsw */
1828 c->dst.bytes = 2;
1829 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1830 break;
1831 case 6: /* lmsw */
1832 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1833 &ctxt->eflags);
1834 c->dst.type = OP_NONE;
1835 break;
1836 case 7: /* invlpg*/
1837 emulate_invlpg(ctxt->vcpu, memop);
1838 /* Disable writeback. */
1839 c->dst.type = OP_NONE;
1840 break;
1841 default:
1842 goto cannot_emulate;
1843 }
1844 break;
1845 case 0x06:
1846 emulate_clts(ctxt->vcpu);
1847 c->dst.type = OP_NONE;
1848 break;
1849 case 0x08: /* invd */
1850 case 0x09: /* wbinvd */
1851 case 0x0d: /* GrpP (prefetch) */
1852 case 0x18: /* Grp16 (prefetch/nop) */
1853 c->dst.type = OP_NONE;
1854 break;
1855 case 0x20: /* mov cr, reg */
1856 if (c->modrm_mod != 3)
1857 goto cannot_emulate;
1858 c->regs[c->modrm_rm] =
1859 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1860 c->dst.type = OP_NONE; /* no writeback */
1861 break;
1862 case 0x21: /* mov from dr to reg */
1863 if (c->modrm_mod != 3)
1864 goto cannot_emulate;
1865 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1866 if (rc)
1867 goto cannot_emulate;
1868 c->dst.type = OP_NONE; /* no writeback */
1869 break;
1870 case 0x22: /* mov reg, cr */
1871 if (c->modrm_mod != 3)
1872 goto cannot_emulate;
1873 realmode_set_cr(ctxt->vcpu,
1874 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1875 c->dst.type = OP_NONE;
1876 break;
1877 case 0x23: /* mov from reg to dr */
1878 if (c->modrm_mod != 3)
1879 goto cannot_emulate;
1880 rc = emulator_set_dr(ctxt, c->modrm_reg,
1881 c->regs[c->modrm_rm]);
1882 if (rc)
1883 goto cannot_emulate;
1884 c->dst.type = OP_NONE; /* no writeback */
1885 break;
1886 case 0x30:
1887 /* wrmsr */
1888 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1889 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1890 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1891 if (rc) {
1892 kvm_inject_gp(ctxt->vcpu, 0);
1893 c->eip = kvm_rip_read(ctxt->vcpu);
1894 }
1895 rc = X86EMUL_CONTINUE;
1896 c->dst.type = OP_NONE;
1897 break;
1898 case 0x32:
1899 /* rdmsr */
1900 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1901 if (rc) {
1902 kvm_inject_gp(ctxt->vcpu, 0);
1903 c->eip = kvm_rip_read(ctxt->vcpu);
1904 } else {
1905 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1906 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1907 }
1908 rc = X86EMUL_CONTINUE;
1909 c->dst.type = OP_NONE;
1910 break;
1911 case 0x40 ... 0x4f: /* cmov */
1912 c->dst.val = c->dst.orig_val = c->src.val;
1913 if (!test_cc(c->b, ctxt->eflags))
1914 c->dst.type = OP_NONE; /* no writeback */
1915 break;
1916 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1917 long int rel;
1918
1919 switch (c->op_bytes) {
1920 case 2:
1921 rel = insn_fetch(s16, 2, c->eip);
1922 break;
1923 case 4:
1924 rel = insn_fetch(s32, 4, c->eip);
1925 break;
1926 case 8:
1927 rel = insn_fetch(s64, 8, c->eip);
1928 break;
1929 default:
1930 DPRINTF("jnz: Invalid op_bytes\n");
1931 goto cannot_emulate;
1932 }
1933 if (test_cc(c->b, ctxt->eflags))
1934 jmp_rel(c, rel);
1935 c->dst.type = OP_NONE;
1936 break;
1937 }
1938 case 0xa3:
1939 bt: /* bt */
1940 c->dst.type = OP_NONE;
1941 /* only subword offset */
1942 c->src.val &= (c->dst.bytes << 3) - 1;
1943 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
1944 break;
1945 case 0xab:
1946 bts: /* bts */
1947 /* only subword offset */
1948 c->src.val &= (c->dst.bytes << 3) - 1;
1949 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
1950 break;
1951 case 0xae: /* clflush */
1952 break;
1953 case 0xb0 ... 0xb1: /* cmpxchg */
1954 /*
1955 * Save real source value, then compare EAX against
1956 * destination.
1957 */
1958 c->src.orig_val = c->src.val;
1959 c->src.val = c->regs[VCPU_REGS_RAX];
1960 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1961 if (ctxt->eflags & EFLG_ZF) {
1962 /* Success: write back to memory. */
1963 c->dst.val = c->src.orig_val;
1964 } else {
1965 /* Failure: write the value we saw to EAX. */
1966 c->dst.type = OP_REG;
1967 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1968 }
1969 break;
1970 case 0xb3:
1971 btr: /* btr */
1972 /* only subword offset */
1973 c->src.val &= (c->dst.bytes << 3) - 1;
1974 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
1975 break;
1976 case 0xb6 ... 0xb7: /* movzx */
1977 c->dst.bytes = c->op_bytes;
1978 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
1979 : (u16) c->src.val;
1980 break;
1981 case 0xba: /* Grp8 */
1982 switch (c->modrm_reg & 3) {
1983 case 0:
1984 goto bt;
1985 case 1:
1986 goto bts;
1987 case 2:
1988 goto btr;
1989 case 3:
1990 goto btc;
1991 }
1992 break;
1993 case 0xbb:
1994 btc: /* btc */
1995 /* only subword offset */
1996 c->src.val &= (c->dst.bytes << 3) - 1;
1997 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
1998 break;
1999 case 0xbe ... 0xbf: /* movsx */
2000 c->dst.bytes = c->op_bytes;
2001 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2002 (s16) c->src.val;
2003 break;
2004 case 0xc3: /* movnti */
2005 c->dst.bytes = c->op_bytes;
2006 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2007 (u64) c->src.val;
2008 break;
2009 case 0xc7: /* Grp9 (cmpxchg8b) */
2010 rc = emulate_grp9(ctxt, ops, memop);
2011 if (rc != 0)
2012 goto done;
2013 c->dst.type = OP_NONE;
2014 break;
2015 }
2016 goto writeback;
2017
2018 cannot_emulate:
2019 DPRINTF("Cannot emulate %02x\n", c->b);
2020 c->eip = saved_eip;
2021 return -1;
2022 }
This page took 0.078042 seconds and 5 git commands to generate.