1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #define DPRINTF(x...) do {} while (0)
31 #include <linux/module.h>
32 #include <asm/kvm_x86_emulate.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
65 #define MemAbs (1<<9) /* Memory operand is absolute displacement */
66 #define String (1<<10) /* String instruction (rep capable) */
67 #define Stack (1<<11) /* Stack instruction (push/pop) */
68 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
69 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
70 #define GroupMask 0xff /* Group number stored in bits 0:7 */
73 Group1A, Group3_Byte, Group3, Group4, Group5,
76 static u16 opcode_table[256] = {
78 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
79 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
82 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
83 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
86 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
87 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
90 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
91 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
94 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
95 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 SrcImmByte, SrcImm, 0, 0,
98 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
99 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
102 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
103 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
106 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
107 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
110 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
112 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
114 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
115 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
117 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
118 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
120 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
123 0, 0, ImplicitOps | Mov | Stack, 0,
124 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
125 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
127 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
128 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
133 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
134 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
135 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
136 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
138 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
139 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
140 0, ModRM | DstReg, 0, Group | Group1A,
142 0, 0, 0, 0, 0, 0, 0, 0,
143 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
145 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
146 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
147 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
148 ByteOp | ImplicitOps | String, ImplicitOps | String,
150 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
151 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
152 ByteOp | ImplicitOps | String, ImplicitOps | String,
154 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
156 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
157 0, ImplicitOps | Stack, 0, 0,
158 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
160 0, 0, 0, 0, 0, 0, 0, 0,
162 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
163 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
166 0, 0, 0, 0, 0, 0, 0, 0,
168 0, 0, 0, 0, 0, 0, 0, 0,
170 ImplicitOps | Stack, SrcImm|ImplicitOps, 0, SrcImmByte|ImplicitOps,
174 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
176 ImplicitOps, 0, ImplicitOps, ImplicitOps,
177 0, 0, Group | Group4, Group | Group5,
180 static u16 twobyte_table[256] = {
182 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
183 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
185 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
187 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
188 0, 0, 0, 0, 0, 0, 0, 0,
190 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
192 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
193 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
194 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
195 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
197 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
198 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
199 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
200 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
202 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
204 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
206 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
208 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
209 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
210 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
211 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
213 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
215 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
217 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
219 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
220 DstMem | SrcReg | ModRM | BitOp,
221 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
222 DstReg | SrcMem16 | ModRM | Mov,
224 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
225 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
226 DstReg | SrcMem16 | ModRM | Mov,
228 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
229 0, 0, 0, 0, 0, 0, 0, 0,
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
233 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
235 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
238 static u16 group_table[] = {
240 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
242 ByteOp | SrcImm | DstMem | ModRM, 0,
243 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
246 DstMem | SrcImm | ModRM | SrcImm, 0,
247 DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
250 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
253 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM, 0, 0,
254 SrcMem | ModRM, 0, SrcMem | ModRM | Stack, 0,
257 static u16 group2_table[] = {
260 /* EFLAGS bit definitions. */
261 #define EFLG_OF (1<<11)
262 #define EFLG_DF (1<<10)
263 #define EFLG_SF (1<<7)
264 #define EFLG_ZF (1<<6)
265 #define EFLG_AF (1<<4)
266 #define EFLG_PF (1<<2)
267 #define EFLG_CF (1<<0)
270 * Instruction emulation:
271 * Most instructions are emulated directly via a fragment of inline assembly
272 * code. This allows us to save/restore EFLAGS and thus very easily pick up
273 * any modified flags.
276 #if defined(CONFIG_X86_64)
277 #define _LO32 "k" /* force 32-bit operand */
278 #define _STK "%%rsp" /* stack pointer */
279 #elif defined(__i386__)
280 #define _LO32 "" /* force 32-bit operand */
281 #define _STK "%%esp" /* stack pointer */
285 * These EFLAGS bits are restored from saved value during emulation, and
286 * any changes are written back to the saved value after emulation.
288 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
290 /* Before executing instruction: restore necessary bits in EFLAGS. */
291 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
292 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
293 "movl %"_sav",%"_LO32 _tmp"; " \
296 "movl %"_msk",%"_LO32 _tmp"; " \
297 "andl %"_LO32 _tmp",("_STK"); " \
299 "notl %"_LO32 _tmp"; " \
300 "andl %"_LO32 _tmp",("_STK"); " \
301 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
303 "orl %"_LO32 _tmp",("_STK"); " \
307 /* After executing instruction: write-back necessary bits in EFLAGS. */
308 #define _POST_EFLAGS(_sav, _msk, _tmp) \
309 /* _sav |= EFLAGS & _msk; */ \
312 "andl %"_msk",%"_LO32 _tmp"; " \
313 "orl %"_LO32 _tmp",%"_sav"; "
315 /* Raw emulation: instruction has two explicit operands. */
316 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
318 unsigned long _tmp; \
320 switch ((_dst).bytes) { \
322 __asm__ __volatile__ ( \
323 _PRE_EFLAGS("0", "4", "2") \
324 _op"w %"_wx"3,%1; " \
325 _POST_EFLAGS("0", "4", "2") \
326 : "=m" (_eflags), "=m" ((_dst).val), \
328 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
331 __asm__ __volatile__ ( \
332 _PRE_EFLAGS("0", "4", "2") \
333 _op"l %"_lx"3,%1; " \
334 _POST_EFLAGS("0", "4", "2") \
335 : "=m" (_eflags), "=m" ((_dst).val), \
337 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
340 __emulate_2op_8byte(_op, _src, _dst, \
341 _eflags, _qx, _qy); \
346 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
348 unsigned long _tmp; \
349 switch ((_dst).bytes) { \
351 __asm__ __volatile__ ( \
352 _PRE_EFLAGS("0", "4", "2") \
353 _op"b %"_bx"3,%1; " \
354 _POST_EFLAGS("0", "4", "2") \
355 : "=m" (_eflags), "=m" ((_dst).val), \
357 : _by ((_src).val), "i" (EFLAGS_MASK)); \
360 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
361 _wx, _wy, _lx, _ly, _qx, _qy); \
366 /* Source operand is byte-sized and may be restricted to just %cl. */
367 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
368 __emulate_2op(_op, _src, _dst, _eflags, \
369 "b", "c", "b", "c", "b", "c", "b", "c")
371 /* Source operand is byte, word, long or quad sized. */
372 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
373 __emulate_2op(_op, _src, _dst, _eflags, \
374 "b", "q", "w", "r", _LO32, "r", "", "r")
376 /* Source operand is word, long or quad sized. */
377 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
378 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
379 "w", "r", _LO32, "r", "", "r")
381 /* Instruction has only one explicit operand (no source operand). */
382 #define emulate_1op(_op, _dst, _eflags) \
384 unsigned long _tmp; \
386 switch ((_dst).bytes) { \
388 __asm__ __volatile__ ( \
389 _PRE_EFLAGS("0", "3", "2") \
391 _POST_EFLAGS("0", "3", "2") \
392 : "=m" (_eflags), "=m" ((_dst).val), \
394 : "i" (EFLAGS_MASK)); \
397 __asm__ __volatile__ ( \
398 _PRE_EFLAGS("0", "3", "2") \
400 _POST_EFLAGS("0", "3", "2") \
401 : "=m" (_eflags), "=m" ((_dst).val), \
403 : "i" (EFLAGS_MASK)); \
406 __asm__ __volatile__ ( \
407 _PRE_EFLAGS("0", "3", "2") \
409 _POST_EFLAGS("0", "3", "2") \
410 : "=m" (_eflags), "=m" ((_dst).val), \
412 : "i" (EFLAGS_MASK)); \
415 __emulate_1op_8byte(_op, _dst, _eflags); \
420 /* Emulate an instruction with quadword operands (x86/64 only). */
421 #if defined(CONFIG_X86_64)
422 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
424 __asm__ __volatile__ ( \
425 _PRE_EFLAGS("0", "4", "2") \
426 _op"q %"_qx"3,%1; " \
427 _POST_EFLAGS("0", "4", "2") \
428 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
429 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
432 #define __emulate_1op_8byte(_op, _dst, _eflags) \
434 __asm__ __volatile__ ( \
435 _PRE_EFLAGS("0", "3", "2") \
437 _POST_EFLAGS("0", "3", "2") \
438 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
439 : "i" (EFLAGS_MASK)); \
442 #elif defined(__i386__)
443 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
444 #define __emulate_1op_8byte(_op, _dst, _eflags)
445 #endif /* __i386__ */
447 /* Fetch next part of the instruction being emulated. */
448 #define insn_fetch(_type, _size, _eip) \
449 ({ unsigned long _x; \
450 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
457 /* Access/update address held in a register, based on addressing mode. */
458 #define address_mask(reg) \
459 ((c->ad_bytes == sizeof(unsigned long)) ? \
460 (reg) : ((reg) & ((1UL << (c->ad_bytes << 3)) - 1)))
461 #define register_address(base, reg) \
462 ((base) + address_mask(reg))
463 #define register_address_increment(reg, inc) \
465 /* signed type ensures sign extension to long */ \
467 if (c->ad_bytes == sizeof(unsigned long)) \
471 ~((1UL << (c->ad_bytes << 3)) - 1)) | \
473 ((1UL << (c->ad_bytes << 3)) - 1)); \
476 #define JMP_REL(rel) \
478 register_address_increment(c->eip, rel); \
481 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
482 struct x86_emulate_ops *ops,
483 unsigned long linear, u8 *dest)
485 struct fetch_cache *fc = &ctxt->decode.fetch;
489 if (linear < fc->start || linear >= fc->end) {
490 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
491 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
495 fc->end = linear + size;
497 *dest = fc->data[linear - fc->start];
501 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
502 struct x86_emulate_ops *ops,
503 unsigned long eip, void *dest, unsigned size)
507 eip += ctxt->cs_base;
509 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
517 * Given the 'reg' portion of a ModRM byte, and a register block, return a
518 * pointer into the block that addresses the relevant register.
519 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
521 static void *decode_register(u8 modrm_reg, unsigned long *regs,
526 p = ®s[modrm_reg];
527 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
528 p = (unsigned char *)®s[modrm_reg & 3] + 1;
532 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
533 struct x86_emulate_ops *ops,
535 u16 *size, unsigned long *address, int op_bytes)
542 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
546 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
551 static int test_cc(unsigned int condition, unsigned int flags)
555 switch ((condition & 15) >> 1) {
557 rc |= (flags & EFLG_OF);
559 case 1: /* b/c/nae */
560 rc |= (flags & EFLG_CF);
563 rc |= (flags & EFLG_ZF);
566 rc |= (flags & (EFLG_CF|EFLG_ZF));
569 rc |= (flags & EFLG_SF);
572 rc |= (flags & EFLG_PF);
575 rc |= (flags & EFLG_ZF);
578 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
582 /* Odd condition identifiers (lsb == 1) have inverted sense. */
583 return (!!rc ^ (condition & 1));
586 static void decode_register_operand(struct operand *op,
587 struct decode_cache *c,
590 unsigned reg = c->modrm_reg;
591 int highbyte_regs = c->rex_prefix == 0;
594 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
596 if ((c->d & ByteOp) && !inhibit_bytereg) {
597 op->ptr = decode_register(reg, c->regs, highbyte_regs);
598 op->val = *(u8 *)op->ptr;
601 op->ptr = decode_register(reg, c->regs, 0);
602 op->bytes = c->op_bytes;
605 op->val = *(u16 *)op->ptr;
608 op->val = *(u32 *)op->ptr;
611 op->val = *(u64 *) op->ptr;
615 op->orig_val = op->val;
618 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
619 struct x86_emulate_ops *ops)
621 struct decode_cache *c = &ctxt->decode;
623 int index_reg = 0, base_reg = 0, scale, rip_relative = 0;
627 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
628 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
629 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
632 c->modrm = insn_fetch(u8, 1, c->eip);
633 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
634 c->modrm_reg |= (c->modrm & 0x38) >> 3;
635 c->modrm_rm |= (c->modrm & 0x07);
639 if (c->modrm_mod == 3) {
640 c->modrm_val = *(unsigned long *)
641 decode_register(c->modrm_rm, c->regs, c->d & ByteOp);
645 if (c->ad_bytes == 2) {
646 unsigned bx = c->regs[VCPU_REGS_RBX];
647 unsigned bp = c->regs[VCPU_REGS_RBP];
648 unsigned si = c->regs[VCPU_REGS_RSI];
649 unsigned di = c->regs[VCPU_REGS_RDI];
651 /* 16-bit ModR/M decode. */
652 switch (c->modrm_mod) {
654 if (c->modrm_rm == 6)
655 c->modrm_ea += insn_fetch(u16, 2, c->eip);
658 c->modrm_ea += insn_fetch(s8, 1, c->eip);
661 c->modrm_ea += insn_fetch(u16, 2, c->eip);
664 switch (c->modrm_rm) {
666 c->modrm_ea += bx + si;
669 c->modrm_ea += bx + di;
672 c->modrm_ea += bp + si;
675 c->modrm_ea += bp + di;
684 if (c->modrm_mod != 0)
691 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
692 (c->modrm_rm == 6 && c->modrm_mod != 0))
693 if (!c->override_base)
694 c->override_base = &ctxt->ss_base;
695 c->modrm_ea = (u16)c->modrm_ea;
697 /* 32/64-bit ModR/M decode. */
698 switch (c->modrm_rm) {
701 sib = insn_fetch(u8, 1, c->eip);
702 index_reg |= (sib >> 3) & 7;
708 if (c->modrm_mod != 0)
709 c->modrm_ea += c->regs[base_reg];
712 insn_fetch(s32, 4, c->eip);
715 c->modrm_ea += c->regs[base_reg];
721 c->modrm_ea += c->regs[index_reg] << scale;
725 if (c->modrm_mod != 0)
726 c->modrm_ea += c->regs[c->modrm_rm];
727 else if (ctxt->mode == X86EMUL_MODE_PROT64)
731 c->modrm_ea += c->regs[c->modrm_rm];
734 switch (c->modrm_mod) {
736 if (c->modrm_rm == 5)
737 c->modrm_ea += insn_fetch(s32, 4, c->eip);
740 c->modrm_ea += insn_fetch(s8, 1, c->eip);
743 c->modrm_ea += insn_fetch(s32, 4, c->eip);
748 c->modrm_ea += c->eip;
749 switch (c->d & SrcMask) {
757 if (c->op_bytes == 8)
760 c->modrm_ea += c->op_bytes;
767 static int decode_abs(struct x86_emulate_ctxt *ctxt,
768 struct x86_emulate_ops *ops)
770 struct decode_cache *c = &ctxt->decode;
773 switch (c->ad_bytes) {
775 c->modrm_ea = insn_fetch(u16, 2, c->eip);
778 c->modrm_ea = insn_fetch(u32, 4, c->eip);
781 c->modrm_ea = insn_fetch(u64, 8, c->eip);
789 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
791 struct decode_cache *c = &ctxt->decode;
793 int mode = ctxt->mode;
794 int def_op_bytes, def_ad_bytes, group;
796 /* Shadow copy of register state. Committed on successful emulation. */
798 memset(c, 0, sizeof(struct decode_cache));
799 c->eip = ctxt->vcpu->arch.rip;
800 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
803 case X86EMUL_MODE_REAL:
804 case X86EMUL_MODE_PROT16:
805 def_op_bytes = def_ad_bytes = 2;
807 case X86EMUL_MODE_PROT32:
808 def_op_bytes = def_ad_bytes = 4;
811 case X86EMUL_MODE_PROT64:
820 c->op_bytes = def_op_bytes;
821 c->ad_bytes = def_ad_bytes;
823 /* Legacy prefixes. */
825 switch (c->b = insn_fetch(u8, 1, c->eip)) {
826 case 0x66: /* operand-size override */
827 /* switch between 2/4 bytes */
828 c->op_bytes = def_op_bytes ^ 6;
830 case 0x67: /* address-size override */
831 if (mode == X86EMUL_MODE_PROT64)
832 /* switch between 4/8 bytes */
833 c->ad_bytes = def_ad_bytes ^ 12;
835 /* switch between 2/4 bytes */
836 c->ad_bytes = def_ad_bytes ^ 6;
838 case 0x2e: /* CS override */
839 c->override_base = &ctxt->cs_base;
841 case 0x3e: /* DS override */
842 c->override_base = &ctxt->ds_base;
844 case 0x26: /* ES override */
845 c->override_base = &ctxt->es_base;
847 case 0x64: /* FS override */
848 c->override_base = &ctxt->fs_base;
850 case 0x65: /* GS override */
851 c->override_base = &ctxt->gs_base;
853 case 0x36: /* SS override */
854 c->override_base = &ctxt->ss_base;
856 case 0x40 ... 0x4f: /* REX */
857 if (mode != X86EMUL_MODE_PROT64)
859 c->rex_prefix = c->b;
861 case 0xf0: /* LOCK */
864 case 0xf2: /* REPNE/REPNZ */
865 c->rep_prefix = REPNE_PREFIX;
867 case 0xf3: /* REP/REPE/REPZ */
868 c->rep_prefix = REPE_PREFIX;
874 /* Any legacy prefix after a REX prefix nullifies its effect. */
883 if (c->rex_prefix & 8)
884 c->op_bytes = 8; /* REX.W */
886 /* Opcode byte(s). */
887 c->d = opcode_table[c->b];
889 /* Two-byte opcode? */
892 c->b = insn_fetch(u8, 1, c->eip);
893 c->d = twobyte_table[c->b];
898 group = c->d & GroupMask;
899 c->modrm = insn_fetch(u8, 1, c->eip);
902 group = (group << 3) + ((c->modrm >> 3) & 7);
903 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
904 c->d = group2_table[group];
906 c->d = group_table[group];
911 DPRINTF("Cannot emulate %02x\n", c->b);
915 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
918 /* ModRM and SIB bytes. */
920 rc = decode_modrm(ctxt, ops);
921 else if (c->d & MemAbs)
922 rc = decode_abs(ctxt, ops);
926 if (!c->override_base)
927 c->override_base = &ctxt->ds_base;
928 if (mode == X86EMUL_MODE_PROT64 &&
929 c->override_base != &ctxt->fs_base &&
930 c->override_base != &ctxt->gs_base)
931 c->override_base = NULL;
933 if (c->override_base)
934 c->modrm_ea += *c->override_base;
936 if (c->ad_bytes != 8)
937 c->modrm_ea = (u32)c->modrm_ea;
939 * Decode and fetch the source operand: register, memory
942 switch (c->d & SrcMask) {
946 decode_register_operand(&c->src, c, 0);
955 c->src.bytes = (c->d & ByteOp) ? 1 :
957 /* Don't fetch the address for invlpg: it could be unmapped. */
958 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
962 * For instructions with a ModR/M byte, switch to register
965 if ((c->d & ModRM) && c->modrm_mod == 3) {
966 c->src.type = OP_REG;
969 c->src.type = OP_MEM;
972 c->src.type = OP_IMM;
973 c->src.ptr = (unsigned long *)c->eip;
974 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
975 if (c->src.bytes == 8)
977 /* NB. Immediates are sign-extended as necessary. */
978 switch (c->src.bytes) {
980 c->src.val = insn_fetch(s8, 1, c->eip);
983 c->src.val = insn_fetch(s16, 2, c->eip);
986 c->src.val = insn_fetch(s32, 4, c->eip);
991 c->src.type = OP_IMM;
992 c->src.ptr = (unsigned long *)c->eip;
994 c->src.val = insn_fetch(s8, 1, c->eip);
998 /* Decode and fetch the destination operand: register or memory. */
999 switch (c->d & DstMask) {
1001 /* Special instructions do their own operand decoding. */
1004 decode_register_operand(&c->dst, c,
1005 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1008 if ((c->d & ModRM) && c->modrm_mod == 3) {
1009 c->dst.type = OP_REG;
1012 c->dst.type = OP_MEM;
1017 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1020 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1022 struct decode_cache *c = &ctxt->decode;
1024 c->dst.type = OP_MEM;
1025 c->dst.bytes = c->op_bytes;
1026 c->dst.val = c->src.val;
1027 register_address_increment(c->regs[VCPU_REGS_RSP], -c->op_bytes);
1028 c->dst.ptr = (void *) register_address(ctxt->ss_base,
1029 c->regs[VCPU_REGS_RSP]);
1032 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1033 struct x86_emulate_ops *ops)
1035 struct decode_cache *c = &ctxt->decode;
1038 rc = ops->read_std(register_address(ctxt->ss_base,
1039 c->regs[VCPU_REGS_RSP]),
1040 &c->dst.val, c->dst.bytes, ctxt->vcpu);
1044 register_address_increment(c->regs[VCPU_REGS_RSP], c->dst.bytes);
1049 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1051 struct decode_cache *c = &ctxt->decode;
1052 switch (c->modrm_reg) {
1054 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1057 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1060 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1063 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1065 case 4: /* sal/shl */
1066 case 6: /* sal/shl */
1067 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1070 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1073 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1078 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1079 struct x86_emulate_ops *ops)
1081 struct decode_cache *c = &ctxt->decode;
1084 switch (c->modrm_reg) {
1085 case 0 ... 1: /* test */
1086 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1089 c->dst.val = ~c->dst.val;
1092 emulate_1op("neg", c->dst, ctxt->eflags);
1095 DPRINTF("Cannot emulate %02x\n", c->b);
1096 rc = X86EMUL_UNHANDLEABLE;
1102 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1103 struct x86_emulate_ops *ops)
1105 struct decode_cache *c = &ctxt->decode;
1107 switch (c->modrm_reg) {
1109 emulate_1op("inc", c->dst, ctxt->eflags);
1112 emulate_1op("dec", c->dst, ctxt->eflags);
1114 case 4: /* jmp abs */
1115 c->eip = c->src.val;
1124 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1125 struct x86_emulate_ops *ops,
1126 unsigned long memop)
1128 struct decode_cache *c = &ctxt->decode;
1132 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1136 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1137 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1139 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1140 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1141 ctxt->eflags &= ~EFLG_ZF;
1144 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1145 (u32) c->regs[VCPU_REGS_RBX];
1147 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1150 ctxt->eflags |= EFLG_ZF;
1155 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1156 struct x86_emulate_ops *ops)
1159 struct decode_cache *c = &ctxt->decode;
1161 switch (c->dst.type) {
1163 /* The 4-byte case *is* correct:
1164 * in 64-bit mode we zero-extend.
1166 switch (c->dst.bytes) {
1168 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1171 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1174 *c->dst.ptr = (u32)c->dst.val;
1175 break; /* 64b: zero-ext */
1177 *c->dst.ptr = c->dst.val;
1183 rc = ops->cmpxchg_emulated(
1184 (unsigned long)c->dst.ptr,
1190 rc = ops->write_emulated(
1191 (unsigned long)c->dst.ptr,
1208 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1210 unsigned long memop = 0;
1212 unsigned long saved_eip = 0;
1213 struct decode_cache *c = &ctxt->decode;
1216 /* Shadow copy of register state. Committed on successful emulation.
1217 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1221 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1224 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1225 memop = c->modrm_ea;
1227 if (c->rep_prefix && (c->d & String)) {
1228 /* All REP prefixes have the same first termination condition */
1229 if (c->regs[VCPU_REGS_RCX] == 0) {
1230 ctxt->vcpu->arch.rip = c->eip;
1233 /* The second termination condition only applies for REPE
1234 * and REPNE. Test if the repeat string operation prefix is
1235 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1236 * corresponding termination condition according to:
1237 * - if REPE/REPZ and ZF = 0 then done
1238 * - if REPNE/REPNZ and ZF = 1 then done
1240 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1241 (c->b == 0xae) || (c->b == 0xaf)) {
1242 if ((c->rep_prefix == REPE_PREFIX) &&
1243 ((ctxt->eflags & EFLG_ZF) == 0)) {
1244 ctxt->vcpu->arch.rip = c->eip;
1247 if ((c->rep_prefix == REPNE_PREFIX) &&
1248 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1249 ctxt->vcpu->arch.rip = c->eip;
1253 c->regs[VCPU_REGS_RCX]--;
1254 c->eip = ctxt->vcpu->arch.rip;
1257 if (c->src.type == OP_MEM) {
1258 c->src.ptr = (unsigned long *)memop;
1260 rc = ops->read_emulated((unsigned long)c->src.ptr,
1266 c->src.orig_val = c->src.val;
1269 if ((c->d & DstMask) == ImplicitOps)
1273 if (c->dst.type == OP_MEM) {
1274 c->dst.ptr = (unsigned long *)memop;
1275 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1278 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1280 c->dst.ptr = (void *)c->dst.ptr +
1281 (c->src.val & mask) / 8;
1283 if (!(c->d & Mov) &&
1284 /* optimisation - avoid slow emulated read */
1285 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1287 c->dst.bytes, ctxt->vcpu)) != 0))
1290 c->dst.orig_val = c->dst.val;
1300 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1304 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1308 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1312 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1316 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1318 case 0x24: /* and al imm8 */
1319 c->dst.type = OP_REG;
1320 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1321 c->dst.val = *(u8 *)c->dst.ptr;
1323 c->dst.orig_val = c->dst.val;
1325 case 0x25: /* and ax imm16, or eax imm32 */
1326 c->dst.type = OP_REG;
1327 c->dst.bytes = c->op_bytes;
1328 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1329 if (c->op_bytes == 2)
1330 c->dst.val = *(u16 *)c->dst.ptr;
1332 c->dst.val = *(u32 *)c->dst.ptr;
1333 c->dst.orig_val = c->dst.val;
1337 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1341 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1345 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1347 case 0x40 ... 0x47: /* inc r16/r32 */
1348 emulate_1op("inc", c->dst, ctxt->eflags);
1350 case 0x48 ... 0x4f: /* dec r16/r32 */
1351 emulate_1op("dec", c->dst, ctxt->eflags);
1353 case 0x50 ... 0x57: /* push reg */
1354 c->dst.type = OP_MEM;
1355 c->dst.bytes = c->op_bytes;
1356 c->dst.val = c->src.val;
1357 register_address_increment(c->regs[VCPU_REGS_RSP],
1359 c->dst.ptr = (void *) register_address(
1360 ctxt->ss_base, c->regs[VCPU_REGS_RSP]);
1362 case 0x58 ... 0x5f: /* pop reg */
1364 if ((rc = ops->read_std(register_address(ctxt->ss_base,
1365 c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1366 c->op_bytes, ctxt->vcpu)) != 0)
1369 register_address_increment(c->regs[VCPU_REGS_RSP],
1371 c->dst.type = OP_NONE; /* Disable writeback. */
1373 case 0x63: /* movsxd */
1374 if (ctxt->mode != X86EMUL_MODE_PROT64)
1375 goto cannot_emulate;
1376 c->dst.val = (s32) c->src.val;
1378 case 0x6a: /* push imm8 */
1380 c->src.val = insn_fetch(s8, 1, c->eip);
1383 case 0x6c: /* insb */
1384 case 0x6d: /* insw/insd */
1385 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1387 (c->d & ByteOp) ? 1 : c->op_bytes,
1389 address_mask(c->regs[VCPU_REGS_RCX]) : 1,
1390 (ctxt->eflags & EFLG_DF),
1391 register_address(ctxt->es_base,
1392 c->regs[VCPU_REGS_RDI]),
1394 c->regs[VCPU_REGS_RDX]) == 0) {
1399 case 0x6e: /* outsb */
1400 case 0x6f: /* outsw/outsd */
1401 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1403 (c->d & ByteOp) ? 1 : c->op_bytes,
1405 address_mask(c->regs[VCPU_REGS_RCX]) : 1,
1406 (ctxt->eflags & EFLG_DF),
1407 register_address(c->override_base ?
1410 c->regs[VCPU_REGS_RSI]),
1412 c->regs[VCPU_REGS_RDX]) == 0) {
1417 case 0x70 ... 0x7f: /* jcc (short) */ {
1418 int rel = insn_fetch(s8, 1, c->eip);
1420 if (test_cc(c->b, ctxt->eflags))
1424 case 0x80 ... 0x83: /* Grp1 */
1425 switch (c->modrm_reg) {
1445 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1447 case 0x86 ... 0x87: /* xchg */
1448 /* Write back the register source. */
1449 switch (c->dst.bytes) {
1451 *(u8 *) c->src.ptr = (u8) c->dst.val;
1454 *(u16 *) c->src.ptr = (u16) c->dst.val;
1457 *c->src.ptr = (u32) c->dst.val;
1458 break; /* 64b reg: zero-extend */
1460 *c->src.ptr = c->dst.val;
1464 * Write back the memory destination with implicit LOCK
1467 c->dst.val = c->src.val;
1470 case 0x88 ... 0x8b: /* mov */
1472 case 0x8d: /* lea r16/r32, m */
1473 c->dst.val = c->modrm_val;
1475 case 0x8f: /* pop (sole member of Grp1a) */
1476 rc = emulate_grp1a(ctxt, ops);
1480 case 0x9c: /* pushf */
1481 c->src.val = (unsigned long) ctxt->eflags;
1484 case 0x9d: /* popf */
1485 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1486 goto pop_instruction;
1487 case 0xa0 ... 0xa1: /* mov */
1488 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1489 c->dst.val = c->src.val;
1491 case 0xa2 ... 0xa3: /* mov */
1492 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1494 case 0xa4 ... 0xa5: /* movs */
1495 c->dst.type = OP_MEM;
1496 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1497 c->dst.ptr = (unsigned long *)register_address(
1499 c->regs[VCPU_REGS_RDI]);
1500 if ((rc = ops->read_emulated(register_address(
1501 c->override_base ? *c->override_base :
1503 c->regs[VCPU_REGS_RSI]),
1505 c->dst.bytes, ctxt->vcpu)) != 0)
1507 register_address_increment(c->regs[VCPU_REGS_RSI],
1508 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1510 register_address_increment(c->regs[VCPU_REGS_RDI],
1511 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1514 case 0xa6 ... 0xa7: /* cmps */
1515 c->src.type = OP_NONE; /* Disable writeback. */
1516 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1517 c->src.ptr = (unsigned long *)register_address(
1518 c->override_base ? *c->override_base :
1520 c->regs[VCPU_REGS_RSI]);
1521 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1527 c->dst.type = OP_NONE; /* Disable writeback. */
1528 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1529 c->dst.ptr = (unsigned long *)register_address(
1531 c->regs[VCPU_REGS_RDI]);
1532 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1538 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1540 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1542 register_address_increment(c->regs[VCPU_REGS_RSI],
1543 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1545 register_address_increment(c->regs[VCPU_REGS_RDI],
1546 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1550 case 0xaa ... 0xab: /* stos */
1551 c->dst.type = OP_MEM;
1552 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1553 c->dst.ptr = (unsigned long *)register_address(
1555 c->regs[VCPU_REGS_RDI]);
1556 c->dst.val = c->regs[VCPU_REGS_RAX];
1557 register_address_increment(c->regs[VCPU_REGS_RDI],
1558 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1561 case 0xac ... 0xad: /* lods */
1562 c->dst.type = OP_REG;
1563 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1564 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1565 if ((rc = ops->read_emulated(register_address(
1566 c->override_base ? *c->override_base :
1568 c->regs[VCPU_REGS_RSI]),
1573 register_address_increment(c->regs[VCPU_REGS_RSI],
1574 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1577 case 0xae ... 0xaf: /* scas */
1578 DPRINTF("Urk! I don't handle SCAS.\n");
1579 goto cannot_emulate;
1583 case 0xc3: /* ret */
1584 c->dst.ptr = &c->eip;
1585 goto pop_instruction;
1586 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1588 c->dst.val = c->src.val;
1590 case 0xd0 ... 0xd1: /* Grp2 */
1594 case 0xd2 ... 0xd3: /* Grp2 */
1595 c->src.val = c->regs[VCPU_REGS_RCX];
1598 case 0xe8: /* call (near) */ {
1600 switch (c->op_bytes) {
1602 rel = insn_fetch(s16, 2, c->eip);
1605 rel = insn_fetch(s32, 4, c->eip);
1608 DPRINTF("Call: Invalid op_bytes\n");
1609 goto cannot_emulate;
1611 c->src.val = (unsigned long) c->eip;
1613 c->op_bytes = c->ad_bytes;
1617 case 0xe9: /* jmp rel */
1618 case 0xeb: /* jmp rel short */
1619 JMP_REL(c->src.val);
1620 c->dst.type = OP_NONE; /* Disable writeback. */
1622 case 0xf4: /* hlt */
1623 ctxt->vcpu->arch.halt_request = 1;
1625 case 0xf5: /* cmc */
1626 /* complement carry flag from eflags reg */
1627 ctxt->eflags ^= EFLG_CF;
1628 c->dst.type = OP_NONE; /* Disable writeback. */
1630 case 0xf6 ... 0xf7: /* Grp3 */
1631 rc = emulate_grp3(ctxt, ops);
1635 case 0xf8: /* clc */
1636 ctxt->eflags &= ~EFLG_CF;
1637 c->dst.type = OP_NONE; /* Disable writeback. */
1639 case 0xfa: /* cli */
1640 ctxt->eflags &= ~X86_EFLAGS_IF;
1641 c->dst.type = OP_NONE; /* Disable writeback. */
1643 case 0xfb: /* sti */
1644 ctxt->eflags |= X86_EFLAGS_IF;
1645 c->dst.type = OP_NONE; /* Disable writeback. */
1647 case 0xfe ... 0xff: /* Grp4/Grp5 */
1648 rc = emulate_grp45(ctxt, ops);
1655 rc = writeback(ctxt, ops);
1659 /* Commit shadow register state. */
1660 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1661 ctxt->vcpu->arch.rip = c->eip;
1664 if (rc == X86EMUL_UNHANDLEABLE) {
1672 case 0x01: /* lgdt, lidt, lmsw */
1673 switch (c->modrm_reg) {
1675 unsigned long address;
1677 case 0: /* vmcall */
1678 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1679 goto cannot_emulate;
1681 rc = kvm_fix_hypercall(ctxt->vcpu);
1685 kvm_emulate_hypercall(ctxt->vcpu);
1688 rc = read_descriptor(ctxt, ops, c->src.ptr,
1689 &size, &address, c->op_bytes);
1692 realmode_lgdt(ctxt->vcpu, size, address);
1694 case 3: /* lidt/vmmcall */
1695 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1696 rc = kvm_fix_hypercall(ctxt->vcpu);
1699 kvm_emulate_hypercall(ctxt->vcpu);
1701 rc = read_descriptor(ctxt, ops, c->src.ptr,
1706 realmode_lidt(ctxt->vcpu, size, address);
1710 if (c->modrm_mod != 3)
1711 goto cannot_emulate;
1712 *(u16 *)&c->regs[c->modrm_rm]
1713 = realmode_get_cr(ctxt->vcpu, 0);
1716 if (c->modrm_mod != 3)
1717 goto cannot_emulate;
1718 realmode_lmsw(ctxt->vcpu, (u16)c->modrm_val,
1722 emulate_invlpg(ctxt->vcpu, memop);
1725 goto cannot_emulate;
1727 /* Disable writeback. */
1728 c->dst.type = OP_NONE;
1731 emulate_clts(ctxt->vcpu);
1732 c->dst.type = OP_NONE;
1734 case 0x08: /* invd */
1735 case 0x09: /* wbinvd */
1736 case 0x0d: /* GrpP (prefetch) */
1737 case 0x18: /* Grp16 (prefetch/nop) */
1738 c->dst.type = OP_NONE;
1740 case 0x20: /* mov cr, reg */
1741 if (c->modrm_mod != 3)
1742 goto cannot_emulate;
1743 c->regs[c->modrm_rm] =
1744 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1745 c->dst.type = OP_NONE; /* no writeback */
1747 case 0x21: /* mov from dr to reg */
1748 if (c->modrm_mod != 3)
1749 goto cannot_emulate;
1750 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1752 goto cannot_emulate;
1753 c->dst.type = OP_NONE; /* no writeback */
1755 case 0x22: /* mov reg, cr */
1756 if (c->modrm_mod != 3)
1757 goto cannot_emulate;
1758 realmode_set_cr(ctxt->vcpu,
1759 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1760 c->dst.type = OP_NONE;
1762 case 0x23: /* mov from reg to dr */
1763 if (c->modrm_mod != 3)
1764 goto cannot_emulate;
1765 rc = emulator_set_dr(ctxt, c->modrm_reg,
1766 c->regs[c->modrm_rm]);
1768 goto cannot_emulate;
1769 c->dst.type = OP_NONE; /* no writeback */
1773 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1774 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1775 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1777 kvm_inject_gp(ctxt->vcpu, 0);
1778 c->eip = ctxt->vcpu->arch.rip;
1780 rc = X86EMUL_CONTINUE;
1781 c->dst.type = OP_NONE;
1785 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1787 kvm_inject_gp(ctxt->vcpu, 0);
1788 c->eip = ctxt->vcpu->arch.rip;
1790 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1791 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1793 rc = X86EMUL_CONTINUE;
1794 c->dst.type = OP_NONE;
1796 case 0x40 ... 0x4f: /* cmov */
1797 c->dst.val = c->dst.orig_val = c->src.val;
1798 if (!test_cc(c->b, ctxt->eflags))
1799 c->dst.type = OP_NONE; /* no writeback */
1801 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1804 switch (c->op_bytes) {
1806 rel = insn_fetch(s16, 2, c->eip);
1809 rel = insn_fetch(s32, 4, c->eip);
1812 rel = insn_fetch(s64, 8, c->eip);
1815 DPRINTF("jnz: Invalid op_bytes\n");
1816 goto cannot_emulate;
1818 if (test_cc(c->b, ctxt->eflags))
1820 c->dst.type = OP_NONE;
1825 c->dst.type = OP_NONE;
1826 /* only subword offset */
1827 c->src.val &= (c->dst.bytes << 3) - 1;
1828 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
1832 /* only subword offset */
1833 c->src.val &= (c->dst.bytes << 3) - 1;
1834 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
1836 case 0xb0 ... 0xb1: /* cmpxchg */
1838 * Save real source value, then compare EAX against
1841 c->src.orig_val = c->src.val;
1842 c->src.val = c->regs[VCPU_REGS_RAX];
1843 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1844 if (ctxt->eflags & EFLG_ZF) {
1845 /* Success: write back to memory. */
1846 c->dst.val = c->src.orig_val;
1848 /* Failure: write the value we saw to EAX. */
1849 c->dst.type = OP_REG;
1850 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1855 /* only subword offset */
1856 c->src.val &= (c->dst.bytes << 3) - 1;
1857 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
1859 case 0xb6 ... 0xb7: /* movzx */
1860 c->dst.bytes = c->op_bytes;
1861 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
1864 case 0xba: /* Grp8 */
1865 switch (c->modrm_reg & 3) {
1878 /* only subword offset */
1879 c->src.val &= (c->dst.bytes << 3) - 1;
1880 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
1882 case 0xbe ... 0xbf: /* movsx */
1883 c->dst.bytes = c->op_bytes;
1884 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
1887 case 0xc3: /* movnti */
1888 c->dst.bytes = c->op_bytes;
1889 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
1892 case 0xc7: /* Grp9 (cmpxchg8b) */
1893 rc = emulate_grp9(ctxt, ops, memop);
1896 c->dst.type = OP_NONE;
1902 DPRINTF("Cannot emulate %02x\n", c->b);