1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table[256] = {
68 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
69 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 0, 0, 0, 0, 0, 0, 0, 0,
104 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
107 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
108 0, 0, 0, 0, 0, 0, 0, 0,
110 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
111 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
113 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
115 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
116 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
117 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
118 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
120 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
121 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
122 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
124 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
126 ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
127 ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
128 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
129 ByteOp | ImplicitOps, ImplicitOps,
131 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
132 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
133 ByteOp | ImplicitOps, ImplicitOps,
135 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
137 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
138 0, ImplicitOps, 0, 0,
139 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
141 0, 0, 0, 0, 0, 0, 0, 0,
143 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
144 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
147 0, 0, 0, 0, 0, 0, 0, 0,
149 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
153 ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
156 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
159 static u16 twobyte_table[256] = {
161 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
162 0, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
164 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
166 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
167 0, 0, 0, 0, 0, 0, 0, 0,
169 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
171 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
172 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
173 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
174 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
176 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
177 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
178 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
179 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
181 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
183 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
185 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
187 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
189 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
191 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
193 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
195 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
196 DstMem | SrcReg | ModRM | BitOp,
197 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
198 DstReg | SrcMem16 | ModRM | Mov,
200 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
201 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
202 DstReg | SrcMem16 | ModRM | Mov,
204 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
206 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
214 * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
215 * are interested only in invlpg and not in any of the rest.
217 * invlpg is a special instruction in that the data it references may not
220 void kvm_emulator_want_group7_invlpg(void)
222 twobyte_table[1] &= ~SrcMem;
224 EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg);
226 /* Type, address-of, and value of an instruction's operand. */
228 enum { OP_REG, OP_MEM, OP_IMM } type;
230 unsigned long val, orig_val, *ptr;
233 /* EFLAGS bit definitions. */
234 #define EFLG_OF (1<<11)
235 #define EFLG_DF (1<<10)
236 #define EFLG_SF (1<<7)
237 #define EFLG_ZF (1<<6)
238 #define EFLG_AF (1<<4)
239 #define EFLG_PF (1<<2)
240 #define EFLG_CF (1<<0)
243 * Instruction emulation:
244 * Most instructions are emulated directly via a fragment of inline assembly
245 * code. This allows us to save/restore EFLAGS and thus very easily pick up
246 * any modified flags.
249 #if defined(CONFIG_X86_64)
250 #define _LO32 "k" /* force 32-bit operand */
251 #define _STK "%%rsp" /* stack pointer */
252 #elif defined(__i386__)
253 #define _LO32 "" /* force 32-bit operand */
254 #define _STK "%%esp" /* stack pointer */
258 * These EFLAGS bits are restored from saved value during emulation, and
259 * any changes are written back to the saved value after emulation.
261 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
263 /* Before executing instruction: restore necessary bits in EFLAGS. */
264 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
265 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
267 "movl %"_msk",%"_LO32 _tmp"; " \
268 "andl %"_LO32 _tmp",("_STK"); " \
270 "notl %"_LO32 _tmp"; " \
271 "andl %"_LO32 _tmp",("_STK"); " \
273 "orl %"_LO32 _tmp",("_STK"); " \
275 /* _sav &= ~msk; */ \
276 "movl %"_msk",%"_LO32 _tmp"; " \
277 "notl %"_LO32 _tmp"; " \
278 "andl %"_LO32 _tmp",%"_sav"; "
280 /* After executing instruction: write-back necessary bits in EFLAGS. */
281 #define _POST_EFLAGS(_sav, _msk, _tmp) \
282 /* _sav |= EFLAGS & _msk; */ \
285 "andl %"_msk",%"_LO32 _tmp"; " \
286 "orl %"_LO32 _tmp",%"_sav"; "
288 /* Raw emulation: instruction has two explicit operands. */
289 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
291 unsigned long _tmp; \
293 switch ((_dst).bytes) { \
295 __asm__ __volatile__ ( \
296 _PRE_EFLAGS("0","4","2") \
297 _op"w %"_wx"3,%1; " \
298 _POST_EFLAGS("0","4","2") \
299 : "=m" (_eflags), "=m" ((_dst).val), \
301 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
304 __asm__ __volatile__ ( \
305 _PRE_EFLAGS("0","4","2") \
306 _op"l %"_lx"3,%1; " \
307 _POST_EFLAGS("0","4","2") \
308 : "=m" (_eflags), "=m" ((_dst).val), \
310 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
313 __emulate_2op_8byte(_op, _src, _dst, \
314 _eflags, _qx, _qy); \
319 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
321 unsigned long _tmp; \
322 switch ( (_dst).bytes ) \
325 __asm__ __volatile__ ( \
326 _PRE_EFLAGS("0","4","2") \
327 _op"b %"_bx"3,%1; " \
328 _POST_EFLAGS("0","4","2") \
329 : "=m" (_eflags), "=m" ((_dst).val), \
331 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
334 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
335 _wx, _wy, _lx, _ly, _qx, _qy); \
340 /* Source operand is byte-sized and may be restricted to just %cl. */
341 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
342 __emulate_2op(_op, _src, _dst, _eflags, \
343 "b", "c", "b", "c", "b", "c", "b", "c")
345 /* Source operand is byte, word, long or quad sized. */
346 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
347 __emulate_2op(_op, _src, _dst, _eflags, \
348 "b", "q", "w", "r", _LO32, "r", "", "r")
350 /* Source operand is word, long or quad sized. */
351 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
352 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
353 "w", "r", _LO32, "r", "", "r")
355 /* Instruction has only one explicit operand (no source operand). */
356 #define emulate_1op(_op, _dst, _eflags) \
358 unsigned long _tmp; \
360 switch ( (_dst).bytes ) \
363 __asm__ __volatile__ ( \
364 _PRE_EFLAGS("0","3","2") \
366 _POST_EFLAGS("0","3","2") \
367 : "=m" (_eflags), "=m" ((_dst).val), \
369 : "i" (EFLAGS_MASK) ); \
372 __asm__ __volatile__ ( \
373 _PRE_EFLAGS("0","3","2") \
375 _POST_EFLAGS("0","3","2") \
376 : "=m" (_eflags), "=m" ((_dst).val), \
378 : "i" (EFLAGS_MASK) ); \
381 __asm__ __volatile__ ( \
382 _PRE_EFLAGS("0","3","2") \
384 _POST_EFLAGS("0","3","2") \
385 : "=m" (_eflags), "=m" ((_dst).val), \
387 : "i" (EFLAGS_MASK) ); \
390 __emulate_1op_8byte(_op, _dst, _eflags); \
395 /* Emulate an instruction with quadword operands (x86/64 only). */
396 #if defined(CONFIG_X86_64)
397 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
399 __asm__ __volatile__ ( \
400 _PRE_EFLAGS("0","4","2") \
401 _op"q %"_qx"3,%1; " \
402 _POST_EFLAGS("0","4","2") \
403 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
404 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
407 #define __emulate_1op_8byte(_op, _dst, _eflags) \
409 __asm__ __volatile__ ( \
410 _PRE_EFLAGS("0","3","2") \
412 _POST_EFLAGS("0","3","2") \
413 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
414 : "i" (EFLAGS_MASK) ); \
417 #elif defined(__i386__)
418 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
419 #define __emulate_1op_8byte(_op, _dst, _eflags)
420 #endif /* __i386__ */
422 /* Fetch next part of the instruction being emulated. */
423 #define insn_fetch(_type, _size, _eip) \
424 ({ unsigned long _x; \
425 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
426 (_size), ctxt->vcpu); \
433 /* Access/update address held in a register, based on addressing mode. */
434 #define address_mask(reg) \
435 ((ad_bytes == sizeof(unsigned long)) ? \
436 (reg) : ((reg) & ((1UL << (ad_bytes << 3)) - 1)))
437 #define register_address(base, reg) \
438 ((base) + address_mask(reg))
439 #define register_address_increment(reg, inc) \
441 /* signed type ensures sign extension to long */ \
443 if ( ad_bytes == sizeof(unsigned long) ) \
446 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
447 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
451 * Given the 'reg' portion of a ModRM byte, and a register block, return a
452 * pointer into the block that addresses the relevant register.
453 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
455 static void *decode_register(u8 modrm_reg, unsigned long *regs,
460 p = ®s[modrm_reg];
461 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
462 p = (unsigned char *)®s[modrm_reg & 3] + 1;
466 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
467 struct x86_emulate_ops *ops,
469 u16 *size, unsigned long *address, int op_bytes)
476 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
480 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
486 x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
489 u8 b, sib, twobyte = 0, rex_prefix = 0;
490 u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
491 unsigned long *override_base = NULL;
492 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
494 struct operand src, dst;
495 unsigned long cr2 = ctxt->cr2;
496 int mode = ctxt->mode;
497 unsigned long modrm_ea;
498 int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
502 /* Shadow copy of register state. Committed on successful emulation. */
503 unsigned long _regs[NR_VCPU_REGS];
504 unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
505 unsigned long modrm_val = 0;
507 memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
510 case X86EMUL_MODE_REAL:
511 case X86EMUL_MODE_PROT16:
512 op_bytes = ad_bytes = 2;
514 case X86EMUL_MODE_PROT32:
515 op_bytes = ad_bytes = 4;
518 case X86EMUL_MODE_PROT64:
527 /* Legacy prefixes. */
528 for (i = 0; i < 8; i++) {
529 switch (b = insn_fetch(u8, 1, _eip)) {
530 case 0x66: /* operand-size override */
531 op_bytes ^= 6; /* switch between 2/4 bytes */
533 case 0x67: /* address-size override */
534 if (mode == X86EMUL_MODE_PROT64)
535 ad_bytes ^= 12; /* switch between 4/8 bytes */
537 ad_bytes ^= 6; /* switch between 2/4 bytes */
539 case 0x2e: /* CS override */
540 override_base = &ctxt->cs_base;
542 case 0x3e: /* DS override */
543 override_base = &ctxt->ds_base;
545 case 0x26: /* ES override */
546 override_base = &ctxt->es_base;
548 case 0x64: /* FS override */
549 override_base = &ctxt->fs_base;
551 case 0x65: /* GS override */
552 override_base = &ctxt->gs_base;
554 case 0x36: /* SS override */
555 override_base = &ctxt->ss_base;
557 case 0xf0: /* LOCK */
560 case 0xf3: /* REP/REPE/REPZ */
563 case 0xf2: /* REPNE/REPNZ */
573 if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
576 op_bytes = 8; /* REX.W */
577 modrm_reg = (b & 4) << 1; /* REX.R */
578 index_reg = (b & 2) << 2; /* REX.X */
579 modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
580 b = insn_fetch(u8, 1, _eip);
583 /* Opcode byte(s). */
586 /* Two-byte opcode? */
589 b = insn_fetch(u8, 1, _eip);
590 d = twobyte_table[b];
598 /* ModRM and SIB bytes. */
600 modrm = insn_fetch(u8, 1, _eip);
601 modrm_mod |= (modrm & 0xc0) >> 6;
602 modrm_reg |= (modrm & 0x38) >> 3;
603 modrm_rm |= (modrm & 0x07);
607 if (modrm_mod == 3) {
608 modrm_val = *(unsigned long *)
609 decode_register(modrm_rm, _regs, d & ByteOp);
614 unsigned bx = _regs[VCPU_REGS_RBX];
615 unsigned bp = _regs[VCPU_REGS_RBP];
616 unsigned si = _regs[VCPU_REGS_RSI];
617 unsigned di = _regs[VCPU_REGS_RDI];
619 /* 16-bit ModR/M decode. */
623 modrm_ea += insn_fetch(u16, 2, _eip);
626 modrm_ea += insn_fetch(s8, 1, _eip);
629 modrm_ea += insn_fetch(u16, 2, _eip);
659 if (modrm_rm == 2 || modrm_rm == 3 ||
660 (modrm_rm == 6 && modrm_mod != 0))
662 override_base = &ctxt->ss_base;
663 modrm_ea = (u16)modrm_ea;
665 /* 32/64-bit ModR/M decode. */
669 sib = insn_fetch(u8, 1, _eip);
670 index_reg |= (sib >> 3) & 7;
677 modrm_ea += _regs[base_reg];
679 modrm_ea += insn_fetch(s32, 4, _eip);
682 modrm_ea += _regs[base_reg];
688 modrm_ea += _regs[index_reg] << scale;
694 modrm_ea += _regs[modrm_rm];
695 else if (mode == X86EMUL_MODE_PROT64)
699 modrm_ea += _regs[modrm_rm];
705 modrm_ea += insn_fetch(s32, 4, _eip);
708 modrm_ea += insn_fetch(s8, 1, _eip);
711 modrm_ea += insn_fetch(s32, 4, _eip);
716 override_base = &ctxt->ds_base;
717 if (mode == X86EMUL_MODE_PROT64 &&
718 override_base != &ctxt->fs_base &&
719 override_base != &ctxt->gs_base)
720 override_base = NULL;
723 modrm_ea += *override_base;
727 switch (d & SrcMask) {
738 modrm_ea += op_bytes;
742 modrm_ea = (u32)modrm_ea;
749 * Decode and fetch the source operand: register, memory
752 switch (d & SrcMask) {
758 src.ptr = decode_register(modrm_reg, _regs,
760 src.val = src.orig_val = *(u8 *) src.ptr;
763 src.ptr = decode_register(modrm_reg, _regs, 0);
764 switch ((src.bytes = op_bytes)) {
766 src.val = src.orig_val = *(u16 *) src.ptr;
769 src.val = src.orig_val = *(u32 *) src.ptr;
772 src.val = src.orig_val = *(u64 *) src.ptr;
784 src.bytes = (d & ByteOp) ? 1 : op_bytes;
787 src.ptr = (unsigned long *)cr2;
788 if ((rc = ops->read_emulated((unsigned long)src.ptr,
789 &src.val, src.bytes, ctxt->vcpu)) != 0)
791 src.orig_val = src.val;
795 src.ptr = (unsigned long *)_eip;
796 src.bytes = (d & ByteOp) ? 1 : op_bytes;
799 /* NB. Immediates are sign-extended as necessary. */
802 src.val = insn_fetch(s8, 1, _eip);
805 src.val = insn_fetch(s16, 2, _eip);
808 src.val = insn_fetch(s32, 4, _eip);
814 src.ptr = (unsigned long *)_eip;
816 src.val = insn_fetch(s8, 1, _eip);
820 /* Decode and fetch the destination operand: register or memory. */
821 switch (d & DstMask) {
823 /* Special instructions do their own operand decoding. */
828 && !(twobyte && (b == 0xb6 || b == 0xb7))) {
829 dst.ptr = decode_register(modrm_reg, _regs,
831 dst.val = *(u8 *) dst.ptr;
834 dst.ptr = decode_register(modrm_reg, _regs, 0);
835 switch ((dst.bytes = op_bytes)) {
837 dst.val = *(u16 *)dst.ptr;
840 dst.val = *(u32 *)dst.ptr;
843 dst.val = *(u64 *)dst.ptr;
850 dst.ptr = (unsigned long *)cr2;
851 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
853 unsigned long mask = ~(dst.bytes * 8 - 1);
855 dst.ptr = (void *)dst.ptr + (src.val & mask) / 8;
857 if (!(d & Mov) && /* optimisation - avoid slow emulated read */
858 ((rc = ops->read_emulated((unsigned long)dst.ptr,
859 &dst.val, dst.bytes, ctxt->vcpu)) != 0))
863 dst.orig_val = dst.val;
871 emulate_2op_SrcV("add", src, dst, _eflags);
875 emulate_2op_SrcV("or", src, dst, _eflags);
879 emulate_2op_SrcV("adc", src, dst, _eflags);
883 emulate_2op_SrcV("sbb", src, dst, _eflags);
887 emulate_2op_SrcV("and", src, dst, _eflags);
891 emulate_2op_SrcV("sub", src, dst, _eflags);
895 emulate_2op_SrcV("xor", src, dst, _eflags);
899 emulate_2op_SrcV("cmp", src, dst, _eflags);
901 case 0x63: /* movsxd */
902 if (mode != X86EMUL_MODE_PROT64)
904 dst.val = (s32) src.val;
906 case 0x80 ... 0x83: /* Grp1 */
928 emulate_2op_SrcV("test", src, dst, _eflags);
930 case 0x86 ... 0x87: /* xchg */
931 /* Write back the register source. */
934 *(u8 *) src.ptr = (u8) dst.val;
937 *(u16 *) src.ptr = (u16) dst.val;
940 *src.ptr = (u32) dst.val;
941 break; /* 64b reg: zero-extend */
947 * Write back the memory destination with implicit LOCK
953 case 0xa0 ... 0xa1: /* mov */
954 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
956 _eip += ad_bytes; /* skip src displacement */
958 case 0xa2 ... 0xa3: /* mov */
959 dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
960 _eip += ad_bytes; /* skip dst displacement */
962 case 0x88 ... 0x8b: /* mov */
963 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
966 case 0x8f: /* pop (sole member of Grp1a) */
967 /* 64-bit mode: POP always pops a 64-bit operand. */
968 if (mode == X86EMUL_MODE_PROT64)
970 if ((rc = ops->read_std(register_address(ctxt->ss_base,
971 _regs[VCPU_REGS_RSP]),
972 &dst.val, dst.bytes, ctxt->vcpu)) != 0)
974 register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
980 emulate_2op_SrcB("rol", src, dst, _eflags);
983 emulate_2op_SrcB("ror", src, dst, _eflags);
986 emulate_2op_SrcB("rcl", src, dst, _eflags);
989 emulate_2op_SrcB("rcr", src, dst, _eflags);
991 case 4: /* sal/shl */
992 case 6: /* sal/shl */
993 emulate_2op_SrcB("sal", src, dst, _eflags);
996 emulate_2op_SrcB("shr", src, dst, _eflags);
999 emulate_2op_SrcB("sar", src, dst, _eflags);
1003 case 0xd0 ... 0xd1: /* Grp2 */
1006 case 0xd2 ... 0xd3: /* Grp2 */
1007 src.val = _regs[VCPU_REGS_RCX];
1009 case 0xf6 ... 0xf7: /* Grp3 */
1010 switch (modrm_reg) {
1011 case 0 ... 1: /* test */
1013 * Special case in Grp3: test has an immediate
1017 src.ptr = (unsigned long *)_eip;
1018 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1021 switch (src.bytes) {
1023 src.val = insn_fetch(s8, 1, _eip);
1026 src.val = insn_fetch(s16, 2, _eip);
1029 src.val = insn_fetch(s32, 4, _eip);
1037 emulate_1op("neg", dst, _eflags);
1040 goto cannot_emulate;
1043 case 0xfe ... 0xff: /* Grp4/Grp5 */
1044 switch (modrm_reg) {
1046 emulate_1op("inc", dst, _eflags);
1049 emulate_1op("dec", dst, _eflags);
1052 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1053 if (mode == X86EMUL_MODE_PROT64) {
1055 if ((rc = ops->read_std((unsigned long)dst.ptr,
1060 register_address_increment(_regs[VCPU_REGS_RSP],
1062 if ((rc = ops->write_std(
1063 register_address(ctxt->ss_base,
1064 _regs[VCPU_REGS_RSP]),
1065 &dst.val, dst.bytes, ctxt->vcpu)) != 0)
1070 goto cannot_emulate;
1079 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1080 switch (dst.bytes) {
1082 *(u8 *)dst.ptr = (u8)dst.val;
1085 *(u16 *)dst.ptr = (u16)dst.val;
1088 *dst.ptr = (u32)dst.val;
1089 break; /* 64b: zero-ext */
1097 rc = ops->cmpxchg_emulated((unsigned long)dst.
1099 &dst.val, dst.bytes,
1102 rc = ops->write_emulated((unsigned long)dst.ptr,
1103 &dst.val, dst.bytes,
1112 /* Commit shadow register state. */
1113 memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
1114 ctxt->eflags = _eflags;
1115 ctxt->vcpu->rip = _eip;
1118 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1122 goto twobyte_special_insn;
1124 case 0x6c: /* insb */
1125 case 0x6d: /* insw/insd */
1126 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1128 (d & ByteOp) ? 1 : op_bytes, /* size */
1130 address_mask(_regs[VCPU_REGS_RCX]) : 1, /* count */
1131 (_eflags & EFLG_DF), /* down */
1132 register_address(ctxt->es_base,
1133 _regs[VCPU_REGS_RDI]), /* address */
1135 _regs[VCPU_REGS_RDX] /* port */
1139 case 0x6e: /* outsb */
1140 case 0x6f: /* outsw/outsd */
1141 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1143 (d & ByteOp) ? 1 : op_bytes, /* size */
1145 address_mask(_regs[VCPU_REGS_RCX]) : 1, /* count */
1146 (_eflags & EFLG_DF), /* down */
1147 register_address(override_base ?
1148 *override_base : ctxt->ds_base,
1149 _regs[VCPU_REGS_RSI]), /* address */
1151 _regs[VCPU_REGS_RDX] /* port */
1157 if (_regs[VCPU_REGS_RCX] == 0) {
1158 ctxt->vcpu->rip = _eip;
1161 _regs[VCPU_REGS_RCX]--;
1162 _eip = ctxt->vcpu->rip;
1165 case 0xa4 ... 0xa5: /* movs */
1167 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1168 dst.ptr = (unsigned long *)register_address(ctxt->es_base,
1169 _regs[VCPU_REGS_RDI]);
1170 if ((rc = ops->read_emulated(register_address(
1171 override_base ? *override_base : ctxt->ds_base,
1172 _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes, ctxt->vcpu)) != 0)
1174 register_address_increment(_regs[VCPU_REGS_RSI],
1175 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1176 register_address_increment(_regs[VCPU_REGS_RDI],
1177 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1179 case 0xa6 ... 0xa7: /* cmps */
1180 DPRINTF("Urk! I don't handle CMPS.\n");
1181 goto cannot_emulate;
1182 case 0xaa ... 0xab: /* stos */
1184 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1185 dst.ptr = (unsigned long *)cr2;
1186 dst.val = _regs[VCPU_REGS_RAX];
1187 register_address_increment(_regs[VCPU_REGS_RDI],
1188 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1190 case 0xac ... 0xad: /* lods */
1192 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1193 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1194 if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes,
1197 register_address_increment(_regs[VCPU_REGS_RSI],
1198 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1200 case 0xae ... 0xaf: /* scas */
1201 DPRINTF("Urk! I don't handle SCAS.\n");
1202 goto cannot_emulate;
1203 case 0xf4: /* hlt */
1204 ctxt->vcpu->halt_request = 1;
1206 case 0xc3: /* ret */
1208 goto pop_instruction;
1209 case 0x58 ... 0x5f: /* pop reg */
1210 dst.ptr = (unsigned long *)&_regs[b & 0x7];
1213 if ((rc = ops->read_std(register_address(ctxt->ss_base,
1214 _regs[VCPU_REGS_RSP]), dst.ptr, op_bytes, ctxt->vcpu))
1218 register_address_increment(_regs[VCPU_REGS_RSP], op_bytes);
1219 no_wb = 1; /* Disable writeback. */
1226 case 0x01: /* lgdt, lidt, lmsw */
1227 /* Disable writeback. */
1229 switch (modrm_reg) {
1231 unsigned long address;
1234 rc = read_descriptor(ctxt, ops, src.ptr,
1235 &size, &address, op_bytes);
1238 realmode_lgdt(ctxt->vcpu, size, address);
1241 rc = read_descriptor(ctxt, ops, src.ptr,
1242 &size, &address, op_bytes);
1245 realmode_lidt(ctxt->vcpu, size, address);
1249 goto cannot_emulate;
1250 *(u16 *)&_regs[modrm_rm]
1251 = realmode_get_cr(ctxt->vcpu, 0);
1255 goto cannot_emulate;
1256 realmode_lmsw(ctxt->vcpu, (u16)modrm_val, &_eflags);
1259 emulate_invlpg(ctxt->vcpu, cr2);
1262 goto cannot_emulate;
1265 case 0x21: /* mov from dr to reg */
1268 goto cannot_emulate;
1269 rc = emulator_get_dr(ctxt, modrm_reg, &_regs[modrm_rm]);
1271 case 0x23: /* mov from reg to dr */
1274 goto cannot_emulate;
1275 rc = emulator_set_dr(ctxt, modrm_reg, _regs[modrm_rm]);
1277 case 0x40 ... 0x4f: /* cmov */
1278 dst.val = dst.orig_val = src.val;
1281 * First, assume we're decoding an even cmov opcode
1284 switch ((b & 15) >> 1) {
1286 no_wb = (_eflags & EFLG_OF) ? 0 : 1;
1288 case 1: /* cmovb/cmovc/cmovnae */
1289 no_wb = (_eflags & EFLG_CF) ? 0 : 1;
1291 case 2: /* cmovz/cmove */
1292 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1294 case 3: /* cmovbe/cmovna */
1295 no_wb = (_eflags & (EFLG_CF | EFLG_ZF)) ? 0 : 1;
1298 no_wb = (_eflags & EFLG_SF) ? 0 : 1;
1300 case 5: /* cmovp/cmovpe */
1301 no_wb = (_eflags & EFLG_PF) ? 0 : 1;
1303 case 7: /* cmovle/cmovng */
1304 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1306 case 6: /* cmovl/cmovnge */
1307 no_wb &= (!(_eflags & EFLG_SF) !=
1308 !(_eflags & EFLG_OF)) ? 0 : 1;
1311 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1314 case 0xb0 ... 0xb1: /* cmpxchg */
1316 * Save real source value, then compare EAX against
1319 src.orig_val = src.val;
1320 src.val = _regs[VCPU_REGS_RAX];
1321 emulate_2op_SrcV("cmp", src, dst, _eflags);
1322 if (_eflags & EFLG_ZF) {
1323 /* Success: write back to memory. */
1324 dst.val = src.orig_val;
1326 /* Failure: write the value we saw to EAX. */
1328 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1333 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1334 emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
1338 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1339 emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
1343 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1344 emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
1346 case 0xb6 ... 0xb7: /* movzx */
1347 dst.bytes = op_bytes;
1348 dst.val = (d & ByteOp) ? (u8) src.val : (u16) src.val;
1352 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1353 emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
1355 case 0xba: /* Grp8 */
1356 switch (modrm_reg & 3) {
1367 case 0xbe ... 0xbf: /* movsx */
1368 dst.bytes = op_bytes;
1369 dst.val = (d & ByteOp) ? (s8) src.val : (s16) src.val;
1374 twobyte_special_insn:
1375 /* Disable writeback. */
1378 case 0x09: /* wbinvd */
1380 case 0x0d: /* GrpP (prefetch) */
1381 case 0x18: /* Grp16 (prefetch/nop) */
1384 emulate_clts(ctxt->vcpu);
1386 case 0x20: /* mov cr, reg */
1388 goto cannot_emulate;
1389 _regs[modrm_rm] = realmode_get_cr(ctxt->vcpu, modrm_reg);
1391 case 0x22: /* mov reg, cr */
1393 goto cannot_emulate;
1394 realmode_set_cr(ctxt->vcpu, modrm_reg, modrm_val, &_eflags);
1398 msr_data = (u32)_regs[VCPU_REGS_RAX]
1399 | ((u64)_regs[VCPU_REGS_RDX] << 32);
1400 rc = kvm_set_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], msr_data);
1402 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1403 _eip = ctxt->vcpu->rip;
1405 rc = X86EMUL_CONTINUE;
1409 rc = kvm_get_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], &msr_data);
1411 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1412 _eip = ctxt->vcpu->rip;
1414 _regs[VCPU_REGS_RAX] = (u32)msr_data;
1415 _regs[VCPU_REGS_RDX] = msr_data >> 32;
1417 rc = X86EMUL_CONTINUE;
1419 case 0xc7: /* Grp9 (cmpxchg8b) */
1422 if ((rc = ops->read_emulated(cr2, &old, 8, ctxt->vcpu))
1425 if (((u32) (old >> 0) != (u32) _regs[VCPU_REGS_RAX]) ||
1426 ((u32) (old >> 32) != (u32) _regs[VCPU_REGS_RDX])) {
1427 _regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1428 _regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1429 _eflags &= ~EFLG_ZF;
1431 new = ((u64)_regs[VCPU_REGS_RCX] << 32)
1432 | (u32) _regs[VCPU_REGS_RBX];
1433 if ((rc = ops->cmpxchg_emulated(cr2, &old,
1434 &new, 8, ctxt->vcpu)) != 0)
1444 DPRINTF("Cannot emulate %02x\n", b);
1451 #include <asm/uaccess.h>
1454 x86_emulate_read_std(unsigned long addr,
1456 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1462 if ((rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0) {
1463 propagate_page_fault(addr + bytes - rc, 0); /* read fault */
1464 return X86EMUL_PROPAGATE_FAULT;
1467 return X86EMUL_CONTINUE;
1471 x86_emulate_write_std(unsigned long addr,
1473 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1477 if ((rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0) {
1478 propagate_page_fault(addr + bytes - rc, PGERR_write_access);
1479 return X86EMUL_PROPAGATE_FAULT;
1482 return X86EMUL_CONTINUE;