1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table[256] = {
68 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
69 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
86 SrcImmByte, SrcImm, 0, 0,
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
103 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
106 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
108 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
109 0, 0, 0, 0, 0, 0, 0, 0,
111 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
112 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
114 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
116 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
117 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
118 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
119 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
121 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
122 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
123 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
125 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
127 ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
128 ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
129 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
130 ByteOp | ImplicitOps, ImplicitOps,
132 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
133 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
134 ByteOp | ImplicitOps, ImplicitOps,
136 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
138 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
139 0, ImplicitOps, 0, 0,
140 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
142 0, 0, 0, 0, 0, 0, 0, 0,
144 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
145 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
148 0, 0, 0, 0, 0, 0, 0, 0,
150 0, 0, 0, 0, 0, 0, 0, 0,
152 0, SrcImm|ImplicitOps, 0, SrcImmByte|ImplicitOps, 0, 0, 0, 0,
156 ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
159 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
162 static u16 twobyte_table[256] = {
164 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
165 0, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
167 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
169 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
170 0, 0, 0, 0, 0, 0, 0, 0,
172 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
174 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
175 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
176 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
177 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
179 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
180 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
181 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
182 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
184 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
186 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
188 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
190 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
192 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
194 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
196 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
198 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
199 DstMem | SrcReg | ModRM | BitOp,
200 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
201 DstReg | SrcMem16 | ModRM | Mov,
203 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
204 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
205 DstReg | SrcMem16 | ModRM | Mov,
207 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
209 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
216 /* Type, address-of, and value of an instruction's operand. */
218 enum { OP_REG, OP_MEM, OP_IMM } type;
220 unsigned long val, orig_val, *ptr;
223 /* EFLAGS bit definitions. */
224 #define EFLG_OF (1<<11)
225 #define EFLG_DF (1<<10)
226 #define EFLG_SF (1<<7)
227 #define EFLG_ZF (1<<6)
228 #define EFLG_AF (1<<4)
229 #define EFLG_PF (1<<2)
230 #define EFLG_CF (1<<0)
233 * Instruction emulation:
234 * Most instructions are emulated directly via a fragment of inline assembly
235 * code. This allows us to save/restore EFLAGS and thus very easily pick up
236 * any modified flags.
239 #if defined(CONFIG_X86_64)
240 #define _LO32 "k" /* force 32-bit operand */
241 #define _STK "%%rsp" /* stack pointer */
242 #elif defined(__i386__)
243 #define _LO32 "" /* force 32-bit operand */
244 #define _STK "%%esp" /* stack pointer */
248 * These EFLAGS bits are restored from saved value during emulation, and
249 * any changes are written back to the saved value after emulation.
251 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
253 /* Before executing instruction: restore necessary bits in EFLAGS. */
254 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
255 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
257 "movl %"_msk",%"_LO32 _tmp"; " \
258 "andl %"_LO32 _tmp",("_STK"); " \
260 "notl %"_LO32 _tmp"; " \
261 "andl %"_LO32 _tmp",("_STK"); " \
263 "orl %"_LO32 _tmp",("_STK"); " \
265 /* _sav &= ~msk; */ \
266 "movl %"_msk",%"_LO32 _tmp"; " \
267 "notl %"_LO32 _tmp"; " \
268 "andl %"_LO32 _tmp",%"_sav"; "
270 /* After executing instruction: write-back necessary bits in EFLAGS. */
271 #define _POST_EFLAGS(_sav, _msk, _tmp) \
272 /* _sav |= EFLAGS & _msk; */ \
275 "andl %"_msk",%"_LO32 _tmp"; " \
276 "orl %"_LO32 _tmp",%"_sav"; "
278 /* Raw emulation: instruction has two explicit operands. */
279 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
281 unsigned long _tmp; \
283 switch ((_dst).bytes) { \
285 __asm__ __volatile__ ( \
286 _PRE_EFLAGS("0","4","2") \
287 _op"w %"_wx"3,%1; " \
288 _POST_EFLAGS("0","4","2") \
289 : "=m" (_eflags), "=m" ((_dst).val), \
291 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
294 __asm__ __volatile__ ( \
295 _PRE_EFLAGS("0","4","2") \
296 _op"l %"_lx"3,%1; " \
297 _POST_EFLAGS("0","4","2") \
298 : "=m" (_eflags), "=m" ((_dst).val), \
300 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
303 __emulate_2op_8byte(_op, _src, _dst, \
304 _eflags, _qx, _qy); \
309 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
311 unsigned long _tmp; \
312 switch ( (_dst).bytes ) \
315 __asm__ __volatile__ ( \
316 _PRE_EFLAGS("0","4","2") \
317 _op"b %"_bx"3,%1; " \
318 _POST_EFLAGS("0","4","2") \
319 : "=m" (_eflags), "=m" ((_dst).val), \
321 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
324 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
325 _wx, _wy, _lx, _ly, _qx, _qy); \
330 /* Source operand is byte-sized and may be restricted to just %cl. */
331 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
332 __emulate_2op(_op, _src, _dst, _eflags, \
333 "b", "c", "b", "c", "b", "c", "b", "c")
335 /* Source operand is byte, word, long or quad sized. */
336 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
337 __emulate_2op(_op, _src, _dst, _eflags, \
338 "b", "q", "w", "r", _LO32, "r", "", "r")
340 /* Source operand is word, long or quad sized. */
341 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
342 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
343 "w", "r", _LO32, "r", "", "r")
345 /* Instruction has only one explicit operand (no source operand). */
346 #define emulate_1op(_op, _dst, _eflags) \
348 unsigned long _tmp; \
350 switch ( (_dst).bytes ) \
353 __asm__ __volatile__ ( \
354 _PRE_EFLAGS("0","3","2") \
356 _POST_EFLAGS("0","3","2") \
357 : "=m" (_eflags), "=m" ((_dst).val), \
359 : "i" (EFLAGS_MASK) ); \
362 __asm__ __volatile__ ( \
363 _PRE_EFLAGS("0","3","2") \
365 _POST_EFLAGS("0","3","2") \
366 : "=m" (_eflags), "=m" ((_dst).val), \
368 : "i" (EFLAGS_MASK) ); \
371 __asm__ __volatile__ ( \
372 _PRE_EFLAGS("0","3","2") \
374 _POST_EFLAGS("0","3","2") \
375 : "=m" (_eflags), "=m" ((_dst).val), \
377 : "i" (EFLAGS_MASK) ); \
380 __emulate_1op_8byte(_op, _dst, _eflags); \
385 /* Emulate an instruction with quadword operands (x86/64 only). */
386 #if defined(CONFIG_X86_64)
387 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
389 __asm__ __volatile__ ( \
390 _PRE_EFLAGS("0","4","2") \
391 _op"q %"_qx"3,%1; " \
392 _POST_EFLAGS("0","4","2") \
393 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
394 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
397 #define __emulate_1op_8byte(_op, _dst, _eflags) \
399 __asm__ __volatile__ ( \
400 _PRE_EFLAGS("0","3","2") \
402 _POST_EFLAGS("0","3","2") \
403 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
404 : "i" (EFLAGS_MASK) ); \
407 #elif defined(__i386__)
408 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
409 #define __emulate_1op_8byte(_op, _dst, _eflags)
410 #endif /* __i386__ */
412 /* Fetch next part of the instruction being emulated. */
413 #define insn_fetch(_type, _size, _eip) \
414 ({ unsigned long _x; \
415 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
416 (_size), ctxt->vcpu); \
423 /* Access/update address held in a register, based on addressing mode. */
424 #define address_mask(reg) \
425 ((ad_bytes == sizeof(unsigned long)) ? \
426 (reg) : ((reg) & ((1UL << (ad_bytes << 3)) - 1)))
427 #define register_address(base, reg) \
428 ((base) + address_mask(reg))
429 #define register_address_increment(reg, inc) \
431 /* signed type ensures sign extension to long */ \
433 if ( ad_bytes == sizeof(unsigned long) ) \
436 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
437 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
440 #define JMP_REL(rel) \
442 _eip += (int)(rel); \
443 _eip = ((op_bytes == 2) ? (uint16_t)_eip : (uint32_t)_eip); \
447 * Given the 'reg' portion of a ModRM byte, and a register block, return a
448 * pointer into the block that addresses the relevant register.
449 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
451 static void *decode_register(u8 modrm_reg, unsigned long *regs,
456 p = ®s[modrm_reg];
457 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
458 p = (unsigned char *)®s[modrm_reg & 3] + 1;
462 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
463 struct x86_emulate_ops *ops,
465 u16 *size, unsigned long *address, int op_bytes)
472 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
476 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
482 x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
485 u8 b, sib, twobyte = 0, rex_prefix = 0;
486 u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
487 unsigned long *override_base = NULL;
488 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
490 struct operand src, dst;
491 unsigned long cr2 = ctxt->cr2;
492 int mode = ctxt->mode;
493 unsigned long modrm_ea;
494 int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
498 /* Shadow copy of register state. Committed on successful emulation. */
499 unsigned long _regs[NR_VCPU_REGS];
500 unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
501 unsigned long modrm_val = 0;
503 memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
506 case X86EMUL_MODE_REAL:
507 case X86EMUL_MODE_PROT16:
508 op_bytes = ad_bytes = 2;
510 case X86EMUL_MODE_PROT32:
511 op_bytes = ad_bytes = 4;
514 case X86EMUL_MODE_PROT64:
523 /* Legacy prefixes. */
524 for (i = 0; i < 8; i++) {
525 switch (b = insn_fetch(u8, 1, _eip)) {
526 case 0x66: /* operand-size override */
527 op_bytes ^= 6; /* switch between 2/4 bytes */
529 case 0x67: /* address-size override */
530 if (mode == X86EMUL_MODE_PROT64)
531 ad_bytes ^= 12; /* switch between 4/8 bytes */
533 ad_bytes ^= 6; /* switch between 2/4 bytes */
535 case 0x2e: /* CS override */
536 override_base = &ctxt->cs_base;
538 case 0x3e: /* DS override */
539 override_base = &ctxt->ds_base;
541 case 0x26: /* ES override */
542 override_base = &ctxt->es_base;
544 case 0x64: /* FS override */
545 override_base = &ctxt->fs_base;
547 case 0x65: /* GS override */
548 override_base = &ctxt->gs_base;
550 case 0x36: /* SS override */
551 override_base = &ctxt->ss_base;
553 case 0xf0: /* LOCK */
556 case 0xf3: /* REP/REPE/REPZ */
559 case 0xf2: /* REPNE/REPNZ */
569 if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
572 op_bytes = 8; /* REX.W */
573 modrm_reg = (b & 4) << 1; /* REX.R */
574 index_reg = (b & 2) << 2; /* REX.X */
575 modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
576 b = insn_fetch(u8, 1, _eip);
579 /* Opcode byte(s). */
582 /* Two-byte opcode? */
585 b = insn_fetch(u8, 1, _eip);
586 d = twobyte_table[b];
594 /* ModRM and SIB bytes. */
596 modrm = insn_fetch(u8, 1, _eip);
597 modrm_mod |= (modrm & 0xc0) >> 6;
598 modrm_reg |= (modrm & 0x38) >> 3;
599 modrm_rm |= (modrm & 0x07);
603 if (modrm_mod == 3) {
604 modrm_val = *(unsigned long *)
605 decode_register(modrm_rm, _regs, d & ByteOp);
610 unsigned bx = _regs[VCPU_REGS_RBX];
611 unsigned bp = _regs[VCPU_REGS_RBP];
612 unsigned si = _regs[VCPU_REGS_RSI];
613 unsigned di = _regs[VCPU_REGS_RDI];
615 /* 16-bit ModR/M decode. */
619 modrm_ea += insn_fetch(u16, 2, _eip);
622 modrm_ea += insn_fetch(s8, 1, _eip);
625 modrm_ea += insn_fetch(u16, 2, _eip);
655 if (modrm_rm == 2 || modrm_rm == 3 ||
656 (modrm_rm == 6 && modrm_mod != 0))
658 override_base = &ctxt->ss_base;
659 modrm_ea = (u16)modrm_ea;
661 /* 32/64-bit ModR/M decode. */
665 sib = insn_fetch(u8, 1, _eip);
666 index_reg |= (sib >> 3) & 7;
673 modrm_ea += _regs[base_reg];
675 modrm_ea += insn_fetch(s32, 4, _eip);
678 modrm_ea += _regs[base_reg];
684 modrm_ea += _regs[index_reg] << scale;
690 modrm_ea += _regs[modrm_rm];
691 else if (mode == X86EMUL_MODE_PROT64)
695 modrm_ea += _regs[modrm_rm];
701 modrm_ea += insn_fetch(s32, 4, _eip);
704 modrm_ea += insn_fetch(s8, 1, _eip);
707 modrm_ea += insn_fetch(s32, 4, _eip);
712 override_base = &ctxt->ds_base;
713 if (mode == X86EMUL_MODE_PROT64 &&
714 override_base != &ctxt->fs_base &&
715 override_base != &ctxt->gs_base)
716 override_base = NULL;
719 modrm_ea += *override_base;
723 switch (d & SrcMask) {
734 modrm_ea += op_bytes;
738 modrm_ea = (u32)modrm_ea;
745 * Decode and fetch the source operand: register, memory
748 switch (d & SrcMask) {
754 src.ptr = decode_register(modrm_reg, _regs,
756 src.val = src.orig_val = *(u8 *) src.ptr;
759 src.ptr = decode_register(modrm_reg, _regs, 0);
760 switch ((src.bytes = op_bytes)) {
762 src.val = src.orig_val = *(u16 *) src.ptr;
765 src.val = src.orig_val = *(u32 *) src.ptr;
768 src.val = src.orig_val = *(u64 *) src.ptr;
780 src.bytes = (d & ByteOp) ? 1 : op_bytes;
781 /* Don't fetch the address for invlpg: it could be unmapped. */
782 if (twobyte && b == 0x01 && modrm_reg == 7)
786 src.ptr = (unsigned long *)cr2;
787 if ((rc = ops->read_emulated((unsigned long)src.ptr,
788 &src.val, src.bytes, ctxt->vcpu)) != 0)
790 src.orig_val = src.val;
794 src.ptr = (unsigned long *)_eip;
795 src.bytes = (d & ByteOp) ? 1 : op_bytes;
798 /* NB. Immediates are sign-extended as necessary. */
801 src.val = insn_fetch(s8, 1, _eip);
804 src.val = insn_fetch(s16, 2, _eip);
807 src.val = insn_fetch(s32, 4, _eip);
813 src.ptr = (unsigned long *)_eip;
815 src.val = insn_fetch(s8, 1, _eip);
819 /* Decode and fetch the destination operand: register or memory. */
820 switch (d & DstMask) {
822 /* Special instructions do their own operand decoding. */
827 && !(twobyte && (b == 0xb6 || b == 0xb7))) {
828 dst.ptr = decode_register(modrm_reg, _regs,
830 dst.val = *(u8 *) dst.ptr;
833 dst.ptr = decode_register(modrm_reg, _regs, 0);
834 switch ((dst.bytes = op_bytes)) {
836 dst.val = *(u16 *)dst.ptr;
839 dst.val = *(u32 *)dst.ptr;
842 dst.val = *(u64 *)dst.ptr;
849 dst.ptr = (unsigned long *)cr2;
850 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
852 unsigned long mask = ~(dst.bytes * 8 - 1);
854 dst.ptr = (void *)dst.ptr + (src.val & mask) / 8;
856 if (!(d & Mov) && /* optimisation - avoid slow emulated read */
857 ((rc = ops->read_emulated((unsigned long)dst.ptr,
858 &dst.val, dst.bytes, ctxt->vcpu)) != 0))
862 dst.orig_val = dst.val;
870 emulate_2op_SrcV("add", src, dst, _eflags);
874 emulate_2op_SrcV("or", src, dst, _eflags);
878 emulate_2op_SrcV("adc", src, dst, _eflags);
882 emulate_2op_SrcV("sbb", src, dst, _eflags);
886 emulate_2op_SrcV("and", src, dst, _eflags);
888 case 0x24: /* and al imm8 */
890 dst.ptr = &_regs[VCPU_REGS_RAX];
891 dst.val = *(u8 *)dst.ptr;
893 dst.orig_val = dst.val;
895 case 0x25: /* and ax imm16, or eax imm32 */
897 dst.bytes = op_bytes;
898 dst.ptr = &_regs[VCPU_REGS_RAX];
900 dst.val = *(u16 *)dst.ptr;
902 dst.val = *(u32 *)dst.ptr;
903 dst.orig_val = dst.val;
907 emulate_2op_SrcV("sub", src, dst, _eflags);
911 emulate_2op_SrcV("xor", src, dst, _eflags);
915 emulate_2op_SrcV("cmp", src, dst, _eflags);
917 case 0x63: /* movsxd */
918 if (mode != X86EMUL_MODE_PROT64)
920 dst.val = (s32) src.val;
922 case 0x80 ... 0x83: /* Grp1 */
944 emulate_2op_SrcV("test", src, dst, _eflags);
946 case 0x86 ... 0x87: /* xchg */
947 /* Write back the register source. */
950 *(u8 *) src.ptr = (u8) dst.val;
953 *(u16 *) src.ptr = (u16) dst.val;
956 *src.ptr = (u32) dst.val;
957 break; /* 64b reg: zero-extend */
963 * Write back the memory destination with implicit LOCK
969 case 0xa0 ... 0xa1: /* mov */
970 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
972 _eip += ad_bytes; /* skip src displacement */
974 case 0xa2 ... 0xa3: /* mov */
975 dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
976 _eip += ad_bytes; /* skip dst displacement */
978 case 0x88 ... 0x8b: /* mov */
979 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
982 case 0x8f: /* pop (sole member of Grp1a) */
983 /* 64-bit mode: POP always pops a 64-bit operand. */
984 if (mode == X86EMUL_MODE_PROT64)
986 if ((rc = ops->read_std(register_address(ctxt->ss_base,
987 _regs[VCPU_REGS_RSP]),
988 &dst.val, dst.bytes, ctxt->vcpu)) != 0)
990 register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
996 emulate_2op_SrcB("rol", src, dst, _eflags);
999 emulate_2op_SrcB("ror", src, dst, _eflags);
1002 emulate_2op_SrcB("rcl", src, dst, _eflags);
1005 emulate_2op_SrcB("rcr", src, dst, _eflags);
1007 case 4: /* sal/shl */
1008 case 6: /* sal/shl */
1009 emulate_2op_SrcB("sal", src, dst, _eflags);
1012 emulate_2op_SrcB("shr", src, dst, _eflags);
1015 emulate_2op_SrcB("sar", src, dst, _eflags);
1019 case 0xd0 ... 0xd1: /* Grp2 */
1022 case 0xd2 ... 0xd3: /* Grp2 */
1023 src.val = _regs[VCPU_REGS_RCX];
1025 case 0xe9: /* jmp rel */
1026 case 0xeb: /* jmp rel short */
1028 no_wb = 1; /* Disable writeback. */
1030 case 0xf6 ... 0xf7: /* Grp3 */
1031 switch (modrm_reg) {
1032 case 0 ... 1: /* test */
1034 * Special case in Grp3: test has an immediate
1038 src.ptr = (unsigned long *)_eip;
1039 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1042 switch (src.bytes) {
1044 src.val = insn_fetch(s8, 1, _eip);
1047 src.val = insn_fetch(s16, 2, _eip);
1050 src.val = insn_fetch(s32, 4, _eip);
1058 emulate_1op("neg", dst, _eflags);
1061 goto cannot_emulate;
1064 case 0xfe ... 0xff: /* Grp4/Grp5 */
1065 switch (modrm_reg) {
1067 emulate_1op("inc", dst, _eflags);
1070 emulate_1op("dec", dst, _eflags);
1073 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1074 if (mode == X86EMUL_MODE_PROT64) {
1076 if ((rc = ops->read_std((unsigned long)dst.ptr,
1081 register_address_increment(_regs[VCPU_REGS_RSP],
1083 if ((rc = ops->write_std(
1084 register_address(ctxt->ss_base,
1085 _regs[VCPU_REGS_RSP]),
1086 &dst.val, dst.bytes, ctxt->vcpu)) != 0)
1091 goto cannot_emulate;
1100 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1101 switch (dst.bytes) {
1103 *(u8 *)dst.ptr = (u8)dst.val;
1106 *(u16 *)dst.ptr = (u16)dst.val;
1109 *dst.ptr = (u32)dst.val;
1110 break; /* 64b: zero-ext */
1118 rc = ops->cmpxchg_emulated((unsigned long)dst.
1120 &dst.val, dst.bytes,
1123 rc = ops->write_emulated((unsigned long)dst.ptr,
1124 &dst.val, dst.bytes,
1133 /* Commit shadow register state. */
1134 memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
1135 ctxt->eflags = _eflags;
1136 ctxt->vcpu->rip = _eip;
1139 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1143 goto twobyte_special_insn;
1145 case 0x50 ... 0x57: /* push reg */
1147 src.val = (u16) _regs[b & 0x7];
1149 src.val = (u32) _regs[b & 0x7];
1151 dst.bytes = op_bytes;
1153 register_address_increment(_regs[VCPU_REGS_RSP], -op_bytes);
1154 dst.ptr = (void *) register_address(
1155 ctxt->ss_base, _regs[VCPU_REGS_RSP]);
1157 case 0x6c: /* insb */
1158 case 0x6d: /* insw/insd */
1159 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1161 (d & ByteOp) ? 1 : op_bytes, /* size */
1163 address_mask(_regs[VCPU_REGS_RCX]) : 1, /* count */
1164 (_eflags & EFLG_DF), /* down */
1165 register_address(ctxt->es_base,
1166 _regs[VCPU_REGS_RDI]), /* address */
1168 _regs[VCPU_REGS_RDX] /* port */
1172 case 0x6e: /* outsb */
1173 case 0x6f: /* outsw/outsd */
1174 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1176 (d & ByteOp) ? 1 : op_bytes, /* size */
1178 address_mask(_regs[VCPU_REGS_RCX]) : 1, /* count */
1179 (_eflags & EFLG_DF), /* down */
1180 register_address(override_base ?
1181 *override_base : ctxt->ds_base,
1182 _regs[VCPU_REGS_RSI]), /* address */
1184 _regs[VCPU_REGS_RDX] /* port */
1190 if (_regs[VCPU_REGS_RCX] == 0) {
1191 ctxt->vcpu->rip = _eip;
1194 _regs[VCPU_REGS_RCX]--;
1195 _eip = ctxt->vcpu->rip;
1198 case 0xa4 ... 0xa5: /* movs */
1200 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1201 dst.ptr = (unsigned long *)register_address(ctxt->es_base,
1202 _regs[VCPU_REGS_RDI]);
1203 if ((rc = ops->read_emulated(register_address(
1204 override_base ? *override_base : ctxt->ds_base,
1205 _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes, ctxt->vcpu)) != 0)
1207 register_address_increment(_regs[VCPU_REGS_RSI],
1208 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1209 register_address_increment(_regs[VCPU_REGS_RDI],
1210 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1212 case 0xa6 ... 0xa7: /* cmps */
1213 DPRINTF("Urk! I don't handle CMPS.\n");
1214 goto cannot_emulate;
1215 case 0xaa ... 0xab: /* stos */
1217 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1218 dst.ptr = (unsigned long *)cr2;
1219 dst.val = _regs[VCPU_REGS_RAX];
1220 register_address_increment(_regs[VCPU_REGS_RDI],
1221 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1223 case 0xac ... 0xad: /* lods */
1225 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1226 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1227 if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes,
1230 register_address_increment(_regs[VCPU_REGS_RSI],
1231 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1233 case 0xae ... 0xaf: /* scas */
1234 DPRINTF("Urk! I don't handle SCAS.\n");
1235 goto cannot_emulate;
1236 case 0xf4: /* hlt */
1237 ctxt->vcpu->halt_request = 1;
1239 case 0xc3: /* ret */
1241 goto pop_instruction;
1242 case 0x58 ... 0x5f: /* pop reg */
1243 dst.ptr = (unsigned long *)&_regs[b & 0x7];
1246 if ((rc = ops->read_std(register_address(ctxt->ss_base,
1247 _regs[VCPU_REGS_RSP]), dst.ptr, op_bytes, ctxt->vcpu))
1251 register_address_increment(_regs[VCPU_REGS_RSP], op_bytes);
1252 no_wb = 1; /* Disable writeback. */
1259 case 0x01: /* lgdt, lidt, lmsw */
1260 /* Disable writeback. */
1262 switch (modrm_reg) {
1264 unsigned long address;
1267 rc = read_descriptor(ctxt, ops, src.ptr,
1268 &size, &address, op_bytes);
1271 realmode_lgdt(ctxt->vcpu, size, address);
1274 rc = read_descriptor(ctxt, ops, src.ptr,
1275 &size, &address, op_bytes);
1278 realmode_lidt(ctxt->vcpu, size, address);
1282 goto cannot_emulate;
1283 *(u16 *)&_regs[modrm_rm]
1284 = realmode_get_cr(ctxt->vcpu, 0);
1288 goto cannot_emulate;
1289 realmode_lmsw(ctxt->vcpu, (u16)modrm_val, &_eflags);
1292 emulate_invlpg(ctxt->vcpu, cr2);
1295 goto cannot_emulate;
1298 case 0x21: /* mov from dr to reg */
1301 goto cannot_emulate;
1302 rc = emulator_get_dr(ctxt, modrm_reg, &_regs[modrm_rm]);
1304 case 0x23: /* mov from reg to dr */
1307 goto cannot_emulate;
1308 rc = emulator_set_dr(ctxt, modrm_reg, _regs[modrm_rm]);
1310 case 0x40 ... 0x4f: /* cmov */
1311 dst.val = dst.orig_val = src.val;
1314 * First, assume we're decoding an even cmov opcode
1317 switch ((b & 15) >> 1) {
1319 no_wb = (_eflags & EFLG_OF) ? 0 : 1;
1321 case 1: /* cmovb/cmovc/cmovnae */
1322 no_wb = (_eflags & EFLG_CF) ? 0 : 1;
1324 case 2: /* cmovz/cmove */
1325 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1327 case 3: /* cmovbe/cmovna */
1328 no_wb = (_eflags & (EFLG_CF | EFLG_ZF)) ? 0 : 1;
1331 no_wb = (_eflags & EFLG_SF) ? 0 : 1;
1333 case 5: /* cmovp/cmovpe */
1334 no_wb = (_eflags & EFLG_PF) ? 0 : 1;
1336 case 7: /* cmovle/cmovng */
1337 no_wb = (_eflags & EFLG_ZF) ? 0 : 1;
1339 case 6: /* cmovl/cmovnge */
1340 no_wb &= (!(_eflags & EFLG_SF) !=
1341 !(_eflags & EFLG_OF)) ? 0 : 1;
1344 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1347 case 0xb0 ... 0xb1: /* cmpxchg */
1349 * Save real source value, then compare EAX against
1352 src.orig_val = src.val;
1353 src.val = _regs[VCPU_REGS_RAX];
1354 emulate_2op_SrcV("cmp", src, dst, _eflags);
1355 if (_eflags & EFLG_ZF) {
1356 /* Success: write back to memory. */
1357 dst.val = src.orig_val;
1359 /* Failure: write the value we saw to EAX. */
1361 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1366 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1367 emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
1371 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1372 emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
1376 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1377 emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
1379 case 0xb6 ... 0xb7: /* movzx */
1380 dst.bytes = op_bytes;
1381 dst.val = (d & ByteOp) ? (u8) src.val : (u16) src.val;
1385 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1386 emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
1388 case 0xba: /* Grp8 */
1389 switch (modrm_reg & 3) {
1400 case 0xbe ... 0xbf: /* movsx */
1401 dst.bytes = op_bytes;
1402 dst.val = (d & ByteOp) ? (s8) src.val : (s16) src.val;
1407 twobyte_special_insn:
1408 /* Disable writeback. */
1411 case 0x09: /* wbinvd */
1413 case 0x0d: /* GrpP (prefetch) */
1414 case 0x18: /* Grp16 (prefetch/nop) */
1417 emulate_clts(ctxt->vcpu);
1419 case 0x20: /* mov cr, reg */
1421 goto cannot_emulate;
1422 _regs[modrm_rm] = realmode_get_cr(ctxt->vcpu, modrm_reg);
1424 case 0x22: /* mov reg, cr */
1426 goto cannot_emulate;
1427 realmode_set_cr(ctxt->vcpu, modrm_reg, modrm_val, &_eflags);
1431 msr_data = (u32)_regs[VCPU_REGS_RAX]
1432 | ((u64)_regs[VCPU_REGS_RDX] << 32);
1433 rc = kvm_set_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], msr_data);
1435 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1436 _eip = ctxt->vcpu->rip;
1438 rc = X86EMUL_CONTINUE;
1442 rc = kvm_get_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], &msr_data);
1444 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1445 _eip = ctxt->vcpu->rip;
1447 _regs[VCPU_REGS_RAX] = (u32)msr_data;
1448 _regs[VCPU_REGS_RDX] = msr_data >> 32;
1450 rc = X86EMUL_CONTINUE;
1452 case 0xc7: /* Grp9 (cmpxchg8b) */
1455 if ((rc = ops->read_emulated(cr2, &old, 8, ctxt->vcpu))
1458 if (((u32) (old >> 0) != (u32) _regs[VCPU_REGS_RAX]) ||
1459 ((u32) (old >> 32) != (u32) _regs[VCPU_REGS_RDX])) {
1460 _regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1461 _regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1462 _eflags &= ~EFLG_ZF;
1464 new = ((u64)_regs[VCPU_REGS_RCX] << 32)
1465 | (u32) _regs[VCPU_REGS_RBX];
1466 if ((rc = ops->cmpxchg_emulated(cr2, &old,
1467 &new, 8, ctxt->vcpu)) != 0)
1477 DPRINTF("Cannot emulate %02x\n", b);
1484 #include <asm/uaccess.h>
1487 x86_emulate_read_std(unsigned long addr,
1489 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1495 if ((rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0) {
1496 propagate_page_fault(addr + bytes - rc, 0); /* read fault */
1497 return X86EMUL_PROPAGATE_FAULT;
1500 return X86EMUL_CONTINUE;
1504 x86_emulate_write_std(unsigned long addr,
1506 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1510 if ((rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0) {
1511 propagate_page_fault(addr + bytes - rc, PGERR_write_access);
1512 return X86EMUL_PROPAGATE_FAULT;
1515 return X86EMUL_CONTINUE;