4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0;
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0, cpu_tmp1, cpu_tmp2, cpu_ptr0, cpu_ptr1;
66 static int x86_64_hregs;
69 typedef struct DisasContext {
70 /* current insn context */
71 int override; /* -1 if no override */
74 target_ulong pc; /* pc = eip + cs_base */
75 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base; /* base of CS segment */
79 int pe; /* protected mode */
80 int code32; /* 32 bit code segment */
82 int lma; /* long mode active */
83 int code64; /* 64 bit code segment */
86 int ss32; /* 32 bit stack segment */
87 int cc_op; /* current CC operation */
88 int addseg; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st; /* currently unused */
90 int vm86; /* vm86 mode */
93 int tf; /* TF cpu flag */
94 int singlestep_enabled; /* "hardware" single step enabled */
95 int jmp_opt; /* use direct block chaining for direct jumps */
96 int mem_index; /* select memory access functions */
97 uint64_t flags; /* all execution flags */
98 struct TranslationBlock *tb;
99 int popl_esp_hack; /* for correct popl with esp base handling */
100 int rip_offset; /* only used in x86_64, but left for simplicity */
102 int cpuid_ext_features;
103 int cpuid_ext2_features;
106 static void gen_eob(DisasContext *s);
107 static void gen_jmp(DisasContext *s, target_ulong eip);
108 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
110 /* i386 arith/logic operations */
130 OP_SHL1, /* undocumented */
143 /* I386 int registers */
144 OR_EAX, /* MUST be even numbered */
153 OR_TMP0 = 16, /* temporary operand register */
155 OR_A0, /* temporary register used when doing address evaluation */
158 static inline void gen_op_movl_T0_0(void)
160 tcg_gen_movi_tl(cpu_T[0], 0);
163 static inline void gen_op_movl_T0_im(int32_t val)
165 tcg_gen_movi_tl(cpu_T[0], val);
168 static inline void gen_op_movl_T0_imu(uint32_t val)
170 tcg_gen_movi_tl(cpu_T[0], val);
173 static inline void gen_op_movl_T1_im(int32_t val)
175 tcg_gen_movi_tl(cpu_T[1], val);
178 static inline void gen_op_movl_T1_imu(uint32_t val)
180 tcg_gen_movi_tl(cpu_T[1], val);
183 static inline void gen_op_movl_A0_im(uint32_t val)
185 tcg_gen_movi_tl(cpu_A0, val);
189 static inline void gen_op_movq_A0_im(int64_t val)
191 tcg_gen_movi_tl(cpu_A0, val);
195 static inline void gen_movtl_T0_im(target_ulong val)
197 tcg_gen_movi_tl(cpu_T[0], val);
200 static inline void gen_movtl_T1_im(target_ulong val)
202 tcg_gen_movi_tl(cpu_T[1], val);
205 static inline void gen_op_andl_T0_ffff(void)
207 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210 static inline void gen_op_andl_T0_im(uint32_t val)
212 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215 static inline void gen_op_movl_T0_T1(void)
217 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220 static inline void gen_op_andl_A0_ffff(void)
222 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
227 #define NB_OP_SIZES 4
229 #define DEF_REGS(prefix, suffix) \
230 prefix ## EAX ## suffix,\
231 prefix ## ECX ## suffix,\
232 prefix ## EDX ## suffix,\
233 prefix ## EBX ## suffix,\
234 prefix ## ESP ## suffix,\
235 prefix ## EBP ## suffix,\
236 prefix ## ESI ## suffix,\
237 prefix ## EDI ## suffix,\
238 prefix ## R8 ## suffix,\
239 prefix ## R9 ## suffix,\
240 prefix ## R10 ## suffix,\
241 prefix ## R11 ## suffix,\
242 prefix ## R12 ## suffix,\
243 prefix ## R13 ## suffix,\
244 prefix ## R14 ## suffix,\
245 prefix ## R15 ## suffix,
247 #else /* !TARGET_X86_64 */
249 #define NB_OP_SIZES 3
251 #define DEF_REGS(prefix, suffix) \
252 prefix ## EAX ## suffix,\
253 prefix ## ECX ## suffix,\
254 prefix ## EDX ## suffix,\
255 prefix ## EBX ## suffix,\
256 prefix ## ESP ## suffix,\
257 prefix ## EBP ## suffix,\
258 prefix ## ESI ## suffix,\
259 prefix ## EDI ## suffix,
261 #endif /* !TARGET_X86_64 */
263 #if defined(WORDS_BIGENDIAN)
264 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
265 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
267 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
268 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
270 #define REG_B_OFFSET 0
271 #define REG_H_OFFSET 1
272 #define REG_W_OFFSET 0
273 #define REG_L_OFFSET 0
274 #define REG_LH_OFFSET 4
277 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
281 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
284 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
288 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
292 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
293 /* high part of register set to zero */
294 tcg_gen_movi_tl(cpu_tmp0, 0);
295 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
299 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
304 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
310 static inline void gen_op_mov_reg_T0(int ot, int reg)
312 gen_op_mov_reg_TN(ot, 0, reg);
315 static inline void gen_op_mov_reg_T1(int ot, int reg)
317 gen_op_mov_reg_TN(ot, 1, reg);
320 static inline void gen_op_mov_reg_A0(int size, int reg)
324 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
328 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
329 /* high part of register set to zero */
330 tcg_gen_movi_tl(cpu_tmp0, 0);
331 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
335 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
340 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
346 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
350 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
358 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
363 static inline void gen_op_movl_A0_reg(int reg)
365 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368 static inline void gen_op_addl_A0_im(int32_t val)
370 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
372 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
377 static inline void gen_op_addq_A0_im(int64_t val)
379 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
383 static void gen_add_A0_im(DisasContext *s, int val)
387 gen_op_addq_A0_im(val);
390 gen_op_addl_A0_im(val);
393 static inline void gen_op_addl_T0_T1(void)
395 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398 static inline void gen_op_jmp_T0(void)
400 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403 static inline void gen_op_addw_ESP_im(int32_t val)
405 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
406 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
410 static inline void gen_op_addl_ESP_im(int32_t val)
412 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
413 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
415 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
417 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
421 static inline void gen_op_addq_ESP_im(int32_t val)
423 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
424 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
429 static inline void gen_op_set_cc_op(int32_t val)
431 tcg_gen_movi_tl(cpu_tmp0, val);
432 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
435 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
437 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
440 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
442 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
446 static inline void gen_op_movl_A0_seg(int reg)
448 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
451 static inline void gen_op_addl_A0_seg(int reg)
453 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
454 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
456 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
461 static inline void gen_op_movq_A0_seg(int reg)
463 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
466 static inline void gen_op_addq_A0_seg(int reg)
468 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
469 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
472 static inline void gen_op_movq_A0_reg(int reg)
474 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
477 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
479 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
481 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
482 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
486 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
488 DEF_REGS(gen_op_cmovw_, _T1_T0)
491 DEF_REGS(gen_op_cmovl_, _T1_T0)
495 DEF_REGS(gen_op_cmovq_, _T1_T0)
500 #define DEF_ARITHC(SUFFIX)\
502 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
506 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
510 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
514 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
518 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
522 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
524 #ifndef CONFIG_USER_ONLY
530 static const int cc_op_arithb[8] = {
541 #define DEF_CMPXCHG(SUFFIX)\
542 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
547 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
551 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
553 #ifndef CONFIG_USER_ONLY
559 #define DEF_SHIFT(SUFFIX)\
561 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
571 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
581 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
591 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
601 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
605 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
607 #ifndef CONFIG_USER_ONLY
613 #define DEF_SHIFTD(SUFFIX, op)\
619 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
631 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
635 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
639 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
641 #ifndef CONFIG_USER_ONLY
642 DEF_SHIFTD(_kernel, im)
643 DEF_SHIFTD(_user, im)
647 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
648 DEF_SHIFTD(_raw, ECX)
649 #ifndef CONFIG_USER_ONLY
650 DEF_SHIFTD(_kernel, ECX)
651 DEF_SHIFTD(_user, ECX)
655 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
658 gen_op_btsw_T0_T1_cc,
659 gen_op_btrw_T0_T1_cc,
660 gen_op_btcw_T0_T1_cc,
664 gen_op_btsl_T0_T1_cc,
665 gen_op_btrl_T0_T1_cc,
666 gen_op_btcl_T0_T1_cc,
671 gen_op_btsq_T0_T1_cc,
672 gen_op_btrq_T0_T1_cc,
673 gen_op_btcq_T0_T1_cc,
678 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
679 gen_op_add_bitw_A0_T1,
680 gen_op_add_bitl_A0_T1,
681 X86_64_ONLY(gen_op_add_bitq_A0_T1),
684 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
701 static inline void gen_op_lds_T0_A0(int idx)
703 int mem_index = (idx >> 2) - 1;
706 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
709 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
713 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static inline void gen_op_ld_T0_A0(int idx)
721 int mem_index = (idx >> 2) - 1;
724 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
727 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
730 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
734 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
739 static inline void gen_op_ldu_T0_A0(int idx)
741 gen_op_ld_T0_A0(idx);
744 static inline void gen_op_ld_T1_A0(int idx)
746 int mem_index = (idx >> 2) - 1;
749 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
752 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
755 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
759 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
764 static inline void gen_op_st_T0_A0(int idx)
766 int mem_index = (idx >> 2) - 1;
769 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
772 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
775 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
779 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
784 static inline void gen_op_st_T1_A0(int idx)
786 int mem_index = (idx >> 2) - 1;
789 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
792 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
795 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
799 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
804 static inline void gen_jmp_im(target_ulong pc)
806 tcg_gen_movi_tl(cpu_tmp0, pc);
807 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
810 static inline void gen_string_movl_A0_ESI(DisasContext *s)
814 override = s->override;
818 gen_op_movq_A0_seg(override);
819 gen_op_addq_A0_reg_sN(0, R_ESI);
821 gen_op_movq_A0_reg(R_ESI);
827 if (s->addseg && override < 0)
830 gen_op_movl_A0_seg(override);
831 gen_op_addl_A0_reg_sN(0, R_ESI);
833 gen_op_movl_A0_reg(R_ESI);
836 /* 16 address, always override */
839 gen_op_movl_A0_reg(R_ESI);
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(override);
845 static inline void gen_string_movl_A0_EDI(DisasContext *s)
849 gen_op_movq_A0_reg(R_EDI);
854 gen_op_movl_A0_seg(R_ES);
855 gen_op_addl_A0_reg_sN(0, R_EDI);
857 gen_op_movl_A0_reg(R_EDI);
860 gen_op_movl_A0_reg(R_EDI);
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(R_ES);
866 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867 gen_op_movl_T0_Dshiftb,
868 gen_op_movl_T0_Dshiftw,
869 gen_op_movl_T0_Dshiftl,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
873 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
876 X86_64_ONLY(gen_op_jnz_ecxq),
879 static GenOpFunc1 *gen_op_jz_ecx[3] = {
882 X86_64_ONLY(gen_op_jz_ecxq),
885 static GenOpFunc *gen_op_dec_ECX[3] = {
888 X86_64_ONLY(gen_op_decq_ECX),
891 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
896 X86_64_ONLY(gen_op_jnz_subq),
902 X86_64_ONLY(gen_op_jz_subq),
906 static GenOpFunc *gen_op_in_DX_T0[3] = {
912 static GenOpFunc *gen_op_out_DX_T0[3] = {
918 static GenOpFunc *gen_op_in[3] = {
924 static GenOpFunc *gen_op_out[3] = {
930 static GenOpFunc *gen_check_io_T0[3] = {
936 static GenOpFunc *gen_check_io_DX[3] = {
942 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
944 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945 if (s->cc_op != CC_OP_DYNAMIC)
946 gen_op_set_cc_op(s->cc_op);
949 gen_check_io_DX[ot]();
951 gen_check_io_T0[ot]();
955 static inline void gen_movs(DisasContext *s, int ot)
957 gen_string_movl_A0_ESI(s);
958 gen_op_ld_T0_A0(ot + s->mem_index);
959 gen_string_movl_A0_EDI(s);
960 gen_op_st_T0_A0(ot + s->mem_index);
961 gen_op_movl_T0_Dshift[ot]();
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
977 static inline void gen_update_cc_op(DisasContext *s)
979 if (s->cc_op != CC_OP_DYNAMIC) {
980 gen_op_set_cc_op(s->cc_op);
981 s->cc_op = CC_OP_DYNAMIC;
985 /* XXX: does not work with gdbstub "ice" single step - not a
987 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
991 l1 = gen_new_label();
992 l2 = gen_new_label();
993 gen_op_jnz_ecx[s->aflag](l1);
995 gen_jmp_tb(s, next_eip, 1);
1000 static inline void gen_stos(DisasContext *s, int ot)
1002 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1003 gen_string_movl_A0_EDI(s);
1004 gen_op_st_T0_A0(ot + s->mem_index);
1005 gen_op_movl_T0_Dshift[ot]();
1006 #ifdef TARGET_X86_64
1007 if (s->aflag == 2) {
1008 gen_op_addq_EDI_T0();
1012 gen_op_addl_EDI_T0();
1014 gen_op_addw_EDI_T0();
1018 static inline void gen_lods(DisasContext *s, int ot)
1020 gen_string_movl_A0_ESI(s);
1021 gen_op_ld_T0_A0(ot + s->mem_index);
1022 gen_op_mov_reg_T0(ot, R_EAX);
1023 gen_op_movl_T0_Dshift[ot]();
1024 #ifdef TARGET_X86_64
1025 if (s->aflag == 2) {
1026 gen_op_addq_ESI_T0();
1030 gen_op_addl_ESI_T0();
1032 gen_op_addw_ESI_T0();
1036 static inline void gen_scas(DisasContext *s, int ot)
1038 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1039 gen_string_movl_A0_EDI(s);
1040 gen_op_ld_T1_A0(ot + s->mem_index);
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift[ot]();
1043 #ifdef TARGET_X86_64
1044 if (s->aflag == 2) {
1045 gen_op_addq_EDI_T0();
1049 gen_op_addl_EDI_T0();
1051 gen_op_addw_EDI_T0();
1055 static inline void gen_cmps(DisasContext *s, int ot)
1057 gen_string_movl_A0_ESI(s);
1058 gen_op_ld_T0_A0(ot + s->mem_index);
1059 gen_string_movl_A0_EDI(s);
1060 gen_op_ld_T1_A0(ot + s->mem_index);
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift[ot]();
1063 #ifdef TARGET_X86_64
1064 if (s->aflag == 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1078 static inline void gen_ins(DisasContext *s, int ot)
1080 gen_string_movl_A0_EDI(s);
1082 gen_op_st_T0_A0(ot + s->mem_index);
1083 gen_op_in_DX_T0[ot]();
1084 gen_op_st_T0_A0(ot + s->mem_index);
1085 gen_op_movl_T0_Dshift[ot]();
1086 #ifdef TARGET_X86_64
1087 if (s->aflag == 2) {
1088 gen_op_addq_EDI_T0();
1092 gen_op_addl_EDI_T0();
1094 gen_op_addw_EDI_T0();
1098 static inline void gen_outs(DisasContext *s, int ot)
1100 gen_string_movl_A0_ESI(s);
1101 gen_op_ld_T0_A0(ot + s->mem_index);
1102 gen_op_out_DX_T0[ot]();
1103 gen_op_movl_T0_Dshift[ot]();
1104 #ifdef TARGET_X86_64
1105 if (s->aflag == 2) {
1106 gen_op_addq_ESI_T0();
1110 gen_op_addl_ESI_T0();
1112 gen_op_addw_ESI_T0();
1116 /* same method as Valgrind : we generate jumps to current or next
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1171 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1202 #ifdef TARGET_X86_64
1205 BUGGY_64(gen_op_jb_subq),
1207 BUGGY_64(gen_op_jbe_subq),
1210 BUGGY_64(gen_op_jl_subq),
1211 BUGGY_64(gen_op_jle_subq),
1215 static GenOpFunc1 *gen_op_loop[3][4] = {
1226 #ifdef TARGET_X86_64
1235 static GenOpFunc *gen_setcc_slow[8] = {
1246 static GenOpFunc *gen_setcc_sub[4][8] = {
1249 gen_op_setb_T0_subb,
1250 gen_op_setz_T0_subb,
1251 gen_op_setbe_T0_subb,
1252 gen_op_sets_T0_subb,
1254 gen_op_setl_T0_subb,
1255 gen_op_setle_T0_subb,
1259 gen_op_setb_T0_subw,
1260 gen_op_setz_T0_subw,
1261 gen_op_setbe_T0_subw,
1262 gen_op_sets_T0_subw,
1264 gen_op_setl_T0_subw,
1265 gen_op_setle_T0_subw,
1269 gen_op_setb_T0_subl,
1270 gen_op_setz_T0_subl,
1271 gen_op_setbe_T0_subl,
1272 gen_op_sets_T0_subl,
1274 gen_op_setl_T0_subl,
1275 gen_op_setle_T0_subl,
1277 #ifdef TARGET_X86_64
1280 gen_op_setb_T0_subq,
1281 gen_op_setz_T0_subq,
1282 gen_op_setbe_T0_subq,
1283 gen_op_sets_T0_subq,
1285 gen_op_setl_T0_subq,
1286 gen_op_setle_T0_subq,
1291 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1292 gen_op_fadd_ST0_FT0,
1293 gen_op_fmul_ST0_FT0,
1294 gen_op_fcom_ST0_FT0,
1295 gen_op_fcom_ST0_FT0,
1296 gen_op_fsub_ST0_FT0,
1297 gen_op_fsubr_ST0_FT0,
1298 gen_op_fdiv_ST0_FT0,
1299 gen_op_fdivr_ST0_FT0,
1302 /* NOTE the exception in "r" op ordering */
1303 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1304 gen_op_fadd_STN_ST0,
1305 gen_op_fmul_STN_ST0,
1308 gen_op_fsubr_STN_ST0,
1309 gen_op_fsub_STN_ST0,
1310 gen_op_fdivr_STN_ST0,
1311 gen_op_fdiv_STN_ST0,
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext *s1, int op, int ot, int d)
1317 GenOpFunc *gen_update_cc;
1320 gen_op_mov_TN_reg(ot, 0, d);
1322 gen_op_ld_T0_A0(ot + s1->mem_index);
1327 if (s1->cc_op != CC_OP_DYNAMIC)
1328 gen_op_set_cc_op(s1->cc_op);
1330 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331 gen_op_mov_reg_T0(ot, d);
1333 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1335 s1->cc_op = CC_OP_DYNAMIC;
1338 gen_op_addl_T0_T1();
1339 s1->cc_op = CC_OP_ADDB + ot;
1340 gen_update_cc = gen_op_update2_cc;
1343 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1344 s1->cc_op = CC_OP_SUBB + ot;
1345 gen_update_cc = gen_op_update2_cc;
1349 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1350 s1->cc_op = CC_OP_LOGICB + ot;
1351 gen_update_cc = gen_op_update1_cc;
1354 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1355 s1->cc_op = CC_OP_LOGICB + ot;
1356 gen_update_cc = gen_op_update1_cc;
1359 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1360 s1->cc_op = CC_OP_LOGICB + ot;
1361 gen_update_cc = gen_op_update1_cc;
1364 gen_op_cmpl_T0_T1_cc();
1365 s1->cc_op = CC_OP_SUBB + ot;
1366 gen_update_cc = NULL;
1369 if (op != OP_CMPL) {
1371 gen_op_mov_reg_T0(ot, d);
1373 gen_op_st_T0_A0(ot + s1->mem_index);
1375 /* the flags update must happen after the memory write (precise
1376 exception support) */
1382 /* if d == OR_TMP0, it means memory operand (address in A0) */
1383 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1386 gen_op_mov_TN_reg(ot, 0, d);
1388 gen_op_ld_T0_A0(ot + s1->mem_index);
1389 if (s1->cc_op != CC_OP_DYNAMIC)
1390 gen_op_set_cc_op(s1->cc_op);
1393 s1->cc_op = CC_OP_INCB + ot;
1396 s1->cc_op = CC_OP_DECB + ot;
1399 gen_op_mov_reg_T0(ot, d);
1401 gen_op_st_T0_A0(ot + s1->mem_index);
1402 gen_op_update_inc_cc();
1405 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1408 gen_op_mov_TN_reg(ot, 0, d);
1410 gen_op_ld_T0_A0(ot + s1->mem_index);
1412 gen_op_mov_TN_reg(ot, 1, s);
1413 /* for zero counts, flags are not updated, so must do it dynamically */
1414 if (s1->cc_op != CC_OP_DYNAMIC)
1415 gen_op_set_cc_op(s1->cc_op);
1418 gen_op_shift_T0_T1_cc[ot][op]();
1420 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1422 gen_op_mov_reg_T0(ot, d);
1423 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1426 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1428 /* currently not optimized */
1429 gen_op_movl_T1_im(c);
1430 gen_shift(s1, op, ot, d, OR_TMP1);
1433 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1441 int mod, rm, code, override, must_add_seg;
1443 override = s->override;
1444 must_add_seg = s->addseg;
1447 mod = (modrm >> 6) & 3;
1459 code = ldub_code(s->pc++);
1460 scale = (code >> 6) & 3;
1461 index = ((code >> 3) & 7) | REX_X(s);
1468 if ((base & 7) == 5) {
1470 disp = (int32_t)ldl_code(s->pc);
1472 if (CODE64(s) && !havesib) {
1473 disp += s->pc + s->rip_offset;
1480 disp = (int8_t)ldub_code(s->pc++);
1484 disp = ldl_code(s->pc);
1490 /* for correct popl handling with esp */
1491 if (base == 4 && s->popl_esp_hack)
1492 disp += s->popl_esp_hack;
1493 #ifdef TARGET_X86_64
1494 if (s->aflag == 2) {
1495 gen_op_movq_A0_reg(base);
1497 gen_op_addq_A0_im(disp);
1502 gen_op_movl_A0_reg(base);
1504 gen_op_addl_A0_im(disp);
1507 #ifdef TARGET_X86_64
1508 if (s->aflag == 2) {
1509 gen_op_movq_A0_im(disp);
1513 gen_op_movl_A0_im(disp);
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib && (index != 4 || scale != 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s->aflag == 2) {
1520 gen_op_addq_A0_reg_sN(scale, index);
1524 gen_op_addl_A0_reg_sN(scale, index);
1529 if (base == R_EBP || base == R_ESP)
1534 #ifdef TARGET_X86_64
1535 if (s->aflag == 2) {
1536 gen_op_addq_A0_seg(override);
1540 gen_op_addl_A0_seg(override);
1547 disp = lduw_code(s->pc);
1549 gen_op_movl_A0_im(disp);
1550 rm = 0; /* avoid SS override */
1557 disp = (int8_t)ldub_code(s->pc++);
1561 disp = lduw_code(s->pc);
1567 gen_op_movl_A0_reg(R_EBX);
1568 gen_op_addl_A0_reg_sN(0, R_ESI);
1571 gen_op_movl_A0_reg(R_EBX);
1572 gen_op_addl_A0_reg_sN(0, R_EDI);
1575 gen_op_movl_A0_reg(R_EBP);
1576 gen_op_addl_A0_reg_sN(0, R_ESI);
1579 gen_op_movl_A0_reg(R_EBP);
1580 gen_op_addl_A0_reg_sN(0, R_EDI);
1583 gen_op_movl_A0_reg(R_ESI);
1586 gen_op_movl_A0_reg(R_EDI);
1589 gen_op_movl_A0_reg(R_EBP);
1593 gen_op_movl_A0_reg(R_EBX);
1597 gen_op_addl_A0_im(disp);
1598 gen_op_andl_A0_ffff();
1602 if (rm == 2 || rm == 3 || rm == 6)
1607 gen_op_addl_A0_seg(override);
1617 static void gen_nop_modrm(DisasContext *s, int modrm)
1619 int mod, rm, base, code;
1621 mod = (modrm >> 6) & 3;
1631 code = ldub_code(s->pc++);
1667 /* used for LEA and MOV AX, mem */
1668 static void gen_add_A0_ds_seg(DisasContext *s)
1670 int override, must_add_seg;
1671 must_add_seg = s->addseg;
1673 if (s->override >= 0) {
1674 override = s->override;
1680 #ifdef TARGET_X86_64
1682 gen_op_addq_A0_seg(override);
1686 gen_op_addl_A0_seg(override);
1691 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1693 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1695 int mod, rm, opreg, disp;
1697 mod = (modrm >> 6) & 3;
1698 rm = (modrm & 7) | REX_B(s);
1702 gen_op_mov_TN_reg(ot, 0, reg);
1703 gen_op_mov_reg_T0(ot, rm);
1705 gen_op_mov_TN_reg(ot, 0, rm);
1707 gen_op_mov_reg_T0(ot, reg);
1710 gen_lea_modrm(s, modrm, &opreg, &disp);
1713 gen_op_mov_TN_reg(ot, 0, reg);
1714 gen_op_st_T0_A0(ot + s->mem_index);
1716 gen_op_ld_T0_A0(ot + s->mem_index);
1718 gen_op_mov_reg_T0(ot, reg);
1723 static inline uint32_t insn_get(DisasContext *s, int ot)
1729 ret = ldub_code(s->pc);
1733 ret = lduw_code(s->pc);
1738 ret = ldl_code(s->pc);
1745 static inline int insn_const_size(unsigned int ot)
1753 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1755 TranslationBlock *tb;
1758 pc = s->cs_base + eip;
1760 /* NOTE: we handle the case where the TB spans two pages here */
1761 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1762 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1763 /* jump to same page: we can use a direct jump */
1764 tcg_gen_goto_tb(tb_num);
1766 tcg_gen_exit_tb((long)tb + tb_num);
1768 /* jump to another page: currently not optimized */
1774 static inline void gen_jcc(DisasContext *s, int b,
1775 target_ulong val, target_ulong next_eip)
1777 TranslationBlock *tb;
1784 jcc_op = (b >> 1) & 7;
1788 /* we optimize the cmp/jcc case */
1793 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1796 /* some jumps are easy to compute */
1838 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1841 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1853 if (s->cc_op != CC_OP_DYNAMIC) {
1854 gen_op_set_cc_op(s->cc_op);
1855 s->cc_op = CC_OP_DYNAMIC;
1859 gen_setcc_slow[jcc_op]();
1860 func = gen_op_jnz_T0_label;
1870 l1 = gen_new_label();
1873 gen_goto_tb(s, 0, next_eip);
1876 gen_goto_tb(s, 1, val);
1881 if (s->cc_op != CC_OP_DYNAMIC) {
1882 gen_op_set_cc_op(s->cc_op);
1883 s->cc_op = CC_OP_DYNAMIC;
1885 gen_setcc_slow[jcc_op]();
1891 l1 = gen_new_label();
1892 l2 = gen_new_label();
1893 gen_op_jnz_T0_label(l1);
1894 gen_jmp_im(next_eip);
1895 gen_op_jmp_label(l2);
1903 static void gen_setcc(DisasContext *s, int b)
1909 jcc_op = (b >> 1) & 7;
1911 /* we optimize the cmp/jcc case */
1916 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1921 /* some jumps are easy to compute */
1948 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1951 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1959 if (s->cc_op != CC_OP_DYNAMIC)
1960 gen_op_set_cc_op(s->cc_op);
1961 func = gen_setcc_slow[jcc_op];
1970 /* move T0 to seg_reg and compute if the CPU state may change. Never
1971 call this function with seg_reg == R_CS */
1972 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1974 if (s->pe && !s->vm86) {
1975 /* XXX: optimize by finding processor state dynamically */
1976 if (s->cc_op != CC_OP_DYNAMIC)
1977 gen_op_set_cc_op(s->cc_op);
1978 gen_jmp_im(cur_eip);
1979 gen_op_movl_seg_T0(seg_reg);
1980 /* abort translation because the addseg value may change or
1981 because ss32 may change. For R_SS, translation must always
1982 stop as a special handling must be done to disable hardware
1983 interrupts for the next instruction */
1984 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1987 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1988 if (seg_reg == R_SS)
1993 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1996 gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1998 #if !defined(CONFIG_USER_ONLY)
1999 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2000 if (s->cc_op != CC_OP_DYNAMIC)
2001 gen_op_set_cc_op(s->cc_op);
2002 SVM_movq_T1_im(s->pc - s->cs_base);
2003 gen_jmp_im(pc_start - s->cs_base);
2005 gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2006 s->cc_op = CC_OP_DYNAMIC;
2007 /* FIXME: maybe we could move the io intercept vector to the TB as well
2008 so we know if this is an EOB or not ... let's assume it's not
2015 static inline int svm_is_rep(int prefixes)
2017 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2021 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2022 uint64_t type, uint64_t param)
2024 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2025 /* no SVM activated */
2028 /* CRx and DRx reads/writes */
2029 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2030 if (s->cc_op != CC_OP_DYNAMIC) {
2031 gen_op_set_cc_op(s->cc_op);
2032 s->cc_op = CC_OP_DYNAMIC;
2034 gen_jmp_im(pc_start - s->cs_base);
2035 SVM_movq_T1_im(param);
2037 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2038 /* this is a special case as we do not know if the interception occurs
2039 so we assume there was none */
2042 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2043 if (s->cc_op != CC_OP_DYNAMIC) {
2044 gen_op_set_cc_op(s->cc_op);
2045 s->cc_op = CC_OP_DYNAMIC;
2047 gen_jmp_im(pc_start - s->cs_base);
2048 SVM_movq_T1_im(param);
2050 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2051 /* this is a special case as we do not know if the interception occurs
2052 so we assume there was none */
2057 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2058 if (s->cc_op != CC_OP_DYNAMIC) {
2059 gen_op_set_cc_op(s->cc_op);
2060 s->cc_op = CC_OP_EFLAGS;
2062 gen_jmp_im(pc_start - s->cs_base);
2063 SVM_movq_T1_im(param);
2065 gen_op_svm_vmexit(type >> 32, type);
2066 /* we can optimize this one so TBs don't get longer
2067 than up to vmexit */
2076 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2078 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2081 static inline void gen_stack_update(DisasContext *s, int addend)
2083 #ifdef TARGET_X86_64
2085 gen_op_addq_ESP_im(addend);
2089 gen_op_addl_ESP_im(addend);
2091 gen_op_addw_ESP_im(addend);
2095 /* generate a push. It depends on ss32, addseg and dflag */
2096 static void gen_push_T0(DisasContext *s)
2098 #ifdef TARGET_X86_64
2100 gen_op_movq_A0_reg(R_ESP);
2102 gen_op_addq_A0_im(-8);
2103 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2105 gen_op_addq_A0_im(-2);
2106 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2108 gen_op_mov_reg_A0(2, R_ESP);
2112 gen_op_movl_A0_reg(R_ESP);
2114 gen_op_addl_A0_im(-2);
2116 gen_op_addl_A0_im(-4);
2119 gen_op_movl_T1_A0();
2120 gen_op_addl_A0_seg(R_SS);
2123 gen_op_andl_A0_ffff();
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_A0_seg(R_SS);
2127 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2128 if (s->ss32 && !s->addseg)
2129 gen_op_mov_reg_A0(1, R_ESP);
2131 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2135 /* generate a push. It depends on ss32, addseg and dflag */
2136 /* slower version for T1, only used for call Ev */
2137 static void gen_push_T1(DisasContext *s)
2139 #ifdef TARGET_X86_64
2141 gen_op_movq_A0_reg(R_ESP);
2143 gen_op_addq_A0_im(-8);
2144 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2146 gen_op_addq_A0_im(-2);
2147 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2149 gen_op_mov_reg_A0(2, R_ESP);
2153 gen_op_movl_A0_reg(R_ESP);
2155 gen_op_addl_A0_im(-2);
2157 gen_op_addl_A0_im(-4);
2160 gen_op_addl_A0_seg(R_SS);
2163 gen_op_andl_A0_ffff();
2164 gen_op_addl_A0_seg(R_SS);
2166 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2168 if (s->ss32 && !s->addseg)
2169 gen_op_mov_reg_A0(1, R_ESP);
2171 gen_stack_update(s, (-2) << s->dflag);
2175 /* two step pop is necessary for precise exceptions */
2176 static void gen_pop_T0(DisasContext *s)
2178 #ifdef TARGET_X86_64
2180 gen_op_movq_A0_reg(R_ESP);
2181 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2185 gen_op_movl_A0_reg(R_ESP);
2188 gen_op_addl_A0_seg(R_SS);
2190 gen_op_andl_A0_ffff();
2191 gen_op_addl_A0_seg(R_SS);
2193 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2197 static void gen_pop_update(DisasContext *s)
2199 #ifdef TARGET_X86_64
2200 if (CODE64(s) && s->dflag) {
2201 gen_stack_update(s, 8);
2205 gen_stack_update(s, 2 << s->dflag);
2209 static void gen_stack_A0(DisasContext *s)
2211 gen_op_movl_A0_reg(R_ESP);
2213 gen_op_andl_A0_ffff();
2214 gen_op_movl_T1_A0();
2216 gen_op_addl_A0_seg(R_SS);
2219 /* NOTE: wrap around in 16 bit not fully handled */
2220 static void gen_pusha(DisasContext *s)
2223 gen_op_movl_A0_reg(R_ESP);
2224 gen_op_addl_A0_im(-16 << s->dflag);
2226 gen_op_andl_A0_ffff();
2227 gen_op_movl_T1_A0();
2229 gen_op_addl_A0_seg(R_SS);
2230 for(i = 0;i < 8; i++) {
2231 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2232 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2233 gen_op_addl_A0_im(2 << s->dflag);
2235 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2238 /* NOTE: wrap around in 16 bit not fully handled */
2239 static void gen_popa(DisasContext *s)
2242 gen_op_movl_A0_reg(R_ESP);
2244 gen_op_andl_A0_ffff();
2245 gen_op_movl_T1_A0();
2246 gen_op_addl_T1_im(16 << s->dflag);
2248 gen_op_addl_A0_seg(R_SS);
2249 for(i = 0;i < 8; i++) {
2250 /* ESP is not reloaded */
2252 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2253 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2255 gen_op_addl_A0_im(2 << s->dflag);
2257 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2260 static void gen_enter(DisasContext *s, int esp_addend, int level)
2265 #ifdef TARGET_X86_64
2267 ot = s->dflag ? OT_QUAD : OT_WORD;
2270 gen_op_movl_A0_reg(R_ESP);
2271 gen_op_addq_A0_im(-opsize);
2272 gen_op_movl_T1_A0();
2275 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2276 gen_op_st_T0_A0(ot + s->mem_index);
2278 gen_op_enter64_level(level, (ot == OT_QUAD));
2280 gen_op_mov_reg_T1(ot, R_EBP);
2281 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2282 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2286 ot = s->dflag + OT_WORD;
2287 opsize = 2 << s->dflag;
2289 gen_op_movl_A0_reg(R_ESP);
2290 gen_op_addl_A0_im(-opsize);
2292 gen_op_andl_A0_ffff();
2293 gen_op_movl_T1_A0();
2295 gen_op_addl_A0_seg(R_SS);
2297 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2298 gen_op_st_T0_A0(ot + s->mem_index);
2300 gen_op_enter_level(level, s->dflag);
2302 gen_op_mov_reg_T1(ot, R_EBP);
2303 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2304 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2308 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2310 if (s->cc_op != CC_OP_DYNAMIC)
2311 gen_op_set_cc_op(s->cc_op);
2312 gen_jmp_im(cur_eip);
2313 gen_op_raise_exception(trapno);
2317 /* an interrupt is different from an exception because of the
2319 static void gen_interrupt(DisasContext *s, int intno,
2320 target_ulong cur_eip, target_ulong next_eip)
2322 if (s->cc_op != CC_OP_DYNAMIC)
2323 gen_op_set_cc_op(s->cc_op);
2324 gen_jmp_im(cur_eip);
2325 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2329 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2331 if (s->cc_op != CC_OP_DYNAMIC)
2332 gen_op_set_cc_op(s->cc_op);
2333 gen_jmp_im(cur_eip);
2338 /* generate a generic end of block. Trace exception is also generated
2340 static void gen_eob(DisasContext *s)
2342 if (s->cc_op != CC_OP_DYNAMIC)
2343 gen_op_set_cc_op(s->cc_op);
2344 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2345 gen_op_reset_inhibit_irq();
2347 if (s->singlestep_enabled) {
2350 gen_op_single_step();
2357 /* generate a jump to eip. No segment change must happen before as a
2358 direct call to the next block may occur */
2359 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2362 if (s->cc_op != CC_OP_DYNAMIC) {
2363 gen_op_set_cc_op(s->cc_op);
2364 s->cc_op = CC_OP_DYNAMIC;
2366 gen_goto_tb(s, tb_num, eip);
2374 static void gen_jmp(DisasContext *s, target_ulong eip)
2376 gen_jmp_tb(s, eip, 0);
2379 static inline void gen_ldq_env_A0(int idx, int offset)
2381 int mem_index = (idx >> 2) - 1;
2382 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2383 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset);
2386 static inline void gen_stq_env_A0(int idx, int offset)
2388 int mem_index = (idx >> 2) - 1;
2389 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset);
2390 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2393 static inline void gen_ldo_env_A0(int idx, int offset)
2395 int mem_index = (idx >> 2) - 1;
2396 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2397 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2398 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2399 tcg_gen_qemu_ld64(cpu_tmp1, cpu_tmp0, mem_index);
2400 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2403 static inline void gen_sto_env_A0(int idx, int offset)
2405 int mem_index = (idx >> 2) - 1;
2406 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2407 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2408 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2409 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2410 tcg_gen_qemu_st64(cpu_tmp1, cpu_tmp0, mem_index);
2413 static inline void gen_op_movo(int d_offset, int s_offset)
2415 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2416 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2417 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset + 8);
2418 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset + 8);
2421 static inline void gen_op_movq(int d_offset, int s_offset)
2423 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2424 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2427 static inline void gen_op_movl(int d_offset, int s_offset)
2429 tcg_gen_ld_i32(cpu_tmp2, cpu_env, s_offset);
2430 tcg_gen_st_i32(cpu_tmp2, cpu_env, d_offset);
2433 static inline void gen_op_movq_env_0(int d_offset)
2435 tcg_gen_movi_i64(cpu_tmp1, 0);
2436 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2439 #define SSE_SPECIAL ((void *)1)
2440 #define SSE_DUMMY ((void *)2)
2442 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2443 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2444 helper_ ## x ## ss, helper_ ## x ## sd, }
2446 static void *sse_op_table1[256][4] = {
2447 /* 3DNow! extensions */
2448 [0x0e] = { SSE_DUMMY }, /* femms */
2449 [0x0f] = { SSE_DUMMY }, /* pf... */
2450 /* pure SSE operations */
2451 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2452 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2453 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2454 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2455 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2456 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2457 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2458 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2460 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2461 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2462 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2463 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2464 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2465 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2466 [0x2e] = { helper_ucomiss, helper_ucomisd },
2467 [0x2f] = { helper_comiss, helper_comisd },
2468 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2469 [0x51] = SSE_FOP(sqrt),
2470 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2471 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2472 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2473 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2474 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2475 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2476 [0x58] = SSE_FOP(add),
2477 [0x59] = SSE_FOP(mul),
2478 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2479 helper_cvtss2sd, helper_cvtsd2ss },
2480 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2481 [0x5c] = SSE_FOP(sub),
2482 [0x5d] = SSE_FOP(min),
2483 [0x5e] = SSE_FOP(div),
2484 [0x5f] = SSE_FOP(max),
2486 [0xc2] = SSE_FOP(cmpeq),
2487 [0xc6] = { helper_shufps, helper_shufpd },
2489 /* MMX ops and their SSE extensions */
2490 [0x60] = MMX_OP2(punpcklbw),
2491 [0x61] = MMX_OP2(punpcklwd),
2492 [0x62] = MMX_OP2(punpckldq),
2493 [0x63] = MMX_OP2(packsswb),
2494 [0x64] = MMX_OP2(pcmpgtb),
2495 [0x65] = MMX_OP2(pcmpgtw),
2496 [0x66] = MMX_OP2(pcmpgtl),
2497 [0x67] = MMX_OP2(packuswb),
2498 [0x68] = MMX_OP2(punpckhbw),
2499 [0x69] = MMX_OP2(punpckhwd),
2500 [0x6a] = MMX_OP2(punpckhdq),
2501 [0x6b] = MMX_OP2(packssdw),
2502 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2503 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2504 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2505 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2506 [0x70] = { helper_pshufw_mmx,
2509 helper_pshuflw_xmm },
2510 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2511 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2512 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2513 [0x74] = MMX_OP2(pcmpeqb),
2514 [0x75] = MMX_OP2(pcmpeqw),
2515 [0x76] = MMX_OP2(pcmpeql),
2516 [0x77] = { SSE_DUMMY }, /* emms */
2517 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2518 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2519 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2520 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2521 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2522 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2523 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2524 [0xd1] = MMX_OP2(psrlw),
2525 [0xd2] = MMX_OP2(psrld),
2526 [0xd3] = MMX_OP2(psrlq),
2527 [0xd4] = MMX_OP2(paddq),
2528 [0xd5] = MMX_OP2(pmullw),
2529 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2530 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2531 [0xd8] = MMX_OP2(psubusb),
2532 [0xd9] = MMX_OP2(psubusw),
2533 [0xda] = MMX_OP2(pminub),
2534 [0xdb] = MMX_OP2(pand),
2535 [0xdc] = MMX_OP2(paddusb),
2536 [0xdd] = MMX_OP2(paddusw),
2537 [0xde] = MMX_OP2(pmaxub),
2538 [0xdf] = MMX_OP2(pandn),
2539 [0xe0] = MMX_OP2(pavgb),
2540 [0xe1] = MMX_OP2(psraw),
2541 [0xe2] = MMX_OP2(psrad),
2542 [0xe3] = MMX_OP2(pavgw),
2543 [0xe4] = MMX_OP2(pmulhuw),
2544 [0xe5] = MMX_OP2(pmulhw),
2545 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2546 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2547 [0xe8] = MMX_OP2(psubsb),
2548 [0xe9] = MMX_OP2(psubsw),
2549 [0xea] = MMX_OP2(pminsw),
2550 [0xeb] = MMX_OP2(por),
2551 [0xec] = MMX_OP2(paddsb),
2552 [0xed] = MMX_OP2(paddsw),
2553 [0xee] = MMX_OP2(pmaxsw),
2554 [0xef] = MMX_OP2(pxor),
2555 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2556 [0xf1] = MMX_OP2(psllw),
2557 [0xf2] = MMX_OP2(pslld),
2558 [0xf3] = MMX_OP2(psllq),
2559 [0xf4] = MMX_OP2(pmuludq),
2560 [0xf5] = MMX_OP2(pmaddwd),
2561 [0xf6] = MMX_OP2(psadbw),
2562 [0xf7] = MMX_OP2(maskmov),
2563 [0xf8] = MMX_OP2(psubb),
2564 [0xf9] = MMX_OP2(psubw),
2565 [0xfa] = MMX_OP2(psubl),
2566 [0xfb] = MMX_OP2(psubq),
2567 [0xfc] = MMX_OP2(paddb),
2568 [0xfd] = MMX_OP2(paddw),
2569 [0xfe] = MMX_OP2(paddl),
2572 static void *sse_op_table2[3 * 8][2] = {
2573 [0 + 2] = MMX_OP2(psrlw),
2574 [0 + 4] = MMX_OP2(psraw),
2575 [0 + 6] = MMX_OP2(psllw),
2576 [8 + 2] = MMX_OP2(psrld),
2577 [8 + 4] = MMX_OP2(psrad),
2578 [8 + 6] = MMX_OP2(pslld),
2579 [16 + 2] = MMX_OP2(psrlq),
2580 [16 + 3] = { NULL, helper_psrldq_xmm },
2581 [16 + 6] = MMX_OP2(psllq),
2582 [16 + 7] = { NULL, helper_pslldq_xmm },
2585 static void *sse_op_table3[4 * 3] = {
2588 X86_64_ONLY(helper_cvtsq2ss),
2589 X86_64_ONLY(helper_cvtsq2sd),
2593 X86_64_ONLY(helper_cvttss2sq),
2594 X86_64_ONLY(helper_cvttsd2sq),
2598 X86_64_ONLY(helper_cvtss2sq),
2599 X86_64_ONLY(helper_cvtsd2sq),
2602 static void *sse_op_table4[8][4] = {
2613 static void *sse_op_table5[256] = {
2614 [0x0c] = helper_pi2fw,
2615 [0x0d] = helper_pi2fd,
2616 [0x1c] = helper_pf2iw,
2617 [0x1d] = helper_pf2id,
2618 [0x8a] = helper_pfnacc,
2619 [0x8e] = helper_pfpnacc,
2620 [0x90] = helper_pfcmpge,
2621 [0x94] = helper_pfmin,
2622 [0x96] = helper_pfrcp,
2623 [0x97] = helper_pfrsqrt,
2624 [0x9a] = helper_pfsub,
2625 [0x9e] = helper_pfadd,
2626 [0xa0] = helper_pfcmpgt,
2627 [0xa4] = helper_pfmax,
2628 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2629 [0xa7] = helper_movq, /* pfrsqit1 */
2630 [0xaa] = helper_pfsubr,
2631 [0xae] = helper_pfacc,
2632 [0xb0] = helper_pfcmpeq,
2633 [0xb4] = helper_pfmul,
2634 [0xb6] = helper_movq, /* pfrcpit2 */
2635 [0xb7] = helper_pmulhrw_mmx,
2636 [0xbb] = helper_pswapd,
2637 [0xbf] = helper_pavgb_mmx /* pavgusb */
2640 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2642 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2643 int modrm, mod, rm, reg, reg_addr, offset_addr;
2647 if (s->prefix & PREFIX_DATA)
2649 else if (s->prefix & PREFIX_REPZ)
2651 else if (s->prefix & PREFIX_REPNZ)
2655 sse_op2 = sse_op_table1[b][b1];
2658 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2668 /* simple MMX/SSE operation */
2669 if (s->flags & HF_TS_MASK) {
2670 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2673 if (s->flags & HF_EM_MASK) {
2675 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2678 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2681 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2684 tcg_gen_helper_0_0(helper_emms);
2689 tcg_gen_helper_0_0(helper_emms);
2692 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2693 the static cpu state) */
2695 tcg_gen_helper_0_0(helper_enter_mmx);
2698 modrm = ldub_code(s->pc++);
2699 reg = ((modrm >> 3) & 7);
2702 mod = (modrm >> 6) & 3;
2703 if (sse_op2 == SSE_SPECIAL) {
2706 case 0x0e7: /* movntq */
2709 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2710 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2712 case 0x1e7: /* movntdq */
2713 case 0x02b: /* movntps */
2714 case 0x12b: /* movntps */
2715 case 0x3f0: /* lddqu */
2718 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2719 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2721 case 0x6e: /* movd mm, ea */
2722 #ifdef TARGET_X86_64
2723 if (s->dflag == 2) {
2724 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2725 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2729 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2730 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2731 offsetof(CPUX86State,fpregs[reg].mmx));
2732 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2735 case 0x16e: /* movd xmm, ea */
2736 #ifdef TARGET_X86_64
2737 if (s->dflag == 2) {
2738 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2739 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2740 offsetof(CPUX86State,xmm_regs[reg]));
2741 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2745 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2746 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2747 offsetof(CPUX86State,xmm_regs[reg]));
2748 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
2749 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2);
2752 case 0x6f: /* movq mm, ea */
2754 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2755 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2758 tcg_gen_ld_i64(cpu_tmp1, cpu_env,
2759 offsetof(CPUX86State,fpregs[rm].mmx));
2760 tcg_gen_st_i64(cpu_tmp1, cpu_env,
2761 offsetof(CPUX86State,fpregs[reg].mmx));
2764 case 0x010: /* movups */
2765 case 0x110: /* movupd */
2766 case 0x028: /* movaps */
2767 case 0x128: /* movapd */
2768 case 0x16f: /* movdqa xmm, ea */
2769 case 0x26f: /* movdqu xmm, ea */
2771 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2772 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2774 rm = (modrm & 7) | REX_B(s);
2775 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2776 offsetof(CPUX86State,xmm_regs[rm]));
2779 case 0x210: /* movss xmm, ea */
2781 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2782 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2783 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2785 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2786 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2787 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2789 rm = (modrm & 7) | REX_B(s);
2790 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2791 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2794 case 0x310: /* movsd xmm, ea */
2796 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2797 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2799 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2800 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2802 rm = (modrm & 7) | REX_B(s);
2803 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2804 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2807 case 0x012: /* movlps */
2808 case 0x112: /* movlpd */
2810 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2811 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2814 rm = (modrm & 7) | REX_B(s);
2815 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2816 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2819 case 0x212: /* movsldup */
2821 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2822 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2824 rm = (modrm & 7) | REX_B(s);
2825 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2826 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2827 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2828 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2830 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2831 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2832 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2833 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2835 case 0x312: /* movddup */
2837 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2838 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2840 rm = (modrm & 7) | REX_B(s);
2841 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2842 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2844 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2845 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2847 case 0x016: /* movhps */
2848 case 0x116: /* movhpd */
2850 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2851 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2854 rm = (modrm & 7) | REX_B(s);
2855 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2856 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2859 case 0x216: /* movshdup */
2861 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2862 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2864 rm = (modrm & 7) | REX_B(s);
2865 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2866 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2867 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2868 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2870 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2871 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2872 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2873 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2875 case 0x7e: /* movd ea, mm */
2876 #ifdef TARGET_X86_64
2877 if (s->dflag == 2) {
2878 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2879 offsetof(CPUX86State,fpregs[reg].mmx));
2880 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2884 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2885 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
2886 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2889 case 0x17e: /* movd ea, xmm */
2890 #ifdef TARGET_X86_64
2891 if (s->dflag == 2) {
2892 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2893 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2894 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2898 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2899 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2900 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2903 case 0x27e: /* movq xmm, ea */
2905 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2906 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2908 rm = (modrm & 7) | REX_B(s);
2909 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2910 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2912 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2914 case 0x7f: /* movq ea, mm */
2916 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2917 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2920 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2921 offsetof(CPUX86State,fpregs[reg].mmx));
2924 case 0x011: /* movups */
2925 case 0x111: /* movupd */
2926 case 0x029: /* movaps */
2927 case 0x129: /* movapd */
2928 case 0x17f: /* movdqa ea, xmm */
2929 case 0x27f: /* movdqu ea, xmm */
2931 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2932 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2934 rm = (modrm & 7) | REX_B(s);
2935 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2936 offsetof(CPUX86State,xmm_regs[reg]));
2939 case 0x211: /* movss ea, xmm */
2941 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2942 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2943 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2945 rm = (modrm & 7) | REX_B(s);
2946 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2947 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2950 case 0x311: /* movsd ea, xmm */
2952 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2953 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2955 rm = (modrm & 7) | REX_B(s);
2956 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2957 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2960 case 0x013: /* movlps */
2961 case 0x113: /* movlpd */
2963 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2964 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2969 case 0x017: /* movhps */
2970 case 0x117: /* movhpd */
2972 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2973 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2978 case 0x71: /* shift mm, im */
2981 case 0x171: /* shift xmm, im */
2984 val = ldub_code(s->pc++);
2986 gen_op_movl_T0_im(val);
2987 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2989 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2990 op1_offset = offsetof(CPUX86State,xmm_t0);
2992 gen_op_movl_T0_im(val);
2993 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2995 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2996 op1_offset = offsetof(CPUX86State,mmx_t0);
2998 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3002 rm = (modrm & 7) | REX_B(s);
3003 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3006 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3008 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3009 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3010 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3012 case 0x050: /* movmskps */
3013 rm = (modrm & 7) | REX_B(s);
3014 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3015 offsetof(CPUX86State,xmm_regs[rm]));
3016 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2, cpu_ptr0);
3017 tcg_gen_extu_i32_i64(cpu_T[0], cpu_tmp2);
3018 gen_op_mov_reg_T0(OT_LONG, reg);
3020 case 0x150: /* movmskpd */
3021 rm = (modrm & 7) | REX_B(s);
3022 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3023 offsetof(CPUX86State,xmm_regs[rm]));
3024 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2, cpu_ptr0);
3025 tcg_gen_extu_i32_i64(cpu_T[0], cpu_tmp2);
3026 gen_op_mov_reg_T0(OT_LONG, reg);
3028 case 0x02a: /* cvtpi2ps */
3029 case 0x12a: /* cvtpi2pd */
3030 tcg_gen_helper_0_0(helper_enter_mmx);
3032 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3033 op2_offset = offsetof(CPUX86State,mmx_t0);
3034 gen_ldq_env_A0(s->mem_index, op2_offset);
3037 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3039 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3040 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3041 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3044 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3048 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3052 case 0x22a: /* cvtsi2ss */
3053 case 0x32a: /* cvtsi2sd */
3054 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3055 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3056 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3057 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3058 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3059 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3060 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2);
3062 case 0x02c: /* cvttps2pi */
3063 case 0x12c: /* cvttpd2pi */
3064 case 0x02d: /* cvtps2pi */
3065 case 0x12d: /* cvtpd2pi */
3066 tcg_gen_helper_0_0(helper_enter_mmx);
3068 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3069 op2_offset = offsetof(CPUX86State,xmm_t0);
3070 gen_ldo_env_A0(s->mem_index, op2_offset);
3072 rm = (modrm & 7) | REX_B(s);
3073 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3075 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3076 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3077 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3080 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3083 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3086 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3089 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3093 case 0x22c: /* cvttss2si */
3094 case 0x32c: /* cvttsd2si */
3095 case 0x22d: /* cvtss2si */
3096 case 0x32d: /* cvtsd2si */
3097 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3099 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3101 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3103 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3104 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3106 op2_offset = offsetof(CPUX86State,xmm_t0);
3108 rm = (modrm & 7) | REX_B(s);
3109 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3111 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3113 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3114 if (ot == OT_LONG) {
3115 tcg_gen_helper_1_1(sse_op2, cpu_tmp2, cpu_ptr0);
3116 tcg_gen_extu_i32_i64(cpu_T[0], cpu_tmp2);
3118 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3120 gen_op_mov_reg_T0(ot, reg);
3122 case 0xc4: /* pinsrw */
3125 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3126 val = ldub_code(s->pc++);
3129 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3130 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3133 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3134 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3137 case 0xc5: /* pextrw */
3141 val = ldub_code(s->pc++);
3144 rm = (modrm & 7) | REX_B(s);
3145 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3146 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3150 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3151 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3153 reg = ((modrm >> 3) & 7) | rex_r;
3154 gen_op_mov_reg_T0(OT_LONG, reg);
3156 case 0x1d6: /* movq ea, xmm */
3158 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3159 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3161 rm = (modrm & 7) | REX_B(s);
3162 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3163 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3164 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3167 case 0x2d6: /* movq2dq */
3168 tcg_gen_helper_0_0(helper_enter_mmx);
3170 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3171 offsetof(CPUX86State,fpregs[rm].mmx));
3172 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3174 case 0x3d6: /* movdq2q */
3175 tcg_gen_helper_0_0(helper_enter_mmx);
3176 rm = (modrm & 7) | REX_B(s);
3177 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3178 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3180 case 0xd7: /* pmovmskb */
3185 rm = (modrm & 7) | REX_B(s);
3186 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3187 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2, cpu_ptr0);
3190 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3191 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2, cpu_ptr0);
3193 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3194 reg = ((modrm >> 3) & 7) | rex_r;
3195 gen_op_mov_reg_T0(OT_LONG, reg);
3201 /* generic MMX or SSE operation */
3204 /* maskmov : we must prepare A0 */
3207 #ifdef TARGET_X86_64
3208 if (s->aflag == 2) {
3209 gen_op_movq_A0_reg(R_EDI);
3213 gen_op_movl_A0_reg(R_EDI);
3215 gen_op_andl_A0_ffff();
3217 gen_add_A0_ds_seg(s);
3219 case 0x70: /* pshufx insn */
3220 case 0xc6: /* pshufx insn */
3221 case 0xc2: /* compare insns */
3228 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3230 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3231 op2_offset = offsetof(CPUX86State,xmm_t0);
3232 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3234 /* specific case for SSE single instructions */
3237 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3238 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3241 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3244 gen_ldo_env_A0(s->mem_index, op2_offset);
3247 rm = (modrm & 7) | REX_B(s);
3248 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3251 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3253 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3254 op2_offset = offsetof(CPUX86State,mmx_t0);
3255 gen_ldq_env_A0(s->mem_index, op2_offset);
3258 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3262 case 0x0f: /* 3DNow! data insns */
3263 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3265 val = ldub_code(s->pc++);
3266 sse_op2 = sse_op_table5[val];
3269 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3270 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3271 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3273 case 0x70: /* pshufx insn */
3274 case 0xc6: /* pshufx insn */
3275 val = ldub_code(s->pc++);
3276 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3277 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3278 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3282 val = ldub_code(s->pc++);
3285 sse_op2 = sse_op_table4[val][b1];
3286 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3287 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3288 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3291 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3292 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3293 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3296 if (b == 0x2e || b == 0x2f) {
3297 /* just to keep the EFLAGS optimization correct */
3299 s->cc_op = CC_OP_EFLAGS;
3305 /* convert one instruction. s->is_jmp is set if the translation must
3306 be stopped. Return the next pc value */
3307 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3309 int b, prefixes, aflag, dflag;
3311 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3312 target_ulong next_eip, tval;
3322 #ifdef TARGET_X86_64
3327 s->rip_offset = 0; /* for relative ip address */
3329 b = ldub_code(s->pc);
3331 /* check prefixes */
3332 #ifdef TARGET_X86_64
3336 prefixes |= PREFIX_REPZ;
3339 prefixes |= PREFIX_REPNZ;
3342 prefixes |= PREFIX_LOCK;
3363 prefixes |= PREFIX_DATA;
3366 prefixes |= PREFIX_ADR;
3370 rex_w = (b >> 3) & 1;
3371 rex_r = (b & 0x4) << 1;
3372 s->rex_x = (b & 0x2) << 2;
3373 REX_B(s) = (b & 0x1) << 3;
3374 x86_64_hregs = 1; /* select uniform byte register addressing */
3378 /* 0x66 is ignored if rex.w is set */
3381 if (prefixes & PREFIX_DATA)
3384 if (!(prefixes & PREFIX_ADR))
3391 prefixes |= PREFIX_REPZ;
3394 prefixes |= PREFIX_REPNZ;
3397 prefixes |= PREFIX_LOCK;
3418 prefixes |= PREFIX_DATA;
3421 prefixes |= PREFIX_ADR;
3424 if (prefixes & PREFIX_DATA)
3426 if (prefixes & PREFIX_ADR)
3430 s->prefix = prefixes;
3434 /* lock generation */
3435 if (prefixes & PREFIX_LOCK)
3438 /* now check op code */
3442 /**************************/
3443 /* extended op code */
3444 b = ldub_code(s->pc++) | 0x100;
3447 /**************************/
3465 ot = dflag + OT_WORD;
3468 case 0: /* OP Ev, Gv */
3469 modrm = ldub_code(s->pc++);
3470 reg = ((modrm >> 3) & 7) | rex_r;
3471 mod = (modrm >> 6) & 3;
3472 rm = (modrm & 7) | REX_B(s);
3474 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3476 } else if (op == OP_XORL && rm == reg) {
3478 /* xor reg, reg optimisation */
3480 s->cc_op = CC_OP_LOGICB + ot;
3481 gen_op_mov_reg_T0(ot, reg);
3482 gen_op_update1_cc();
3487 gen_op_mov_TN_reg(ot, 1, reg);
3488 gen_op(s, op, ot, opreg);
3490 case 1: /* OP Gv, Ev */
3491 modrm = ldub_code(s->pc++);
3492 mod = (modrm >> 6) & 3;
3493 reg = ((modrm >> 3) & 7) | rex_r;
3494 rm = (modrm & 7) | REX_B(s);
3496 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3497 gen_op_ld_T1_A0(ot + s->mem_index);
3498 } else if (op == OP_XORL && rm == reg) {
3501 gen_op_mov_TN_reg(ot, 1, rm);
3503 gen_op(s, op, ot, reg);
3505 case 2: /* OP A, Iv */
3506 val = insn_get(s, ot);
3507 gen_op_movl_T1_im(val);
3508 gen_op(s, op, ot, OR_EAX);
3514 case 0x80: /* GRP1 */
3524 ot = dflag + OT_WORD;
3526 modrm = ldub_code(s->pc++);
3527 mod = (modrm >> 6) & 3;
3528 rm = (modrm & 7) | REX_B(s);
3529 op = (modrm >> 3) & 7;
3535 s->rip_offset = insn_const_size(ot);
3536 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3547 val = insn_get(s, ot);
3550 val = (int8_t)insn_get(s, OT_BYTE);
3553 gen_op_movl_T1_im(val);
3554 gen_op(s, op, ot, opreg);
3558 /**************************/
3559 /* inc, dec, and other misc arith */
3560 case 0x40 ... 0x47: /* inc Gv */
3561 ot = dflag ? OT_LONG : OT_WORD;
3562 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3564 case 0x48 ... 0x4f: /* dec Gv */
3565 ot = dflag ? OT_LONG : OT_WORD;
3566 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3568 case 0xf6: /* GRP3 */
3573 ot = dflag + OT_WORD;
3575 modrm = ldub_code(s->pc++);
3576 mod = (modrm >> 6) & 3;
3577 rm = (modrm & 7) | REX_B(s);
3578 op = (modrm >> 3) & 7;
3581 s->rip_offset = insn_const_size(ot);
3582 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3583 gen_op_ld_T0_A0(ot + s->mem_index);
3585 gen_op_mov_TN_reg(ot, 0, rm);
3590 val = insn_get(s, ot);
3591 gen_op_movl_T1_im(val);
3592 gen_op_testl_T0_T1_cc();
3593 s->cc_op = CC_OP_LOGICB + ot;
3598 gen_op_st_T0_A0(ot + s->mem_index);
3600 gen_op_mov_reg_T0(ot, rm);
3606 gen_op_st_T0_A0(ot + s->mem_index);
3608 gen_op_mov_reg_T0(ot, rm);
3610 gen_op_update_neg_cc();
3611 s->cc_op = CC_OP_SUBB + ot;
3616 gen_op_mulb_AL_T0();
3617 s->cc_op = CC_OP_MULB;
3620 gen_op_mulw_AX_T0();
3621 s->cc_op = CC_OP_MULW;
3625 gen_op_mull_EAX_T0();
3626 s->cc_op = CC_OP_MULL;
3628 #ifdef TARGET_X86_64
3630 gen_op_mulq_EAX_T0();
3631 s->cc_op = CC_OP_MULQ;
3639 gen_op_imulb_AL_T0();
3640 s->cc_op = CC_OP_MULB;
3643 gen_op_imulw_AX_T0();
3644 s->cc_op = CC_OP_MULW;
3648 gen_op_imull_EAX_T0();
3649 s->cc_op = CC_OP_MULL;
3651 #ifdef TARGET_X86_64
3653 gen_op_imulq_EAX_T0();
3654 s->cc_op = CC_OP_MULQ;
3662 gen_jmp_im(pc_start - s->cs_base);
3663 gen_op_divb_AL_T0();
3666 gen_jmp_im(pc_start - s->cs_base);
3667 gen_op_divw_AX_T0();
3671 gen_jmp_im(pc_start - s->cs_base);
3673 /* XXX: this is just a test */
3674 tcg_gen_macro_2(cpu_T[0], cpu_T[0], MACRO_TEST);
3676 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
3679 #ifdef TARGET_X86_64
3681 gen_jmp_im(pc_start - s->cs_base);
3682 gen_op_divq_EAX_T0();
3690 gen_jmp_im(pc_start - s->cs_base);
3691 gen_op_idivb_AL_T0();
3694 gen_jmp_im(pc_start - s->cs_base);
3695 gen_op_idivw_AX_T0();
3699 gen_jmp_im(pc_start - s->cs_base);
3700 tcg_gen_helper_0_1(helper_idivl_EAX_T0, cpu_T[0]);
3702 #ifdef TARGET_X86_64
3704 gen_jmp_im(pc_start - s->cs_base);
3705 gen_op_idivq_EAX_T0();
3715 case 0xfe: /* GRP4 */
3716 case 0xff: /* GRP5 */
3720 ot = dflag + OT_WORD;
3722 modrm = ldub_code(s->pc++);
3723 mod = (modrm >> 6) & 3;
3724 rm = (modrm & 7) | REX_B(s);
3725 op = (modrm >> 3) & 7;
3726 if (op >= 2 && b == 0xfe) {
3730 if (op == 2 || op == 4) {
3731 /* operand size for jumps is 64 bit */
3733 } else if (op == 3 || op == 5) {
3734 /* for call calls, the operand is 16 or 32 bit, even
3736 ot = dflag ? OT_LONG : OT_WORD;
3737 } else if (op == 6) {
3738 /* default push size is 64 bit */
3739 ot = dflag ? OT_QUAD : OT_WORD;
3743 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3744 if (op >= 2 && op != 3 && op != 5)
3745 gen_op_ld_T0_A0(ot + s->mem_index);
3747 gen_op_mov_TN_reg(ot, 0, rm);
3751 case 0: /* inc Ev */
3756 gen_inc(s, ot, opreg, 1);
3758 case 1: /* dec Ev */
3763 gen_inc(s, ot, opreg, -1);
3765 case 2: /* call Ev */
3766 /* XXX: optimize if memory (no 'and' is necessary) */
3768 gen_op_andl_T0_ffff();
3769 next_eip = s->pc - s->cs_base;
3770 gen_movtl_T1_im(next_eip);
3775 case 3: /* lcall Ev */
3776 gen_op_ld_T1_A0(ot + s->mem_index);
3777 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3778 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3780 if (s->pe && !s->vm86) {
3781 if (s->cc_op != CC_OP_DYNAMIC)
3782 gen_op_set_cc_op(s->cc_op);
3783 gen_jmp_im(pc_start - s->cs_base);
3784 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3786 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3790 case 4: /* jmp Ev */
3792 gen_op_andl_T0_ffff();
3796 case 5: /* ljmp Ev */
3797 gen_op_ld_T1_A0(ot + s->mem_index);
3798 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3799 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3801 if (s->pe && !s->vm86) {
3802 if (s->cc_op != CC_OP_DYNAMIC)
3803 gen_op_set_cc_op(s->cc_op);
3804 gen_jmp_im(pc_start - s->cs_base);
3805 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3807 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3808 gen_op_movl_T0_T1();
3813 case 6: /* push Ev */
3821 case 0x84: /* test Ev, Gv */
3826 ot = dflag + OT_WORD;
3828 modrm = ldub_code(s->pc++);
3829 mod = (modrm >> 6) & 3;
3830 rm = (modrm & 7) | REX_B(s);
3831 reg = ((modrm >> 3) & 7) | rex_r;
3833 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3834 gen_op_mov_TN_reg(ot, 1, reg);
3835 gen_op_testl_T0_T1_cc();
3836 s->cc_op = CC_OP_LOGICB + ot;
3839 case 0xa8: /* test eAX, Iv */
3844 ot = dflag + OT_WORD;
3845 val = insn_get(s, ot);
3847 gen_op_mov_TN_reg(ot, 0, OR_EAX);
3848 gen_op_movl_T1_im(val);
3849 gen_op_testl_T0_T1_cc();
3850 s->cc_op = CC_OP_LOGICB + ot;
3853 case 0x98: /* CWDE/CBW */
3854 #ifdef TARGET_X86_64
3856 gen_op_movslq_RAX_EAX();
3860 gen_op_movswl_EAX_AX();
3862 gen_op_movsbw_AX_AL();
3864 case 0x99: /* CDQ/CWD */
3865 #ifdef TARGET_X86_64
3867 gen_op_movsqo_RDX_RAX();
3871 gen_op_movslq_EDX_EAX();
3873 gen_op_movswl_DX_AX();
3875 case 0x1af: /* imul Gv, Ev */
3876 case 0x69: /* imul Gv, Ev, I */
3878 ot = dflag + OT_WORD;
3879 modrm = ldub_code(s->pc++);
3880 reg = ((modrm >> 3) & 7) | rex_r;
3882 s->rip_offset = insn_const_size(ot);
3885 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3887 val = insn_get(s, ot);
3888 gen_op_movl_T1_im(val);
3889 } else if (b == 0x6b) {
3890 val = (int8_t)insn_get(s, OT_BYTE);
3891 gen_op_movl_T1_im(val);
3893 gen_op_mov_TN_reg(ot, 1, reg);
3896 #ifdef TARGET_X86_64
3897 if (ot == OT_QUAD) {
3898 gen_op_imulq_T0_T1();
3901 if (ot == OT_LONG) {
3902 gen_op_imull_T0_T1();
3904 gen_op_imulw_T0_T1();
3906 gen_op_mov_reg_T0(ot, reg);
3907 s->cc_op = CC_OP_MULB + ot;
3910 case 0x1c1: /* xadd Ev, Gv */
3914 ot = dflag + OT_WORD;
3915 modrm = ldub_code(s->pc++);
3916 reg = ((modrm >> 3) & 7) | rex_r;
3917 mod = (modrm >> 6) & 3;
3919 rm = (modrm & 7) | REX_B(s);
3920 gen_op_mov_TN_reg(ot, 0, reg);
3921 gen_op_mov_TN_reg(ot, 1, rm);
3922 gen_op_addl_T0_T1();
3923 gen_op_mov_reg_T1(ot, reg);
3924 gen_op_mov_reg_T0(ot, rm);
3926 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3927 gen_op_mov_TN_reg(ot, 0, reg);
3928 gen_op_ld_T1_A0(ot + s->mem_index);
3929 gen_op_addl_T0_T1();
3930 gen_op_st_T0_A0(ot + s->mem_index);
3931 gen_op_mov_reg_T1(ot, reg);
3933 gen_op_update2_cc();
3934 s->cc_op = CC_OP_ADDB + ot;
3937 case 0x1b1: /* cmpxchg Ev, Gv */
3941 ot = dflag + OT_WORD;
3942 modrm = ldub_code(s->pc++);
3943 reg = ((modrm >> 3) & 7) | rex_r;
3944 mod = (modrm >> 6) & 3;
3945 gen_op_mov_TN_reg(ot, 1, reg);
3947 rm = (modrm & 7) | REX_B(s);
3948 gen_op_mov_TN_reg(ot, 0, rm);
3949 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3950 gen_op_mov_reg_T0(ot, rm);
3952 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3953 gen_op_ld_T0_A0(ot + s->mem_index);
3954 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3956 s->cc_op = CC_OP_SUBB + ot;
3958 case 0x1c7: /* cmpxchg8b */
3959 modrm = ldub_code(s->pc++);
3960 mod = (modrm >> 6) & 3;
3961 if ((mod == 3) || ((modrm & 0x38) != 0x8))
3963 gen_jmp_im(pc_start - s->cs_base);
3964 if (s->cc_op != CC_OP_DYNAMIC)
3965 gen_op_set_cc_op(s->cc_op);
3966 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3968 s->cc_op = CC_OP_EFLAGS;
3971 /**************************/
3973 case 0x50 ... 0x57: /* push */
3974 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3977 case 0x58 ... 0x5f: /* pop */
3979 ot = dflag ? OT_QUAD : OT_WORD;
3981 ot = dflag + OT_WORD;
3984 /* NOTE: order is important for pop %sp */
3986 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3988 case 0x60: /* pusha */
3993 case 0x61: /* popa */
3998 case 0x68: /* push Iv */
4001 ot = dflag ? OT_QUAD : OT_WORD;
4003 ot = dflag + OT_WORD;
4006 val = insn_get(s, ot);
4008 val = (int8_t)insn_get(s, OT_BYTE);
4009 gen_op_movl_T0_im(val);
4012 case 0x8f: /* pop Ev */
4014 ot = dflag ? OT_QUAD : OT_WORD;
4016 ot = dflag + OT_WORD;
4018 modrm = ldub_code(s->pc++);
4019 mod = (modrm >> 6) & 3;
4022 /* NOTE: order is important for pop %sp */
4024 rm = (modrm & 7) | REX_B(s);
4025 gen_op_mov_reg_T0(ot, rm);
4027 /* NOTE: order is important too for MMU exceptions */
4028 s->popl_esp_hack = 1 << ot;
4029 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4030 s->popl_esp_hack = 0;
4034 case 0xc8: /* enter */
4037 val = lduw_code(s->pc);
4039 level = ldub_code(s->pc++);
4040 gen_enter(s, val, level);
4043 case 0xc9: /* leave */
4044 /* XXX: exception not precise (ESP is updated before potential exception) */
4046 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4047 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4048 } else if (s->ss32) {
4049 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4050 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4052 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4053 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4057 ot = dflag ? OT_QUAD : OT_WORD;
4059 ot = dflag + OT_WORD;
4061 gen_op_mov_reg_T0(ot, R_EBP);
4064 case 0x06: /* push es */
4065 case 0x0e: /* push cs */
4066 case 0x16: /* push ss */
4067 case 0x1e: /* push ds */
4070 gen_op_movl_T0_seg(b >> 3);
4073 case 0x1a0: /* push fs */
4074 case 0x1a8: /* push gs */
4075 gen_op_movl_T0_seg((b >> 3) & 7);
4078 case 0x07: /* pop es */
4079 case 0x17: /* pop ss */
4080 case 0x1f: /* pop ds */
4085 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4088 /* if reg == SS, inhibit interrupts/trace. */
4089 /* If several instructions disable interrupts, only the
4091 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4092 gen_op_set_inhibit_irq();
4096 gen_jmp_im(s->pc - s->cs_base);
4100 case 0x1a1: /* pop fs */
4101 case 0x1a9: /* pop gs */
4103 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4106 gen_jmp_im(s->pc - s->cs_base);
4111 /**************************/
4114 case 0x89: /* mov Gv, Ev */
4118 ot = dflag + OT_WORD;
4119 modrm = ldub_code(s->pc++);
4120 reg = ((modrm >> 3) & 7) | rex_r;
4122 /* generate a generic store */
4123 gen_ldst_modrm(s, modrm, ot, reg, 1);
4126 case 0xc7: /* mov Ev, Iv */
4130 ot = dflag + OT_WORD;
4131 modrm = ldub_code(s->pc++);
4132 mod = (modrm >> 6) & 3;
4134 s->rip_offset = insn_const_size(ot);
4135 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4137 val = insn_get(s, ot);
4138 gen_op_movl_T0_im(val);
4140 gen_op_st_T0_A0(ot + s->mem_index);
4142 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4145 case 0x8b: /* mov Ev, Gv */
4149 ot = OT_WORD + dflag;
4150 modrm = ldub_code(s->pc++);
4151 reg = ((modrm >> 3) & 7) | rex_r;
4153 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4154 gen_op_mov_reg_T0(ot, reg);
4156 case 0x8e: /* mov seg, Gv */
4157 modrm = ldub_code(s->pc++);
4158 reg = (modrm >> 3) & 7;
4159 if (reg >= 6 || reg == R_CS)
4161 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4162 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4164 /* if reg == SS, inhibit interrupts/trace */
4165 /* If several instructions disable interrupts, only the
4167 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4168 gen_op_set_inhibit_irq();
4172 gen_jmp_im(s->pc - s->cs_base);
4176 case 0x8c: /* mov Gv, seg */
4177 modrm = ldub_code(s->pc++);
4178 reg = (modrm >> 3) & 7;
4179 mod = (modrm >> 6) & 3;
4182 gen_op_movl_T0_seg(reg);
4184 ot = OT_WORD + dflag;
4187 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4190 case 0x1b6: /* movzbS Gv, Eb */
4191 case 0x1b7: /* movzwS Gv, Eb */
4192 case 0x1be: /* movsbS Gv, Eb */
4193 case 0x1bf: /* movswS Gv, Eb */
4196 /* d_ot is the size of destination */
4197 d_ot = dflag + OT_WORD;
4198 /* ot is the size of source */
4199 ot = (b & 1) + OT_BYTE;
4200 modrm = ldub_code(s->pc++);
4201 reg = ((modrm >> 3) & 7) | rex_r;
4202 mod = (modrm >> 6) & 3;
4203 rm = (modrm & 7) | REX_B(s);
4206 gen_op_mov_TN_reg(ot, 0, rm);
4207 switch(ot | (b & 8)) {
4209 gen_op_movzbl_T0_T0();
4212 gen_op_movsbl_T0_T0();
4215 gen_op_movzwl_T0_T0();
4219 gen_op_movswl_T0_T0();
4222 gen_op_mov_reg_T0(d_ot, reg);
4224 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4226 gen_op_lds_T0_A0(ot + s->mem_index);
4228 gen_op_ldu_T0_A0(ot + s->mem_index);
4230 gen_op_mov_reg_T0(d_ot, reg);
4235 case 0x8d: /* lea */
4236 ot = dflag + OT_WORD;
4237 modrm = ldub_code(s->pc++);
4238 mod = (modrm >> 6) & 3;
4241 reg = ((modrm >> 3) & 7) | rex_r;
4242 /* we must ensure that no segment is added */
4246 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4248 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4251 case 0xa0: /* mov EAX, Ov */
4253 case 0xa2: /* mov Ov, EAX */
4256 target_ulong offset_addr;
4261 ot = dflag + OT_WORD;
4262 #ifdef TARGET_X86_64
4263 if (s->aflag == 2) {
4264 offset_addr = ldq_code(s->pc);
4266 gen_op_movq_A0_im(offset_addr);
4271 offset_addr = insn_get(s, OT_LONG);
4273 offset_addr = insn_get(s, OT_WORD);
4275 gen_op_movl_A0_im(offset_addr);
4277 gen_add_A0_ds_seg(s);
4279 gen_op_ld_T0_A0(ot + s->mem_index);
4280 gen_op_mov_reg_T0(ot, R_EAX);
4282 gen_op_mov_TN_reg(ot, 0, R_EAX);
4283 gen_op_st_T0_A0(ot + s->mem_index);
4287 case 0xd7: /* xlat */
4288 #ifdef TARGET_X86_64
4289 if (s->aflag == 2) {
4290 gen_op_movq_A0_reg(R_EBX);
4291 gen_op_addq_A0_AL();
4295 gen_op_movl_A0_reg(R_EBX);
4296 gen_op_addl_A0_AL();
4298 gen_op_andl_A0_ffff();
4300 gen_add_A0_ds_seg(s);
4301 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4302 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4304 case 0xb0 ... 0xb7: /* mov R, Ib */
4305 val = insn_get(s, OT_BYTE);
4306 gen_op_movl_T0_im(val);
4307 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4309 case 0xb8 ... 0xbf: /* mov R, Iv */
4310 #ifdef TARGET_X86_64
4314 tmp = ldq_code(s->pc);
4316 reg = (b & 7) | REX_B(s);
4317 gen_movtl_T0_im(tmp);
4318 gen_op_mov_reg_T0(OT_QUAD, reg);
4322 ot = dflag ? OT_LONG : OT_WORD;
4323 val = insn_get(s, ot);
4324 reg = (b & 7) | REX_B(s);
4325 gen_op_movl_T0_im(val);
4326 gen_op_mov_reg_T0(ot, reg);
4330 case 0x91 ... 0x97: /* xchg R, EAX */
4331 ot = dflag + OT_WORD;
4332 reg = (b & 7) | REX_B(s);
4336 case 0x87: /* xchg Ev, Gv */
4340 ot = dflag + OT_WORD;
4341 modrm = ldub_code(s->pc++);
4342 reg = ((modrm >> 3) & 7) | rex_r;
4343 mod = (modrm >> 6) & 3;
4345 rm = (modrm & 7) | REX_B(s);
4347 gen_op_mov_TN_reg(ot, 0, reg);
4348 gen_op_mov_TN_reg(ot, 1, rm);
4349 gen_op_mov_reg_T0(ot, rm);
4350 gen_op_mov_reg_T1(ot, reg);
4352 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4353 gen_op_mov_TN_reg(ot, 0, reg);
4354 /* for xchg, lock is implicit */
4355 if (!(prefixes & PREFIX_LOCK))
4357 gen_op_ld_T1_A0(ot + s->mem_index);
4358 gen_op_st_T0_A0(ot + s->mem_index);
4359 if (!(prefixes & PREFIX_LOCK))
4361 gen_op_mov_reg_T1(ot, reg);
4364 case 0xc4: /* les Gv */
4369 case 0xc5: /* lds Gv */
4374 case 0x1b2: /* lss Gv */
4377 case 0x1b4: /* lfs Gv */
4380 case 0x1b5: /* lgs Gv */
4383 ot = dflag ? OT_LONG : OT_WORD;
4384 modrm = ldub_code(s->pc++);
4385 reg = ((modrm >> 3) & 7) | rex_r;
4386 mod = (modrm >> 6) & 3;
4389 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4390 gen_op_ld_T1_A0(ot + s->mem_index);
4391 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4392 /* load the segment first to handle exceptions properly */
4393 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4394 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4395 /* then put the data */
4396 gen_op_mov_reg_T1(ot, reg);
4398 gen_jmp_im(s->pc - s->cs_base);
4403 /************************/
4414 ot = dflag + OT_WORD;
4416 modrm = ldub_code(s->pc++);
4417 mod = (modrm >> 6) & 3;
4418 op = (modrm >> 3) & 7;
4424 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4427 opreg = (modrm & 7) | REX_B(s);
4432 gen_shift(s, op, ot, opreg, OR_ECX);
4435 shift = ldub_code(s->pc++);
4437 gen_shifti(s, op, ot, opreg, shift);
4452 case 0x1a4: /* shld imm */
4456 case 0x1a5: /* shld cl */
4460 case 0x1ac: /* shrd imm */
4464 case 0x1ad: /* shrd cl */
4468 ot = dflag + OT_WORD;
4469 modrm = ldub_code(s->pc++);
4470 mod = (modrm >> 6) & 3;
4471 rm = (modrm & 7) | REX_B(s);
4472 reg = ((modrm >> 3) & 7) | rex_r;
4475 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4476 gen_op_ld_T0_A0(ot + s->mem_index);
4478 gen_op_mov_TN_reg(ot, 0, rm);
4480 gen_op_mov_TN_reg(ot, 1, reg);
4483 val = ldub_code(s->pc++);
4490 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4492 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4493 if (op == 0 && ot != OT_WORD)
4494 s->cc_op = CC_OP_SHLB + ot;
4496 s->cc_op = CC_OP_SARB + ot;
4499 if (s->cc_op != CC_OP_DYNAMIC)
4500 gen_op_set_cc_op(s->cc_op);
4502 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4504 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4505 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4508 gen_op_mov_reg_T0(ot, rm);
4512 /************************/
4515 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4516 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4517 /* XXX: what to do if illegal op ? */
4518 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4521 modrm = ldub_code(s->pc++);
4522 mod = (modrm >> 6) & 3;
4524 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4527 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4529 case 0x00 ... 0x07: /* fxxxs */
4530 case 0x10 ... 0x17: /* fixxxl */
4531 case 0x20 ... 0x27: /* fxxxl */
4532 case 0x30 ... 0x37: /* fixxx */
4539 gen_op_flds_FT0_A0();
4542 gen_op_fildl_FT0_A0();
4545 gen_op_fldl_FT0_A0();
4549 gen_op_fild_FT0_A0();
4553 gen_op_fp_arith_ST0_FT0[op1]();
4555 /* fcomp needs pop */
4560 case 0x08: /* flds */
4561 case 0x0a: /* fsts */
4562 case 0x0b: /* fstps */
4563 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4564 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4565 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4570 gen_op_flds_ST0_A0();
4573 gen_op_fildl_ST0_A0();
4576 gen_op_fldl_ST0_A0();
4580 gen_op_fild_ST0_A0();
4587 gen_op_fisttl_ST0_A0();
4590 gen_op_fisttll_ST0_A0();
4594 gen_op_fistt_ST0_A0();
4601 gen_op_fsts_ST0_A0();
4604 gen_op_fistl_ST0_A0();
4607 gen_op_fstl_ST0_A0();
4611 gen_op_fist_ST0_A0();
4619 case 0x0c: /* fldenv mem */
4620 gen_op_fldenv_A0(s->dflag);
4622 case 0x0d: /* fldcw mem */
4625 case 0x0e: /* fnstenv mem */
4626 gen_op_fnstenv_A0(s->dflag);
4628 case 0x0f: /* fnstcw mem */
4631 case 0x1d: /* fldt mem */
4632 gen_op_fldt_ST0_A0();
4634 case 0x1f: /* fstpt mem */
4635 gen_op_fstt_ST0_A0();
4638 case 0x2c: /* frstor mem */
4639 gen_op_frstor_A0(s->dflag);
4641 case 0x2e: /* fnsave mem */
4642 gen_op_fnsave_A0(s->dflag);
4644 case 0x2f: /* fnstsw mem */
4647 case 0x3c: /* fbld */
4648 gen_op_fbld_ST0_A0();
4650 case 0x3e: /* fbstp */
4651 gen_op_fbst_ST0_A0();
4654 case 0x3d: /* fildll */
4655 gen_op_fildll_ST0_A0();
4657 case 0x3f: /* fistpll */
4658 gen_op_fistll_ST0_A0();
4665 /* register float ops */
4669 case 0x08: /* fld sti */
4671 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4673 case 0x09: /* fxchg sti */
4674 case 0x29: /* fxchg4 sti, undocumented op */
4675 case 0x39: /* fxchg7 sti, undocumented op */
4676 gen_op_fxchg_ST0_STN(opreg);
4678 case 0x0a: /* grp d9/2 */
4681 /* check exceptions (FreeBSD FPU probe) */
4682 if (s->cc_op != CC_OP_DYNAMIC)
4683 gen_op_set_cc_op(s->cc_op);
4684 gen_jmp_im(pc_start - s->cs_base);
4691 case 0x0c: /* grp d9/4 */
4701 gen_op_fcom_ST0_FT0();
4710 case 0x0d: /* grp d9/5 */
4719 gen_op_fldl2t_ST0();
4723 gen_op_fldl2e_ST0();
4731 gen_op_fldlg2_ST0();
4735 gen_op_fldln2_ST0();
4746 case 0x0e: /* grp d9/6 */
4757 case 3: /* fpatan */
4760 case 4: /* fxtract */
4763 case 5: /* fprem1 */
4766 case 6: /* fdecstp */
4770 case 7: /* fincstp */
4775 case 0x0f: /* grp d9/7 */
4780 case 1: /* fyl2xp1 */
4786 case 3: /* fsincos */
4789 case 5: /* fscale */
4792 case 4: /* frndint */
4804 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4805 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4806 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4812 gen_op_fp_arith_STN_ST0[op1](opreg);
4816 gen_op_fmov_FT0_STN(opreg);
4817 gen_op_fp_arith_ST0_FT0[op1]();
4821 case 0x02: /* fcom */
4822 case 0x22: /* fcom2, undocumented op */
4823 gen_op_fmov_FT0_STN(opreg);
4824 gen_op_fcom_ST0_FT0();
4826 case 0x03: /* fcomp */
4827 case 0x23: /* fcomp3, undocumented op */
4828 case 0x32: /* fcomp5, undocumented op */
4829 gen_op_fmov_FT0_STN(opreg);
4830 gen_op_fcom_ST0_FT0();
4833 case 0x15: /* da/5 */
4835 case 1: /* fucompp */
4836 gen_op_fmov_FT0_STN(1);
4837 gen_op_fucom_ST0_FT0();
4847 case 0: /* feni (287 only, just do nop here) */
4849 case 1: /* fdisi (287 only, just do nop here) */
4854 case 3: /* fninit */
4857 case 4: /* fsetpm (287 only, just do nop here) */
4863 case 0x1d: /* fucomi */
4864 if (s->cc_op != CC_OP_DYNAMIC)
4865 gen_op_set_cc_op(s->cc_op);
4866 gen_op_fmov_FT0_STN(opreg);
4867 gen_op_fucomi_ST0_FT0();
4868 s->cc_op = CC_OP_EFLAGS;
4870 case 0x1e: /* fcomi */
4871 if (s->cc_op != CC_OP_DYNAMIC)
4872 gen_op_set_cc_op(s->cc_op);
4873 gen_op_fmov_FT0_STN(opreg);
4874 gen_op_fcomi_ST0_FT0();
4875 s->cc_op = CC_OP_EFLAGS;
4877 case 0x28: /* ffree sti */
4878 gen_op_ffree_STN(opreg);
4880 case 0x2a: /* fst sti */
4881 gen_op_fmov_STN_ST0(opreg);
4883 case 0x2b: /* fstp sti */
4884 case 0x0b: /* fstp1 sti, undocumented op */
4885 case 0x3a: /* fstp8 sti, undocumented op */
4886 case 0x3b: /* fstp9 sti, undocumented op */
4887 gen_op_fmov_STN_ST0(opreg);
4890 case 0x2c: /* fucom st(i) */
4891 gen_op_fmov_FT0_STN(opreg);
4892 gen_op_fucom_ST0_FT0();
4894 case 0x2d: /* fucomp st(i) */
4895 gen_op_fmov_FT0_STN(opreg);
4896 gen_op_fucom_ST0_FT0();
4899 case 0x33: /* de/3 */
4901 case 1: /* fcompp */
4902 gen_op_fmov_FT0_STN(1);
4903 gen_op_fcom_ST0_FT0();
4911 case 0x38: /* ffreep sti, undocumented op */
4912 gen_op_ffree_STN(opreg);
4915 case 0x3c: /* df/4 */
4918 gen_op_fnstsw_EAX();
4924 case 0x3d: /* fucomip */
4925 if (s->cc_op != CC_OP_DYNAMIC)
4926 gen_op_set_cc_op(s->cc_op);
4927 gen_op_fmov_FT0_STN(opreg);
4928 gen_op_fucomi_ST0_FT0();
4930 s->cc_op = CC_OP_EFLAGS;
4932 case 0x3e: /* fcomip */
4933 if (s->cc_op != CC_OP_DYNAMIC)
4934 gen_op_set_cc_op(s->cc_op);
4935 gen_op_fmov_FT0_STN(opreg);
4936 gen_op_fcomi_ST0_FT0();
4938 s->cc_op = CC_OP_EFLAGS;
4940 case 0x10 ... 0x13: /* fcmovxx */
4944 const static uint8_t fcmov_cc[8] = {
4950 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4952 gen_op_fcmov_ST0_STN_T0(opreg);
4960 /************************/
4963 case 0xa4: /* movsS */
4968 ot = dflag + OT_WORD;
4970 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4971 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4977 case 0xaa: /* stosS */
4982 ot = dflag + OT_WORD;
4984 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4985 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4990 case 0xac: /* lodsS */
4995 ot = dflag + OT_WORD;
4996 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4997 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5002 case 0xae: /* scasS */
5007 ot = dflag + OT_WORD;
5008 if (prefixes & PREFIX_REPNZ) {
5009 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5010 } else if (prefixes & PREFIX_REPZ) {
5011 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5014 s->cc_op = CC_OP_SUBB + ot;
5018 case 0xa6: /* cmpsS */
5023 ot = dflag + OT_WORD;
5024 if (prefixes & PREFIX_REPNZ) {
5025 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5026 } else if (prefixes & PREFIX_REPZ) {
5027 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5030 s->cc_op = CC_OP_SUBB + ot;
5033 case 0x6c: /* insS */
5038 ot = dflag ? OT_LONG : OT_WORD;
5039 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5040 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5041 gen_op_andl_T0_ffff();
5042 if (gen_svm_check_io(s, pc_start,
5043 SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
5044 svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
5046 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5047 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5052 case 0x6e: /* outsS */
5057 ot = dflag ? OT_LONG : OT_WORD;
5058 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5059 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5060 gen_op_andl_T0_ffff();
5061 if (gen_svm_check_io(s, pc_start,
5062 (1 << (4+ot)) | svm_is_rep(prefixes) |
5063 4 | (1 << (7+s->aflag))))
5065 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5066 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5072 /************************/
5080 ot = dflag ? OT_LONG : OT_WORD;
5081 val = ldub_code(s->pc++);
5082 gen_op_movl_T0_im(val);
5083 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5084 if (gen_svm_check_io(s, pc_start,
5085 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5089 gen_op_mov_reg_T1(ot, R_EAX);
5096 ot = dflag ? OT_LONG : OT_WORD;
5097 val = ldub_code(s->pc++);
5098 gen_op_movl_T0_im(val);
5099 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5100 if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5103 gen_op_mov_TN_reg(ot, 1, R_EAX);
5111 ot = dflag ? OT_LONG : OT_WORD;
5112 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5113 gen_op_andl_T0_ffff();
5114 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5115 if (gen_svm_check_io(s, pc_start,
5116 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5120 gen_op_mov_reg_T1(ot, R_EAX);
5127 ot = dflag ? OT_LONG : OT_WORD;
5128 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5129 gen_op_andl_T0_ffff();
5130 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5131 if (gen_svm_check_io(s, pc_start,
5132 svm_is_rep(prefixes) | (1 << (4+ot))))
5134 gen_op_mov_TN_reg(ot, 1, R_EAX);
5138 /************************/
5140 case 0xc2: /* ret im */
5141 val = ldsw_code(s->pc);
5144 if (CODE64(s) && s->dflag)
5146 gen_stack_update(s, val + (2 << s->dflag));
5148 gen_op_andl_T0_ffff();
5152 case 0xc3: /* ret */
5156 gen_op_andl_T0_ffff();
5160 case 0xca: /* lret im */
5161 val = ldsw_code(s->pc);
5164 if (s->pe && !s->vm86) {
5165 if (s->cc_op != CC_OP_DYNAMIC)
5166 gen_op_set_cc_op(s->cc_op);
5167 gen_jmp_im(pc_start - s->cs_base);
5168 gen_op_lret_protected(s->dflag, val);
5172 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5174 gen_op_andl_T0_ffff();
5175 /* NOTE: keeping EIP updated is not a problem in case of
5179 gen_op_addl_A0_im(2 << s->dflag);
5180 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5181 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5182 /* add stack offset */
5183 gen_stack_update(s, val + (4 << s->dflag));
5187 case 0xcb: /* lret */
5190 case 0xcf: /* iret */
5191 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5195 gen_op_iret_real(s->dflag);
5196 s->cc_op = CC_OP_EFLAGS;
5197 } else if (s->vm86) {
5199 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5201 gen_op_iret_real(s->dflag);
5202 s->cc_op = CC_OP_EFLAGS;
5205 if (s->cc_op != CC_OP_DYNAMIC)
5206 gen_op_set_cc_op(s->cc_op);
5207 gen_jmp_im(pc_start - s->cs_base);
5208 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5209 s->cc_op = CC_OP_EFLAGS;
5213 case 0xe8: /* call im */
5216 tval = (int32_t)insn_get(s, OT_LONG);
5218 tval = (int16_t)insn_get(s, OT_WORD);
5219 next_eip = s->pc - s->cs_base;
5223 gen_movtl_T0_im(next_eip);
5228 case 0x9a: /* lcall im */
5230 unsigned int selector, offset;
5234 ot = dflag ? OT_LONG : OT_WORD;
5235 offset = insn_get(s, ot);
5236 selector = insn_get(s, OT_WORD);
5238 gen_op_movl_T0_im(selector);
5239 gen_op_movl_T1_imu(offset);
5242 case 0xe9: /* jmp im */
5244 tval = (int32_t)insn_get(s, OT_LONG);
5246 tval = (int16_t)insn_get(s, OT_WORD);
5247 tval += s->pc - s->cs_base;
5252 case 0xea: /* ljmp im */
5254 unsigned int selector, offset;
5258 ot = dflag ? OT_LONG : OT_WORD;
5259 offset = insn_get(s, ot);
5260 selector = insn_get(s, OT_WORD);
5262 gen_op_movl_T0_im(selector);
5263 gen_op_movl_T1_imu(offset);
5266 case 0xeb: /* jmp Jb */
5267 tval = (int8_t)insn_get(s, OT_BYTE);
5268 tval += s->pc - s->cs_base;
5273 case 0x70 ... 0x7f: /* jcc Jb */
5274 tval = (int8_t)insn_get(s, OT_BYTE);
5276 case 0x180 ... 0x18f: /* jcc Jv */
5278 tval = (int32_t)insn_get(s, OT_LONG);
5280 tval = (int16_t)insn_get(s, OT_WORD);
5283 next_eip = s->pc - s->cs_base;
5287 gen_jcc(s, b, tval, next_eip);
5290 case 0x190 ... 0x19f: /* setcc Gv */
5291 modrm = ldub_code(s->pc++);
5293 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5295 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5296 ot = dflag + OT_WORD;
5297 modrm = ldub_code(s->pc++);
5298 reg = ((modrm >> 3) & 7) | rex_r;
5299 mod = (modrm >> 6) & 3;
5302 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5303 gen_op_ld_T1_A0(ot + s->mem_index);
5305 rm = (modrm & 7) | REX_B(s);
5306 gen_op_mov_TN_reg(ot, 1, rm);
5308 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5311 /************************/
5313 case 0x9c: /* pushf */
5314 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5316 if (s->vm86 && s->iopl != 3) {
5317 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5319 if (s->cc_op != CC_OP_DYNAMIC)
5320 gen_op_set_cc_op(s->cc_op);
5321 gen_op_movl_T0_eflags();
5325 case 0x9d: /* popf */
5326 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5328 if (s->vm86 && s->iopl != 3) {
5329 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5334 gen_op_movl_eflags_T0_cpl0();
5336 gen_op_movw_eflags_T0_cpl0();
5339 if (s->cpl <= s->iopl) {
5341 gen_op_movl_eflags_T0_io();
5343 gen_op_movw_eflags_T0_io();
5347 gen_op_movl_eflags_T0();
5349 gen_op_movw_eflags_T0();
5354 s->cc_op = CC_OP_EFLAGS;
5355 /* abort translation because TF flag may change */
5356 gen_jmp_im(s->pc - s->cs_base);
5360 case 0x9e: /* sahf */
5363 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5364 if (s->cc_op != CC_OP_DYNAMIC)
5365 gen_op_set_cc_op(s->cc_op);
5366 gen_op_movb_eflags_T0();
5367 s->cc_op = CC_OP_EFLAGS;
5369 case 0x9f: /* lahf */
5372 if (s->cc_op != CC_OP_DYNAMIC)
5373 gen_op_set_cc_op(s->cc_op);
5374 gen_op_movl_T0_eflags();
5375 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5377 case 0xf5: /* cmc */
5378 if (s->cc_op != CC_OP_DYNAMIC)
5379 gen_op_set_cc_op(s->cc_op);
5381 s->cc_op = CC_OP_EFLAGS;
5383 case 0xf8: /* clc */
5384 if (s->cc_op != CC_OP_DYNAMIC)
5385 gen_op_set_cc_op(s->cc_op);
5387 s->cc_op = CC_OP_EFLAGS;
5389 case 0xf9: /* stc */
5390 if (s->cc_op != CC_OP_DYNAMIC)
5391 gen_op_set_cc_op(s->cc_op);
5393 s->cc_op = CC_OP_EFLAGS;
5395 case 0xfc: /* cld */
5398 case 0xfd: /* std */
5402 /************************/
5403 /* bit operations */
5404 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5405 ot = dflag + OT_WORD;
5406 modrm = ldub_code(s->pc++);
5407 op = (modrm >> 3) & 7;
5408 mod = (modrm >> 6) & 3;
5409 rm = (modrm & 7) | REX_B(s);
5412 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5413 gen_op_ld_T0_A0(ot + s->mem_index);
5415 gen_op_mov_TN_reg(ot, 0, rm);
5418 val = ldub_code(s->pc++);
5419 gen_op_movl_T1_im(val);
5423 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5424 s->cc_op = CC_OP_SARB + ot;
5427 gen_op_st_T0_A0(ot + s->mem_index);
5429 gen_op_mov_reg_T0(ot, rm);
5430 gen_op_update_bt_cc();
5433 case 0x1a3: /* bt Gv, Ev */
5436 case 0x1ab: /* bts */
5439 case 0x1b3: /* btr */
5442 case 0x1bb: /* btc */
5445 ot = dflag + OT_WORD;
5446 modrm = ldub_code(s->pc++);
5447 reg = ((modrm >> 3) & 7) | rex_r;
5448 mod = (modrm >> 6) & 3;
5449 rm = (modrm & 7) | REX_B(s);
5450 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5452 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5453 /* specific case: we need to add a displacement */
5454 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5455 gen_op_ld_T0_A0(ot + s->mem_index);
5457 gen_op_mov_TN_reg(ot, 0, rm);
5459 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5460 s->cc_op = CC_OP_SARB + ot;
5463 gen_op_st_T0_A0(ot + s->mem_index);
5465 gen_op_mov_reg_T0(ot, rm);
5466 gen_op_update_bt_cc();
5469 case 0x1bc: /* bsf */
5470 case 0x1bd: /* bsr */
5471 ot = dflag + OT_WORD;
5472 modrm = ldub_code(s->pc++);
5473 reg = ((modrm >> 3) & 7) | rex_r;
5474 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5475 /* NOTE: in order to handle the 0 case, we must load the
5476 result. It could be optimized with a generated jump */
5477 gen_op_mov_TN_reg(ot, 1, reg);
5478 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5479 gen_op_mov_reg_T1(ot, reg);
5480 s->cc_op = CC_OP_LOGICB + ot;
5482 /************************/
5484 case 0x27: /* daa */
5487 if (s->cc_op != CC_OP_DYNAMIC)
5488 gen_op_set_cc_op(s->cc_op);
5490 s->cc_op = CC_OP_EFLAGS;
5492 case 0x2f: /* das */
5495 if (s->cc_op != CC_OP_DYNAMIC)
5496 gen_op_set_cc_op(s->cc_op);
5498 s->cc_op = CC_OP_EFLAGS;
5500 case 0x37: /* aaa */
5503 if (s->cc_op != CC_OP_DYNAMIC)
5504 gen_op_set_cc_op(s->cc_op);
5506 s->cc_op = CC_OP_EFLAGS;
5508 case 0x3f: /* aas */
5511 if (s->cc_op != CC_OP_DYNAMIC)
5512 gen_op_set_cc_op(s->cc_op);
5514 s->cc_op = CC_OP_EFLAGS;
5516 case 0xd4: /* aam */
5519 val = ldub_code(s->pc++);
5521 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5524 s->cc_op = CC_OP_LOGICB;
5527 case 0xd5: /* aad */
5530 val = ldub_code(s->pc++);
5532 s->cc_op = CC_OP_LOGICB;
5534 /************************/
5536 case 0x90: /* nop */
5537 /* XXX: xchg + rex handling */
5538 /* XXX: correct lock test for all insn */
5539 if (prefixes & PREFIX_LOCK)
5541 if (prefixes & PREFIX_REPZ) {
5542 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5545 case 0x9b: /* fwait */
5546 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5547 (HF_MP_MASK | HF_TS_MASK)) {
5548 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5550 if (s->cc_op != CC_OP_DYNAMIC)
5551 gen_op_set_cc_op(s->cc_op);
5552 gen_jmp_im(pc_start - s->cs_base);
5556 case 0xcc: /* int3 */
5557 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5559 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5561 case 0xcd: /* int N */
5562 val = ldub_code(s->pc++);
5563 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5565 if (s->vm86 && s->iopl != 3) {
5566 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5568 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5571 case 0xce: /* into */
5574 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5576 if (s->cc_op != CC_OP_DYNAMIC)
5577 gen_op_set_cc_op(s->cc_op);
5578 gen_jmp_im(pc_start - s->cs_base);
5579 gen_op_into(s->pc - pc_start);
5581 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5582 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5585 gen_debug(s, pc_start - s->cs_base);
5588 tb_flush(cpu_single_env);
5589 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5592 case 0xfa: /* cli */
5594 if (s->cpl <= s->iopl) {
5597 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5603 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5607 case 0xfb: /* sti */
5609 if (s->cpl <= s->iopl) {
5612 /* interruptions are enabled only the first insn after sti */
5613 /* If several instructions disable interrupts, only the
5615 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5616 gen_op_set_inhibit_irq();
5617 /* give a chance to handle pending irqs */
5618 gen_jmp_im(s->pc - s->cs_base);
5621 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5627 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5631 case 0x62: /* bound */
5634 ot = dflag ? OT_LONG : OT_WORD;
5635 modrm = ldub_code(s->pc++);
5636 reg = (modrm >> 3) & 7;
5637 mod = (modrm >> 6) & 3;
5640 gen_op_mov_TN_reg(ot, 0, reg);
5641 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5642 gen_jmp_im(pc_start - s->cs_base);
5648 case 0x1c8 ... 0x1cf: /* bswap reg */
5649 reg = (b & 7) | REX_B(s);
5650 #ifdef TARGET_X86_64
5652 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5653 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5654 gen_op_mov_reg_T0(OT_QUAD, reg);
5658 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5660 tmp0 = tcg_temp_new(TCG_TYPE_I32);
5661 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5662 tcg_gen_bswap_i32(tmp0, tmp0);
5663 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5664 gen_op_mov_reg_T0(OT_LONG, reg);
5668 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5669 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5670 gen_op_mov_reg_T0(OT_LONG, reg);
5674 case 0xd6: /* salc */
5677 if (s->cc_op != CC_OP_DYNAMIC)
5678 gen_op_set_cc_op(s->cc_op);
5681 case 0xe0: /* loopnz */
5682 case 0xe1: /* loopz */
5683 if (s->cc_op != CC_OP_DYNAMIC)
5684 gen_op_set_cc_op(s->cc_op);
5686 case 0xe2: /* loop */
5687 case 0xe3: /* jecxz */
5691 tval = (int8_t)insn_get(s, OT_BYTE);
5692 next_eip = s->pc - s->cs_base;
5697 l1 = gen_new_label();
5698 l2 = gen_new_label();
5701 gen_op_jz_ecx[s->aflag](l1);
5703 gen_op_dec_ECX[s->aflag]();
5706 gen_op_loop[s->aflag][b](l1);
5709 gen_jmp_im(next_eip);
5710 gen_op_jmp_label(l2);
5717 case 0x130: /* wrmsr */
5718 case 0x132: /* rdmsr */
5720 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5724 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5727 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5734 case 0x131: /* rdtsc */
5735 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5737 gen_jmp_im(pc_start - s->cs_base);
5740 case 0x133: /* rdpmc */
5741 gen_jmp_im(pc_start - s->cs_base);
5744 case 0x134: /* sysenter */
5748 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5750 if (s->cc_op != CC_OP_DYNAMIC) {
5751 gen_op_set_cc_op(s->cc_op);
5752 s->cc_op = CC_OP_DYNAMIC;
5754 gen_jmp_im(pc_start - s->cs_base);
5759 case 0x135: /* sysexit */
5763 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5765 if (s->cc_op != CC_OP_DYNAMIC) {
5766 gen_op_set_cc_op(s->cc_op);
5767 s->cc_op = CC_OP_DYNAMIC;
5769 gen_jmp_im(pc_start - s->cs_base);
5774 #ifdef TARGET_X86_64
5775 case 0x105: /* syscall */
5776 /* XXX: is it usable in real mode ? */
5777 if (s->cc_op != CC_OP_DYNAMIC) {
5778 gen_op_set_cc_op(s->cc_op);
5779 s->cc_op = CC_OP_DYNAMIC;
5781 gen_jmp_im(pc_start - s->cs_base);
5782 gen_op_syscall(s->pc - pc_start);
5785 case 0x107: /* sysret */
5787 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5789 if (s->cc_op != CC_OP_DYNAMIC) {
5790 gen_op_set_cc_op(s->cc_op);
5791 s->cc_op = CC_OP_DYNAMIC;
5793 gen_jmp_im(pc_start - s->cs_base);
5794 gen_op_sysret(s->dflag);
5795 /* condition codes are modified only in long mode */
5797 s->cc_op = CC_OP_EFLAGS;
5802 case 0x1a2: /* cpuid */
5803 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5807 case 0xf4: /* hlt */
5809 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5811 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5813 if (s->cc_op != CC_OP_DYNAMIC)
5814 gen_op_set_cc_op(s->cc_op);
5815 gen_jmp_im(s->pc - s->cs_base);
5821 modrm = ldub_code(s->pc++);
5822 mod = (modrm >> 6) & 3;
5823 op = (modrm >> 3) & 7;
5826 if (!s->pe || s->vm86)
5828 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5830 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5834 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5837 if (!s->pe || s->vm86)
5840 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5842 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5844 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5845 gen_jmp_im(pc_start - s->cs_base);
5850 if (!s->pe || s->vm86)
5852 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5854 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5858 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5861 if (!s->pe || s->vm86)
5864 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5866 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5868 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5869 gen_jmp_im(pc_start - s->cs_base);
5875 if (!s->pe || s->vm86)
5877 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5878 if (s->cc_op != CC_OP_DYNAMIC)
5879 gen_op_set_cc_op(s->cc_op);
5884 s->cc_op = CC_OP_EFLAGS;
5891 modrm = ldub_code(s->pc++);
5892 mod = (modrm >> 6) & 3;
5893 op = (modrm >> 3) & 7;
5899 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5901 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5902 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5903 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5904 gen_add_A0_im(s, 2);
5905 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5907 gen_op_andl_T0_im(0xffffff);
5908 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5913 case 0: /* monitor */
5914 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5917 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5919 gen_jmp_im(pc_start - s->cs_base);
5920 #ifdef TARGET_X86_64
5921 if (s->aflag == 2) {
5922 gen_op_movq_A0_reg(R_EBX);
5923 gen_op_addq_A0_AL();
5927 gen_op_movl_A0_reg(R_EBX);
5928 gen_op_addl_A0_AL();
5930 gen_op_andl_A0_ffff();
5932 gen_add_A0_ds_seg(s);
5936 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5939 if (s->cc_op != CC_OP_DYNAMIC) {
5940 gen_op_set_cc_op(s->cc_op);
5941 s->cc_op = CC_OP_DYNAMIC;
5943 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5945 gen_jmp_im(s->pc - s->cs_base);
5953 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5955 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5956 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5957 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5958 gen_add_A0_im(s, 2);
5959 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5961 gen_op_andl_T0_im(0xffffff);
5962 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5970 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5972 if (s->cc_op != CC_OP_DYNAMIC)
5973 gen_op_set_cc_op(s->cc_op);
5974 gen_jmp_im(s->pc - s->cs_base);
5976 s->cc_op = CC_OP_EFLAGS;
5979 case 1: /* VMMCALL */
5980 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5982 /* FIXME: cause #UD if hflags & SVM */
5985 case 2: /* VMLOAD */
5986 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5990 case 3: /* VMSAVE */
5991 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5996 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6001 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6005 case 6: /* SKINIT */
6006 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6010 case 7: /* INVLPGA */
6011 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6018 } else if (s->cpl != 0) {
6019 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6021 if (gen_svm_check_intercept(s, pc_start,
6022 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6024 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6025 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6026 gen_add_A0_im(s, 2);
6027 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6029 gen_op_andl_T0_im(0xffffff);
6031 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6032 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6034 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6035 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6040 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6042 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6043 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6047 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6049 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6051 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6053 gen_jmp_im(s->pc - s->cs_base);
6057 case 7: /* invlpg */
6059 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6062 #ifdef TARGET_X86_64
6063 if (CODE64(s) && rm == 0) {
6065 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6066 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6067 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6068 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6075 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6077 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6079 gen_jmp_im(s->pc - s->cs_base);
6088 case 0x108: /* invd */
6089 case 0x109: /* wbinvd */
6091 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6093 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6098 case 0x63: /* arpl or movslS (x86_64) */
6099 #ifdef TARGET_X86_64
6102 /* d_ot is the size of destination */
6103 d_ot = dflag + OT_WORD;
6105 modrm = ldub_code(s->pc++);
6106 reg = ((modrm >> 3) & 7) | rex_r;
6107 mod = (modrm >> 6) & 3;
6108 rm = (modrm & 7) | REX_B(s);
6111 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6113 if (d_ot == OT_QUAD)
6114 gen_op_movslq_T0_T0();
6115 gen_op_mov_reg_T0(d_ot, reg);
6117 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6118 if (d_ot == OT_QUAD) {
6119 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6121 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6123 gen_op_mov_reg_T0(d_ot, reg);
6128 if (!s->pe || s->vm86)
6130 ot = dflag ? OT_LONG : OT_WORD;
6131 modrm = ldub_code(s->pc++);
6132 reg = (modrm >> 3) & 7;
6133 mod = (modrm >> 6) & 3;
6136 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6137 gen_op_ld_T0_A0(ot + s->mem_index);
6139 gen_op_mov_TN_reg(ot, 0, rm);
6141 if (s->cc_op != CC_OP_DYNAMIC)
6142 gen_op_set_cc_op(s->cc_op);
6144 s->cc_op = CC_OP_EFLAGS;
6146 gen_op_st_T0_A0(ot + s->mem_index);
6148 gen_op_mov_reg_T0(ot, rm);
6150 gen_op_arpl_update();
6153 case 0x102: /* lar */
6154 case 0x103: /* lsl */
6155 if (!s->pe || s->vm86)
6157 ot = dflag ? OT_LONG : OT_WORD;
6158 modrm = ldub_code(s->pc++);
6159 reg = ((modrm >> 3) & 7) | rex_r;
6160 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6161 gen_op_mov_TN_reg(ot, 1, reg);
6162 if (s->cc_op != CC_OP_DYNAMIC)
6163 gen_op_set_cc_op(s->cc_op);
6168 s->cc_op = CC_OP_EFLAGS;
6169 gen_op_mov_reg_T1(ot, reg);
6172 modrm = ldub_code(s->pc++);
6173 mod = (modrm >> 6) & 3;
6174 op = (modrm >> 3) & 7;
6176 case 0: /* prefetchnta */
6177 case 1: /* prefetchnt0 */
6178 case 2: /* prefetchnt0 */
6179 case 3: /* prefetchnt0 */
6182 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6183 /* nothing more to do */
6185 default: /* nop (multi byte) */
6186 gen_nop_modrm(s, modrm);
6190 case 0x119 ... 0x11f: /* nop (multi byte) */
6191 modrm = ldub_code(s->pc++);
6192 gen_nop_modrm(s, modrm);
6194 case 0x120: /* mov reg, crN */
6195 case 0x122: /* mov crN, reg */
6197 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6199 modrm = ldub_code(s->pc++);
6200 if ((modrm & 0xc0) != 0xc0)
6202 rm = (modrm & 7) | REX_B(s);
6203 reg = ((modrm >> 3) & 7) | rex_r;
6215 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6216 gen_op_mov_TN_reg(ot, 0, rm);
6217 gen_op_movl_crN_T0(reg);
6218 gen_jmp_im(s->pc - s->cs_base);
6221 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6222 #if !defined(CONFIG_USER_ONLY)
6224 gen_op_movtl_T0_cr8();
6227 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6228 gen_op_mov_reg_T0(ot, rm);
6236 case 0x121: /* mov reg, drN */
6237 case 0x123: /* mov drN, reg */
6239 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6241 modrm = ldub_code(s->pc++);
6242 if ((modrm & 0xc0) != 0xc0)
6244 rm = (modrm & 7) | REX_B(s);
6245 reg = ((modrm >> 3) & 7) | rex_r;
6250 /* XXX: do it dynamically with CR4.DE bit */
6251 if (reg == 4 || reg == 5 || reg >= 8)
6254 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6255 gen_op_mov_TN_reg(ot, 0, rm);
6256 gen_op_movl_drN_T0(reg);
6257 gen_jmp_im(s->pc - s->cs_base);
6260 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6261 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6262 gen_op_mov_reg_T0(ot, rm);
6266 case 0x106: /* clts */
6268 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6270 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6272 /* abort block because static cpu state changed */
6273 gen_jmp_im(s->pc - s->cs_base);
6277 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6278 case 0x1c3: /* MOVNTI reg, mem */
6279 if (!(s->cpuid_features & CPUID_SSE2))
6281 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6282 modrm = ldub_code(s->pc++);
6283 mod = (modrm >> 6) & 3;
6286 reg = ((modrm >> 3) & 7) | rex_r;
6287 /* generate a generic store */
6288 gen_ldst_modrm(s, modrm, ot, reg, 1);
6291 modrm = ldub_code(s->pc++);
6292 mod = (modrm >> 6) & 3;
6293 op = (modrm >> 3) & 7;
6295 case 0: /* fxsave */
6296 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6297 (s->flags & HF_EM_MASK))
6299 if (s->flags & HF_TS_MASK) {
6300 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6303 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6304 gen_op_fxsave_A0((s->dflag == 2));
6306 case 1: /* fxrstor */
6307 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6308 (s->flags & HF_EM_MASK))
6310 if (s->flags & HF_TS_MASK) {
6311 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6314 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6315 gen_op_fxrstor_A0((s->dflag == 2));
6317 case 2: /* ldmxcsr */
6318 case 3: /* stmxcsr */
6319 if (s->flags & HF_TS_MASK) {
6320 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6323 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6326 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6328 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6329 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6331 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6332 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6335 case 5: /* lfence */
6336 case 6: /* mfence */
6337 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6340 case 7: /* sfence / clflush */
6341 if ((modrm & 0xc7) == 0xc0) {
6343 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6344 if (!(s->cpuid_features & CPUID_SSE))
6348 if (!(s->cpuid_features & CPUID_CLFLUSH))
6350 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6357 case 0x10d: /* 3DNow! prefetch(w) */
6358 modrm = ldub_code(s->pc++);
6359 mod = (modrm >> 6) & 3;
6362 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6363 /* ignore for now */
6365 case 0x1aa: /* rsm */
6366 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6368 if (!(s->flags & HF_SMM_MASK))
6370 if (s->cc_op != CC_OP_DYNAMIC) {
6371 gen_op_set_cc_op(s->cc_op);
6372 s->cc_op = CC_OP_DYNAMIC;
6374 gen_jmp_im(s->pc - s->cs_base);
6378 case 0x10e ... 0x10f:
6379 /* 3DNow! instructions, ignore prefixes */
6380 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6381 case 0x110 ... 0x117:
6382 case 0x128 ... 0x12f:
6383 case 0x150 ... 0x177:
6384 case 0x17c ... 0x17f:
6386 case 0x1c4 ... 0x1c6:
6387 case 0x1d0 ... 0x1fe:
6388 gen_sse(s, b, pc_start, rex_r);
6393 /* lock generation */
6394 if (s->prefix & PREFIX_LOCK)
6398 if (s->prefix & PREFIX_LOCK)
6400 /* XXX: ensure that no lock was generated */
6401 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6405 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6406 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6408 /* flags read by an operation */
6409 static uint16_t opc_read_flags[NB_OPS] = {
6410 [INDEX_op_aas] = CC_A,
6411 [INDEX_op_aaa] = CC_A,
6412 [INDEX_op_das] = CC_A | CC_C,
6413 [INDEX_op_daa] = CC_A | CC_C,
6415 /* subtle: due to the incl/decl implementation, C is used */
6416 [INDEX_op_update_inc_cc] = CC_C,
6418 [INDEX_op_into] = CC_O,
6420 [INDEX_op_jb_subb] = CC_C,
6421 [INDEX_op_jb_subw] = CC_C,
6422 [INDEX_op_jb_subl] = CC_C,
6424 [INDEX_op_jz_subb] = CC_Z,
6425 [INDEX_op_jz_subw] = CC_Z,
6426 [INDEX_op_jz_subl] = CC_Z,
6428 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6429 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6430 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6432 [INDEX_op_js_subb] = CC_S,
6433 [INDEX_op_js_subw] = CC_S,
6434 [INDEX_op_js_subl] = CC_S,
6436 [INDEX_op_jl_subb] = CC_O | CC_S,
6437 [INDEX_op_jl_subw] = CC_O | CC_S,
6438 [INDEX_op_jl_subl] = CC_O | CC_S,
6440 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6441 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6442 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6444 [INDEX_op_loopnzw] = CC_Z,
6445 [INDEX_op_loopnzl] = CC_Z,
6446 [INDEX_op_loopzw] = CC_Z,
6447 [INDEX_op_loopzl] = CC_Z,
6449 [INDEX_op_seto_T0_cc] = CC_O,
6450 [INDEX_op_setb_T0_cc] = CC_C,
6451 [INDEX_op_setz_T0_cc] = CC_Z,
6452 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6453 [INDEX_op_sets_T0_cc] = CC_S,
6454 [INDEX_op_setp_T0_cc] = CC_P,
6455 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6456 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6458 [INDEX_op_setb_T0_subb] = CC_C,
6459 [INDEX_op_setb_T0_subw] = CC_C,
6460 [INDEX_op_setb_T0_subl] = CC_C,
6462 [INDEX_op_setz_T0_subb] = CC_Z,
6463 [INDEX_op_setz_T0_subw] = CC_Z,
6464 [INDEX_op_setz_T0_subl] = CC_Z,
6466 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6467 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6468 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6470 [INDEX_op_sets_T0_subb] = CC_S,
6471 [INDEX_op_sets_T0_subw] = CC_S,
6472 [INDEX_op_sets_T0_subl] = CC_S,
6474 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6475 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6476 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6478 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6479 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6480 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6482 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6483 [INDEX_op_cmc] = CC_C,
6484 [INDEX_op_salc] = CC_C,
6486 /* needed for correct flag optimisation before string ops */
6487 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6488 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6489 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6490 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6492 #ifdef TARGET_X86_64
6493 [INDEX_op_jb_subq] = CC_C,
6494 [INDEX_op_jz_subq] = CC_Z,
6495 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6496 [INDEX_op_js_subq] = CC_S,
6497 [INDEX_op_jl_subq] = CC_O | CC_S,
6498 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6500 [INDEX_op_loopnzq] = CC_Z,
6501 [INDEX_op_loopzq] = CC_Z,
6503 [INDEX_op_setb_T0_subq] = CC_C,
6504 [INDEX_op_setz_T0_subq] = CC_Z,
6505 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6506 [INDEX_op_sets_T0_subq] = CC_S,
6507 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6508 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6510 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6511 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6514 #define DEF_READF(SUFFIX)\
6515 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6516 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6517 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6518 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6519 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6520 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6521 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6522 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6524 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6525 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6526 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6527 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6528 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6529 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6530 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6531 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6535 #ifndef CONFIG_USER_ONLY
6541 /* flags written by an operation */
6542 static uint16_t opc_write_flags[NB_OPS] = {
6543 [INDEX_op_update2_cc] = CC_OSZAPC,
6544 [INDEX_op_update1_cc] = CC_OSZAPC,
6545 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6546 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6547 /* subtle: due to the incl/decl implementation, C is used */
6548 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6549 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6551 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6552 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6553 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6554 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6555 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6556 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6557 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6558 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6559 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6560 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6561 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6564 [INDEX_op_com_dummy] = CC_OSZAPC,
6565 [INDEX_op_com_dummy] = CC_OSZAPC,
6566 [INDEX_op_com_dummy] = CC_OSZAPC,
6567 [INDEX_op_com_dummy] = CC_OSZAPC,
6570 [INDEX_op_aam] = CC_OSZAPC,
6571 [INDEX_op_aad] = CC_OSZAPC,
6572 [INDEX_op_aas] = CC_OSZAPC,
6573 [INDEX_op_aaa] = CC_OSZAPC,
6574 [INDEX_op_das] = CC_OSZAPC,
6575 [INDEX_op_daa] = CC_OSZAPC,
6577 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6578 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6579 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6580 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6581 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6582 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6583 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6584 [INDEX_op_clc] = CC_C,
6585 [INDEX_op_stc] = CC_C,
6586 [INDEX_op_cmc] = CC_C,
6588 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6589 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6590 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6591 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6592 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6593 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6594 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6595 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6596 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6597 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6598 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6599 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6601 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6602 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6603 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6604 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6605 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6606 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6608 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6609 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6610 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6611 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6613 [INDEX_op_cmpxchg8b] = CC_Z,
6614 [INDEX_op_lar] = CC_Z,
6615 [INDEX_op_lsl] = CC_Z,
6616 [INDEX_op_verr] = CC_Z,
6617 [INDEX_op_verw] = CC_Z,
6618 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6619 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6621 #define DEF_WRITEF(SUFFIX)\
6622 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6623 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6624 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6625 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6626 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6627 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6628 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6629 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6631 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6632 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6633 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6634 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6635 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6636 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6637 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6638 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6640 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6641 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6642 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6643 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6644 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6645 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6646 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6647 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6649 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6650 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6651 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6652 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6654 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6655 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6656 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6657 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6659 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6660 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6661 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6662 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6664 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6665 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6666 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6667 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6668 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6669 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6671 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6672 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6673 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6674 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6675 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6676 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6678 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6679 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6680 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6681 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6686 #ifndef CONFIG_USER_ONLY
6692 /* simpler form of an operation if no flags need to be generated */
6693 static uint16_t opc_simpler[NB_OPS] = {
6694 [INDEX_op_update2_cc] = INDEX_op_nop,
6695 [INDEX_op_update1_cc] = INDEX_op_nop,
6696 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6698 /* broken: CC_OP logic must be rewritten */
6699 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6702 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6703 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6704 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6705 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6707 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6708 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6709 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6710 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6712 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6713 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6714 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6715 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6717 #define DEF_SIMPLER(SUFFIX)\
6718 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6719 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6720 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6721 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6723 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6724 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6725 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6726 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6730 #ifndef CONFIG_USER_ONLY
6731 DEF_SIMPLER(_kernel)
6736 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6741 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6747 void optimize_flags_init(void)
6750 /* put default values in arrays */
6751 for(i = 0; i < NB_OPS; i++) {
6752 if (opc_simpler[i] == 0)
6756 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6758 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6759 #if TARGET_LONG_BITS > HOST_LONG_BITS
6760 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6761 TCG_AREG0, offsetof(CPUState, t0), "T0");
6762 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6763 TCG_AREG0, offsetof(CPUState, t1), "T1");
6764 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6765 TCG_AREG0, offsetof(CPUState, t2), "A0");
6767 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6768 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6769 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6770 cpu_tmp1 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6772 /* the helpers are only registered to print debug info */
6773 TCG_HELPER(helper_divl_EAX_T0);
6774 TCG_HELPER(helper_idivl_EAX_T0);
6777 /* CPU flags computation optimization: we move backward thru the
6778 generated code to see which flags are needed. The operation is
6779 modified if suitable */
6780 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6783 int live_flags, write_flags, op;
6785 opc_ptr = opc_buf + opc_buf_len;
6786 /* live_flags contains the flags needed by the next instructions
6787 in the code. At the end of the block, we consider that all the
6789 live_flags = CC_OSZAPC;
6790 while (opc_ptr > opc_buf) {
6792 /* if none of the flags written by the instruction is used,
6793 then we can try to find a simpler instruction */
6794 write_flags = opc_write_flags[op];
6795 if ((live_flags & write_flags) == 0) {
6796 *opc_ptr = opc_simpler[op];
6798 /* compute the live flags before the instruction */
6799 live_flags &= ~write_flags;
6800 live_flags |= opc_read_flags[op];
6804 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6805 basic block 'tb'. If search_pc is TRUE, also generate PC
6806 information for each intermediate instruction. */
6807 static inline int gen_intermediate_code_internal(CPUState *env,
6808 TranslationBlock *tb,
6811 DisasContext dc1, *dc = &dc1;
6812 target_ulong pc_ptr;
6813 uint16_t *gen_opc_end;
6816 target_ulong pc_start;
6817 target_ulong cs_base;
6819 /* generate intermediate code */
6821 cs_base = tb->cs_base;
6823 cflags = tb->cflags;
6825 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6826 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6827 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6828 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6830 dc->vm86 = (flags >> VM_SHIFT) & 1;
6831 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6832 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6833 dc->tf = (flags >> TF_SHIFT) & 1;
6834 dc->singlestep_enabled = env->singlestep_enabled;
6835 dc->cc_op = CC_OP_DYNAMIC;
6836 dc->cs_base = cs_base;
6838 dc->popl_esp_hack = 0;
6839 /* select memory access functions */
6841 if (flags & HF_SOFTMMU_MASK) {
6843 dc->mem_index = 2 * 4;
6845 dc->mem_index = 1 * 4;
6847 dc->cpuid_features = env->cpuid_features;
6848 dc->cpuid_ext_features = env->cpuid_ext_features;
6849 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6850 #ifdef TARGET_X86_64
6851 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6852 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6855 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6856 (flags & HF_INHIBIT_IRQ_MASK)
6857 #ifndef CONFIG_SOFTMMU
6858 || (flags & HF_SOFTMMU_MASK)
6862 /* check addseg logic */
6863 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6864 printf("ERROR addseg\n");
6867 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6868 #if TARGET_LONG_BITS > HOST_LONG_BITS
6869 cpu_tmp1 = tcg_temp_new(TCG_TYPE_I64);
6871 cpu_tmp2 = tcg_temp_new(TCG_TYPE_I32);
6872 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6873 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6875 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6877 dc->is_jmp = DISAS_NEXT;
6882 if (env->nb_breakpoints > 0) {
6883 for(j = 0; j < env->nb_breakpoints; j++) {
6884 if (env->breakpoints[j] == pc_ptr) {
6885 gen_debug(dc, pc_ptr - dc->cs_base);
6891 j = gen_opc_ptr - gen_opc_buf;
6895 gen_opc_instr_start[lj++] = 0;
6897 gen_opc_pc[lj] = pc_ptr;
6898 gen_opc_cc_op[lj] = dc->cc_op;
6899 gen_opc_instr_start[lj] = 1;
6901 pc_ptr = disas_insn(dc, pc_ptr);
6902 /* stop translation if indicated */
6905 /* if single step mode, we generate only one instruction and
6906 generate an exception */
6907 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6908 the flag and abort the translation to give the irqs a
6909 change to be happen */
6910 if (dc->tf || dc->singlestep_enabled ||
6911 (flags & HF_INHIBIT_IRQ_MASK) ||
6912 (cflags & CF_SINGLE_INSN)) {
6913 gen_jmp_im(pc_ptr - dc->cs_base);
6917 /* if too long translation, stop generation too */
6918 if (gen_opc_ptr >= gen_opc_end ||
6919 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6920 gen_jmp_im(pc_ptr - dc->cs_base);
6925 *gen_opc_ptr = INDEX_op_end;
6926 /* we don't forget to fill the last values */
6928 j = gen_opc_ptr - gen_opc_buf;
6931 gen_opc_instr_start[lj++] = 0;
6935 if (loglevel & CPU_LOG_TB_CPU) {
6936 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6938 if (loglevel & CPU_LOG_TB_IN_ASM) {
6940 fprintf(logfile, "----------------\n");
6941 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6942 #ifdef TARGET_X86_64
6947 disas_flags = !dc->code32;
6948 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6949 fprintf(logfile, "\n");
6950 if (loglevel & CPU_LOG_TB_OP_OPT) {
6951 fprintf(logfile, "OP before opt:\n");
6952 tcg_dump_ops(&tcg_ctx, logfile);
6953 fprintf(logfile, "\n");
6958 /* optimize flag computations */
6959 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6962 tb->size = pc_ptr - pc_start;
6966 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6968 return gen_intermediate_code_internal(env, tb, 0);
6971 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6973 return gen_intermediate_code_internal(env, tb, 1);
6976 void gen_pc_load(CPUState *env, TranslationBlock *tb,
6977 unsigned long searched_pc, int pc_pos, void *puc)
6981 if (loglevel & CPU_LOG_TB_OP) {
6983 fprintf(logfile, "RESTORE:\n");
6984 for(i = 0;i <= pc_pos; i++) {
6985 if (gen_opc_instr_start[i]) {
6986 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
6989 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
6990 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
6991 (uint32_t)tb->cs_base);
6994 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
6995 cc_op = gen_opc_cc_op[pc_pos];
6996 if (cc_op != CC_OP_DYNAMIC)