4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0;
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0, cpu_tmp1, cpu_tmp2, cpu_ptr0, cpu_ptr1;
66 static int x86_64_hregs;
69 typedef struct DisasContext {
70 /* current insn context */
71 int override; /* -1 if no override */
74 target_ulong pc; /* pc = eip + cs_base */
75 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base; /* base of CS segment */
79 int pe; /* protected mode */
80 int code32; /* 32 bit code segment */
82 int lma; /* long mode active */
83 int code64; /* 64 bit code segment */
86 int ss32; /* 32 bit stack segment */
87 int cc_op; /* current CC operation */
88 int addseg; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st; /* currently unused */
90 int vm86; /* vm86 mode */
93 int tf; /* TF cpu flag */
94 int singlestep_enabled; /* "hardware" single step enabled */
95 int jmp_opt; /* use direct block chaining for direct jumps */
96 int mem_index; /* select memory access functions */
97 uint64_t flags; /* all execution flags */
98 struct TranslationBlock *tb;
99 int popl_esp_hack; /* for correct popl with esp base handling */
100 int rip_offset; /* only used in x86_64, but left for simplicity */
102 int cpuid_ext_features;
103 int cpuid_ext2_features;
106 static void gen_eob(DisasContext *s);
107 static void gen_jmp(DisasContext *s, target_ulong eip);
108 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
110 /* i386 arith/logic operations */
130 OP_SHL1, /* undocumented */
143 /* I386 int registers */
144 OR_EAX, /* MUST be even numbered */
153 OR_TMP0 = 16, /* temporary operand register */
155 OR_A0, /* temporary register used when doing address evaluation */
158 static inline void gen_op_movl_T0_0(void)
160 tcg_gen_movi_tl(cpu_T[0], 0);
163 static inline void gen_op_movl_T0_im(int32_t val)
165 tcg_gen_movi_tl(cpu_T[0], val);
168 static inline void gen_op_movl_T0_imu(uint32_t val)
170 tcg_gen_movi_tl(cpu_T[0], val);
173 static inline void gen_op_movl_T1_im(int32_t val)
175 tcg_gen_movi_tl(cpu_T[1], val);
178 static inline void gen_op_movl_T1_imu(uint32_t val)
180 tcg_gen_movi_tl(cpu_T[1], val);
183 static inline void gen_op_movl_A0_im(uint32_t val)
185 tcg_gen_movi_tl(cpu_A0, val);
189 static inline void gen_op_movq_A0_im(int64_t val)
191 tcg_gen_movi_tl(cpu_A0, val);
195 static inline void gen_movtl_T0_im(target_ulong val)
197 tcg_gen_movi_tl(cpu_T[0], val);
200 static inline void gen_movtl_T1_im(target_ulong val)
202 tcg_gen_movi_tl(cpu_T[1], val);
205 static inline void gen_op_andl_T0_ffff(void)
207 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210 static inline void gen_op_andl_T0_im(uint32_t val)
212 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215 static inline void gen_op_movl_T0_T1(void)
217 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220 static inline void gen_op_andl_A0_ffff(void)
222 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
227 #define NB_OP_SIZES 4
229 #define DEF_REGS(prefix, suffix) \
230 prefix ## EAX ## suffix,\
231 prefix ## ECX ## suffix,\
232 prefix ## EDX ## suffix,\
233 prefix ## EBX ## suffix,\
234 prefix ## ESP ## suffix,\
235 prefix ## EBP ## suffix,\
236 prefix ## ESI ## suffix,\
237 prefix ## EDI ## suffix,\
238 prefix ## R8 ## suffix,\
239 prefix ## R9 ## suffix,\
240 prefix ## R10 ## suffix,\
241 prefix ## R11 ## suffix,\
242 prefix ## R12 ## suffix,\
243 prefix ## R13 ## suffix,\
244 prefix ## R14 ## suffix,\
245 prefix ## R15 ## suffix,
247 #else /* !TARGET_X86_64 */
249 #define NB_OP_SIZES 3
251 #define DEF_REGS(prefix, suffix) \
252 prefix ## EAX ## suffix,\
253 prefix ## ECX ## suffix,\
254 prefix ## EDX ## suffix,\
255 prefix ## EBX ## suffix,\
256 prefix ## ESP ## suffix,\
257 prefix ## EBP ## suffix,\
258 prefix ## ESI ## suffix,\
259 prefix ## EDI ## suffix,
261 #endif /* !TARGET_X86_64 */
263 #if defined(WORDS_BIGENDIAN)
264 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
265 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
267 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
268 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
270 #define REG_B_OFFSET 0
271 #define REG_H_OFFSET 1
272 #define REG_W_OFFSET 0
273 #define REG_L_OFFSET 0
274 #define REG_LH_OFFSET 4
277 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
281 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
284 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
288 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
292 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
293 /* high part of register set to zero */
294 tcg_gen_movi_tl(cpu_tmp0, 0);
295 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
299 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
304 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
310 static inline void gen_op_mov_reg_T0(int ot, int reg)
312 gen_op_mov_reg_TN(ot, 0, reg);
315 static inline void gen_op_mov_reg_T1(int ot, int reg)
317 gen_op_mov_reg_TN(ot, 1, reg);
320 static inline void gen_op_mov_reg_A0(int size, int reg)
324 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
328 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
329 /* high part of register set to zero */
330 tcg_gen_movi_tl(cpu_tmp0, 0);
331 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
335 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
340 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
346 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
350 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
358 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
363 static inline void gen_op_movl_A0_reg(int reg)
365 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368 static inline void gen_op_addl_A0_im(int32_t val)
370 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
372 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
377 static inline void gen_op_addq_A0_im(int64_t val)
379 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
383 static void gen_add_A0_im(DisasContext *s, int val)
387 gen_op_addq_A0_im(val);
390 gen_op_addl_A0_im(val);
393 static inline void gen_op_addl_T0_T1(void)
395 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398 static inline void gen_op_jmp_T0(void)
400 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403 static inline void gen_op_addw_ESP_im(int32_t val)
405 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
406 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
410 static inline void gen_op_addl_ESP_im(int32_t val)
412 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
413 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
415 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
417 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
421 static inline void gen_op_addq_ESP_im(int32_t val)
423 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
424 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
429 static inline void gen_op_set_cc_op(int32_t val)
431 tcg_gen_movi_tl(cpu_tmp0, val);
432 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
435 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
437 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
440 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
442 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
446 static inline void gen_op_movl_A0_seg(int reg)
448 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
451 static inline void gen_op_addl_A0_seg(int reg)
453 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
454 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
456 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
461 static inline void gen_op_movq_A0_seg(int reg)
463 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
466 static inline void gen_op_addq_A0_seg(int reg)
468 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
469 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
472 static inline void gen_op_movq_A0_reg(int reg)
474 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
477 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
479 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
481 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
482 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
486 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
488 DEF_REGS(gen_op_cmovw_, _T1_T0)
491 DEF_REGS(gen_op_cmovl_, _T1_T0)
495 DEF_REGS(gen_op_cmovq_, _T1_T0)
500 #define DEF_ARITHC(SUFFIX)\
502 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
506 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
510 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
514 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
518 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
522 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
524 #ifndef CONFIG_USER_ONLY
530 static const int cc_op_arithb[8] = {
541 #define DEF_CMPXCHG(SUFFIX)\
542 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
547 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
551 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
553 #ifndef CONFIG_USER_ONLY
559 #define DEF_SHIFT(SUFFIX)\
561 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
571 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
581 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
591 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
601 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
605 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
607 #ifndef CONFIG_USER_ONLY
613 #define DEF_SHIFTD(SUFFIX, op)\
619 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
631 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
635 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
639 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
641 #ifndef CONFIG_USER_ONLY
642 DEF_SHIFTD(_kernel, im)
643 DEF_SHIFTD(_user, im)
647 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
648 DEF_SHIFTD(_raw, ECX)
649 #ifndef CONFIG_USER_ONLY
650 DEF_SHIFTD(_kernel, ECX)
651 DEF_SHIFTD(_user, ECX)
655 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
658 gen_op_btsw_T0_T1_cc,
659 gen_op_btrw_T0_T1_cc,
660 gen_op_btcw_T0_T1_cc,
664 gen_op_btsl_T0_T1_cc,
665 gen_op_btrl_T0_T1_cc,
666 gen_op_btcl_T0_T1_cc,
671 gen_op_btsq_T0_T1_cc,
672 gen_op_btrq_T0_T1_cc,
673 gen_op_btcq_T0_T1_cc,
678 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
679 gen_op_add_bitw_A0_T1,
680 gen_op_add_bitl_A0_T1,
681 X86_64_ONLY(gen_op_add_bitq_A0_T1),
684 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
701 static inline void gen_op_lds_T0_A0(int idx)
703 int mem_index = (idx >> 2) - 1;
706 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
709 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
713 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static inline void gen_op_ld_T0_A0(int idx)
721 int mem_index = (idx >> 2) - 1;
724 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
727 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
730 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
734 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
739 static inline void gen_op_ldu_T0_A0(int idx)
741 gen_op_ld_T0_A0(idx);
744 static inline void gen_op_ld_T1_A0(int idx)
746 int mem_index = (idx >> 2) - 1;
749 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
752 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
755 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
759 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
764 static inline void gen_op_st_T0_A0(int idx)
766 int mem_index = (idx >> 2) - 1;
769 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
772 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
775 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
779 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
784 static inline void gen_op_st_T1_A0(int idx)
786 int mem_index = (idx >> 2) - 1;
789 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
792 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
795 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
799 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
804 static inline void gen_jmp_im(target_ulong pc)
806 tcg_gen_movi_tl(cpu_tmp0, pc);
807 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
810 static inline void gen_string_movl_A0_ESI(DisasContext *s)
814 override = s->override;
818 gen_op_movq_A0_seg(override);
819 gen_op_addq_A0_reg_sN(0, R_ESI);
821 gen_op_movq_A0_reg(R_ESI);
827 if (s->addseg && override < 0)
830 gen_op_movl_A0_seg(override);
831 gen_op_addl_A0_reg_sN(0, R_ESI);
833 gen_op_movl_A0_reg(R_ESI);
836 /* 16 address, always override */
839 gen_op_movl_A0_reg(R_ESI);
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(override);
845 static inline void gen_string_movl_A0_EDI(DisasContext *s)
849 gen_op_movq_A0_reg(R_EDI);
854 gen_op_movl_A0_seg(R_ES);
855 gen_op_addl_A0_reg_sN(0, R_EDI);
857 gen_op_movl_A0_reg(R_EDI);
860 gen_op_movl_A0_reg(R_EDI);
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(R_ES);
866 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867 gen_op_movl_T0_Dshiftb,
868 gen_op_movl_T0_Dshiftw,
869 gen_op_movl_T0_Dshiftl,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
873 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
876 X86_64_ONLY(gen_op_jnz_ecxq),
879 static GenOpFunc1 *gen_op_jz_ecx[3] = {
882 X86_64_ONLY(gen_op_jz_ecxq),
885 static GenOpFunc *gen_op_dec_ECX[3] = {
888 X86_64_ONLY(gen_op_decq_ECX),
891 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
896 X86_64_ONLY(gen_op_jnz_subq),
902 X86_64_ONLY(gen_op_jz_subq),
906 static GenOpFunc *gen_op_in_DX_T0[3] = {
912 static GenOpFunc *gen_op_out_DX_T0[3] = {
918 static GenOpFunc *gen_op_in[3] = {
924 static GenOpFunc *gen_op_out[3] = {
930 static GenOpFunc *gen_check_io_T0[3] = {
936 static GenOpFunc *gen_check_io_DX[3] = {
942 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
944 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945 if (s->cc_op != CC_OP_DYNAMIC)
946 gen_op_set_cc_op(s->cc_op);
949 gen_check_io_DX[ot]();
951 gen_check_io_T0[ot]();
955 static inline void gen_movs(DisasContext *s, int ot)
957 gen_string_movl_A0_ESI(s);
958 gen_op_ld_T0_A0(ot + s->mem_index);
959 gen_string_movl_A0_EDI(s);
960 gen_op_st_T0_A0(ot + s->mem_index);
961 gen_op_movl_T0_Dshift[ot]();
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
977 static inline void gen_update_cc_op(DisasContext *s)
979 if (s->cc_op != CC_OP_DYNAMIC) {
980 gen_op_set_cc_op(s->cc_op);
981 s->cc_op = CC_OP_DYNAMIC;
985 /* XXX: does not work with gdbstub "ice" single step - not a
987 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
991 l1 = gen_new_label();
992 l2 = gen_new_label();
993 gen_op_jnz_ecx[s->aflag](l1);
995 gen_jmp_tb(s, next_eip, 1);
1000 static inline void gen_stos(DisasContext *s, int ot)
1002 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1003 gen_string_movl_A0_EDI(s);
1004 gen_op_st_T0_A0(ot + s->mem_index);
1005 gen_op_movl_T0_Dshift[ot]();
1006 #ifdef TARGET_X86_64
1007 if (s->aflag == 2) {
1008 gen_op_addq_EDI_T0();
1012 gen_op_addl_EDI_T0();
1014 gen_op_addw_EDI_T0();
1018 static inline void gen_lods(DisasContext *s, int ot)
1020 gen_string_movl_A0_ESI(s);
1021 gen_op_ld_T0_A0(ot + s->mem_index);
1022 gen_op_mov_reg_T0(ot, R_EAX);
1023 gen_op_movl_T0_Dshift[ot]();
1024 #ifdef TARGET_X86_64
1025 if (s->aflag == 2) {
1026 gen_op_addq_ESI_T0();
1030 gen_op_addl_ESI_T0();
1032 gen_op_addw_ESI_T0();
1036 static inline void gen_scas(DisasContext *s, int ot)
1038 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1039 gen_string_movl_A0_EDI(s);
1040 gen_op_ld_T1_A0(ot + s->mem_index);
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift[ot]();
1043 #ifdef TARGET_X86_64
1044 if (s->aflag == 2) {
1045 gen_op_addq_EDI_T0();
1049 gen_op_addl_EDI_T0();
1051 gen_op_addw_EDI_T0();
1055 static inline void gen_cmps(DisasContext *s, int ot)
1057 gen_string_movl_A0_ESI(s);
1058 gen_op_ld_T0_A0(ot + s->mem_index);
1059 gen_string_movl_A0_EDI(s);
1060 gen_op_ld_T1_A0(ot + s->mem_index);
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift[ot]();
1063 #ifdef TARGET_X86_64
1064 if (s->aflag == 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1078 static inline void gen_ins(DisasContext *s, int ot)
1080 gen_string_movl_A0_EDI(s);
1082 gen_op_st_T0_A0(ot + s->mem_index);
1083 gen_op_in_DX_T0[ot]();
1084 gen_op_st_T0_A0(ot + s->mem_index);
1085 gen_op_movl_T0_Dshift[ot]();
1086 #ifdef TARGET_X86_64
1087 if (s->aflag == 2) {
1088 gen_op_addq_EDI_T0();
1092 gen_op_addl_EDI_T0();
1094 gen_op_addw_EDI_T0();
1098 static inline void gen_outs(DisasContext *s, int ot)
1100 gen_string_movl_A0_ESI(s);
1101 gen_op_ld_T0_A0(ot + s->mem_index);
1102 gen_op_out_DX_T0[ot]();
1103 gen_op_movl_T0_Dshift[ot]();
1104 #ifdef TARGET_X86_64
1105 if (s->aflag == 2) {
1106 gen_op_addq_ESI_T0();
1110 gen_op_addl_ESI_T0();
1112 gen_op_addw_ESI_T0();
1116 /* same method as Valgrind : we generate jumps to current or next
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1171 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1202 #ifdef TARGET_X86_64
1205 BUGGY_64(gen_op_jb_subq),
1207 BUGGY_64(gen_op_jbe_subq),
1210 BUGGY_64(gen_op_jl_subq),
1211 BUGGY_64(gen_op_jle_subq),
1215 static GenOpFunc1 *gen_op_loop[3][4] = {
1226 #ifdef TARGET_X86_64
1235 static GenOpFunc *gen_setcc_slow[8] = {
1246 static GenOpFunc *gen_setcc_sub[4][8] = {
1249 gen_op_setb_T0_subb,
1250 gen_op_setz_T0_subb,
1251 gen_op_setbe_T0_subb,
1252 gen_op_sets_T0_subb,
1254 gen_op_setl_T0_subb,
1255 gen_op_setle_T0_subb,
1259 gen_op_setb_T0_subw,
1260 gen_op_setz_T0_subw,
1261 gen_op_setbe_T0_subw,
1262 gen_op_sets_T0_subw,
1264 gen_op_setl_T0_subw,
1265 gen_op_setle_T0_subw,
1269 gen_op_setb_T0_subl,
1270 gen_op_setz_T0_subl,
1271 gen_op_setbe_T0_subl,
1272 gen_op_sets_T0_subl,
1274 gen_op_setl_T0_subl,
1275 gen_op_setle_T0_subl,
1277 #ifdef TARGET_X86_64
1280 gen_op_setb_T0_subq,
1281 gen_op_setz_T0_subq,
1282 gen_op_setbe_T0_subq,
1283 gen_op_sets_T0_subq,
1285 gen_op_setl_T0_subq,
1286 gen_op_setle_T0_subq,
1291 static void *helper_fp_arith_ST0_FT0[8] = {
1292 helper_fadd_ST0_FT0,
1293 helper_fmul_ST0_FT0,
1294 helper_fcom_ST0_FT0,
1295 helper_fcom_ST0_FT0,
1296 helper_fsub_ST0_FT0,
1297 helper_fsubr_ST0_FT0,
1298 helper_fdiv_ST0_FT0,
1299 helper_fdivr_ST0_FT0,
1302 /* NOTE the exception in "r" op ordering */
1303 static void *helper_fp_arith_STN_ST0[8] = {
1304 helper_fadd_STN_ST0,
1305 helper_fmul_STN_ST0,
1308 helper_fsubr_STN_ST0,
1309 helper_fsub_STN_ST0,
1310 helper_fdivr_STN_ST0,
1311 helper_fdiv_STN_ST0,
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext *s1, int op, int ot, int d)
1317 GenOpFunc *gen_update_cc;
1320 gen_op_mov_TN_reg(ot, 0, d);
1322 gen_op_ld_T0_A0(ot + s1->mem_index);
1327 if (s1->cc_op != CC_OP_DYNAMIC)
1328 gen_op_set_cc_op(s1->cc_op);
1330 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331 gen_op_mov_reg_T0(ot, d);
1333 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1335 s1->cc_op = CC_OP_DYNAMIC;
1338 gen_op_addl_T0_T1();
1339 s1->cc_op = CC_OP_ADDB + ot;
1340 gen_update_cc = gen_op_update2_cc;
1343 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1344 s1->cc_op = CC_OP_SUBB + ot;
1345 gen_update_cc = gen_op_update2_cc;
1349 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1350 s1->cc_op = CC_OP_LOGICB + ot;
1351 gen_update_cc = gen_op_update1_cc;
1354 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1355 s1->cc_op = CC_OP_LOGICB + ot;
1356 gen_update_cc = gen_op_update1_cc;
1359 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1360 s1->cc_op = CC_OP_LOGICB + ot;
1361 gen_update_cc = gen_op_update1_cc;
1364 gen_op_cmpl_T0_T1_cc();
1365 s1->cc_op = CC_OP_SUBB + ot;
1366 gen_update_cc = NULL;
1369 if (op != OP_CMPL) {
1371 gen_op_mov_reg_T0(ot, d);
1373 gen_op_st_T0_A0(ot + s1->mem_index);
1375 /* the flags update must happen after the memory write (precise
1376 exception support) */
1382 /* if d == OR_TMP0, it means memory operand (address in A0) */
1383 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1386 gen_op_mov_TN_reg(ot, 0, d);
1388 gen_op_ld_T0_A0(ot + s1->mem_index);
1389 if (s1->cc_op != CC_OP_DYNAMIC)
1390 gen_op_set_cc_op(s1->cc_op);
1393 s1->cc_op = CC_OP_INCB + ot;
1396 s1->cc_op = CC_OP_DECB + ot;
1399 gen_op_mov_reg_T0(ot, d);
1401 gen_op_st_T0_A0(ot + s1->mem_index);
1402 gen_op_update_inc_cc();
1405 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1408 gen_op_mov_TN_reg(ot, 0, d);
1410 gen_op_ld_T0_A0(ot + s1->mem_index);
1412 gen_op_mov_TN_reg(ot, 1, s);
1413 /* for zero counts, flags are not updated, so must do it dynamically */
1414 if (s1->cc_op != CC_OP_DYNAMIC)
1415 gen_op_set_cc_op(s1->cc_op);
1418 gen_op_shift_T0_T1_cc[ot][op]();
1420 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1422 gen_op_mov_reg_T0(ot, d);
1423 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1426 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1428 /* currently not optimized */
1429 gen_op_movl_T1_im(c);
1430 gen_shift(s1, op, ot, d, OR_TMP1);
1433 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1441 int mod, rm, code, override, must_add_seg;
1443 override = s->override;
1444 must_add_seg = s->addseg;
1447 mod = (modrm >> 6) & 3;
1459 code = ldub_code(s->pc++);
1460 scale = (code >> 6) & 3;
1461 index = ((code >> 3) & 7) | REX_X(s);
1468 if ((base & 7) == 5) {
1470 disp = (int32_t)ldl_code(s->pc);
1472 if (CODE64(s) && !havesib) {
1473 disp += s->pc + s->rip_offset;
1480 disp = (int8_t)ldub_code(s->pc++);
1484 disp = ldl_code(s->pc);
1490 /* for correct popl handling with esp */
1491 if (base == 4 && s->popl_esp_hack)
1492 disp += s->popl_esp_hack;
1493 #ifdef TARGET_X86_64
1494 if (s->aflag == 2) {
1495 gen_op_movq_A0_reg(base);
1497 gen_op_addq_A0_im(disp);
1502 gen_op_movl_A0_reg(base);
1504 gen_op_addl_A0_im(disp);
1507 #ifdef TARGET_X86_64
1508 if (s->aflag == 2) {
1509 gen_op_movq_A0_im(disp);
1513 gen_op_movl_A0_im(disp);
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib && (index != 4 || scale != 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s->aflag == 2) {
1520 gen_op_addq_A0_reg_sN(scale, index);
1524 gen_op_addl_A0_reg_sN(scale, index);
1529 if (base == R_EBP || base == R_ESP)
1534 #ifdef TARGET_X86_64
1535 if (s->aflag == 2) {
1536 gen_op_addq_A0_seg(override);
1540 gen_op_addl_A0_seg(override);
1547 disp = lduw_code(s->pc);
1549 gen_op_movl_A0_im(disp);
1550 rm = 0; /* avoid SS override */
1557 disp = (int8_t)ldub_code(s->pc++);
1561 disp = lduw_code(s->pc);
1567 gen_op_movl_A0_reg(R_EBX);
1568 gen_op_addl_A0_reg_sN(0, R_ESI);
1571 gen_op_movl_A0_reg(R_EBX);
1572 gen_op_addl_A0_reg_sN(0, R_EDI);
1575 gen_op_movl_A0_reg(R_EBP);
1576 gen_op_addl_A0_reg_sN(0, R_ESI);
1579 gen_op_movl_A0_reg(R_EBP);
1580 gen_op_addl_A0_reg_sN(0, R_EDI);
1583 gen_op_movl_A0_reg(R_ESI);
1586 gen_op_movl_A0_reg(R_EDI);
1589 gen_op_movl_A0_reg(R_EBP);
1593 gen_op_movl_A0_reg(R_EBX);
1597 gen_op_addl_A0_im(disp);
1598 gen_op_andl_A0_ffff();
1602 if (rm == 2 || rm == 3 || rm == 6)
1607 gen_op_addl_A0_seg(override);
1617 static void gen_nop_modrm(DisasContext *s, int modrm)
1619 int mod, rm, base, code;
1621 mod = (modrm >> 6) & 3;
1631 code = ldub_code(s->pc++);
1667 /* used for LEA and MOV AX, mem */
1668 static void gen_add_A0_ds_seg(DisasContext *s)
1670 int override, must_add_seg;
1671 must_add_seg = s->addseg;
1673 if (s->override >= 0) {
1674 override = s->override;
1680 #ifdef TARGET_X86_64
1682 gen_op_addq_A0_seg(override);
1686 gen_op_addl_A0_seg(override);
1691 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1693 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1695 int mod, rm, opreg, disp;
1697 mod = (modrm >> 6) & 3;
1698 rm = (modrm & 7) | REX_B(s);
1702 gen_op_mov_TN_reg(ot, 0, reg);
1703 gen_op_mov_reg_T0(ot, rm);
1705 gen_op_mov_TN_reg(ot, 0, rm);
1707 gen_op_mov_reg_T0(ot, reg);
1710 gen_lea_modrm(s, modrm, &opreg, &disp);
1713 gen_op_mov_TN_reg(ot, 0, reg);
1714 gen_op_st_T0_A0(ot + s->mem_index);
1716 gen_op_ld_T0_A0(ot + s->mem_index);
1718 gen_op_mov_reg_T0(ot, reg);
1723 static inline uint32_t insn_get(DisasContext *s, int ot)
1729 ret = ldub_code(s->pc);
1733 ret = lduw_code(s->pc);
1738 ret = ldl_code(s->pc);
1745 static inline int insn_const_size(unsigned int ot)
1753 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1755 TranslationBlock *tb;
1758 pc = s->cs_base + eip;
1760 /* NOTE: we handle the case where the TB spans two pages here */
1761 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1762 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1763 /* jump to same page: we can use a direct jump */
1764 tcg_gen_goto_tb(tb_num);
1766 tcg_gen_exit_tb((long)tb + tb_num);
1768 /* jump to another page: currently not optimized */
1774 static inline void gen_jcc(DisasContext *s, int b,
1775 target_ulong val, target_ulong next_eip)
1777 TranslationBlock *tb;
1784 jcc_op = (b >> 1) & 7;
1788 /* we optimize the cmp/jcc case */
1793 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1796 /* some jumps are easy to compute */
1838 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1841 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1853 if (s->cc_op != CC_OP_DYNAMIC) {
1854 gen_op_set_cc_op(s->cc_op);
1855 s->cc_op = CC_OP_DYNAMIC;
1859 gen_setcc_slow[jcc_op]();
1860 func = gen_op_jnz_T0_label;
1870 l1 = gen_new_label();
1873 gen_goto_tb(s, 0, next_eip);
1876 gen_goto_tb(s, 1, val);
1881 if (s->cc_op != CC_OP_DYNAMIC) {
1882 gen_op_set_cc_op(s->cc_op);
1883 s->cc_op = CC_OP_DYNAMIC;
1885 gen_setcc_slow[jcc_op]();
1891 l1 = gen_new_label();
1892 l2 = gen_new_label();
1893 gen_op_jnz_T0_label(l1);
1894 gen_jmp_im(next_eip);
1895 gen_op_jmp_label(l2);
1903 static void gen_setcc(DisasContext *s, int b)
1909 jcc_op = (b >> 1) & 7;
1911 /* we optimize the cmp/jcc case */
1916 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1921 /* some jumps are easy to compute */
1948 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1951 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1959 if (s->cc_op != CC_OP_DYNAMIC)
1960 gen_op_set_cc_op(s->cc_op);
1961 func = gen_setcc_slow[jcc_op];
1970 /* move T0 to seg_reg and compute if the CPU state may change. Never
1971 call this function with seg_reg == R_CS */
1972 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1974 if (s->pe && !s->vm86) {
1975 /* XXX: optimize by finding processor state dynamically */
1976 if (s->cc_op != CC_OP_DYNAMIC)
1977 gen_op_set_cc_op(s->cc_op);
1978 gen_jmp_im(cur_eip);
1979 gen_op_movl_seg_T0(seg_reg);
1980 /* abort translation because the addseg value may change or
1981 because ss32 may change. For R_SS, translation must always
1982 stop as a special handling must be done to disable hardware
1983 interrupts for the next instruction */
1984 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1987 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1988 if (seg_reg == R_SS)
1993 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1996 gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1998 #if !defined(CONFIG_USER_ONLY)
1999 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2000 if (s->cc_op != CC_OP_DYNAMIC)
2001 gen_op_set_cc_op(s->cc_op);
2002 SVM_movq_T1_im(s->pc - s->cs_base);
2003 gen_jmp_im(pc_start - s->cs_base);
2005 gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2006 s->cc_op = CC_OP_DYNAMIC;
2007 /* FIXME: maybe we could move the io intercept vector to the TB as well
2008 so we know if this is an EOB or not ... let's assume it's not
2015 static inline int svm_is_rep(int prefixes)
2017 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2021 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2022 uint64_t type, uint64_t param)
2024 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2025 /* no SVM activated */
2028 /* CRx and DRx reads/writes */
2029 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2030 if (s->cc_op != CC_OP_DYNAMIC) {
2031 gen_op_set_cc_op(s->cc_op);
2032 s->cc_op = CC_OP_DYNAMIC;
2034 gen_jmp_im(pc_start - s->cs_base);
2035 SVM_movq_T1_im(param);
2037 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2038 /* this is a special case as we do not know if the interception occurs
2039 so we assume there was none */
2042 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2043 if (s->cc_op != CC_OP_DYNAMIC) {
2044 gen_op_set_cc_op(s->cc_op);
2045 s->cc_op = CC_OP_DYNAMIC;
2047 gen_jmp_im(pc_start - s->cs_base);
2048 SVM_movq_T1_im(param);
2050 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2051 /* this is a special case as we do not know if the interception occurs
2052 so we assume there was none */
2057 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2058 if (s->cc_op != CC_OP_DYNAMIC) {
2059 gen_op_set_cc_op(s->cc_op);
2060 s->cc_op = CC_OP_EFLAGS;
2062 gen_jmp_im(pc_start - s->cs_base);
2063 SVM_movq_T1_im(param);
2065 gen_op_svm_vmexit(type >> 32, type);
2066 /* we can optimize this one so TBs don't get longer
2067 than up to vmexit */
2076 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2078 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2081 static inline void gen_stack_update(DisasContext *s, int addend)
2083 #ifdef TARGET_X86_64
2085 gen_op_addq_ESP_im(addend);
2089 gen_op_addl_ESP_im(addend);
2091 gen_op_addw_ESP_im(addend);
2095 /* generate a push. It depends on ss32, addseg and dflag */
2096 static void gen_push_T0(DisasContext *s)
2098 #ifdef TARGET_X86_64
2100 gen_op_movq_A0_reg(R_ESP);
2102 gen_op_addq_A0_im(-8);
2103 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2105 gen_op_addq_A0_im(-2);
2106 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2108 gen_op_mov_reg_A0(2, R_ESP);
2112 gen_op_movl_A0_reg(R_ESP);
2114 gen_op_addl_A0_im(-2);
2116 gen_op_addl_A0_im(-4);
2119 gen_op_movl_T1_A0();
2120 gen_op_addl_A0_seg(R_SS);
2123 gen_op_andl_A0_ffff();
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_A0_seg(R_SS);
2127 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2128 if (s->ss32 && !s->addseg)
2129 gen_op_mov_reg_A0(1, R_ESP);
2131 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2135 /* generate a push. It depends on ss32, addseg and dflag */
2136 /* slower version for T1, only used for call Ev */
2137 static void gen_push_T1(DisasContext *s)
2139 #ifdef TARGET_X86_64
2141 gen_op_movq_A0_reg(R_ESP);
2143 gen_op_addq_A0_im(-8);
2144 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2146 gen_op_addq_A0_im(-2);
2147 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2149 gen_op_mov_reg_A0(2, R_ESP);
2153 gen_op_movl_A0_reg(R_ESP);
2155 gen_op_addl_A0_im(-2);
2157 gen_op_addl_A0_im(-4);
2160 gen_op_addl_A0_seg(R_SS);
2163 gen_op_andl_A0_ffff();
2164 gen_op_addl_A0_seg(R_SS);
2166 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2168 if (s->ss32 && !s->addseg)
2169 gen_op_mov_reg_A0(1, R_ESP);
2171 gen_stack_update(s, (-2) << s->dflag);
2175 /* two step pop is necessary for precise exceptions */
2176 static void gen_pop_T0(DisasContext *s)
2178 #ifdef TARGET_X86_64
2180 gen_op_movq_A0_reg(R_ESP);
2181 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2185 gen_op_movl_A0_reg(R_ESP);
2188 gen_op_addl_A0_seg(R_SS);
2190 gen_op_andl_A0_ffff();
2191 gen_op_addl_A0_seg(R_SS);
2193 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2197 static void gen_pop_update(DisasContext *s)
2199 #ifdef TARGET_X86_64
2200 if (CODE64(s) && s->dflag) {
2201 gen_stack_update(s, 8);
2205 gen_stack_update(s, 2 << s->dflag);
2209 static void gen_stack_A0(DisasContext *s)
2211 gen_op_movl_A0_reg(R_ESP);
2213 gen_op_andl_A0_ffff();
2214 gen_op_movl_T1_A0();
2216 gen_op_addl_A0_seg(R_SS);
2219 /* NOTE: wrap around in 16 bit not fully handled */
2220 static void gen_pusha(DisasContext *s)
2223 gen_op_movl_A0_reg(R_ESP);
2224 gen_op_addl_A0_im(-16 << s->dflag);
2226 gen_op_andl_A0_ffff();
2227 gen_op_movl_T1_A0();
2229 gen_op_addl_A0_seg(R_SS);
2230 for(i = 0;i < 8; i++) {
2231 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2232 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2233 gen_op_addl_A0_im(2 << s->dflag);
2235 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2238 /* NOTE: wrap around in 16 bit not fully handled */
2239 static void gen_popa(DisasContext *s)
2242 gen_op_movl_A0_reg(R_ESP);
2244 gen_op_andl_A0_ffff();
2245 gen_op_movl_T1_A0();
2246 gen_op_addl_T1_im(16 << s->dflag);
2248 gen_op_addl_A0_seg(R_SS);
2249 for(i = 0;i < 8; i++) {
2250 /* ESP is not reloaded */
2252 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2253 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2255 gen_op_addl_A0_im(2 << s->dflag);
2257 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2260 static void gen_enter(DisasContext *s, int esp_addend, int level)
2265 #ifdef TARGET_X86_64
2267 ot = s->dflag ? OT_QUAD : OT_WORD;
2270 gen_op_movl_A0_reg(R_ESP);
2271 gen_op_addq_A0_im(-opsize);
2272 gen_op_movl_T1_A0();
2275 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2276 gen_op_st_T0_A0(ot + s->mem_index);
2278 /* XXX: must save state */
2279 tcg_gen_helper_0_2(helper_enter64_level,
2280 tcg_const_i32(level),
2281 tcg_const_i32((ot == OT_QUAD)));
2283 gen_op_mov_reg_T1(ot, R_EBP);
2284 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2285 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2289 ot = s->dflag + OT_WORD;
2290 opsize = 2 << s->dflag;
2292 gen_op_movl_A0_reg(R_ESP);
2293 gen_op_addl_A0_im(-opsize);
2295 gen_op_andl_A0_ffff();
2296 gen_op_movl_T1_A0();
2298 gen_op_addl_A0_seg(R_SS);
2300 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2301 gen_op_st_T0_A0(ot + s->mem_index);
2303 /* XXX: must save state */
2304 tcg_gen_helper_0_2(helper_enter_level,
2305 tcg_const_i32(level),
2306 tcg_const_i32(s->dflag));
2308 gen_op_mov_reg_T1(ot, R_EBP);
2309 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2310 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2314 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2316 if (s->cc_op != CC_OP_DYNAMIC)
2317 gen_op_set_cc_op(s->cc_op);
2318 gen_jmp_im(cur_eip);
2319 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2323 /* an interrupt is different from an exception because of the
2325 static void gen_interrupt(DisasContext *s, int intno,
2326 target_ulong cur_eip, target_ulong next_eip)
2328 if (s->cc_op != CC_OP_DYNAMIC)
2329 gen_op_set_cc_op(s->cc_op);
2330 gen_jmp_im(cur_eip);
2331 tcg_gen_helper_0_2(helper_raise_interrupt,
2332 tcg_const_i32(intno),
2333 tcg_const_i32(next_eip - cur_eip));
2337 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2339 if (s->cc_op != CC_OP_DYNAMIC)
2340 gen_op_set_cc_op(s->cc_op);
2341 gen_jmp_im(cur_eip);
2342 tcg_gen_helper_0_0(helper_debug);
2346 /* generate a generic end of block. Trace exception is also generated
2348 static void gen_eob(DisasContext *s)
2350 if (s->cc_op != CC_OP_DYNAMIC)
2351 gen_op_set_cc_op(s->cc_op);
2352 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2353 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2355 if (s->singlestep_enabled) {
2356 tcg_gen_helper_0_0(helper_debug);
2358 tcg_gen_helper_0_0(helper_single_step);
2365 /* generate a jump to eip. No segment change must happen before as a
2366 direct call to the next block may occur */
2367 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2370 if (s->cc_op != CC_OP_DYNAMIC) {
2371 gen_op_set_cc_op(s->cc_op);
2372 s->cc_op = CC_OP_DYNAMIC;
2374 gen_goto_tb(s, tb_num, eip);
2382 static void gen_jmp(DisasContext *s, target_ulong eip)
2384 gen_jmp_tb(s, eip, 0);
2387 static inline void gen_ldq_env_A0(int idx, int offset)
2389 int mem_index = (idx >> 2) - 1;
2390 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2391 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset);
2394 static inline void gen_stq_env_A0(int idx, int offset)
2396 int mem_index = (idx >> 2) - 1;
2397 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset);
2398 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2401 static inline void gen_ldo_env_A0(int idx, int offset)
2403 int mem_index = (idx >> 2) - 1;
2404 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2405 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2406 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2407 tcg_gen_qemu_ld64(cpu_tmp1, cpu_tmp0, mem_index);
2408 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2411 static inline void gen_sto_env_A0(int idx, int offset)
2413 int mem_index = (idx >> 2) - 1;
2414 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2415 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2416 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2417 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2418 tcg_gen_qemu_st64(cpu_tmp1, cpu_tmp0, mem_index);
2421 static inline void gen_op_movo(int d_offset, int s_offset)
2423 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2424 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2425 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset + 8);
2426 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset + 8);
2429 static inline void gen_op_movq(int d_offset, int s_offset)
2431 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2432 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2435 static inline void gen_op_movl(int d_offset, int s_offset)
2437 tcg_gen_ld_i32(cpu_tmp2, cpu_env, s_offset);
2438 tcg_gen_st_i32(cpu_tmp2, cpu_env, d_offset);
2441 static inline void gen_op_movq_env_0(int d_offset)
2443 tcg_gen_movi_i64(cpu_tmp1, 0);
2444 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2447 #define SSE_SPECIAL ((void *)1)
2448 #define SSE_DUMMY ((void *)2)
2450 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2451 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2452 helper_ ## x ## ss, helper_ ## x ## sd, }
2454 static void *sse_op_table1[256][4] = {
2455 /* 3DNow! extensions */
2456 [0x0e] = { SSE_DUMMY }, /* femms */
2457 [0x0f] = { SSE_DUMMY }, /* pf... */
2458 /* pure SSE operations */
2459 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2460 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2461 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2462 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2463 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2464 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2465 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2466 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2468 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2469 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2470 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2471 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2472 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2473 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2474 [0x2e] = { helper_ucomiss, helper_ucomisd },
2475 [0x2f] = { helper_comiss, helper_comisd },
2476 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2477 [0x51] = SSE_FOP(sqrt),
2478 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2479 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2480 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2481 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2482 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2483 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2484 [0x58] = SSE_FOP(add),
2485 [0x59] = SSE_FOP(mul),
2486 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2487 helper_cvtss2sd, helper_cvtsd2ss },
2488 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2489 [0x5c] = SSE_FOP(sub),
2490 [0x5d] = SSE_FOP(min),
2491 [0x5e] = SSE_FOP(div),
2492 [0x5f] = SSE_FOP(max),
2494 [0xc2] = SSE_FOP(cmpeq),
2495 [0xc6] = { helper_shufps, helper_shufpd },
2497 /* MMX ops and their SSE extensions */
2498 [0x60] = MMX_OP2(punpcklbw),
2499 [0x61] = MMX_OP2(punpcklwd),
2500 [0x62] = MMX_OP2(punpckldq),
2501 [0x63] = MMX_OP2(packsswb),
2502 [0x64] = MMX_OP2(pcmpgtb),
2503 [0x65] = MMX_OP2(pcmpgtw),
2504 [0x66] = MMX_OP2(pcmpgtl),
2505 [0x67] = MMX_OP2(packuswb),
2506 [0x68] = MMX_OP2(punpckhbw),
2507 [0x69] = MMX_OP2(punpckhwd),
2508 [0x6a] = MMX_OP2(punpckhdq),
2509 [0x6b] = MMX_OP2(packssdw),
2510 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2511 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2512 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2513 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2514 [0x70] = { helper_pshufw_mmx,
2517 helper_pshuflw_xmm },
2518 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2519 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2520 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2521 [0x74] = MMX_OP2(pcmpeqb),
2522 [0x75] = MMX_OP2(pcmpeqw),
2523 [0x76] = MMX_OP2(pcmpeql),
2524 [0x77] = { SSE_DUMMY }, /* emms */
2525 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2526 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2527 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2528 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2529 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2530 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2531 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2532 [0xd1] = MMX_OP2(psrlw),
2533 [0xd2] = MMX_OP2(psrld),
2534 [0xd3] = MMX_OP2(psrlq),
2535 [0xd4] = MMX_OP2(paddq),
2536 [0xd5] = MMX_OP2(pmullw),
2537 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2538 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2539 [0xd8] = MMX_OP2(psubusb),
2540 [0xd9] = MMX_OP2(psubusw),
2541 [0xda] = MMX_OP2(pminub),
2542 [0xdb] = MMX_OP2(pand),
2543 [0xdc] = MMX_OP2(paddusb),
2544 [0xdd] = MMX_OP2(paddusw),
2545 [0xde] = MMX_OP2(pmaxub),
2546 [0xdf] = MMX_OP2(pandn),
2547 [0xe0] = MMX_OP2(pavgb),
2548 [0xe1] = MMX_OP2(psraw),
2549 [0xe2] = MMX_OP2(psrad),
2550 [0xe3] = MMX_OP2(pavgw),
2551 [0xe4] = MMX_OP2(pmulhuw),
2552 [0xe5] = MMX_OP2(pmulhw),
2553 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2554 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2555 [0xe8] = MMX_OP2(psubsb),
2556 [0xe9] = MMX_OP2(psubsw),
2557 [0xea] = MMX_OP2(pminsw),
2558 [0xeb] = MMX_OP2(por),
2559 [0xec] = MMX_OP2(paddsb),
2560 [0xed] = MMX_OP2(paddsw),
2561 [0xee] = MMX_OP2(pmaxsw),
2562 [0xef] = MMX_OP2(pxor),
2563 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2564 [0xf1] = MMX_OP2(psllw),
2565 [0xf2] = MMX_OP2(pslld),
2566 [0xf3] = MMX_OP2(psllq),
2567 [0xf4] = MMX_OP2(pmuludq),
2568 [0xf5] = MMX_OP2(pmaddwd),
2569 [0xf6] = MMX_OP2(psadbw),
2570 [0xf7] = MMX_OP2(maskmov),
2571 [0xf8] = MMX_OP2(psubb),
2572 [0xf9] = MMX_OP2(psubw),
2573 [0xfa] = MMX_OP2(psubl),
2574 [0xfb] = MMX_OP2(psubq),
2575 [0xfc] = MMX_OP2(paddb),
2576 [0xfd] = MMX_OP2(paddw),
2577 [0xfe] = MMX_OP2(paddl),
2580 static void *sse_op_table2[3 * 8][2] = {
2581 [0 + 2] = MMX_OP2(psrlw),
2582 [0 + 4] = MMX_OP2(psraw),
2583 [0 + 6] = MMX_OP2(psllw),
2584 [8 + 2] = MMX_OP2(psrld),
2585 [8 + 4] = MMX_OP2(psrad),
2586 [8 + 6] = MMX_OP2(pslld),
2587 [16 + 2] = MMX_OP2(psrlq),
2588 [16 + 3] = { NULL, helper_psrldq_xmm },
2589 [16 + 6] = MMX_OP2(psllq),
2590 [16 + 7] = { NULL, helper_pslldq_xmm },
2593 static void *sse_op_table3[4 * 3] = {
2596 X86_64_ONLY(helper_cvtsq2ss),
2597 X86_64_ONLY(helper_cvtsq2sd),
2601 X86_64_ONLY(helper_cvttss2sq),
2602 X86_64_ONLY(helper_cvttsd2sq),
2606 X86_64_ONLY(helper_cvtss2sq),
2607 X86_64_ONLY(helper_cvtsd2sq),
2610 static void *sse_op_table4[8][4] = {
2621 static void *sse_op_table5[256] = {
2622 [0x0c] = helper_pi2fw,
2623 [0x0d] = helper_pi2fd,
2624 [0x1c] = helper_pf2iw,
2625 [0x1d] = helper_pf2id,
2626 [0x8a] = helper_pfnacc,
2627 [0x8e] = helper_pfpnacc,
2628 [0x90] = helper_pfcmpge,
2629 [0x94] = helper_pfmin,
2630 [0x96] = helper_pfrcp,
2631 [0x97] = helper_pfrsqrt,
2632 [0x9a] = helper_pfsub,
2633 [0x9e] = helper_pfadd,
2634 [0xa0] = helper_pfcmpgt,
2635 [0xa4] = helper_pfmax,
2636 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2637 [0xa7] = helper_movq, /* pfrsqit1 */
2638 [0xaa] = helper_pfsubr,
2639 [0xae] = helper_pfacc,
2640 [0xb0] = helper_pfcmpeq,
2641 [0xb4] = helper_pfmul,
2642 [0xb6] = helper_movq, /* pfrcpit2 */
2643 [0xb7] = helper_pmulhrw_mmx,
2644 [0xbb] = helper_pswapd,
2645 [0xbf] = helper_pavgb_mmx /* pavgusb */
2648 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2650 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2651 int modrm, mod, rm, reg, reg_addr, offset_addr;
2655 if (s->prefix & PREFIX_DATA)
2657 else if (s->prefix & PREFIX_REPZ)
2659 else if (s->prefix & PREFIX_REPNZ)
2663 sse_op2 = sse_op_table1[b][b1];
2666 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2676 /* simple MMX/SSE operation */
2677 if (s->flags & HF_TS_MASK) {
2678 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2681 if (s->flags & HF_EM_MASK) {
2683 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2686 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2689 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2692 tcg_gen_helper_0_0(helper_emms);
2697 tcg_gen_helper_0_0(helper_emms);
2700 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2701 the static cpu state) */
2703 tcg_gen_helper_0_0(helper_enter_mmx);
2706 modrm = ldub_code(s->pc++);
2707 reg = ((modrm >> 3) & 7);
2710 mod = (modrm >> 6) & 3;
2711 if (sse_op2 == SSE_SPECIAL) {
2714 case 0x0e7: /* movntq */
2717 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2718 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2720 case 0x1e7: /* movntdq */
2721 case 0x02b: /* movntps */
2722 case 0x12b: /* movntps */
2723 case 0x3f0: /* lddqu */
2726 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2727 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2729 case 0x6e: /* movd mm, ea */
2730 #ifdef TARGET_X86_64
2731 if (s->dflag == 2) {
2732 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2733 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2737 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2738 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2739 offsetof(CPUX86State,fpregs[reg].mmx));
2740 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2743 case 0x16e: /* movd xmm, ea */
2744 #ifdef TARGET_X86_64
2745 if (s->dflag == 2) {
2746 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2747 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2748 offsetof(CPUX86State,xmm_regs[reg]));
2749 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2753 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2754 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2755 offsetof(CPUX86State,xmm_regs[reg]));
2756 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
2757 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2);
2760 case 0x6f: /* movq mm, ea */
2762 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2763 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2766 tcg_gen_ld_i64(cpu_tmp1, cpu_env,
2767 offsetof(CPUX86State,fpregs[rm].mmx));
2768 tcg_gen_st_i64(cpu_tmp1, cpu_env,
2769 offsetof(CPUX86State,fpregs[reg].mmx));
2772 case 0x010: /* movups */
2773 case 0x110: /* movupd */
2774 case 0x028: /* movaps */
2775 case 0x128: /* movapd */
2776 case 0x16f: /* movdqa xmm, ea */
2777 case 0x26f: /* movdqu xmm, ea */
2779 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2780 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2782 rm = (modrm & 7) | REX_B(s);
2783 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2784 offsetof(CPUX86State,xmm_regs[rm]));
2787 case 0x210: /* movss xmm, ea */
2789 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2790 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2791 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2793 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2794 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2795 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2797 rm = (modrm & 7) | REX_B(s);
2798 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2799 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2802 case 0x310: /* movsd xmm, ea */
2804 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2805 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2807 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2808 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2810 rm = (modrm & 7) | REX_B(s);
2811 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2812 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2815 case 0x012: /* movlps */
2816 case 0x112: /* movlpd */
2818 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2819 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2822 rm = (modrm & 7) | REX_B(s);
2823 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2824 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2827 case 0x212: /* movsldup */
2829 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2830 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2832 rm = (modrm & 7) | REX_B(s);
2833 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2834 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2835 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2836 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2838 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2839 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2840 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2841 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2843 case 0x312: /* movddup */
2845 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2846 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2848 rm = (modrm & 7) | REX_B(s);
2849 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2850 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2852 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2853 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2855 case 0x016: /* movhps */
2856 case 0x116: /* movhpd */
2858 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2859 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2862 rm = (modrm & 7) | REX_B(s);
2863 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2864 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2867 case 0x216: /* movshdup */
2869 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2870 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2872 rm = (modrm & 7) | REX_B(s);
2873 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2874 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2875 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2876 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2878 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2879 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2880 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2881 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2883 case 0x7e: /* movd ea, mm */
2884 #ifdef TARGET_X86_64
2885 if (s->dflag == 2) {
2886 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2887 offsetof(CPUX86State,fpregs[reg].mmx));
2888 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2892 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2893 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
2894 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2897 case 0x17e: /* movd ea, xmm */
2898 #ifdef TARGET_X86_64
2899 if (s->dflag == 2) {
2900 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2901 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2902 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2906 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2907 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2908 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2911 case 0x27e: /* movq xmm, ea */
2913 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2914 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2916 rm = (modrm & 7) | REX_B(s);
2917 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2918 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2920 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2922 case 0x7f: /* movq ea, mm */
2924 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2925 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2928 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2929 offsetof(CPUX86State,fpregs[reg].mmx));
2932 case 0x011: /* movups */
2933 case 0x111: /* movupd */
2934 case 0x029: /* movaps */
2935 case 0x129: /* movapd */
2936 case 0x17f: /* movdqa ea, xmm */
2937 case 0x27f: /* movdqu ea, xmm */
2939 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2940 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2942 rm = (modrm & 7) | REX_B(s);
2943 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2944 offsetof(CPUX86State,xmm_regs[reg]));
2947 case 0x211: /* movss ea, xmm */
2949 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2950 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2951 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2953 rm = (modrm & 7) | REX_B(s);
2954 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2955 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2958 case 0x311: /* movsd ea, xmm */
2960 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2961 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2963 rm = (modrm & 7) | REX_B(s);
2964 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2965 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2968 case 0x013: /* movlps */
2969 case 0x113: /* movlpd */
2971 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2972 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2977 case 0x017: /* movhps */
2978 case 0x117: /* movhpd */
2980 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2981 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2986 case 0x71: /* shift mm, im */
2989 case 0x171: /* shift xmm, im */
2992 val = ldub_code(s->pc++);
2994 gen_op_movl_T0_im(val);
2995 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2997 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2998 op1_offset = offsetof(CPUX86State,xmm_t0);
3000 gen_op_movl_T0_im(val);
3001 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3003 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3004 op1_offset = offsetof(CPUX86State,mmx_t0);
3006 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3010 rm = (modrm & 7) | REX_B(s);
3011 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3014 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3016 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3017 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3018 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3020 case 0x050: /* movmskps */
3021 rm = (modrm & 7) | REX_B(s);
3022 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3023 offsetof(CPUX86State,xmm_regs[rm]));
3024 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2, cpu_ptr0);
3025 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3026 gen_op_mov_reg_T0(OT_LONG, reg);
3028 case 0x150: /* movmskpd */
3029 rm = (modrm & 7) | REX_B(s);
3030 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3031 offsetof(CPUX86State,xmm_regs[rm]));
3032 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2, cpu_ptr0);
3033 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3034 gen_op_mov_reg_T0(OT_LONG, reg);
3036 case 0x02a: /* cvtpi2ps */
3037 case 0x12a: /* cvtpi2pd */
3038 tcg_gen_helper_0_0(helper_enter_mmx);
3040 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3041 op2_offset = offsetof(CPUX86State,mmx_t0);
3042 gen_ldq_env_A0(s->mem_index, op2_offset);
3045 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3047 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3048 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3049 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3052 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3056 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3060 case 0x22a: /* cvtsi2ss */
3061 case 0x32a: /* cvtsi2sd */
3062 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3063 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3064 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3065 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3066 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3067 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3068 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2);
3070 case 0x02c: /* cvttps2pi */
3071 case 0x12c: /* cvttpd2pi */
3072 case 0x02d: /* cvtps2pi */
3073 case 0x12d: /* cvtpd2pi */
3074 tcg_gen_helper_0_0(helper_enter_mmx);
3076 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3077 op2_offset = offsetof(CPUX86State,xmm_t0);
3078 gen_ldo_env_A0(s->mem_index, op2_offset);
3080 rm = (modrm & 7) | REX_B(s);
3081 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3083 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3084 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3085 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3088 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3091 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3094 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3097 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3101 case 0x22c: /* cvttss2si */
3102 case 0x32c: /* cvttsd2si */
3103 case 0x22d: /* cvtss2si */
3104 case 0x32d: /* cvtsd2si */
3105 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3107 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3109 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3111 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3112 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3114 op2_offset = offsetof(CPUX86State,xmm_t0);
3116 rm = (modrm & 7) | REX_B(s);
3117 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3119 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3121 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3122 if (ot == OT_LONG) {
3123 tcg_gen_helper_1_1(sse_op2, cpu_tmp2, cpu_ptr0);
3124 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3126 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3128 gen_op_mov_reg_T0(ot, reg);
3130 case 0xc4: /* pinsrw */
3133 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3134 val = ldub_code(s->pc++);
3137 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3138 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3141 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3142 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3145 case 0xc5: /* pextrw */
3149 val = ldub_code(s->pc++);
3152 rm = (modrm & 7) | REX_B(s);
3153 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3154 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3158 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3159 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3161 reg = ((modrm >> 3) & 7) | rex_r;
3162 gen_op_mov_reg_T0(OT_LONG, reg);
3164 case 0x1d6: /* movq ea, xmm */
3166 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3167 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3169 rm = (modrm & 7) | REX_B(s);
3170 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3171 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3172 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3175 case 0x2d6: /* movq2dq */
3176 tcg_gen_helper_0_0(helper_enter_mmx);
3178 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3179 offsetof(CPUX86State,fpregs[rm].mmx));
3180 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3182 case 0x3d6: /* movdq2q */
3183 tcg_gen_helper_0_0(helper_enter_mmx);
3184 rm = (modrm & 7) | REX_B(s);
3185 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3186 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3188 case 0xd7: /* pmovmskb */
3193 rm = (modrm & 7) | REX_B(s);
3194 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3195 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2, cpu_ptr0);
3198 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3199 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2, cpu_ptr0);
3201 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3202 reg = ((modrm >> 3) & 7) | rex_r;
3203 gen_op_mov_reg_T0(OT_LONG, reg);
3209 /* generic MMX or SSE operation */
3212 /* maskmov : we must prepare A0 */
3215 #ifdef TARGET_X86_64
3216 if (s->aflag == 2) {
3217 gen_op_movq_A0_reg(R_EDI);
3221 gen_op_movl_A0_reg(R_EDI);
3223 gen_op_andl_A0_ffff();
3225 gen_add_A0_ds_seg(s);
3227 case 0x70: /* pshufx insn */
3228 case 0xc6: /* pshufx insn */
3229 case 0xc2: /* compare insns */
3236 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3238 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3239 op2_offset = offsetof(CPUX86State,xmm_t0);
3240 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3242 /* specific case for SSE single instructions */
3245 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3246 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3249 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3252 gen_ldo_env_A0(s->mem_index, op2_offset);
3255 rm = (modrm & 7) | REX_B(s);
3256 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3259 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3261 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3262 op2_offset = offsetof(CPUX86State,mmx_t0);
3263 gen_ldq_env_A0(s->mem_index, op2_offset);
3266 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3270 case 0x0f: /* 3DNow! data insns */
3271 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3273 val = ldub_code(s->pc++);
3274 sse_op2 = sse_op_table5[val];
3277 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3278 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3279 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3281 case 0x70: /* pshufx insn */
3282 case 0xc6: /* pshufx insn */
3283 val = ldub_code(s->pc++);
3284 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3285 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3286 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3290 val = ldub_code(s->pc++);
3293 sse_op2 = sse_op_table4[val][b1];
3294 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3295 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3296 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3299 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3300 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3301 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3304 if (b == 0x2e || b == 0x2f) {
3305 /* just to keep the EFLAGS optimization correct */
3307 s->cc_op = CC_OP_EFLAGS;
3312 /* convert one instruction. s->is_jmp is set if the translation must
3313 be stopped. Return the next pc value */
3314 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3316 int b, prefixes, aflag, dflag;
3318 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3319 target_ulong next_eip, tval;
3329 #ifdef TARGET_X86_64
3334 s->rip_offset = 0; /* for relative ip address */
3336 b = ldub_code(s->pc);
3338 /* check prefixes */
3339 #ifdef TARGET_X86_64
3343 prefixes |= PREFIX_REPZ;
3346 prefixes |= PREFIX_REPNZ;
3349 prefixes |= PREFIX_LOCK;
3370 prefixes |= PREFIX_DATA;
3373 prefixes |= PREFIX_ADR;
3377 rex_w = (b >> 3) & 1;
3378 rex_r = (b & 0x4) << 1;
3379 s->rex_x = (b & 0x2) << 2;
3380 REX_B(s) = (b & 0x1) << 3;
3381 x86_64_hregs = 1; /* select uniform byte register addressing */
3385 /* 0x66 is ignored if rex.w is set */
3388 if (prefixes & PREFIX_DATA)
3391 if (!(prefixes & PREFIX_ADR))
3398 prefixes |= PREFIX_REPZ;
3401 prefixes |= PREFIX_REPNZ;
3404 prefixes |= PREFIX_LOCK;
3425 prefixes |= PREFIX_DATA;
3428 prefixes |= PREFIX_ADR;
3431 if (prefixes & PREFIX_DATA)
3433 if (prefixes & PREFIX_ADR)
3437 s->prefix = prefixes;
3441 /* lock generation */
3442 if (prefixes & PREFIX_LOCK)
3445 /* now check op code */
3449 /**************************/
3450 /* extended op code */
3451 b = ldub_code(s->pc++) | 0x100;
3454 /**************************/
3472 ot = dflag + OT_WORD;
3475 case 0: /* OP Ev, Gv */
3476 modrm = ldub_code(s->pc++);
3477 reg = ((modrm >> 3) & 7) | rex_r;
3478 mod = (modrm >> 6) & 3;
3479 rm = (modrm & 7) | REX_B(s);
3481 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3483 } else if (op == OP_XORL && rm == reg) {
3485 /* xor reg, reg optimisation */
3487 s->cc_op = CC_OP_LOGICB + ot;
3488 gen_op_mov_reg_T0(ot, reg);
3489 gen_op_update1_cc();
3494 gen_op_mov_TN_reg(ot, 1, reg);
3495 gen_op(s, op, ot, opreg);
3497 case 1: /* OP Gv, Ev */
3498 modrm = ldub_code(s->pc++);
3499 mod = (modrm >> 6) & 3;
3500 reg = ((modrm >> 3) & 7) | rex_r;
3501 rm = (modrm & 7) | REX_B(s);
3503 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3504 gen_op_ld_T1_A0(ot + s->mem_index);
3505 } else if (op == OP_XORL && rm == reg) {
3508 gen_op_mov_TN_reg(ot, 1, rm);
3510 gen_op(s, op, ot, reg);
3512 case 2: /* OP A, Iv */
3513 val = insn_get(s, ot);
3514 gen_op_movl_T1_im(val);
3515 gen_op(s, op, ot, OR_EAX);
3521 case 0x80: /* GRP1 */
3531 ot = dflag + OT_WORD;
3533 modrm = ldub_code(s->pc++);
3534 mod = (modrm >> 6) & 3;
3535 rm = (modrm & 7) | REX_B(s);
3536 op = (modrm >> 3) & 7;
3542 s->rip_offset = insn_const_size(ot);
3543 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3554 val = insn_get(s, ot);
3557 val = (int8_t)insn_get(s, OT_BYTE);
3560 gen_op_movl_T1_im(val);
3561 gen_op(s, op, ot, opreg);
3565 /**************************/
3566 /* inc, dec, and other misc arith */
3567 case 0x40 ... 0x47: /* inc Gv */
3568 ot = dflag ? OT_LONG : OT_WORD;
3569 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3571 case 0x48 ... 0x4f: /* dec Gv */
3572 ot = dflag ? OT_LONG : OT_WORD;
3573 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3575 case 0xf6: /* GRP3 */
3580 ot = dflag + OT_WORD;
3582 modrm = ldub_code(s->pc++);
3583 mod = (modrm >> 6) & 3;
3584 rm = (modrm & 7) | REX_B(s);
3585 op = (modrm >> 3) & 7;
3588 s->rip_offset = insn_const_size(ot);
3589 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3590 gen_op_ld_T0_A0(ot + s->mem_index);
3592 gen_op_mov_TN_reg(ot, 0, rm);
3597 val = insn_get(s, ot);
3598 gen_op_movl_T1_im(val);
3599 gen_op_testl_T0_T1_cc();
3600 s->cc_op = CC_OP_LOGICB + ot;
3605 gen_op_st_T0_A0(ot + s->mem_index);
3607 gen_op_mov_reg_T0(ot, rm);
3613 gen_op_st_T0_A0(ot + s->mem_index);
3615 gen_op_mov_reg_T0(ot, rm);
3617 gen_op_update_neg_cc();
3618 s->cc_op = CC_OP_SUBB + ot;
3623 gen_op_mulb_AL_T0();
3624 s->cc_op = CC_OP_MULB;
3627 gen_op_mulw_AX_T0();
3628 s->cc_op = CC_OP_MULW;
3632 gen_op_mull_EAX_T0();
3633 s->cc_op = CC_OP_MULL;
3635 #ifdef TARGET_X86_64
3637 gen_op_mulq_EAX_T0();
3638 s->cc_op = CC_OP_MULQ;
3646 gen_op_imulb_AL_T0();
3647 s->cc_op = CC_OP_MULB;
3650 gen_op_imulw_AX_T0();
3651 s->cc_op = CC_OP_MULW;
3655 gen_op_imull_EAX_T0();
3656 s->cc_op = CC_OP_MULL;
3658 #ifdef TARGET_X86_64
3660 gen_op_imulq_EAX_T0();
3661 s->cc_op = CC_OP_MULQ;
3669 gen_jmp_im(pc_start - s->cs_base);
3670 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3673 gen_jmp_im(pc_start - s->cs_base);
3674 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3678 gen_jmp_im(pc_start - s->cs_base);
3679 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3681 #ifdef TARGET_X86_64
3683 gen_jmp_im(pc_start - s->cs_base);
3684 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3692 gen_jmp_im(pc_start - s->cs_base);
3693 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3696 gen_jmp_im(pc_start - s->cs_base);
3697 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3701 gen_jmp_im(pc_start - s->cs_base);
3702 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3704 #ifdef TARGET_X86_64
3706 gen_jmp_im(pc_start - s->cs_base);
3707 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3717 case 0xfe: /* GRP4 */
3718 case 0xff: /* GRP5 */
3722 ot = dflag + OT_WORD;
3724 modrm = ldub_code(s->pc++);
3725 mod = (modrm >> 6) & 3;
3726 rm = (modrm & 7) | REX_B(s);
3727 op = (modrm >> 3) & 7;
3728 if (op >= 2 && b == 0xfe) {
3732 if (op == 2 || op == 4) {
3733 /* operand size for jumps is 64 bit */
3735 } else if (op == 3 || op == 5) {
3736 /* for call calls, the operand is 16 or 32 bit, even
3738 ot = dflag ? OT_LONG : OT_WORD;
3739 } else if (op == 6) {
3740 /* default push size is 64 bit */
3741 ot = dflag ? OT_QUAD : OT_WORD;
3745 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3746 if (op >= 2 && op != 3 && op != 5)
3747 gen_op_ld_T0_A0(ot + s->mem_index);
3749 gen_op_mov_TN_reg(ot, 0, rm);
3753 case 0: /* inc Ev */
3758 gen_inc(s, ot, opreg, 1);
3760 case 1: /* dec Ev */
3765 gen_inc(s, ot, opreg, -1);
3767 case 2: /* call Ev */
3768 /* XXX: optimize if memory (no 'and' is necessary) */
3770 gen_op_andl_T0_ffff();
3771 next_eip = s->pc - s->cs_base;
3772 gen_movtl_T1_im(next_eip);
3777 case 3: /* lcall Ev */
3778 gen_op_ld_T1_A0(ot + s->mem_index);
3779 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3780 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3782 if (s->pe && !s->vm86) {
3783 if (s->cc_op != CC_OP_DYNAMIC)
3784 gen_op_set_cc_op(s->cc_op);
3785 gen_jmp_im(pc_start - s->cs_base);
3786 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3788 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3792 case 4: /* jmp Ev */
3794 gen_op_andl_T0_ffff();
3798 case 5: /* ljmp Ev */
3799 gen_op_ld_T1_A0(ot + s->mem_index);
3800 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3801 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3803 if (s->pe && !s->vm86) {
3804 if (s->cc_op != CC_OP_DYNAMIC)
3805 gen_op_set_cc_op(s->cc_op);
3806 gen_jmp_im(pc_start - s->cs_base);
3807 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3809 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3810 gen_op_movl_T0_T1();
3815 case 6: /* push Ev */
3823 case 0x84: /* test Ev, Gv */
3828 ot = dflag + OT_WORD;
3830 modrm = ldub_code(s->pc++);
3831 mod = (modrm >> 6) & 3;
3832 rm = (modrm & 7) | REX_B(s);
3833 reg = ((modrm >> 3) & 7) | rex_r;
3835 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3836 gen_op_mov_TN_reg(ot, 1, reg);
3837 gen_op_testl_T0_T1_cc();
3838 s->cc_op = CC_OP_LOGICB + ot;
3841 case 0xa8: /* test eAX, Iv */
3846 ot = dflag + OT_WORD;
3847 val = insn_get(s, ot);
3849 gen_op_mov_TN_reg(ot, 0, OR_EAX);
3850 gen_op_movl_T1_im(val);
3851 gen_op_testl_T0_T1_cc();
3852 s->cc_op = CC_OP_LOGICB + ot;
3855 case 0x98: /* CWDE/CBW */
3856 #ifdef TARGET_X86_64
3858 gen_op_movslq_RAX_EAX();
3862 gen_op_movswl_EAX_AX();
3864 gen_op_movsbw_AX_AL();
3866 case 0x99: /* CDQ/CWD */
3867 #ifdef TARGET_X86_64
3869 gen_op_movsqo_RDX_RAX();
3873 gen_op_movslq_EDX_EAX();
3875 gen_op_movswl_DX_AX();
3877 case 0x1af: /* imul Gv, Ev */
3878 case 0x69: /* imul Gv, Ev, I */
3880 ot = dflag + OT_WORD;
3881 modrm = ldub_code(s->pc++);
3882 reg = ((modrm >> 3) & 7) | rex_r;
3884 s->rip_offset = insn_const_size(ot);
3887 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3889 val = insn_get(s, ot);
3890 gen_op_movl_T1_im(val);
3891 } else if (b == 0x6b) {
3892 val = (int8_t)insn_get(s, OT_BYTE);
3893 gen_op_movl_T1_im(val);
3895 gen_op_mov_TN_reg(ot, 1, reg);
3898 #ifdef TARGET_X86_64
3899 if (ot == OT_QUAD) {
3900 gen_op_imulq_T0_T1();
3903 if (ot == OT_LONG) {
3904 gen_op_imull_T0_T1();
3906 gen_op_imulw_T0_T1();
3908 gen_op_mov_reg_T0(ot, reg);
3909 s->cc_op = CC_OP_MULB + ot;
3912 case 0x1c1: /* xadd Ev, Gv */
3916 ot = dflag + OT_WORD;
3917 modrm = ldub_code(s->pc++);
3918 reg = ((modrm >> 3) & 7) | rex_r;
3919 mod = (modrm >> 6) & 3;
3921 rm = (modrm & 7) | REX_B(s);
3922 gen_op_mov_TN_reg(ot, 0, reg);
3923 gen_op_mov_TN_reg(ot, 1, rm);
3924 gen_op_addl_T0_T1();
3925 gen_op_mov_reg_T1(ot, reg);
3926 gen_op_mov_reg_T0(ot, rm);
3928 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3929 gen_op_mov_TN_reg(ot, 0, reg);
3930 gen_op_ld_T1_A0(ot + s->mem_index);
3931 gen_op_addl_T0_T1();
3932 gen_op_st_T0_A0(ot + s->mem_index);
3933 gen_op_mov_reg_T1(ot, reg);
3935 gen_op_update2_cc();
3936 s->cc_op = CC_OP_ADDB + ot;
3939 case 0x1b1: /* cmpxchg Ev, Gv */
3943 ot = dflag + OT_WORD;
3944 modrm = ldub_code(s->pc++);
3945 reg = ((modrm >> 3) & 7) | rex_r;
3946 mod = (modrm >> 6) & 3;
3947 gen_op_mov_TN_reg(ot, 1, reg);
3949 rm = (modrm & 7) | REX_B(s);
3950 gen_op_mov_TN_reg(ot, 0, rm);
3951 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3952 gen_op_mov_reg_T0(ot, rm);
3954 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3955 gen_op_ld_T0_A0(ot + s->mem_index);
3956 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3958 s->cc_op = CC_OP_SUBB + ot;
3960 case 0x1c7: /* cmpxchg8b */
3961 modrm = ldub_code(s->pc++);
3962 mod = (modrm >> 6) & 3;
3963 if ((mod == 3) || ((modrm & 0x38) != 0x8))
3965 gen_jmp_im(pc_start - s->cs_base);
3966 if (s->cc_op != CC_OP_DYNAMIC)
3967 gen_op_set_cc_op(s->cc_op);
3968 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3970 s->cc_op = CC_OP_EFLAGS;
3973 /**************************/
3975 case 0x50 ... 0x57: /* push */
3976 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3979 case 0x58 ... 0x5f: /* pop */
3981 ot = dflag ? OT_QUAD : OT_WORD;
3983 ot = dflag + OT_WORD;
3986 /* NOTE: order is important for pop %sp */
3988 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3990 case 0x60: /* pusha */
3995 case 0x61: /* popa */
4000 case 0x68: /* push Iv */
4003 ot = dflag ? OT_QUAD : OT_WORD;
4005 ot = dflag + OT_WORD;
4008 val = insn_get(s, ot);
4010 val = (int8_t)insn_get(s, OT_BYTE);
4011 gen_op_movl_T0_im(val);
4014 case 0x8f: /* pop Ev */
4016 ot = dflag ? OT_QUAD : OT_WORD;
4018 ot = dflag + OT_WORD;
4020 modrm = ldub_code(s->pc++);
4021 mod = (modrm >> 6) & 3;
4024 /* NOTE: order is important for pop %sp */
4026 rm = (modrm & 7) | REX_B(s);
4027 gen_op_mov_reg_T0(ot, rm);
4029 /* NOTE: order is important too for MMU exceptions */
4030 s->popl_esp_hack = 1 << ot;
4031 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4032 s->popl_esp_hack = 0;
4036 case 0xc8: /* enter */
4039 val = lduw_code(s->pc);
4041 level = ldub_code(s->pc++);
4042 gen_enter(s, val, level);
4045 case 0xc9: /* leave */
4046 /* XXX: exception not precise (ESP is updated before potential exception) */
4048 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4049 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4050 } else if (s->ss32) {
4051 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4052 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4054 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4055 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4059 ot = dflag ? OT_QUAD : OT_WORD;
4061 ot = dflag + OT_WORD;
4063 gen_op_mov_reg_T0(ot, R_EBP);
4066 case 0x06: /* push es */
4067 case 0x0e: /* push cs */
4068 case 0x16: /* push ss */
4069 case 0x1e: /* push ds */
4072 gen_op_movl_T0_seg(b >> 3);
4075 case 0x1a0: /* push fs */
4076 case 0x1a8: /* push gs */
4077 gen_op_movl_T0_seg((b >> 3) & 7);
4080 case 0x07: /* pop es */
4081 case 0x17: /* pop ss */
4082 case 0x1f: /* pop ds */
4087 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4090 /* if reg == SS, inhibit interrupts/trace. */
4091 /* If several instructions disable interrupts, only the
4093 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4094 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4098 gen_jmp_im(s->pc - s->cs_base);
4102 case 0x1a1: /* pop fs */
4103 case 0x1a9: /* pop gs */
4105 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4108 gen_jmp_im(s->pc - s->cs_base);
4113 /**************************/
4116 case 0x89: /* mov Gv, Ev */
4120 ot = dflag + OT_WORD;
4121 modrm = ldub_code(s->pc++);
4122 reg = ((modrm >> 3) & 7) | rex_r;
4124 /* generate a generic store */
4125 gen_ldst_modrm(s, modrm, ot, reg, 1);
4128 case 0xc7: /* mov Ev, Iv */
4132 ot = dflag + OT_WORD;
4133 modrm = ldub_code(s->pc++);
4134 mod = (modrm >> 6) & 3;
4136 s->rip_offset = insn_const_size(ot);
4137 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4139 val = insn_get(s, ot);
4140 gen_op_movl_T0_im(val);
4142 gen_op_st_T0_A0(ot + s->mem_index);
4144 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4147 case 0x8b: /* mov Ev, Gv */
4151 ot = OT_WORD + dflag;
4152 modrm = ldub_code(s->pc++);
4153 reg = ((modrm >> 3) & 7) | rex_r;
4155 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4156 gen_op_mov_reg_T0(ot, reg);
4158 case 0x8e: /* mov seg, Gv */
4159 modrm = ldub_code(s->pc++);
4160 reg = (modrm >> 3) & 7;
4161 if (reg >= 6 || reg == R_CS)
4163 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4164 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4166 /* if reg == SS, inhibit interrupts/trace */
4167 /* If several instructions disable interrupts, only the
4169 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4170 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4174 gen_jmp_im(s->pc - s->cs_base);
4178 case 0x8c: /* mov Gv, seg */
4179 modrm = ldub_code(s->pc++);
4180 reg = (modrm >> 3) & 7;
4181 mod = (modrm >> 6) & 3;
4184 gen_op_movl_T0_seg(reg);
4186 ot = OT_WORD + dflag;
4189 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4192 case 0x1b6: /* movzbS Gv, Eb */
4193 case 0x1b7: /* movzwS Gv, Eb */
4194 case 0x1be: /* movsbS Gv, Eb */
4195 case 0x1bf: /* movswS Gv, Eb */
4198 /* d_ot is the size of destination */
4199 d_ot = dflag + OT_WORD;
4200 /* ot is the size of source */
4201 ot = (b & 1) + OT_BYTE;
4202 modrm = ldub_code(s->pc++);
4203 reg = ((modrm >> 3) & 7) | rex_r;
4204 mod = (modrm >> 6) & 3;
4205 rm = (modrm & 7) | REX_B(s);
4208 gen_op_mov_TN_reg(ot, 0, rm);
4209 switch(ot | (b & 8)) {
4211 gen_op_movzbl_T0_T0();
4214 gen_op_movsbl_T0_T0();
4217 gen_op_movzwl_T0_T0();
4221 gen_op_movswl_T0_T0();
4224 gen_op_mov_reg_T0(d_ot, reg);
4226 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4228 gen_op_lds_T0_A0(ot + s->mem_index);
4230 gen_op_ldu_T0_A0(ot + s->mem_index);
4232 gen_op_mov_reg_T0(d_ot, reg);
4237 case 0x8d: /* lea */
4238 ot = dflag + OT_WORD;
4239 modrm = ldub_code(s->pc++);
4240 mod = (modrm >> 6) & 3;
4243 reg = ((modrm >> 3) & 7) | rex_r;
4244 /* we must ensure that no segment is added */
4248 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4250 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4253 case 0xa0: /* mov EAX, Ov */
4255 case 0xa2: /* mov Ov, EAX */
4258 target_ulong offset_addr;
4263 ot = dflag + OT_WORD;
4264 #ifdef TARGET_X86_64
4265 if (s->aflag == 2) {
4266 offset_addr = ldq_code(s->pc);
4268 gen_op_movq_A0_im(offset_addr);
4273 offset_addr = insn_get(s, OT_LONG);
4275 offset_addr = insn_get(s, OT_WORD);
4277 gen_op_movl_A0_im(offset_addr);
4279 gen_add_A0_ds_seg(s);
4281 gen_op_ld_T0_A0(ot + s->mem_index);
4282 gen_op_mov_reg_T0(ot, R_EAX);
4284 gen_op_mov_TN_reg(ot, 0, R_EAX);
4285 gen_op_st_T0_A0(ot + s->mem_index);
4289 case 0xd7: /* xlat */
4290 #ifdef TARGET_X86_64
4291 if (s->aflag == 2) {
4292 gen_op_movq_A0_reg(R_EBX);
4293 gen_op_addq_A0_AL();
4297 gen_op_movl_A0_reg(R_EBX);
4298 gen_op_addl_A0_AL();
4300 gen_op_andl_A0_ffff();
4302 gen_add_A0_ds_seg(s);
4303 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4304 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4306 case 0xb0 ... 0xb7: /* mov R, Ib */
4307 val = insn_get(s, OT_BYTE);
4308 gen_op_movl_T0_im(val);
4309 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4311 case 0xb8 ... 0xbf: /* mov R, Iv */
4312 #ifdef TARGET_X86_64
4316 tmp = ldq_code(s->pc);
4318 reg = (b & 7) | REX_B(s);
4319 gen_movtl_T0_im(tmp);
4320 gen_op_mov_reg_T0(OT_QUAD, reg);
4324 ot = dflag ? OT_LONG : OT_WORD;
4325 val = insn_get(s, ot);
4326 reg = (b & 7) | REX_B(s);
4327 gen_op_movl_T0_im(val);
4328 gen_op_mov_reg_T0(ot, reg);
4332 case 0x91 ... 0x97: /* xchg R, EAX */
4333 ot = dflag + OT_WORD;
4334 reg = (b & 7) | REX_B(s);
4338 case 0x87: /* xchg Ev, Gv */
4342 ot = dflag + OT_WORD;
4343 modrm = ldub_code(s->pc++);
4344 reg = ((modrm >> 3) & 7) | rex_r;
4345 mod = (modrm >> 6) & 3;
4347 rm = (modrm & 7) | REX_B(s);
4349 gen_op_mov_TN_reg(ot, 0, reg);
4350 gen_op_mov_TN_reg(ot, 1, rm);
4351 gen_op_mov_reg_T0(ot, rm);
4352 gen_op_mov_reg_T1(ot, reg);
4354 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4355 gen_op_mov_TN_reg(ot, 0, reg);
4356 /* for xchg, lock is implicit */
4357 if (!(prefixes & PREFIX_LOCK))
4359 gen_op_ld_T1_A0(ot + s->mem_index);
4360 gen_op_st_T0_A0(ot + s->mem_index);
4361 if (!(prefixes & PREFIX_LOCK))
4363 gen_op_mov_reg_T1(ot, reg);
4366 case 0xc4: /* les Gv */
4371 case 0xc5: /* lds Gv */
4376 case 0x1b2: /* lss Gv */
4379 case 0x1b4: /* lfs Gv */
4382 case 0x1b5: /* lgs Gv */
4385 ot = dflag ? OT_LONG : OT_WORD;
4386 modrm = ldub_code(s->pc++);
4387 reg = ((modrm >> 3) & 7) | rex_r;
4388 mod = (modrm >> 6) & 3;
4391 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4392 gen_op_ld_T1_A0(ot + s->mem_index);
4393 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4394 /* load the segment first to handle exceptions properly */
4395 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4396 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4397 /* then put the data */
4398 gen_op_mov_reg_T1(ot, reg);
4400 gen_jmp_im(s->pc - s->cs_base);
4405 /************************/
4416 ot = dflag + OT_WORD;
4418 modrm = ldub_code(s->pc++);
4419 mod = (modrm >> 6) & 3;
4420 op = (modrm >> 3) & 7;
4426 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4429 opreg = (modrm & 7) | REX_B(s);
4434 gen_shift(s, op, ot, opreg, OR_ECX);
4437 shift = ldub_code(s->pc++);
4439 gen_shifti(s, op, ot, opreg, shift);
4454 case 0x1a4: /* shld imm */
4458 case 0x1a5: /* shld cl */
4462 case 0x1ac: /* shrd imm */
4466 case 0x1ad: /* shrd cl */
4470 ot = dflag + OT_WORD;
4471 modrm = ldub_code(s->pc++);
4472 mod = (modrm >> 6) & 3;
4473 rm = (modrm & 7) | REX_B(s);
4474 reg = ((modrm >> 3) & 7) | rex_r;
4477 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4478 gen_op_ld_T0_A0(ot + s->mem_index);
4480 gen_op_mov_TN_reg(ot, 0, rm);
4482 gen_op_mov_TN_reg(ot, 1, reg);
4485 val = ldub_code(s->pc++);
4492 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4494 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4495 if (op == 0 && ot != OT_WORD)
4496 s->cc_op = CC_OP_SHLB + ot;
4498 s->cc_op = CC_OP_SARB + ot;
4501 if (s->cc_op != CC_OP_DYNAMIC)
4502 gen_op_set_cc_op(s->cc_op);
4504 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4506 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4507 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4510 gen_op_mov_reg_T0(ot, rm);
4514 /************************/
4517 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4518 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4519 /* XXX: what to do if illegal op ? */
4520 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4523 modrm = ldub_code(s->pc++);
4524 mod = (modrm >> 6) & 3;
4526 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4529 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4531 case 0x00 ... 0x07: /* fxxxs */
4532 case 0x10 ... 0x17: /* fixxxl */
4533 case 0x20 ... 0x27: /* fxxxl */
4534 case 0x30 ... 0x37: /* fixxx */
4541 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4542 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4543 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2);
4546 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4547 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4548 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4551 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0,
4552 (s->mem_index >> 2) - 1);
4553 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1);
4557 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4558 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4559 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4563 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4565 /* fcomp needs pop */
4566 tcg_gen_helper_0_0(helper_fpop);
4570 case 0x08: /* flds */
4571 case 0x0a: /* fsts */
4572 case 0x0b: /* fstps */
4573 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4574 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4575 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4580 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4581 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4582 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2);
4585 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4586 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4587 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4590 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0,
4591 (s->mem_index >> 2) - 1);
4592 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1);
4596 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4597 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4598 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4603 /* XXX: the corresponding CPUID bit must be tested ! */
4606 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2);
4607 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4608 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4611 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1);
4612 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0,
4613 (s->mem_index >> 2) - 1);
4617 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2);
4618 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4619 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4622 tcg_gen_helper_0_0(helper_fpop);
4627 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2);
4628 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4629 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4632 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2);
4633 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4634 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4637 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1);
4638 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0,
4639 (s->mem_index >> 2) - 1);
4643 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2);
4644 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4645 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4649 tcg_gen_helper_0_0(helper_fpop);
4653 case 0x0c: /* fldenv mem */
4654 if (s->cc_op != CC_OP_DYNAMIC)
4655 gen_op_set_cc_op(s->cc_op);
4656 gen_jmp_im(pc_start - s->cs_base);
4657 tcg_gen_helper_0_2(helper_fldenv,
4658 cpu_A0, tcg_const_i32(s->dflag));
4660 case 0x0d: /* fldcw mem */
4661 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4662 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4663 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2);
4665 case 0x0e: /* fnstenv mem */
4666 if (s->cc_op != CC_OP_DYNAMIC)
4667 gen_op_set_cc_op(s->cc_op);
4668 gen_jmp_im(pc_start - s->cs_base);
4669 tcg_gen_helper_0_2(helper_fstenv,
4670 cpu_A0, tcg_const_i32(s->dflag));
4672 case 0x0f: /* fnstcw mem */
4673 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2);
4674 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4675 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4677 case 0x1d: /* fldt mem */
4678 if (s->cc_op != CC_OP_DYNAMIC)
4679 gen_op_set_cc_op(s->cc_op);
4680 gen_jmp_im(pc_start - s->cs_base);
4681 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4683 case 0x1f: /* fstpt mem */
4684 if (s->cc_op != CC_OP_DYNAMIC)
4685 gen_op_set_cc_op(s->cc_op);
4686 gen_jmp_im(pc_start - s->cs_base);
4687 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4688 tcg_gen_helper_0_0(helper_fpop);
4690 case 0x2c: /* frstor mem */
4691 if (s->cc_op != CC_OP_DYNAMIC)
4692 gen_op_set_cc_op(s->cc_op);
4693 gen_jmp_im(pc_start - s->cs_base);
4694 tcg_gen_helper_0_2(helper_frstor,
4695 cpu_A0, tcg_const_i32(s->dflag));
4697 case 0x2e: /* fnsave mem */
4698 if (s->cc_op != CC_OP_DYNAMIC)
4699 gen_op_set_cc_op(s->cc_op);
4700 gen_jmp_im(pc_start - s->cs_base);
4701 tcg_gen_helper_0_2(helper_fsave,
4702 cpu_A0, tcg_const_i32(s->dflag));
4704 case 0x2f: /* fnstsw mem */
4705 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4706 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4707 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4709 case 0x3c: /* fbld */
4710 if (s->cc_op != CC_OP_DYNAMIC)
4711 gen_op_set_cc_op(s->cc_op);
4712 gen_jmp_im(pc_start - s->cs_base);
4713 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4715 case 0x3e: /* fbstp */
4716 if (s->cc_op != CC_OP_DYNAMIC)
4717 gen_op_set_cc_op(s->cc_op);
4718 gen_jmp_im(pc_start - s->cs_base);
4719 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4720 tcg_gen_helper_0_0(helper_fpop);
4722 case 0x3d: /* fildll */
4723 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0,
4724 (s->mem_index >> 2) - 1);
4725 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1);
4727 case 0x3f: /* fistpll */
4728 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1);
4729 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0,
4730 (s->mem_index >> 2) - 1);
4731 tcg_gen_helper_0_0(helper_fpop);
4737 /* register float ops */
4741 case 0x08: /* fld sti */
4742 tcg_gen_helper_0_0(helper_fpush);
4743 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
4745 case 0x09: /* fxchg sti */
4746 case 0x29: /* fxchg4 sti, undocumented op */
4747 case 0x39: /* fxchg7 sti, undocumented op */
4748 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
4750 case 0x0a: /* grp d9/2 */
4753 /* check exceptions (FreeBSD FPU probe) */
4754 if (s->cc_op != CC_OP_DYNAMIC)
4755 gen_op_set_cc_op(s->cc_op);
4756 gen_jmp_im(pc_start - s->cs_base);
4757 tcg_gen_helper_0_0(helper_fwait);
4763 case 0x0c: /* grp d9/4 */
4766 tcg_gen_helper_0_0(helper_fchs_ST0);
4769 tcg_gen_helper_0_0(helper_fabs_ST0);
4772 tcg_gen_helper_0_0(helper_fldz_FT0);
4773 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4776 tcg_gen_helper_0_0(helper_fxam_ST0);
4782 case 0x0d: /* grp d9/5 */
4786 tcg_gen_helper_0_0(helper_fpush);
4787 tcg_gen_helper_0_0(helper_fld1_ST0);
4790 tcg_gen_helper_0_0(helper_fpush);
4791 tcg_gen_helper_0_0(helper_fldl2t_ST0);
4794 tcg_gen_helper_0_0(helper_fpush);
4795 tcg_gen_helper_0_0(helper_fldl2e_ST0);
4798 tcg_gen_helper_0_0(helper_fpush);
4799 tcg_gen_helper_0_0(helper_fldpi_ST0);
4802 tcg_gen_helper_0_0(helper_fpush);
4803 tcg_gen_helper_0_0(helper_fldlg2_ST0);
4806 tcg_gen_helper_0_0(helper_fpush);
4807 tcg_gen_helper_0_0(helper_fldln2_ST0);
4810 tcg_gen_helper_0_0(helper_fpush);
4811 tcg_gen_helper_0_0(helper_fldz_ST0);
4818 case 0x0e: /* grp d9/6 */
4821 tcg_gen_helper_0_0(helper_f2xm1);
4824 tcg_gen_helper_0_0(helper_fyl2x);
4827 tcg_gen_helper_0_0(helper_fptan);
4829 case 3: /* fpatan */
4830 tcg_gen_helper_0_0(helper_fpatan);
4832 case 4: /* fxtract */
4833 tcg_gen_helper_0_0(helper_fxtract);
4835 case 5: /* fprem1 */
4836 tcg_gen_helper_0_0(helper_fprem1);
4838 case 6: /* fdecstp */
4839 tcg_gen_helper_0_0(helper_fdecstp);
4842 case 7: /* fincstp */
4843 tcg_gen_helper_0_0(helper_fincstp);
4847 case 0x0f: /* grp d9/7 */
4850 tcg_gen_helper_0_0(helper_fprem);
4852 case 1: /* fyl2xp1 */
4853 tcg_gen_helper_0_0(helper_fyl2xp1);
4856 tcg_gen_helper_0_0(helper_fsqrt);
4858 case 3: /* fsincos */
4859 tcg_gen_helper_0_0(helper_fsincos);
4861 case 5: /* fscale */
4862 tcg_gen_helper_0_0(helper_fscale);
4864 case 4: /* frndint */
4865 tcg_gen_helper_0_0(helper_frndint);
4868 tcg_gen_helper_0_0(helper_fsin);
4872 tcg_gen_helper_0_0(helper_fcos);
4876 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4877 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4878 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4884 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
4886 tcg_gen_helper_0_0(helper_fpop);
4888 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4889 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4893 case 0x02: /* fcom */
4894 case 0x22: /* fcom2, undocumented op */
4895 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4896 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4898 case 0x03: /* fcomp */
4899 case 0x23: /* fcomp3, undocumented op */
4900 case 0x32: /* fcomp5, undocumented op */
4901 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4902 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4903 tcg_gen_helper_0_0(helper_fpop);
4905 case 0x15: /* da/5 */
4907 case 1: /* fucompp */
4908 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4909 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4910 tcg_gen_helper_0_0(helper_fpop);
4911 tcg_gen_helper_0_0(helper_fpop);
4919 case 0: /* feni (287 only, just do nop here) */
4921 case 1: /* fdisi (287 only, just do nop here) */
4924 tcg_gen_helper_0_0(helper_fclex);
4926 case 3: /* fninit */
4927 tcg_gen_helper_0_0(helper_fninit);
4929 case 4: /* fsetpm (287 only, just do nop here) */
4935 case 0x1d: /* fucomi */
4936 if (s->cc_op != CC_OP_DYNAMIC)
4937 gen_op_set_cc_op(s->cc_op);
4938 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4939 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
4940 gen_op_fcomi_dummy();
4941 s->cc_op = CC_OP_EFLAGS;
4943 case 0x1e: /* fcomi */
4944 if (s->cc_op != CC_OP_DYNAMIC)
4945 gen_op_set_cc_op(s->cc_op);
4946 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4947 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
4948 gen_op_fcomi_dummy();
4949 s->cc_op = CC_OP_EFLAGS;
4951 case 0x28: /* ffree sti */
4952 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4954 case 0x2a: /* fst sti */
4955 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4957 case 0x2b: /* fstp sti */
4958 case 0x0b: /* fstp1 sti, undocumented op */
4959 case 0x3a: /* fstp8 sti, undocumented op */
4960 case 0x3b: /* fstp9 sti, undocumented op */
4961 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4962 tcg_gen_helper_0_0(helper_fpop);
4964 case 0x2c: /* fucom st(i) */
4965 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4966 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4968 case 0x2d: /* fucomp st(i) */
4969 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4970 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4971 tcg_gen_helper_0_0(helper_fpop);
4973 case 0x33: /* de/3 */
4975 case 1: /* fcompp */
4976 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4977 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4978 tcg_gen_helper_0_0(helper_fpop);
4979 tcg_gen_helper_0_0(helper_fpop);
4985 case 0x38: /* ffreep sti, undocumented op */
4986 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4987 tcg_gen_helper_0_0(helper_fpop);
4989 case 0x3c: /* df/4 */
4992 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4993 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4994 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5000 case 0x3d: /* fucomip */
5001 if (s->cc_op != CC_OP_DYNAMIC)
5002 gen_op_set_cc_op(s->cc_op);
5003 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5004 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5005 tcg_gen_helper_0_0(helper_fpop);
5006 gen_op_fcomi_dummy();
5007 s->cc_op = CC_OP_EFLAGS;
5009 case 0x3e: /* fcomip */
5010 if (s->cc_op != CC_OP_DYNAMIC)
5011 gen_op_set_cc_op(s->cc_op);
5012 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5013 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5014 tcg_gen_helper_0_0(helper_fpop);
5015 gen_op_fcomi_dummy();
5016 s->cc_op = CC_OP_EFLAGS;
5018 case 0x10 ... 0x13: /* fcmovxx */
5022 const static uint8_t fcmov_cc[8] = {
5028 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5030 l1 = gen_new_label();
5031 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5032 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5041 /************************/
5044 case 0xa4: /* movsS */
5049 ot = dflag + OT_WORD;
5051 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5052 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5058 case 0xaa: /* stosS */
5063 ot = dflag + OT_WORD;
5065 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5066 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5071 case 0xac: /* lodsS */
5076 ot = dflag + OT_WORD;
5077 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5078 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5083 case 0xae: /* scasS */
5088 ot = dflag + OT_WORD;
5089 if (prefixes & PREFIX_REPNZ) {
5090 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5091 } else if (prefixes & PREFIX_REPZ) {
5092 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5095 s->cc_op = CC_OP_SUBB + ot;
5099 case 0xa6: /* cmpsS */
5104 ot = dflag + OT_WORD;
5105 if (prefixes & PREFIX_REPNZ) {
5106 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5107 } else if (prefixes & PREFIX_REPZ) {
5108 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5111 s->cc_op = CC_OP_SUBB + ot;
5114 case 0x6c: /* insS */
5119 ot = dflag ? OT_LONG : OT_WORD;
5120 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5121 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5122 gen_op_andl_T0_ffff();
5123 if (gen_svm_check_io(s, pc_start,
5124 SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
5125 svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
5127 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5128 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5133 case 0x6e: /* outsS */
5138 ot = dflag ? OT_LONG : OT_WORD;
5139 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5140 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5141 gen_op_andl_T0_ffff();
5142 if (gen_svm_check_io(s, pc_start,
5143 (1 << (4+ot)) | svm_is_rep(prefixes) |
5144 4 | (1 << (7+s->aflag))))
5146 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5147 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5153 /************************/
5161 ot = dflag ? OT_LONG : OT_WORD;
5162 val = ldub_code(s->pc++);
5163 gen_op_movl_T0_im(val);
5164 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5165 if (gen_svm_check_io(s, pc_start,
5166 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5170 gen_op_mov_reg_T1(ot, R_EAX);
5177 ot = dflag ? OT_LONG : OT_WORD;
5178 val = ldub_code(s->pc++);
5179 gen_op_movl_T0_im(val);
5180 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5181 if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5184 gen_op_mov_TN_reg(ot, 1, R_EAX);
5192 ot = dflag ? OT_LONG : OT_WORD;
5193 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5194 gen_op_andl_T0_ffff();
5195 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5196 if (gen_svm_check_io(s, pc_start,
5197 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5201 gen_op_mov_reg_T1(ot, R_EAX);
5208 ot = dflag ? OT_LONG : OT_WORD;
5209 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5210 gen_op_andl_T0_ffff();
5211 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5212 if (gen_svm_check_io(s, pc_start,
5213 svm_is_rep(prefixes) | (1 << (4+ot))))
5215 gen_op_mov_TN_reg(ot, 1, R_EAX);
5219 /************************/
5221 case 0xc2: /* ret im */
5222 val = ldsw_code(s->pc);
5225 if (CODE64(s) && s->dflag)
5227 gen_stack_update(s, val + (2 << s->dflag));
5229 gen_op_andl_T0_ffff();
5233 case 0xc3: /* ret */
5237 gen_op_andl_T0_ffff();
5241 case 0xca: /* lret im */
5242 val = ldsw_code(s->pc);
5245 if (s->pe && !s->vm86) {
5246 if (s->cc_op != CC_OP_DYNAMIC)
5247 gen_op_set_cc_op(s->cc_op);
5248 gen_jmp_im(pc_start - s->cs_base);
5249 gen_op_lret_protected(s->dflag, val);
5253 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5255 gen_op_andl_T0_ffff();
5256 /* NOTE: keeping EIP updated is not a problem in case of
5260 gen_op_addl_A0_im(2 << s->dflag);
5261 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5262 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5263 /* add stack offset */
5264 gen_stack_update(s, val + (4 << s->dflag));
5268 case 0xcb: /* lret */
5271 case 0xcf: /* iret */
5272 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5276 gen_op_iret_real(s->dflag);
5277 s->cc_op = CC_OP_EFLAGS;
5278 } else if (s->vm86) {
5280 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5282 gen_op_iret_real(s->dflag);
5283 s->cc_op = CC_OP_EFLAGS;
5286 if (s->cc_op != CC_OP_DYNAMIC)
5287 gen_op_set_cc_op(s->cc_op);
5288 gen_jmp_im(pc_start - s->cs_base);
5289 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5290 s->cc_op = CC_OP_EFLAGS;
5294 case 0xe8: /* call im */
5297 tval = (int32_t)insn_get(s, OT_LONG);
5299 tval = (int16_t)insn_get(s, OT_WORD);
5300 next_eip = s->pc - s->cs_base;
5304 gen_movtl_T0_im(next_eip);
5309 case 0x9a: /* lcall im */
5311 unsigned int selector, offset;
5315 ot = dflag ? OT_LONG : OT_WORD;
5316 offset = insn_get(s, ot);
5317 selector = insn_get(s, OT_WORD);
5319 gen_op_movl_T0_im(selector);
5320 gen_op_movl_T1_imu(offset);
5323 case 0xe9: /* jmp im */
5325 tval = (int32_t)insn_get(s, OT_LONG);
5327 tval = (int16_t)insn_get(s, OT_WORD);
5328 tval += s->pc - s->cs_base;
5333 case 0xea: /* ljmp im */
5335 unsigned int selector, offset;
5339 ot = dflag ? OT_LONG : OT_WORD;
5340 offset = insn_get(s, ot);
5341 selector = insn_get(s, OT_WORD);
5343 gen_op_movl_T0_im(selector);
5344 gen_op_movl_T1_imu(offset);
5347 case 0xeb: /* jmp Jb */
5348 tval = (int8_t)insn_get(s, OT_BYTE);
5349 tval += s->pc - s->cs_base;
5354 case 0x70 ... 0x7f: /* jcc Jb */
5355 tval = (int8_t)insn_get(s, OT_BYTE);
5357 case 0x180 ... 0x18f: /* jcc Jv */
5359 tval = (int32_t)insn_get(s, OT_LONG);
5361 tval = (int16_t)insn_get(s, OT_WORD);
5364 next_eip = s->pc - s->cs_base;
5368 gen_jcc(s, b, tval, next_eip);
5371 case 0x190 ... 0x19f: /* setcc Gv */
5372 modrm = ldub_code(s->pc++);
5374 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5376 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5377 ot = dflag + OT_WORD;
5378 modrm = ldub_code(s->pc++);
5379 reg = ((modrm >> 3) & 7) | rex_r;
5380 mod = (modrm >> 6) & 3;
5383 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5384 gen_op_ld_T1_A0(ot + s->mem_index);
5386 rm = (modrm & 7) | REX_B(s);
5387 gen_op_mov_TN_reg(ot, 1, rm);
5389 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5392 /************************/
5394 case 0x9c: /* pushf */
5395 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5397 if (s->vm86 && s->iopl != 3) {
5398 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5400 if (s->cc_op != CC_OP_DYNAMIC)
5401 gen_op_set_cc_op(s->cc_op);
5402 gen_op_movl_T0_eflags();
5406 case 0x9d: /* popf */
5407 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5409 if (s->vm86 && s->iopl != 3) {
5410 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5415 gen_op_movl_eflags_T0_cpl0();
5417 gen_op_movw_eflags_T0_cpl0();
5420 if (s->cpl <= s->iopl) {
5422 gen_op_movl_eflags_T0_io();
5424 gen_op_movw_eflags_T0_io();
5428 gen_op_movl_eflags_T0();
5430 gen_op_movw_eflags_T0();
5435 s->cc_op = CC_OP_EFLAGS;
5436 /* abort translation because TF flag may change */
5437 gen_jmp_im(s->pc - s->cs_base);
5441 case 0x9e: /* sahf */
5444 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5445 if (s->cc_op != CC_OP_DYNAMIC)
5446 gen_op_set_cc_op(s->cc_op);
5447 gen_op_movb_eflags_T0();
5448 s->cc_op = CC_OP_EFLAGS;
5450 case 0x9f: /* lahf */
5453 if (s->cc_op != CC_OP_DYNAMIC)
5454 gen_op_set_cc_op(s->cc_op);
5455 gen_op_movl_T0_eflags();
5456 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5458 case 0xf5: /* cmc */
5459 if (s->cc_op != CC_OP_DYNAMIC)
5460 gen_op_set_cc_op(s->cc_op);
5462 s->cc_op = CC_OP_EFLAGS;
5464 case 0xf8: /* clc */
5465 if (s->cc_op != CC_OP_DYNAMIC)
5466 gen_op_set_cc_op(s->cc_op);
5468 s->cc_op = CC_OP_EFLAGS;
5470 case 0xf9: /* stc */
5471 if (s->cc_op != CC_OP_DYNAMIC)
5472 gen_op_set_cc_op(s->cc_op);
5474 s->cc_op = CC_OP_EFLAGS;
5476 case 0xfc: /* cld */
5477 tcg_gen_movi_i32(cpu_tmp2, 1);
5478 tcg_gen_st_i32(cpu_tmp2, cpu_env, offsetof(CPUState, df));
5480 case 0xfd: /* std */
5481 tcg_gen_movi_i32(cpu_tmp2, -1);
5482 tcg_gen_st_i32(cpu_tmp2, cpu_env, offsetof(CPUState, df));
5485 /************************/
5486 /* bit operations */
5487 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5488 ot = dflag + OT_WORD;
5489 modrm = ldub_code(s->pc++);
5490 op = (modrm >> 3) & 7;
5491 mod = (modrm >> 6) & 3;
5492 rm = (modrm & 7) | REX_B(s);
5495 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5496 gen_op_ld_T0_A0(ot + s->mem_index);
5498 gen_op_mov_TN_reg(ot, 0, rm);
5501 val = ldub_code(s->pc++);
5502 gen_op_movl_T1_im(val);
5506 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5507 s->cc_op = CC_OP_SARB + ot;
5510 gen_op_st_T0_A0(ot + s->mem_index);
5512 gen_op_mov_reg_T0(ot, rm);
5513 gen_op_update_bt_cc();
5516 case 0x1a3: /* bt Gv, Ev */
5519 case 0x1ab: /* bts */
5522 case 0x1b3: /* btr */
5525 case 0x1bb: /* btc */
5528 ot = dflag + OT_WORD;
5529 modrm = ldub_code(s->pc++);
5530 reg = ((modrm >> 3) & 7) | rex_r;
5531 mod = (modrm >> 6) & 3;
5532 rm = (modrm & 7) | REX_B(s);
5533 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5535 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5536 /* specific case: we need to add a displacement */
5537 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5538 gen_op_ld_T0_A0(ot + s->mem_index);
5540 gen_op_mov_TN_reg(ot, 0, rm);
5542 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5543 s->cc_op = CC_OP_SARB + ot;
5546 gen_op_st_T0_A0(ot + s->mem_index);
5548 gen_op_mov_reg_T0(ot, rm);
5549 gen_op_update_bt_cc();
5552 case 0x1bc: /* bsf */
5553 case 0x1bd: /* bsr */
5554 ot = dflag + OT_WORD;
5555 modrm = ldub_code(s->pc++);
5556 reg = ((modrm >> 3) & 7) | rex_r;
5557 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5558 /* NOTE: in order to handle the 0 case, we must load the
5559 result. It could be optimized with a generated jump */
5560 gen_op_mov_TN_reg(ot, 1, reg);
5561 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5562 gen_op_mov_reg_T1(ot, reg);
5563 s->cc_op = CC_OP_LOGICB + ot;
5565 /************************/
5567 case 0x27: /* daa */
5570 if (s->cc_op != CC_OP_DYNAMIC)
5571 gen_op_set_cc_op(s->cc_op);
5573 s->cc_op = CC_OP_EFLAGS;
5575 case 0x2f: /* das */
5578 if (s->cc_op != CC_OP_DYNAMIC)
5579 gen_op_set_cc_op(s->cc_op);
5581 s->cc_op = CC_OP_EFLAGS;
5583 case 0x37: /* aaa */
5586 if (s->cc_op != CC_OP_DYNAMIC)
5587 gen_op_set_cc_op(s->cc_op);
5589 s->cc_op = CC_OP_EFLAGS;
5591 case 0x3f: /* aas */
5594 if (s->cc_op != CC_OP_DYNAMIC)
5595 gen_op_set_cc_op(s->cc_op);
5597 s->cc_op = CC_OP_EFLAGS;
5599 case 0xd4: /* aam */
5602 val = ldub_code(s->pc++);
5604 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5607 s->cc_op = CC_OP_LOGICB;
5610 case 0xd5: /* aad */
5613 val = ldub_code(s->pc++);
5615 s->cc_op = CC_OP_LOGICB;
5617 /************************/
5619 case 0x90: /* nop */
5620 /* XXX: xchg + rex handling */
5621 /* XXX: correct lock test for all insn */
5622 if (prefixes & PREFIX_LOCK)
5624 if (prefixes & PREFIX_REPZ) {
5625 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5628 case 0x9b: /* fwait */
5629 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5630 (HF_MP_MASK | HF_TS_MASK)) {
5631 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5633 if (s->cc_op != CC_OP_DYNAMIC)
5634 gen_op_set_cc_op(s->cc_op);
5635 gen_jmp_im(pc_start - s->cs_base);
5636 tcg_gen_helper_0_0(helper_fwait);
5639 case 0xcc: /* int3 */
5640 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5642 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5644 case 0xcd: /* int N */
5645 val = ldub_code(s->pc++);
5646 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5648 if (s->vm86 && s->iopl != 3) {
5649 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5651 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5654 case 0xce: /* into */
5657 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5659 if (s->cc_op != CC_OP_DYNAMIC)
5660 gen_op_set_cc_op(s->cc_op);
5661 gen_jmp_im(pc_start - s->cs_base);
5662 gen_op_into(s->pc - pc_start);
5664 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5665 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5668 gen_debug(s, pc_start - s->cs_base);
5671 tb_flush(cpu_single_env);
5672 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5675 case 0xfa: /* cli */
5677 if (s->cpl <= s->iopl) {
5678 tcg_gen_helper_0_0(helper_cli);
5680 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5684 tcg_gen_helper_0_0(helper_cli);
5686 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5690 case 0xfb: /* sti */
5692 if (s->cpl <= s->iopl) {
5694 tcg_gen_helper_0_0(helper_sti);
5695 /* interruptions are enabled only the first insn after sti */
5696 /* If several instructions disable interrupts, only the
5698 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5699 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5700 /* give a chance to handle pending irqs */
5701 gen_jmp_im(s->pc - s->cs_base);
5704 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5710 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5714 case 0x62: /* bound */
5717 ot = dflag ? OT_LONG : OT_WORD;
5718 modrm = ldub_code(s->pc++);
5719 reg = (modrm >> 3) & 7;
5720 mod = (modrm >> 6) & 3;
5723 gen_op_mov_TN_reg(ot, 0, reg);
5724 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5725 gen_jmp_im(pc_start - s->cs_base);
5727 tcg_gen_helper_0_0(helper_boundw);
5729 tcg_gen_helper_0_0(helper_boundl);
5731 case 0x1c8 ... 0x1cf: /* bswap reg */
5732 reg = (b & 7) | REX_B(s);
5733 #ifdef TARGET_X86_64
5735 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5736 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5737 gen_op_mov_reg_T0(OT_QUAD, reg);
5741 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5743 tmp0 = tcg_temp_new(TCG_TYPE_I32);
5744 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5745 tcg_gen_bswap_i32(tmp0, tmp0);
5746 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5747 gen_op_mov_reg_T0(OT_LONG, reg);
5751 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5752 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5753 gen_op_mov_reg_T0(OT_LONG, reg);
5757 case 0xd6: /* salc */
5760 if (s->cc_op != CC_OP_DYNAMIC)
5761 gen_op_set_cc_op(s->cc_op);
5764 case 0xe0: /* loopnz */
5765 case 0xe1: /* loopz */
5766 if (s->cc_op != CC_OP_DYNAMIC)
5767 gen_op_set_cc_op(s->cc_op);
5769 case 0xe2: /* loop */
5770 case 0xe3: /* jecxz */
5774 tval = (int8_t)insn_get(s, OT_BYTE);
5775 next_eip = s->pc - s->cs_base;
5780 l1 = gen_new_label();
5781 l2 = gen_new_label();
5784 gen_op_jz_ecx[s->aflag](l1);
5786 gen_op_dec_ECX[s->aflag]();
5789 gen_op_loop[s->aflag][b](l1);
5792 gen_jmp_im(next_eip);
5793 gen_op_jmp_label(l2);
5800 case 0x130: /* wrmsr */
5801 case 0x132: /* rdmsr */
5803 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5807 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5808 tcg_gen_helper_0_0(helper_rdmsr);
5810 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5811 tcg_gen_helper_0_0(helper_wrmsr);
5817 case 0x131: /* rdtsc */
5818 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5820 gen_jmp_im(pc_start - s->cs_base);
5821 tcg_gen_helper_0_0(helper_rdtsc);
5823 case 0x133: /* rdpmc */
5824 gen_jmp_im(pc_start - s->cs_base);
5825 tcg_gen_helper_0_0(helper_rdpmc);
5827 case 0x134: /* sysenter */
5831 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5833 if (s->cc_op != CC_OP_DYNAMIC) {
5834 gen_op_set_cc_op(s->cc_op);
5835 s->cc_op = CC_OP_DYNAMIC;
5837 gen_jmp_im(pc_start - s->cs_base);
5838 tcg_gen_helper_0_0(helper_sysenter);
5842 case 0x135: /* sysexit */
5846 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5848 if (s->cc_op != CC_OP_DYNAMIC) {
5849 gen_op_set_cc_op(s->cc_op);
5850 s->cc_op = CC_OP_DYNAMIC;
5852 gen_jmp_im(pc_start - s->cs_base);
5853 tcg_gen_helper_0_0(helper_sysexit);
5857 #ifdef TARGET_X86_64
5858 case 0x105: /* syscall */
5859 /* XXX: is it usable in real mode ? */
5860 if (s->cc_op != CC_OP_DYNAMIC) {
5861 gen_op_set_cc_op(s->cc_op);
5862 s->cc_op = CC_OP_DYNAMIC;
5864 gen_jmp_im(pc_start - s->cs_base);
5865 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
5868 case 0x107: /* sysret */
5870 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5872 if (s->cc_op != CC_OP_DYNAMIC) {
5873 gen_op_set_cc_op(s->cc_op);
5874 s->cc_op = CC_OP_DYNAMIC;
5876 gen_jmp_im(pc_start - s->cs_base);
5877 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
5878 /* condition codes are modified only in long mode */
5880 s->cc_op = CC_OP_EFLAGS;
5885 case 0x1a2: /* cpuid */
5886 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5888 tcg_gen_helper_0_0(helper_cpuid);
5890 case 0xf4: /* hlt */
5892 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5894 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5896 if (s->cc_op != CC_OP_DYNAMIC)
5897 gen_op_set_cc_op(s->cc_op);
5898 gen_jmp_im(s->pc - s->cs_base);
5899 tcg_gen_helper_0_0(helper_hlt);
5904 modrm = ldub_code(s->pc++);
5905 mod = (modrm >> 6) & 3;
5906 op = (modrm >> 3) & 7;
5909 if (!s->pe || s->vm86)
5911 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5913 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5917 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5920 if (!s->pe || s->vm86)
5923 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5925 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5927 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5928 gen_jmp_im(pc_start - s->cs_base);
5929 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5930 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2);
5934 if (!s->pe || s->vm86)
5936 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5938 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5942 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5945 if (!s->pe || s->vm86)
5948 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5950 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5952 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5953 gen_jmp_im(pc_start - s->cs_base);
5954 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5955 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2);
5960 if (!s->pe || s->vm86)
5962 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5963 if (s->cc_op != CC_OP_DYNAMIC)
5964 gen_op_set_cc_op(s->cc_op);
5969 s->cc_op = CC_OP_EFLAGS;
5976 modrm = ldub_code(s->pc++);
5977 mod = (modrm >> 6) & 3;
5978 op = (modrm >> 3) & 7;
5984 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5986 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5987 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5988 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5989 gen_add_A0_im(s, 2);
5990 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5992 gen_op_andl_T0_im(0xffffff);
5993 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5998 case 0: /* monitor */
5999 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6002 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6004 gen_jmp_im(pc_start - s->cs_base);
6005 #ifdef TARGET_X86_64
6006 if (s->aflag == 2) {
6007 gen_op_movq_A0_reg(R_EBX);
6008 gen_op_addq_A0_AL();
6012 gen_op_movl_A0_reg(R_EBX);
6013 gen_op_addl_A0_AL();
6015 gen_op_andl_A0_ffff();
6017 gen_add_A0_ds_seg(s);
6018 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6021 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6024 if (s->cc_op != CC_OP_DYNAMIC) {
6025 gen_op_set_cc_op(s->cc_op);
6026 s->cc_op = CC_OP_DYNAMIC;
6028 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6030 gen_jmp_im(s->pc - s->cs_base);
6031 tcg_gen_helper_0_0(helper_mwait);
6038 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6040 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6041 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6042 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6043 gen_add_A0_im(s, 2);
6044 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6046 gen_op_andl_T0_im(0xffffff);
6047 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6055 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6057 if (s->cc_op != CC_OP_DYNAMIC)
6058 gen_op_set_cc_op(s->cc_op);
6059 gen_jmp_im(s->pc - s->cs_base);
6060 tcg_gen_helper_0_0(helper_vmrun);
6061 s->cc_op = CC_OP_EFLAGS;
6064 case 1: /* VMMCALL */
6065 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6067 /* FIXME: cause #UD if hflags & SVM */
6068 tcg_gen_helper_0_0(helper_vmmcall);
6070 case 2: /* VMLOAD */
6071 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6073 tcg_gen_helper_0_0(helper_vmload);
6075 case 3: /* VMSAVE */
6076 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6078 tcg_gen_helper_0_0(helper_vmsave);
6081 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6083 tcg_gen_helper_0_0(helper_stgi);
6086 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6088 tcg_gen_helper_0_0(helper_clgi);
6090 case 6: /* SKINIT */
6091 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6093 tcg_gen_helper_0_0(helper_skinit);
6095 case 7: /* INVLPGA */
6096 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6098 tcg_gen_helper_0_0(helper_invlpga);
6103 } else if (s->cpl != 0) {
6104 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6106 if (gen_svm_check_intercept(s, pc_start,
6107 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6109 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6110 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6111 gen_add_A0_im(s, 2);
6112 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6114 gen_op_andl_T0_im(0xffffff);
6116 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6117 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6119 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6120 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6125 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6127 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6128 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6132 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6134 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6136 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6138 gen_jmp_im(s->pc - s->cs_base);
6142 case 7: /* invlpg */
6144 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6147 #ifdef TARGET_X86_64
6148 if (CODE64(s) && rm == 0) {
6150 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6151 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6152 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6153 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6160 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6162 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6163 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6164 gen_jmp_im(s->pc - s->cs_base);
6173 case 0x108: /* invd */
6174 case 0x109: /* wbinvd */
6176 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6178 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6183 case 0x63: /* arpl or movslS (x86_64) */
6184 #ifdef TARGET_X86_64
6187 /* d_ot is the size of destination */
6188 d_ot = dflag + OT_WORD;
6190 modrm = ldub_code(s->pc++);
6191 reg = ((modrm >> 3) & 7) | rex_r;
6192 mod = (modrm >> 6) & 3;
6193 rm = (modrm & 7) | REX_B(s);
6196 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6198 if (d_ot == OT_QUAD)
6199 gen_op_movslq_T0_T0();
6200 gen_op_mov_reg_T0(d_ot, reg);
6202 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6203 if (d_ot == OT_QUAD) {
6204 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6206 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6208 gen_op_mov_reg_T0(d_ot, reg);
6213 if (!s->pe || s->vm86)
6215 ot = dflag ? OT_LONG : OT_WORD;
6216 modrm = ldub_code(s->pc++);
6217 reg = (modrm >> 3) & 7;
6218 mod = (modrm >> 6) & 3;
6221 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6222 gen_op_ld_T0_A0(ot + s->mem_index);
6224 gen_op_mov_TN_reg(ot, 0, rm);
6226 if (s->cc_op != CC_OP_DYNAMIC)
6227 gen_op_set_cc_op(s->cc_op);
6229 s->cc_op = CC_OP_EFLAGS;
6231 gen_op_st_T0_A0(ot + s->mem_index);
6233 gen_op_mov_reg_T0(ot, rm);
6235 gen_op_arpl_update();
6238 case 0x102: /* lar */
6239 case 0x103: /* lsl */
6240 if (!s->pe || s->vm86)
6242 ot = dflag ? OT_LONG : OT_WORD;
6243 modrm = ldub_code(s->pc++);
6244 reg = ((modrm >> 3) & 7) | rex_r;
6245 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6246 gen_op_mov_TN_reg(ot, 1, reg);
6247 if (s->cc_op != CC_OP_DYNAMIC)
6248 gen_op_set_cc_op(s->cc_op);
6253 s->cc_op = CC_OP_EFLAGS;
6254 gen_op_mov_reg_T1(ot, reg);
6257 modrm = ldub_code(s->pc++);
6258 mod = (modrm >> 6) & 3;
6259 op = (modrm >> 3) & 7;
6261 case 0: /* prefetchnta */
6262 case 1: /* prefetchnt0 */
6263 case 2: /* prefetchnt0 */
6264 case 3: /* prefetchnt0 */
6267 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6268 /* nothing more to do */
6270 default: /* nop (multi byte) */
6271 gen_nop_modrm(s, modrm);
6275 case 0x119 ... 0x11f: /* nop (multi byte) */
6276 modrm = ldub_code(s->pc++);
6277 gen_nop_modrm(s, modrm);
6279 case 0x120: /* mov reg, crN */
6280 case 0x122: /* mov crN, reg */
6282 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6284 modrm = ldub_code(s->pc++);
6285 if ((modrm & 0xc0) != 0xc0)
6287 rm = (modrm & 7) | REX_B(s);
6288 reg = ((modrm >> 3) & 7) | rex_r;
6300 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6301 gen_op_mov_TN_reg(ot, 0, rm);
6302 gen_op_movl_crN_T0(reg);
6303 gen_jmp_im(s->pc - s->cs_base);
6306 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6307 #if !defined(CONFIG_USER_ONLY)
6309 gen_op_movtl_T0_cr8();
6312 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6313 gen_op_mov_reg_T0(ot, rm);
6321 case 0x121: /* mov reg, drN */
6322 case 0x123: /* mov drN, reg */
6324 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6326 modrm = ldub_code(s->pc++);
6327 if ((modrm & 0xc0) != 0xc0)
6329 rm = (modrm & 7) | REX_B(s);
6330 reg = ((modrm >> 3) & 7) | rex_r;
6335 /* XXX: do it dynamically with CR4.DE bit */
6336 if (reg == 4 || reg == 5 || reg >= 8)
6339 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6340 gen_op_mov_TN_reg(ot, 0, rm);
6341 gen_op_movl_drN_T0(reg);
6342 gen_jmp_im(s->pc - s->cs_base);
6345 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6346 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6347 gen_op_mov_reg_T0(ot, rm);
6351 case 0x106: /* clts */
6353 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6355 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6357 /* abort block because static cpu state changed */
6358 gen_jmp_im(s->pc - s->cs_base);
6362 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6363 case 0x1c3: /* MOVNTI reg, mem */
6364 if (!(s->cpuid_features & CPUID_SSE2))
6366 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6367 modrm = ldub_code(s->pc++);
6368 mod = (modrm >> 6) & 3;
6371 reg = ((modrm >> 3) & 7) | rex_r;
6372 /* generate a generic store */
6373 gen_ldst_modrm(s, modrm, ot, reg, 1);
6376 modrm = ldub_code(s->pc++);
6377 mod = (modrm >> 6) & 3;
6378 op = (modrm >> 3) & 7;
6380 case 0: /* fxsave */
6381 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6382 (s->flags & HF_EM_MASK))
6384 if (s->flags & HF_TS_MASK) {
6385 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6388 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6389 if (s->cc_op != CC_OP_DYNAMIC)
6390 gen_op_set_cc_op(s->cc_op);
6391 gen_jmp_im(pc_start - s->cs_base);
6392 tcg_gen_helper_0_2(helper_fxsave,
6393 cpu_A0, tcg_const_i32((s->dflag == 2)));
6395 case 1: /* fxrstor */
6396 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6397 (s->flags & HF_EM_MASK))
6399 if (s->flags & HF_TS_MASK) {
6400 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6403 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6404 if (s->cc_op != CC_OP_DYNAMIC)
6405 gen_op_set_cc_op(s->cc_op);
6406 gen_jmp_im(pc_start - s->cs_base);
6407 tcg_gen_helper_0_2(helper_fxrstor,
6408 cpu_A0, tcg_const_i32((s->dflag == 2)));
6410 case 2: /* ldmxcsr */
6411 case 3: /* stmxcsr */
6412 if (s->flags & HF_TS_MASK) {
6413 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6416 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6419 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6421 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6422 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6424 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6425 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6428 case 5: /* lfence */
6429 case 6: /* mfence */
6430 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6433 case 7: /* sfence / clflush */
6434 if ((modrm & 0xc7) == 0xc0) {
6436 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6437 if (!(s->cpuid_features & CPUID_SSE))
6441 if (!(s->cpuid_features & CPUID_CLFLUSH))
6443 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6450 case 0x10d: /* 3DNow! prefetch(w) */
6451 modrm = ldub_code(s->pc++);
6452 mod = (modrm >> 6) & 3;
6455 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6456 /* ignore for now */
6458 case 0x1aa: /* rsm */
6459 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6461 if (!(s->flags & HF_SMM_MASK))
6463 if (s->cc_op != CC_OP_DYNAMIC) {
6464 gen_op_set_cc_op(s->cc_op);
6465 s->cc_op = CC_OP_DYNAMIC;
6467 gen_jmp_im(s->pc - s->cs_base);
6468 tcg_gen_helper_0_0(helper_rsm);
6471 case 0x10e ... 0x10f:
6472 /* 3DNow! instructions, ignore prefixes */
6473 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6474 case 0x110 ... 0x117:
6475 case 0x128 ... 0x12f:
6476 case 0x150 ... 0x177:
6477 case 0x17c ... 0x17f:
6479 case 0x1c4 ... 0x1c6:
6480 case 0x1d0 ... 0x1fe:
6481 gen_sse(s, b, pc_start, rex_r);
6486 /* lock generation */
6487 if (s->prefix & PREFIX_LOCK)
6491 if (s->prefix & PREFIX_LOCK)
6493 /* XXX: ensure that no lock was generated */
6494 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6498 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6499 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6501 /* flags read by an operation */
6502 static uint16_t opc_read_flags[NB_OPS] = {
6503 [INDEX_op_aas] = CC_A,
6504 [INDEX_op_aaa] = CC_A,
6505 [INDEX_op_das] = CC_A | CC_C,
6506 [INDEX_op_daa] = CC_A | CC_C,
6508 /* subtle: due to the incl/decl implementation, C is used */
6509 [INDEX_op_update_inc_cc] = CC_C,
6511 [INDEX_op_into] = CC_O,
6513 [INDEX_op_jb_subb] = CC_C,
6514 [INDEX_op_jb_subw] = CC_C,
6515 [INDEX_op_jb_subl] = CC_C,
6517 [INDEX_op_jz_subb] = CC_Z,
6518 [INDEX_op_jz_subw] = CC_Z,
6519 [INDEX_op_jz_subl] = CC_Z,
6521 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6522 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6523 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6525 [INDEX_op_js_subb] = CC_S,
6526 [INDEX_op_js_subw] = CC_S,
6527 [INDEX_op_js_subl] = CC_S,
6529 [INDEX_op_jl_subb] = CC_O | CC_S,
6530 [INDEX_op_jl_subw] = CC_O | CC_S,
6531 [INDEX_op_jl_subl] = CC_O | CC_S,
6533 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6534 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6535 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6537 [INDEX_op_loopnzw] = CC_Z,
6538 [INDEX_op_loopnzl] = CC_Z,
6539 [INDEX_op_loopzw] = CC_Z,
6540 [INDEX_op_loopzl] = CC_Z,
6542 [INDEX_op_seto_T0_cc] = CC_O,
6543 [INDEX_op_setb_T0_cc] = CC_C,
6544 [INDEX_op_setz_T0_cc] = CC_Z,
6545 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6546 [INDEX_op_sets_T0_cc] = CC_S,
6547 [INDEX_op_setp_T0_cc] = CC_P,
6548 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6549 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6551 [INDEX_op_setb_T0_subb] = CC_C,
6552 [INDEX_op_setb_T0_subw] = CC_C,
6553 [INDEX_op_setb_T0_subl] = CC_C,
6555 [INDEX_op_setz_T0_subb] = CC_Z,
6556 [INDEX_op_setz_T0_subw] = CC_Z,
6557 [INDEX_op_setz_T0_subl] = CC_Z,
6559 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6560 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6561 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6563 [INDEX_op_sets_T0_subb] = CC_S,
6564 [INDEX_op_sets_T0_subw] = CC_S,
6565 [INDEX_op_sets_T0_subl] = CC_S,
6567 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6568 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6569 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6571 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6572 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6573 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6575 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6576 [INDEX_op_cmc] = CC_C,
6577 [INDEX_op_salc] = CC_C,
6579 /* needed for correct flag optimisation before string ops */
6580 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6581 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6582 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6583 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6585 #ifdef TARGET_X86_64
6586 [INDEX_op_jb_subq] = CC_C,
6587 [INDEX_op_jz_subq] = CC_Z,
6588 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6589 [INDEX_op_js_subq] = CC_S,
6590 [INDEX_op_jl_subq] = CC_O | CC_S,
6591 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6593 [INDEX_op_loopnzq] = CC_Z,
6594 [INDEX_op_loopzq] = CC_Z,
6596 [INDEX_op_setb_T0_subq] = CC_C,
6597 [INDEX_op_setz_T0_subq] = CC_Z,
6598 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6599 [INDEX_op_sets_T0_subq] = CC_S,
6600 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6601 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6603 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6604 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6607 #define DEF_READF(SUFFIX)\
6608 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6609 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6610 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6611 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6612 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6613 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6614 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6615 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6617 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6618 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6619 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6620 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6621 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6622 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6623 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6624 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6628 #ifndef CONFIG_USER_ONLY
6634 /* flags written by an operation */
6635 static uint16_t opc_write_flags[NB_OPS] = {
6636 [INDEX_op_update2_cc] = CC_OSZAPC,
6637 [INDEX_op_update1_cc] = CC_OSZAPC,
6638 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6639 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6640 /* subtle: due to the incl/decl implementation, C is used */
6641 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6642 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6644 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6645 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6646 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6647 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6648 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6649 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6650 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6651 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6652 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6653 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6654 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6657 [INDEX_op_com_dummy] = CC_OSZAPC,
6658 [INDEX_op_com_dummy] = CC_OSZAPC,
6659 [INDEX_op_com_dummy] = CC_OSZAPC,
6660 [INDEX_op_com_dummy] = CC_OSZAPC,
6663 [INDEX_op_aam] = CC_OSZAPC,
6664 [INDEX_op_aad] = CC_OSZAPC,
6665 [INDEX_op_aas] = CC_OSZAPC,
6666 [INDEX_op_aaa] = CC_OSZAPC,
6667 [INDEX_op_das] = CC_OSZAPC,
6668 [INDEX_op_daa] = CC_OSZAPC,
6670 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6671 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6672 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6673 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6674 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6675 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6676 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6677 [INDEX_op_clc] = CC_C,
6678 [INDEX_op_stc] = CC_C,
6679 [INDEX_op_cmc] = CC_C,
6681 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6682 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6683 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6684 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6685 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6686 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6687 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6688 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6689 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6690 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6691 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6692 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6694 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6695 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6696 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6697 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6698 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6699 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6701 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6702 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6703 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6704 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6706 [INDEX_op_cmpxchg8b] = CC_Z,
6707 [INDEX_op_lar] = CC_Z,
6708 [INDEX_op_lsl] = CC_Z,
6709 [INDEX_op_verr] = CC_Z,
6710 [INDEX_op_verw] = CC_Z,
6711 [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6712 [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6714 #define DEF_WRITEF(SUFFIX)\
6715 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6716 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6717 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6718 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6719 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6720 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6721 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6722 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6724 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6725 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6726 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6727 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6728 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6729 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6730 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6731 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6733 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6734 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6735 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6736 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6737 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6738 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6739 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6740 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6742 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6743 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6744 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6745 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6747 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6748 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6749 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6750 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6752 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6753 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6754 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6755 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6757 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6758 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6759 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6760 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6761 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6762 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6764 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6765 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6766 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6767 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6768 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6769 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6771 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6772 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6773 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6774 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6779 #ifndef CONFIG_USER_ONLY
6785 /* simpler form of an operation if no flags need to be generated */
6786 static uint16_t opc_simpler[NB_OPS] = {
6787 [INDEX_op_update2_cc] = INDEX_op_nop,
6788 [INDEX_op_update1_cc] = INDEX_op_nop,
6789 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6791 /* broken: CC_OP logic must be rewritten */
6792 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6795 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6796 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6797 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6798 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6800 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6801 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6802 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6803 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6805 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6806 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6807 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6808 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6810 #define DEF_SIMPLER(SUFFIX)\
6811 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6812 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6813 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6814 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6816 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6817 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6818 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6819 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6823 #ifndef CONFIG_USER_ONLY
6824 DEF_SIMPLER(_kernel)
6829 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6834 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6840 void optimize_flags_init(void)
6843 /* put default values in arrays */
6844 for(i = 0; i < NB_OPS; i++) {
6845 if (opc_simpler[i] == 0)
6849 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6851 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6852 #if TARGET_LONG_BITS > HOST_LONG_BITS
6853 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6854 TCG_AREG0, offsetof(CPUState, t0), "T0");
6855 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6856 TCG_AREG0, offsetof(CPUState, t1), "T1");
6857 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6858 TCG_AREG0, offsetof(CPUState, t2), "A0");
6860 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6861 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6862 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6863 cpu_tmp1 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6867 /* CPU flags computation optimization: we move backward thru the
6868 generated code to see which flags are needed. The operation is
6869 modified if suitable */
6870 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6873 int live_flags, write_flags, op;
6875 opc_ptr = opc_buf + opc_buf_len;
6876 /* live_flags contains the flags needed by the next instructions
6877 in the code. At the end of the block, we consider that all the
6879 live_flags = CC_OSZAPC;
6880 while (opc_ptr > opc_buf) {
6882 /* if none of the flags written by the instruction is used,
6883 then we can try to find a simpler instruction */
6884 write_flags = opc_write_flags[op];
6885 if ((live_flags & write_flags) == 0) {
6886 *opc_ptr = opc_simpler[op];
6888 /* compute the live flags before the instruction */
6889 live_flags &= ~write_flags;
6890 live_flags |= opc_read_flags[op];
6894 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6895 basic block 'tb'. If search_pc is TRUE, also generate PC
6896 information for each intermediate instruction. */
6897 static inline int gen_intermediate_code_internal(CPUState *env,
6898 TranslationBlock *tb,
6901 DisasContext dc1, *dc = &dc1;
6902 target_ulong pc_ptr;
6903 uint16_t *gen_opc_end;
6906 target_ulong pc_start;
6907 target_ulong cs_base;
6909 /* generate intermediate code */
6911 cs_base = tb->cs_base;
6913 cflags = tb->cflags;
6915 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6916 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6917 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6918 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6920 dc->vm86 = (flags >> VM_SHIFT) & 1;
6921 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6922 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6923 dc->tf = (flags >> TF_SHIFT) & 1;
6924 dc->singlestep_enabled = env->singlestep_enabled;
6925 dc->cc_op = CC_OP_DYNAMIC;
6926 dc->cs_base = cs_base;
6928 dc->popl_esp_hack = 0;
6929 /* select memory access functions */
6931 if (flags & HF_SOFTMMU_MASK) {
6933 dc->mem_index = 2 * 4;
6935 dc->mem_index = 1 * 4;
6937 dc->cpuid_features = env->cpuid_features;
6938 dc->cpuid_ext_features = env->cpuid_ext_features;
6939 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6940 #ifdef TARGET_X86_64
6941 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6942 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6945 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6946 (flags & HF_INHIBIT_IRQ_MASK)
6947 #ifndef CONFIG_SOFTMMU
6948 || (flags & HF_SOFTMMU_MASK)
6952 /* check addseg logic */
6953 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6954 printf("ERROR addseg\n");
6957 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6958 #if TARGET_LONG_BITS > HOST_LONG_BITS
6959 cpu_tmp1 = tcg_temp_new(TCG_TYPE_I64);
6961 cpu_tmp2 = tcg_temp_new(TCG_TYPE_I32);
6962 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6963 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6965 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6967 dc->is_jmp = DISAS_NEXT;
6972 if (env->nb_breakpoints > 0) {
6973 for(j = 0; j < env->nb_breakpoints; j++) {
6974 if (env->breakpoints[j] == pc_ptr) {
6975 gen_debug(dc, pc_ptr - dc->cs_base);
6981 j = gen_opc_ptr - gen_opc_buf;
6985 gen_opc_instr_start[lj++] = 0;
6987 gen_opc_pc[lj] = pc_ptr;
6988 gen_opc_cc_op[lj] = dc->cc_op;
6989 gen_opc_instr_start[lj] = 1;
6991 pc_ptr = disas_insn(dc, pc_ptr);
6992 /* stop translation if indicated */
6995 /* if single step mode, we generate only one instruction and
6996 generate an exception */
6997 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6998 the flag and abort the translation to give the irqs a
6999 change to be happen */
7000 if (dc->tf || dc->singlestep_enabled ||
7001 (flags & HF_INHIBIT_IRQ_MASK) ||
7002 (cflags & CF_SINGLE_INSN)) {
7003 gen_jmp_im(pc_ptr - dc->cs_base);
7007 /* if too long translation, stop generation too */
7008 if (gen_opc_ptr >= gen_opc_end ||
7009 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7010 gen_jmp_im(pc_ptr - dc->cs_base);
7015 *gen_opc_ptr = INDEX_op_end;
7016 /* we don't forget to fill the last values */
7018 j = gen_opc_ptr - gen_opc_buf;
7021 gen_opc_instr_start[lj++] = 0;
7025 if (loglevel & CPU_LOG_TB_CPU) {
7026 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7028 if (loglevel & CPU_LOG_TB_IN_ASM) {
7030 fprintf(logfile, "----------------\n");
7031 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7032 #ifdef TARGET_X86_64
7037 disas_flags = !dc->code32;
7038 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7039 fprintf(logfile, "\n");
7040 if (loglevel & CPU_LOG_TB_OP_OPT) {
7041 fprintf(logfile, "OP before opt:\n");
7042 tcg_dump_ops(&tcg_ctx, logfile);
7043 fprintf(logfile, "\n");
7048 /* optimize flag computations */
7049 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
7052 tb->size = pc_ptr - pc_start;
7056 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7058 return gen_intermediate_code_internal(env, tb, 0);
7061 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7063 return gen_intermediate_code_internal(env, tb, 1);
7066 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7067 unsigned long searched_pc, int pc_pos, void *puc)
7071 if (loglevel & CPU_LOG_TB_OP) {
7073 fprintf(logfile, "RESTORE:\n");
7074 for(i = 0;i <= pc_pos; i++) {
7075 if (gen_opc_instr_start[i]) {
7076 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7079 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7080 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7081 (uint32_t)tb->cs_base);
7084 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7085 cc_op = gen_opc_cc_op[pc_pos];
7086 if (cc_op != CC_OP_DYNAMIC)