4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0;
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0, cpu_tmp1, cpu_tmp2, cpu_tmp3, cpu_ptr0, cpu_ptr1;
66 static int x86_64_hregs;
69 typedef struct DisasContext {
70 /* current insn context */
71 int override; /* -1 if no override */
74 target_ulong pc; /* pc = eip + cs_base */
75 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base; /* base of CS segment */
79 int pe; /* protected mode */
80 int code32; /* 32 bit code segment */
82 int lma; /* long mode active */
83 int code64; /* 64 bit code segment */
86 int ss32; /* 32 bit stack segment */
87 int cc_op; /* current CC operation */
88 int addseg; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st; /* currently unused */
90 int vm86; /* vm86 mode */
93 int tf; /* TF cpu flag */
94 int singlestep_enabled; /* "hardware" single step enabled */
95 int jmp_opt; /* use direct block chaining for direct jumps */
96 int mem_index; /* select memory access functions */
97 uint64_t flags; /* all execution flags */
98 struct TranslationBlock *tb;
99 int popl_esp_hack; /* for correct popl with esp base handling */
100 int rip_offset; /* only used in x86_64, but left for simplicity */
102 int cpuid_ext_features;
103 int cpuid_ext2_features;
106 static void gen_eob(DisasContext *s);
107 static void gen_jmp(DisasContext *s, target_ulong eip);
108 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
110 /* i386 arith/logic operations */
130 OP_SHL1, /* undocumented */
143 /* I386 int registers */
144 OR_EAX, /* MUST be even numbered */
153 OR_TMP0 = 16, /* temporary operand register */
155 OR_A0, /* temporary register used when doing address evaluation */
158 static inline void gen_op_movl_T0_0(void)
160 tcg_gen_movi_tl(cpu_T[0], 0);
163 static inline void gen_op_movl_T0_im(int32_t val)
165 tcg_gen_movi_tl(cpu_T[0], val);
168 static inline void gen_op_movl_T0_imu(uint32_t val)
170 tcg_gen_movi_tl(cpu_T[0], val);
173 static inline void gen_op_movl_T1_im(int32_t val)
175 tcg_gen_movi_tl(cpu_T[1], val);
178 static inline void gen_op_movl_T1_imu(uint32_t val)
180 tcg_gen_movi_tl(cpu_T[1], val);
183 static inline void gen_op_movl_A0_im(uint32_t val)
185 tcg_gen_movi_tl(cpu_A0, val);
189 static inline void gen_op_movq_A0_im(int64_t val)
191 tcg_gen_movi_tl(cpu_A0, val);
195 static inline void gen_movtl_T0_im(target_ulong val)
197 tcg_gen_movi_tl(cpu_T[0], val);
200 static inline void gen_movtl_T1_im(target_ulong val)
202 tcg_gen_movi_tl(cpu_T[1], val);
205 static inline void gen_op_andl_T0_ffff(void)
207 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210 static inline void gen_op_andl_T0_im(uint32_t val)
212 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215 static inline void gen_op_movl_T0_T1(void)
217 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220 static inline void gen_op_andl_A0_ffff(void)
222 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
227 #define NB_OP_SIZES 4
229 #define DEF_REGS(prefix, suffix) \
230 prefix ## EAX ## suffix,\
231 prefix ## ECX ## suffix,\
232 prefix ## EDX ## suffix,\
233 prefix ## EBX ## suffix,\
234 prefix ## ESP ## suffix,\
235 prefix ## EBP ## suffix,\
236 prefix ## ESI ## suffix,\
237 prefix ## EDI ## suffix,\
238 prefix ## R8 ## suffix,\
239 prefix ## R9 ## suffix,\
240 prefix ## R10 ## suffix,\
241 prefix ## R11 ## suffix,\
242 prefix ## R12 ## suffix,\
243 prefix ## R13 ## suffix,\
244 prefix ## R14 ## suffix,\
245 prefix ## R15 ## suffix,
247 #else /* !TARGET_X86_64 */
249 #define NB_OP_SIZES 3
251 #define DEF_REGS(prefix, suffix) \
252 prefix ## EAX ## suffix,\
253 prefix ## ECX ## suffix,\
254 prefix ## EDX ## suffix,\
255 prefix ## EBX ## suffix,\
256 prefix ## ESP ## suffix,\
257 prefix ## EBP ## suffix,\
258 prefix ## ESI ## suffix,\
259 prefix ## EDI ## suffix,
261 #endif /* !TARGET_X86_64 */
263 #if defined(WORDS_BIGENDIAN)
264 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
265 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
267 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
268 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
270 #define REG_B_OFFSET 0
271 #define REG_H_OFFSET 1
272 #define REG_W_OFFSET 0
273 #define REG_L_OFFSET 0
274 #define REG_LH_OFFSET 4
277 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
281 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
284 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
288 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
292 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
293 /* high part of register set to zero */
294 tcg_gen_movi_tl(cpu_tmp0, 0);
295 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
299 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
304 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
310 static inline void gen_op_mov_reg_T0(int ot, int reg)
312 gen_op_mov_reg_TN(ot, 0, reg);
315 static inline void gen_op_mov_reg_T1(int ot, int reg)
317 gen_op_mov_reg_TN(ot, 1, reg);
320 static inline void gen_op_mov_reg_A0(int size, int reg)
324 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
328 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
329 /* high part of register set to zero */
330 tcg_gen_movi_tl(cpu_tmp0, 0);
331 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
335 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
340 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
346 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
350 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
358 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
363 static inline void gen_op_movl_A0_reg(int reg)
365 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368 static inline void gen_op_addl_A0_im(int32_t val)
370 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
372 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
377 static inline void gen_op_addq_A0_im(int64_t val)
379 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
383 static void gen_add_A0_im(DisasContext *s, int val)
387 gen_op_addq_A0_im(val);
390 gen_op_addl_A0_im(val);
393 static inline void gen_op_addl_T0_T1(void)
395 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398 static inline void gen_op_jmp_T0(void)
400 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403 static inline void gen_op_addw_ESP_im(int32_t val)
405 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
406 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
410 static inline void gen_op_addl_ESP_im(int32_t val)
412 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
413 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
415 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
417 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
421 static inline void gen_op_addq_ESP_im(int32_t val)
423 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
424 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
429 static inline void gen_op_set_cc_op(int32_t val)
431 tcg_gen_movi_tl(cpu_tmp0, val);
432 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
435 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
437 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
440 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
442 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
446 static inline void gen_op_movl_A0_seg(int reg)
448 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
451 static inline void gen_op_addl_A0_seg(int reg)
453 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
454 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
456 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
461 static inline void gen_op_movq_A0_seg(int reg)
463 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
466 static inline void gen_op_addq_A0_seg(int reg)
468 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
469 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
472 static inline void gen_op_movq_A0_reg(int reg)
474 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
477 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
479 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
481 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
482 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
486 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
488 DEF_REGS(gen_op_cmovw_, _T1_T0)
491 DEF_REGS(gen_op_cmovl_, _T1_T0)
495 DEF_REGS(gen_op_cmovq_, _T1_T0)
500 #define DEF_ARITHC(SUFFIX)\
502 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
506 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
510 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
514 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
518 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
522 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
524 #ifndef CONFIG_USER_ONLY
530 static const int cc_op_arithb[8] = {
541 #define DEF_CMPXCHG(SUFFIX)\
542 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
547 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
551 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
553 #ifndef CONFIG_USER_ONLY
559 #define DEF_SHIFT(SUFFIX)\
561 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
571 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
581 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
591 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
601 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
605 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
607 #ifndef CONFIG_USER_ONLY
613 #define DEF_SHIFTD(SUFFIX, op)\
619 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
631 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
635 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
639 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
641 #ifndef CONFIG_USER_ONLY
642 DEF_SHIFTD(_kernel, im)
643 DEF_SHIFTD(_user, im)
647 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
648 DEF_SHIFTD(_raw, ECX)
649 #ifndef CONFIG_USER_ONLY
650 DEF_SHIFTD(_kernel, ECX)
651 DEF_SHIFTD(_user, ECX)
655 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
658 gen_op_btsw_T0_T1_cc,
659 gen_op_btrw_T0_T1_cc,
660 gen_op_btcw_T0_T1_cc,
664 gen_op_btsl_T0_T1_cc,
665 gen_op_btrl_T0_T1_cc,
666 gen_op_btcl_T0_T1_cc,
671 gen_op_btsq_T0_T1_cc,
672 gen_op_btrq_T0_T1_cc,
673 gen_op_btcq_T0_T1_cc,
678 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
679 gen_op_add_bitw_A0_T1,
680 gen_op_add_bitl_A0_T1,
681 X86_64_ONLY(gen_op_add_bitq_A0_T1),
684 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
701 static inline void gen_op_lds_T0_A0(int idx)
703 int mem_index = (idx >> 2) - 1;
706 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
709 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
713 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static inline void gen_op_ld_T0_A0(int idx)
721 int mem_index = (idx >> 2) - 1;
724 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
727 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
730 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
734 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
739 static inline void gen_op_ldu_T0_A0(int idx)
741 gen_op_ld_T0_A0(idx);
744 static inline void gen_op_ld_T1_A0(int idx)
746 int mem_index = (idx >> 2) - 1;
749 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
752 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
755 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
759 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
764 static inline void gen_op_st_T0_A0(int idx)
766 int mem_index = (idx >> 2) - 1;
769 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
772 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
775 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
779 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
784 static inline void gen_op_st_T1_A0(int idx)
786 int mem_index = (idx >> 2) - 1;
789 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
792 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
795 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
799 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
804 static inline void gen_jmp_im(target_ulong pc)
806 tcg_gen_movi_tl(cpu_tmp0, pc);
807 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
810 static inline void gen_string_movl_A0_ESI(DisasContext *s)
814 override = s->override;
818 gen_op_movq_A0_seg(override);
819 gen_op_addq_A0_reg_sN(0, R_ESI);
821 gen_op_movq_A0_reg(R_ESI);
827 if (s->addseg && override < 0)
830 gen_op_movl_A0_seg(override);
831 gen_op_addl_A0_reg_sN(0, R_ESI);
833 gen_op_movl_A0_reg(R_ESI);
836 /* 16 address, always override */
839 gen_op_movl_A0_reg(R_ESI);
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(override);
845 static inline void gen_string_movl_A0_EDI(DisasContext *s)
849 gen_op_movq_A0_reg(R_EDI);
854 gen_op_movl_A0_seg(R_ES);
855 gen_op_addl_A0_reg_sN(0, R_EDI);
857 gen_op_movl_A0_reg(R_EDI);
860 gen_op_movl_A0_reg(R_EDI);
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(R_ES);
866 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867 gen_op_movl_T0_Dshiftb,
868 gen_op_movl_T0_Dshiftw,
869 gen_op_movl_T0_Dshiftl,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
873 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
876 X86_64_ONLY(gen_op_jnz_ecxq),
879 static GenOpFunc1 *gen_op_jz_ecx[3] = {
882 X86_64_ONLY(gen_op_jz_ecxq),
885 static GenOpFunc *gen_op_dec_ECX[3] = {
888 X86_64_ONLY(gen_op_decq_ECX),
891 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
896 X86_64_ONLY(gen_op_jnz_subq),
902 X86_64_ONLY(gen_op_jz_subq),
906 static void *helper_in_func[3] = {
912 static void *helper_out_func[3] = {
918 static void *gen_check_io_func[3] = {
924 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
928 target_ulong next_eip;
931 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
932 if (s->cc_op != CC_OP_DYNAMIC)
933 gen_op_set_cc_op(s->cc_op);
936 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
937 tcg_gen_helper_0_1(gen_check_io_func[ot],
940 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
942 if (s->cc_op != CC_OP_DYNAMIC)
943 gen_op_set_cc_op(s->cc_op);
947 svm_flags |= (1 << (4 + ot));
948 next_eip = s->pc - s->cs_base;
949 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
950 tcg_gen_helper_0_3(helper_svm_check_io,
952 tcg_const_i32(svm_flags),
953 tcg_const_i32(next_eip - cur_eip));
957 static inline void gen_movs(DisasContext *s, int ot)
959 gen_string_movl_A0_ESI(s);
960 gen_op_ld_T0_A0(ot + s->mem_index);
961 gen_string_movl_A0_EDI(s);
962 gen_op_st_T0_A0(ot + s->mem_index);
963 gen_op_movl_T0_Dshift[ot]();
966 gen_op_addq_ESI_T0();
967 gen_op_addq_EDI_T0();
971 gen_op_addl_ESI_T0();
972 gen_op_addl_EDI_T0();
974 gen_op_addw_ESI_T0();
975 gen_op_addw_EDI_T0();
979 static inline void gen_update_cc_op(DisasContext *s)
981 if (s->cc_op != CC_OP_DYNAMIC) {
982 gen_op_set_cc_op(s->cc_op);
983 s->cc_op = CC_OP_DYNAMIC;
987 /* XXX: does not work with gdbstub "ice" single step - not a
989 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
993 l1 = gen_new_label();
994 l2 = gen_new_label();
995 gen_op_jnz_ecx[s->aflag](l1);
997 gen_jmp_tb(s, next_eip, 1);
1002 static inline void gen_stos(DisasContext *s, int ot)
1004 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1005 gen_string_movl_A0_EDI(s);
1006 gen_op_st_T0_A0(ot + s->mem_index);
1007 gen_op_movl_T0_Dshift[ot]();
1008 #ifdef TARGET_X86_64
1009 if (s->aflag == 2) {
1010 gen_op_addq_EDI_T0();
1014 gen_op_addl_EDI_T0();
1016 gen_op_addw_EDI_T0();
1020 static inline void gen_lods(DisasContext *s, int ot)
1022 gen_string_movl_A0_ESI(s);
1023 gen_op_ld_T0_A0(ot + s->mem_index);
1024 gen_op_mov_reg_T0(ot, R_EAX);
1025 gen_op_movl_T0_Dshift[ot]();
1026 #ifdef TARGET_X86_64
1027 if (s->aflag == 2) {
1028 gen_op_addq_ESI_T0();
1032 gen_op_addl_ESI_T0();
1034 gen_op_addw_ESI_T0();
1038 static inline void gen_scas(DisasContext *s, int ot)
1040 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1041 gen_string_movl_A0_EDI(s);
1042 gen_op_ld_T1_A0(ot + s->mem_index);
1043 gen_op_cmpl_T0_T1_cc();
1044 gen_op_movl_T0_Dshift[ot]();
1045 #ifdef TARGET_X86_64
1046 if (s->aflag == 2) {
1047 gen_op_addq_EDI_T0();
1051 gen_op_addl_EDI_T0();
1053 gen_op_addw_EDI_T0();
1057 static inline void gen_cmps(DisasContext *s, int ot)
1059 gen_string_movl_A0_ESI(s);
1060 gen_op_ld_T0_A0(ot + s->mem_index);
1061 gen_string_movl_A0_EDI(s);
1062 gen_op_ld_T1_A0(ot + s->mem_index);
1063 gen_op_cmpl_T0_T1_cc();
1064 gen_op_movl_T0_Dshift[ot]();
1065 #ifdef TARGET_X86_64
1066 if (s->aflag == 2) {
1067 gen_op_addq_ESI_T0();
1068 gen_op_addq_EDI_T0();
1072 gen_op_addl_ESI_T0();
1073 gen_op_addl_EDI_T0();
1075 gen_op_addw_ESI_T0();
1076 gen_op_addw_EDI_T0();
1080 static inline void gen_ins(DisasContext *s, int ot)
1082 gen_string_movl_A0_EDI(s);
1084 gen_op_st_T0_A0(ot + s->mem_index);
1085 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1086 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[1]);
1087 tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
1088 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2);
1089 gen_op_st_T0_A0(ot + s->mem_index);
1090 gen_op_movl_T0_Dshift[ot]();
1091 #ifdef TARGET_X86_64
1092 if (s->aflag == 2) {
1093 gen_op_addq_EDI_T0();
1097 gen_op_addl_EDI_T0();
1099 gen_op_addw_EDI_T0();
1103 static inline void gen_outs(DisasContext *s, int ot)
1105 gen_string_movl_A0_ESI(s);
1106 gen_op_ld_T0_A0(ot + s->mem_index);
1108 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1109 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[1]);
1110 tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
1111 tcg_gen_trunc_tl_i32(cpu_tmp3, cpu_T[0]);
1112 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2, cpu_tmp3);
1114 gen_op_movl_T0_Dshift[ot]();
1115 #ifdef TARGET_X86_64
1116 if (s->aflag == 2) {
1117 gen_op_addq_ESI_T0();
1121 gen_op_addl_ESI_T0();
1123 gen_op_addw_ESI_T0();
1127 /* same method as Valgrind : we generate jumps to current or next
1129 #define GEN_REPZ(op) \
1130 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1131 target_ulong cur_eip, target_ulong next_eip) \
1134 gen_update_cc_op(s); \
1135 l2 = gen_jz_ecx_string(s, next_eip); \
1136 gen_ ## op(s, ot); \
1137 gen_op_dec_ECX[s->aflag](); \
1138 /* a loop would cause two single step exceptions if ECX = 1 \
1139 before rep string_insn */ \
1141 gen_op_jz_ecx[s->aflag](l2); \
1142 gen_jmp(s, cur_eip); \
1145 #define GEN_REPZ2(op) \
1146 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1147 target_ulong cur_eip, \
1148 target_ulong next_eip, \
1152 gen_update_cc_op(s); \
1153 l2 = gen_jz_ecx_string(s, next_eip); \
1154 gen_ ## op(s, ot); \
1155 gen_op_dec_ECX[s->aflag](); \
1156 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1157 gen_op_string_jnz_sub[nz][ot](l2);\
1159 gen_op_jz_ecx[s->aflag](l2); \
1160 gen_jmp(s, cur_eip); \
1182 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1213 #ifdef TARGET_X86_64
1216 BUGGY_64(gen_op_jb_subq),
1218 BUGGY_64(gen_op_jbe_subq),
1221 BUGGY_64(gen_op_jl_subq),
1222 BUGGY_64(gen_op_jle_subq),
1226 static GenOpFunc1 *gen_op_loop[3][4] = {
1237 #ifdef TARGET_X86_64
1246 static GenOpFunc *gen_setcc_slow[8] = {
1257 static GenOpFunc *gen_setcc_sub[4][8] = {
1260 gen_op_setb_T0_subb,
1261 gen_op_setz_T0_subb,
1262 gen_op_setbe_T0_subb,
1263 gen_op_sets_T0_subb,
1265 gen_op_setl_T0_subb,
1266 gen_op_setle_T0_subb,
1270 gen_op_setb_T0_subw,
1271 gen_op_setz_T0_subw,
1272 gen_op_setbe_T0_subw,
1273 gen_op_sets_T0_subw,
1275 gen_op_setl_T0_subw,
1276 gen_op_setle_T0_subw,
1280 gen_op_setb_T0_subl,
1281 gen_op_setz_T0_subl,
1282 gen_op_setbe_T0_subl,
1283 gen_op_sets_T0_subl,
1285 gen_op_setl_T0_subl,
1286 gen_op_setle_T0_subl,
1288 #ifdef TARGET_X86_64
1291 gen_op_setb_T0_subq,
1292 gen_op_setz_T0_subq,
1293 gen_op_setbe_T0_subq,
1294 gen_op_sets_T0_subq,
1296 gen_op_setl_T0_subq,
1297 gen_op_setle_T0_subq,
1302 static void *helper_fp_arith_ST0_FT0[8] = {
1303 helper_fadd_ST0_FT0,
1304 helper_fmul_ST0_FT0,
1305 helper_fcom_ST0_FT0,
1306 helper_fcom_ST0_FT0,
1307 helper_fsub_ST0_FT0,
1308 helper_fsubr_ST0_FT0,
1309 helper_fdiv_ST0_FT0,
1310 helper_fdivr_ST0_FT0,
1313 /* NOTE the exception in "r" op ordering */
1314 static void *helper_fp_arith_STN_ST0[8] = {
1315 helper_fadd_STN_ST0,
1316 helper_fmul_STN_ST0,
1319 helper_fsubr_STN_ST0,
1320 helper_fsub_STN_ST0,
1321 helper_fdivr_STN_ST0,
1322 helper_fdiv_STN_ST0,
1325 /* if d == OR_TMP0, it means memory operand (address in A0) */
1326 static void gen_op(DisasContext *s1, int op, int ot, int d)
1328 GenOpFunc *gen_update_cc;
1331 gen_op_mov_TN_reg(ot, 0, d);
1333 gen_op_ld_T0_A0(ot + s1->mem_index);
1338 if (s1->cc_op != CC_OP_DYNAMIC)
1339 gen_op_set_cc_op(s1->cc_op);
1341 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1342 gen_op_mov_reg_T0(ot, d);
1344 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1346 s1->cc_op = CC_OP_DYNAMIC;
1349 gen_op_addl_T0_T1();
1350 s1->cc_op = CC_OP_ADDB + ot;
1351 gen_update_cc = gen_op_update2_cc;
1354 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1355 s1->cc_op = CC_OP_SUBB + ot;
1356 gen_update_cc = gen_op_update2_cc;
1360 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1361 s1->cc_op = CC_OP_LOGICB + ot;
1362 gen_update_cc = gen_op_update1_cc;
1365 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1366 s1->cc_op = CC_OP_LOGICB + ot;
1367 gen_update_cc = gen_op_update1_cc;
1370 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1371 s1->cc_op = CC_OP_LOGICB + ot;
1372 gen_update_cc = gen_op_update1_cc;
1375 gen_op_cmpl_T0_T1_cc();
1376 s1->cc_op = CC_OP_SUBB + ot;
1377 gen_update_cc = NULL;
1380 if (op != OP_CMPL) {
1382 gen_op_mov_reg_T0(ot, d);
1384 gen_op_st_T0_A0(ot + s1->mem_index);
1386 /* the flags update must happen after the memory write (precise
1387 exception support) */
1393 /* if d == OR_TMP0, it means memory operand (address in A0) */
1394 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1397 gen_op_mov_TN_reg(ot, 0, d);
1399 gen_op_ld_T0_A0(ot + s1->mem_index);
1400 if (s1->cc_op != CC_OP_DYNAMIC)
1401 gen_op_set_cc_op(s1->cc_op);
1404 s1->cc_op = CC_OP_INCB + ot;
1407 s1->cc_op = CC_OP_DECB + ot;
1410 gen_op_mov_reg_T0(ot, d);
1412 gen_op_st_T0_A0(ot + s1->mem_index);
1413 gen_op_update_inc_cc();
1416 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1419 gen_op_mov_TN_reg(ot, 0, d);
1421 gen_op_ld_T0_A0(ot + s1->mem_index);
1423 gen_op_mov_TN_reg(ot, 1, s);
1424 /* for zero counts, flags are not updated, so must do it dynamically */
1425 if (s1->cc_op != CC_OP_DYNAMIC)
1426 gen_op_set_cc_op(s1->cc_op);
1429 gen_op_shift_T0_T1_cc[ot][op]();
1431 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1433 gen_op_mov_reg_T0(ot, d);
1434 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1437 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1439 /* currently not optimized */
1440 gen_op_movl_T1_im(c);
1441 gen_shift(s1, op, ot, d, OR_TMP1);
1444 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1452 int mod, rm, code, override, must_add_seg;
1454 override = s->override;
1455 must_add_seg = s->addseg;
1458 mod = (modrm >> 6) & 3;
1470 code = ldub_code(s->pc++);
1471 scale = (code >> 6) & 3;
1472 index = ((code >> 3) & 7) | REX_X(s);
1479 if ((base & 7) == 5) {
1481 disp = (int32_t)ldl_code(s->pc);
1483 if (CODE64(s) && !havesib) {
1484 disp += s->pc + s->rip_offset;
1491 disp = (int8_t)ldub_code(s->pc++);
1495 disp = ldl_code(s->pc);
1501 /* for correct popl handling with esp */
1502 if (base == 4 && s->popl_esp_hack)
1503 disp += s->popl_esp_hack;
1504 #ifdef TARGET_X86_64
1505 if (s->aflag == 2) {
1506 gen_op_movq_A0_reg(base);
1508 gen_op_addq_A0_im(disp);
1513 gen_op_movl_A0_reg(base);
1515 gen_op_addl_A0_im(disp);
1518 #ifdef TARGET_X86_64
1519 if (s->aflag == 2) {
1520 gen_op_movq_A0_im(disp);
1524 gen_op_movl_A0_im(disp);
1527 /* XXX: index == 4 is always invalid */
1528 if (havesib && (index != 4 || scale != 0)) {
1529 #ifdef TARGET_X86_64
1530 if (s->aflag == 2) {
1531 gen_op_addq_A0_reg_sN(scale, index);
1535 gen_op_addl_A0_reg_sN(scale, index);
1540 if (base == R_EBP || base == R_ESP)
1545 #ifdef TARGET_X86_64
1546 if (s->aflag == 2) {
1547 gen_op_addq_A0_seg(override);
1551 gen_op_addl_A0_seg(override);
1558 disp = lduw_code(s->pc);
1560 gen_op_movl_A0_im(disp);
1561 rm = 0; /* avoid SS override */
1568 disp = (int8_t)ldub_code(s->pc++);
1572 disp = lduw_code(s->pc);
1578 gen_op_movl_A0_reg(R_EBX);
1579 gen_op_addl_A0_reg_sN(0, R_ESI);
1582 gen_op_movl_A0_reg(R_EBX);
1583 gen_op_addl_A0_reg_sN(0, R_EDI);
1586 gen_op_movl_A0_reg(R_EBP);
1587 gen_op_addl_A0_reg_sN(0, R_ESI);
1590 gen_op_movl_A0_reg(R_EBP);
1591 gen_op_addl_A0_reg_sN(0, R_EDI);
1594 gen_op_movl_A0_reg(R_ESI);
1597 gen_op_movl_A0_reg(R_EDI);
1600 gen_op_movl_A0_reg(R_EBP);
1604 gen_op_movl_A0_reg(R_EBX);
1608 gen_op_addl_A0_im(disp);
1609 gen_op_andl_A0_ffff();
1613 if (rm == 2 || rm == 3 || rm == 6)
1618 gen_op_addl_A0_seg(override);
1628 static void gen_nop_modrm(DisasContext *s, int modrm)
1630 int mod, rm, base, code;
1632 mod = (modrm >> 6) & 3;
1642 code = ldub_code(s->pc++);
1678 /* used for LEA and MOV AX, mem */
1679 static void gen_add_A0_ds_seg(DisasContext *s)
1681 int override, must_add_seg;
1682 must_add_seg = s->addseg;
1684 if (s->override >= 0) {
1685 override = s->override;
1691 #ifdef TARGET_X86_64
1693 gen_op_addq_A0_seg(override);
1697 gen_op_addl_A0_seg(override);
1702 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1704 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1706 int mod, rm, opreg, disp;
1708 mod = (modrm >> 6) & 3;
1709 rm = (modrm & 7) | REX_B(s);
1713 gen_op_mov_TN_reg(ot, 0, reg);
1714 gen_op_mov_reg_T0(ot, rm);
1716 gen_op_mov_TN_reg(ot, 0, rm);
1718 gen_op_mov_reg_T0(ot, reg);
1721 gen_lea_modrm(s, modrm, &opreg, &disp);
1724 gen_op_mov_TN_reg(ot, 0, reg);
1725 gen_op_st_T0_A0(ot + s->mem_index);
1727 gen_op_ld_T0_A0(ot + s->mem_index);
1729 gen_op_mov_reg_T0(ot, reg);
1734 static inline uint32_t insn_get(DisasContext *s, int ot)
1740 ret = ldub_code(s->pc);
1744 ret = lduw_code(s->pc);
1749 ret = ldl_code(s->pc);
1756 static inline int insn_const_size(unsigned int ot)
1764 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1766 TranslationBlock *tb;
1769 pc = s->cs_base + eip;
1771 /* NOTE: we handle the case where the TB spans two pages here */
1772 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1773 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1774 /* jump to same page: we can use a direct jump */
1775 tcg_gen_goto_tb(tb_num);
1777 tcg_gen_exit_tb((long)tb + tb_num);
1779 /* jump to another page: currently not optimized */
1785 static inline void gen_jcc(DisasContext *s, int b,
1786 target_ulong val, target_ulong next_eip)
1788 TranslationBlock *tb;
1795 jcc_op = (b >> 1) & 7;
1799 /* we optimize the cmp/jcc case */
1804 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1807 /* some jumps are easy to compute */
1849 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1852 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1864 if (s->cc_op != CC_OP_DYNAMIC) {
1865 gen_op_set_cc_op(s->cc_op);
1866 s->cc_op = CC_OP_DYNAMIC;
1870 gen_setcc_slow[jcc_op]();
1871 func = gen_op_jnz_T0_label;
1881 l1 = gen_new_label();
1884 gen_goto_tb(s, 0, next_eip);
1887 gen_goto_tb(s, 1, val);
1892 if (s->cc_op != CC_OP_DYNAMIC) {
1893 gen_op_set_cc_op(s->cc_op);
1894 s->cc_op = CC_OP_DYNAMIC;
1896 gen_setcc_slow[jcc_op]();
1902 l1 = gen_new_label();
1903 l2 = gen_new_label();
1904 gen_op_jnz_T0_label(l1);
1905 gen_jmp_im(next_eip);
1906 gen_op_jmp_label(l2);
1914 static void gen_setcc(DisasContext *s, int b)
1920 jcc_op = (b >> 1) & 7;
1922 /* we optimize the cmp/jcc case */
1927 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1932 /* some jumps are easy to compute */
1959 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1962 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1970 if (s->cc_op != CC_OP_DYNAMIC)
1971 gen_op_set_cc_op(s->cc_op);
1972 func = gen_setcc_slow[jcc_op];
1981 /* move T0 to seg_reg and compute if the CPU state may change. Never
1982 call this function with seg_reg == R_CS */
1983 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1985 if (s->pe && !s->vm86) {
1986 /* XXX: optimize by finding processor state dynamically */
1987 if (s->cc_op != CC_OP_DYNAMIC)
1988 gen_op_set_cc_op(s->cc_op);
1989 gen_jmp_im(cur_eip);
1990 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
1991 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2);
1992 /* abort translation because the addseg value may change or
1993 because ss32 may change. For R_SS, translation must always
1994 stop as a special handling must be done to disable hardware
1995 interrupts for the next instruction */
1996 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1999 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2000 if (seg_reg == R_SS)
2005 static inline int svm_is_rep(int prefixes)
2007 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2011 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2012 uint32_t type, uint64_t param)
2014 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2015 /* no SVM activated */
2018 /* CRx and DRx reads/writes */
2019 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2020 if (s->cc_op != CC_OP_DYNAMIC) {
2021 gen_op_set_cc_op(s->cc_op);
2023 gen_jmp_im(pc_start - s->cs_base);
2024 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2025 tcg_const_i32(type), tcg_const_i64(param));
2026 /* this is a special case as we do not know if the interception occurs
2027 so we assume there was none */
2030 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2031 if (s->cc_op != CC_OP_DYNAMIC) {
2032 gen_op_set_cc_op(s->cc_op);
2034 gen_jmp_im(pc_start - s->cs_base);
2035 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2036 tcg_const_i32(type), tcg_const_i64(param));
2037 /* this is a special case as we do not know if the interception occurs
2038 so we assume there was none */
2043 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2044 if (s->cc_op != CC_OP_DYNAMIC) {
2045 gen_op_set_cc_op(s->cc_op);
2047 gen_jmp_im(pc_start - s->cs_base);
2048 tcg_gen_helper_0_2(helper_vmexit,
2049 tcg_const_i32(type), tcg_const_i64(param));
2050 /* we can optimize this one so TBs don't get longer
2051 than up to vmexit */
2060 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2062 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2065 static inline void gen_stack_update(DisasContext *s, int addend)
2067 #ifdef TARGET_X86_64
2069 gen_op_addq_ESP_im(addend);
2073 gen_op_addl_ESP_im(addend);
2075 gen_op_addw_ESP_im(addend);
2079 /* generate a push. It depends on ss32, addseg and dflag */
2080 static void gen_push_T0(DisasContext *s)
2082 #ifdef TARGET_X86_64
2084 gen_op_movq_A0_reg(R_ESP);
2086 gen_op_addq_A0_im(-8);
2087 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2089 gen_op_addq_A0_im(-2);
2090 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2092 gen_op_mov_reg_A0(2, R_ESP);
2096 gen_op_movl_A0_reg(R_ESP);
2098 gen_op_addl_A0_im(-2);
2100 gen_op_addl_A0_im(-4);
2103 gen_op_movl_T1_A0();
2104 gen_op_addl_A0_seg(R_SS);
2107 gen_op_andl_A0_ffff();
2108 gen_op_movl_T1_A0();
2109 gen_op_addl_A0_seg(R_SS);
2111 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2112 if (s->ss32 && !s->addseg)
2113 gen_op_mov_reg_A0(1, R_ESP);
2115 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2119 /* generate a push. It depends on ss32, addseg and dflag */
2120 /* slower version for T1, only used for call Ev */
2121 static void gen_push_T1(DisasContext *s)
2123 #ifdef TARGET_X86_64
2125 gen_op_movq_A0_reg(R_ESP);
2127 gen_op_addq_A0_im(-8);
2128 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2130 gen_op_addq_A0_im(-2);
2131 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2133 gen_op_mov_reg_A0(2, R_ESP);
2137 gen_op_movl_A0_reg(R_ESP);
2139 gen_op_addl_A0_im(-2);
2141 gen_op_addl_A0_im(-4);
2144 gen_op_addl_A0_seg(R_SS);
2147 gen_op_andl_A0_ffff();
2148 gen_op_addl_A0_seg(R_SS);
2150 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2152 if (s->ss32 && !s->addseg)
2153 gen_op_mov_reg_A0(1, R_ESP);
2155 gen_stack_update(s, (-2) << s->dflag);
2159 /* two step pop is necessary for precise exceptions */
2160 static void gen_pop_T0(DisasContext *s)
2162 #ifdef TARGET_X86_64
2164 gen_op_movq_A0_reg(R_ESP);
2165 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2169 gen_op_movl_A0_reg(R_ESP);
2172 gen_op_addl_A0_seg(R_SS);
2174 gen_op_andl_A0_ffff();
2175 gen_op_addl_A0_seg(R_SS);
2177 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2181 static void gen_pop_update(DisasContext *s)
2183 #ifdef TARGET_X86_64
2184 if (CODE64(s) && s->dflag) {
2185 gen_stack_update(s, 8);
2189 gen_stack_update(s, 2 << s->dflag);
2193 static void gen_stack_A0(DisasContext *s)
2195 gen_op_movl_A0_reg(R_ESP);
2197 gen_op_andl_A0_ffff();
2198 gen_op_movl_T1_A0();
2200 gen_op_addl_A0_seg(R_SS);
2203 /* NOTE: wrap around in 16 bit not fully handled */
2204 static void gen_pusha(DisasContext *s)
2207 gen_op_movl_A0_reg(R_ESP);
2208 gen_op_addl_A0_im(-16 << s->dflag);
2210 gen_op_andl_A0_ffff();
2211 gen_op_movl_T1_A0();
2213 gen_op_addl_A0_seg(R_SS);
2214 for(i = 0;i < 8; i++) {
2215 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2216 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2217 gen_op_addl_A0_im(2 << s->dflag);
2219 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2222 /* NOTE: wrap around in 16 bit not fully handled */
2223 static void gen_popa(DisasContext *s)
2226 gen_op_movl_A0_reg(R_ESP);
2228 gen_op_andl_A0_ffff();
2229 gen_op_movl_T1_A0();
2230 gen_op_addl_T1_im(16 << s->dflag);
2232 gen_op_addl_A0_seg(R_SS);
2233 for(i = 0;i < 8; i++) {
2234 /* ESP is not reloaded */
2236 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2237 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2239 gen_op_addl_A0_im(2 << s->dflag);
2241 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2244 static void gen_enter(DisasContext *s, int esp_addend, int level)
2249 #ifdef TARGET_X86_64
2251 ot = s->dflag ? OT_QUAD : OT_WORD;
2254 gen_op_movl_A0_reg(R_ESP);
2255 gen_op_addq_A0_im(-opsize);
2256 gen_op_movl_T1_A0();
2259 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2260 gen_op_st_T0_A0(ot + s->mem_index);
2262 /* XXX: must save state */
2263 tcg_gen_helper_0_3(helper_enter64_level,
2264 tcg_const_i32(level),
2265 tcg_const_i32((ot == OT_QUAD)),
2268 gen_op_mov_reg_T1(ot, R_EBP);
2269 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2270 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2274 ot = s->dflag + OT_WORD;
2275 opsize = 2 << s->dflag;
2277 gen_op_movl_A0_reg(R_ESP);
2278 gen_op_addl_A0_im(-opsize);
2280 gen_op_andl_A0_ffff();
2281 gen_op_movl_T1_A0();
2283 gen_op_addl_A0_seg(R_SS);
2285 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2286 gen_op_st_T0_A0(ot + s->mem_index);
2288 /* XXX: must save state */
2289 tcg_gen_helper_0_3(helper_enter_level,
2290 tcg_const_i32(level),
2291 tcg_const_i32(s->dflag),
2294 gen_op_mov_reg_T1(ot, R_EBP);
2295 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2296 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2300 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2302 if (s->cc_op != CC_OP_DYNAMIC)
2303 gen_op_set_cc_op(s->cc_op);
2304 gen_jmp_im(cur_eip);
2305 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2309 /* an interrupt is different from an exception because of the
2311 static void gen_interrupt(DisasContext *s, int intno,
2312 target_ulong cur_eip, target_ulong next_eip)
2314 if (s->cc_op != CC_OP_DYNAMIC)
2315 gen_op_set_cc_op(s->cc_op);
2316 gen_jmp_im(cur_eip);
2317 tcg_gen_helper_0_2(helper_raise_interrupt,
2318 tcg_const_i32(intno),
2319 tcg_const_i32(next_eip - cur_eip));
2323 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2325 if (s->cc_op != CC_OP_DYNAMIC)
2326 gen_op_set_cc_op(s->cc_op);
2327 gen_jmp_im(cur_eip);
2328 tcg_gen_helper_0_0(helper_debug);
2332 /* generate a generic end of block. Trace exception is also generated
2334 static void gen_eob(DisasContext *s)
2336 if (s->cc_op != CC_OP_DYNAMIC)
2337 gen_op_set_cc_op(s->cc_op);
2338 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2339 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2341 if (s->singlestep_enabled) {
2342 tcg_gen_helper_0_0(helper_debug);
2344 tcg_gen_helper_0_0(helper_single_step);
2351 /* generate a jump to eip. No segment change must happen before as a
2352 direct call to the next block may occur */
2353 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2356 if (s->cc_op != CC_OP_DYNAMIC) {
2357 gen_op_set_cc_op(s->cc_op);
2358 s->cc_op = CC_OP_DYNAMIC;
2360 gen_goto_tb(s, tb_num, eip);
2368 static void gen_jmp(DisasContext *s, target_ulong eip)
2370 gen_jmp_tb(s, eip, 0);
2373 static inline void gen_ldq_env_A0(int idx, int offset)
2375 int mem_index = (idx >> 2) - 1;
2376 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2377 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset);
2380 static inline void gen_stq_env_A0(int idx, int offset)
2382 int mem_index = (idx >> 2) - 1;
2383 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset);
2384 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2387 static inline void gen_ldo_env_A0(int idx, int offset)
2389 int mem_index = (idx >> 2) - 1;
2390 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2391 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2392 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2393 tcg_gen_qemu_ld64(cpu_tmp1, cpu_tmp0, mem_index);
2394 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2397 static inline void gen_sto_env_A0(int idx, int offset)
2399 int mem_index = (idx >> 2) - 1;
2400 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2401 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2402 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2403 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2404 tcg_gen_qemu_st64(cpu_tmp1, cpu_tmp0, mem_index);
2407 static inline void gen_op_movo(int d_offset, int s_offset)
2409 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2410 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2411 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset + 8);
2412 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset + 8);
2415 static inline void gen_op_movq(int d_offset, int s_offset)
2417 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2418 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2421 static inline void gen_op_movl(int d_offset, int s_offset)
2423 tcg_gen_ld_i32(cpu_tmp2, cpu_env, s_offset);
2424 tcg_gen_st_i32(cpu_tmp2, cpu_env, d_offset);
2427 static inline void gen_op_movq_env_0(int d_offset)
2429 tcg_gen_movi_i64(cpu_tmp1, 0);
2430 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2433 #define SSE_SPECIAL ((void *)1)
2434 #define SSE_DUMMY ((void *)2)
2436 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2437 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2438 helper_ ## x ## ss, helper_ ## x ## sd, }
2440 static void *sse_op_table1[256][4] = {
2441 /* 3DNow! extensions */
2442 [0x0e] = { SSE_DUMMY }, /* femms */
2443 [0x0f] = { SSE_DUMMY }, /* pf... */
2444 /* pure SSE operations */
2445 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2446 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2447 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2448 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2449 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2450 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2451 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2452 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2454 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2455 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2456 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2457 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2458 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2459 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2460 [0x2e] = { helper_ucomiss, helper_ucomisd },
2461 [0x2f] = { helper_comiss, helper_comisd },
2462 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2463 [0x51] = SSE_FOP(sqrt),
2464 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2465 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2466 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2467 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2468 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2469 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2470 [0x58] = SSE_FOP(add),
2471 [0x59] = SSE_FOP(mul),
2472 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2473 helper_cvtss2sd, helper_cvtsd2ss },
2474 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2475 [0x5c] = SSE_FOP(sub),
2476 [0x5d] = SSE_FOP(min),
2477 [0x5e] = SSE_FOP(div),
2478 [0x5f] = SSE_FOP(max),
2480 [0xc2] = SSE_FOP(cmpeq),
2481 [0xc6] = { helper_shufps, helper_shufpd },
2483 /* MMX ops and their SSE extensions */
2484 [0x60] = MMX_OP2(punpcklbw),
2485 [0x61] = MMX_OP2(punpcklwd),
2486 [0x62] = MMX_OP2(punpckldq),
2487 [0x63] = MMX_OP2(packsswb),
2488 [0x64] = MMX_OP2(pcmpgtb),
2489 [0x65] = MMX_OP2(pcmpgtw),
2490 [0x66] = MMX_OP2(pcmpgtl),
2491 [0x67] = MMX_OP2(packuswb),
2492 [0x68] = MMX_OP2(punpckhbw),
2493 [0x69] = MMX_OP2(punpckhwd),
2494 [0x6a] = MMX_OP2(punpckhdq),
2495 [0x6b] = MMX_OP2(packssdw),
2496 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2497 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2498 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2499 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2500 [0x70] = { helper_pshufw_mmx,
2503 helper_pshuflw_xmm },
2504 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2505 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2506 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2507 [0x74] = MMX_OP2(pcmpeqb),
2508 [0x75] = MMX_OP2(pcmpeqw),
2509 [0x76] = MMX_OP2(pcmpeql),
2510 [0x77] = { SSE_DUMMY }, /* emms */
2511 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2512 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2513 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2514 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2515 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2516 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2517 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2518 [0xd1] = MMX_OP2(psrlw),
2519 [0xd2] = MMX_OP2(psrld),
2520 [0xd3] = MMX_OP2(psrlq),
2521 [0xd4] = MMX_OP2(paddq),
2522 [0xd5] = MMX_OP2(pmullw),
2523 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2524 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2525 [0xd8] = MMX_OP2(psubusb),
2526 [0xd9] = MMX_OP2(psubusw),
2527 [0xda] = MMX_OP2(pminub),
2528 [0xdb] = MMX_OP2(pand),
2529 [0xdc] = MMX_OP2(paddusb),
2530 [0xdd] = MMX_OP2(paddusw),
2531 [0xde] = MMX_OP2(pmaxub),
2532 [0xdf] = MMX_OP2(pandn),
2533 [0xe0] = MMX_OP2(pavgb),
2534 [0xe1] = MMX_OP2(psraw),
2535 [0xe2] = MMX_OP2(psrad),
2536 [0xe3] = MMX_OP2(pavgw),
2537 [0xe4] = MMX_OP2(pmulhuw),
2538 [0xe5] = MMX_OP2(pmulhw),
2539 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2540 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2541 [0xe8] = MMX_OP2(psubsb),
2542 [0xe9] = MMX_OP2(psubsw),
2543 [0xea] = MMX_OP2(pminsw),
2544 [0xeb] = MMX_OP2(por),
2545 [0xec] = MMX_OP2(paddsb),
2546 [0xed] = MMX_OP2(paddsw),
2547 [0xee] = MMX_OP2(pmaxsw),
2548 [0xef] = MMX_OP2(pxor),
2549 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2550 [0xf1] = MMX_OP2(psllw),
2551 [0xf2] = MMX_OP2(pslld),
2552 [0xf3] = MMX_OP2(psllq),
2553 [0xf4] = MMX_OP2(pmuludq),
2554 [0xf5] = MMX_OP2(pmaddwd),
2555 [0xf6] = MMX_OP2(psadbw),
2556 [0xf7] = MMX_OP2(maskmov),
2557 [0xf8] = MMX_OP2(psubb),
2558 [0xf9] = MMX_OP2(psubw),
2559 [0xfa] = MMX_OP2(psubl),
2560 [0xfb] = MMX_OP2(psubq),
2561 [0xfc] = MMX_OP2(paddb),
2562 [0xfd] = MMX_OP2(paddw),
2563 [0xfe] = MMX_OP2(paddl),
2566 static void *sse_op_table2[3 * 8][2] = {
2567 [0 + 2] = MMX_OP2(psrlw),
2568 [0 + 4] = MMX_OP2(psraw),
2569 [0 + 6] = MMX_OP2(psllw),
2570 [8 + 2] = MMX_OP2(psrld),
2571 [8 + 4] = MMX_OP2(psrad),
2572 [8 + 6] = MMX_OP2(pslld),
2573 [16 + 2] = MMX_OP2(psrlq),
2574 [16 + 3] = { NULL, helper_psrldq_xmm },
2575 [16 + 6] = MMX_OP2(psllq),
2576 [16 + 7] = { NULL, helper_pslldq_xmm },
2579 static void *sse_op_table3[4 * 3] = {
2582 X86_64_ONLY(helper_cvtsq2ss),
2583 X86_64_ONLY(helper_cvtsq2sd),
2587 X86_64_ONLY(helper_cvttss2sq),
2588 X86_64_ONLY(helper_cvttsd2sq),
2592 X86_64_ONLY(helper_cvtss2sq),
2593 X86_64_ONLY(helper_cvtsd2sq),
2596 static void *sse_op_table4[8][4] = {
2607 static void *sse_op_table5[256] = {
2608 [0x0c] = helper_pi2fw,
2609 [0x0d] = helper_pi2fd,
2610 [0x1c] = helper_pf2iw,
2611 [0x1d] = helper_pf2id,
2612 [0x8a] = helper_pfnacc,
2613 [0x8e] = helper_pfpnacc,
2614 [0x90] = helper_pfcmpge,
2615 [0x94] = helper_pfmin,
2616 [0x96] = helper_pfrcp,
2617 [0x97] = helper_pfrsqrt,
2618 [0x9a] = helper_pfsub,
2619 [0x9e] = helper_pfadd,
2620 [0xa0] = helper_pfcmpgt,
2621 [0xa4] = helper_pfmax,
2622 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2623 [0xa7] = helper_movq, /* pfrsqit1 */
2624 [0xaa] = helper_pfsubr,
2625 [0xae] = helper_pfacc,
2626 [0xb0] = helper_pfcmpeq,
2627 [0xb4] = helper_pfmul,
2628 [0xb6] = helper_movq, /* pfrcpit2 */
2629 [0xb7] = helper_pmulhrw_mmx,
2630 [0xbb] = helper_pswapd,
2631 [0xbf] = helper_pavgb_mmx /* pavgusb */
2634 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2636 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2637 int modrm, mod, rm, reg, reg_addr, offset_addr;
2641 if (s->prefix & PREFIX_DATA)
2643 else if (s->prefix & PREFIX_REPZ)
2645 else if (s->prefix & PREFIX_REPNZ)
2649 sse_op2 = sse_op_table1[b][b1];
2652 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2662 /* simple MMX/SSE operation */
2663 if (s->flags & HF_TS_MASK) {
2664 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2667 if (s->flags & HF_EM_MASK) {
2669 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2672 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2675 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2678 tcg_gen_helper_0_0(helper_emms);
2683 tcg_gen_helper_0_0(helper_emms);
2686 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2687 the static cpu state) */
2689 tcg_gen_helper_0_0(helper_enter_mmx);
2692 modrm = ldub_code(s->pc++);
2693 reg = ((modrm >> 3) & 7);
2696 mod = (modrm >> 6) & 3;
2697 if (sse_op2 == SSE_SPECIAL) {
2700 case 0x0e7: /* movntq */
2703 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2704 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2706 case 0x1e7: /* movntdq */
2707 case 0x02b: /* movntps */
2708 case 0x12b: /* movntps */
2709 case 0x3f0: /* lddqu */
2712 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2713 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2715 case 0x6e: /* movd mm, ea */
2716 #ifdef TARGET_X86_64
2717 if (s->dflag == 2) {
2718 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2719 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2723 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2724 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2725 offsetof(CPUX86State,fpregs[reg].mmx));
2726 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2729 case 0x16e: /* movd xmm, ea */
2730 #ifdef TARGET_X86_64
2731 if (s->dflag == 2) {
2732 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2733 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2734 offsetof(CPUX86State,xmm_regs[reg]));
2735 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2739 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2740 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2741 offsetof(CPUX86State,xmm_regs[reg]));
2742 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
2743 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2);
2746 case 0x6f: /* movq mm, ea */
2748 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2749 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2752 tcg_gen_ld_i64(cpu_tmp1, cpu_env,
2753 offsetof(CPUX86State,fpregs[rm].mmx));
2754 tcg_gen_st_i64(cpu_tmp1, cpu_env,
2755 offsetof(CPUX86State,fpregs[reg].mmx));
2758 case 0x010: /* movups */
2759 case 0x110: /* movupd */
2760 case 0x028: /* movaps */
2761 case 0x128: /* movapd */
2762 case 0x16f: /* movdqa xmm, ea */
2763 case 0x26f: /* movdqu xmm, ea */
2765 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2766 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2768 rm = (modrm & 7) | REX_B(s);
2769 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2770 offsetof(CPUX86State,xmm_regs[rm]));
2773 case 0x210: /* movss xmm, ea */
2775 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2776 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2777 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2779 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2780 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2781 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2783 rm = (modrm & 7) | REX_B(s);
2784 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2785 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2788 case 0x310: /* movsd xmm, ea */
2790 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2791 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2793 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2794 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2796 rm = (modrm & 7) | REX_B(s);
2797 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2798 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2801 case 0x012: /* movlps */
2802 case 0x112: /* movlpd */
2804 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2805 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2808 rm = (modrm & 7) | REX_B(s);
2809 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2810 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2813 case 0x212: /* movsldup */
2815 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2816 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2818 rm = (modrm & 7) | REX_B(s);
2819 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2820 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2821 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2822 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2824 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2825 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2826 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2827 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2829 case 0x312: /* movddup */
2831 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2832 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2834 rm = (modrm & 7) | REX_B(s);
2835 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2836 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2838 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2839 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2841 case 0x016: /* movhps */
2842 case 0x116: /* movhpd */
2844 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2845 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2848 rm = (modrm & 7) | REX_B(s);
2849 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2850 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2853 case 0x216: /* movshdup */
2855 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2856 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2858 rm = (modrm & 7) | REX_B(s);
2859 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2860 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2861 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2862 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2864 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2865 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2866 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2867 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2869 case 0x7e: /* movd ea, mm */
2870 #ifdef TARGET_X86_64
2871 if (s->dflag == 2) {
2872 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2873 offsetof(CPUX86State,fpregs[reg].mmx));
2874 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2878 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2879 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
2880 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2883 case 0x17e: /* movd ea, xmm */
2884 #ifdef TARGET_X86_64
2885 if (s->dflag == 2) {
2886 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2887 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2888 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2892 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2893 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2894 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2897 case 0x27e: /* movq xmm, ea */
2899 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2900 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2902 rm = (modrm & 7) | REX_B(s);
2903 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2904 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2906 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2908 case 0x7f: /* movq ea, mm */
2910 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2911 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2914 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2915 offsetof(CPUX86State,fpregs[reg].mmx));
2918 case 0x011: /* movups */
2919 case 0x111: /* movupd */
2920 case 0x029: /* movaps */
2921 case 0x129: /* movapd */
2922 case 0x17f: /* movdqa ea, xmm */
2923 case 0x27f: /* movdqu ea, xmm */
2925 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2926 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2928 rm = (modrm & 7) | REX_B(s);
2929 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2930 offsetof(CPUX86State,xmm_regs[reg]));
2933 case 0x211: /* movss ea, xmm */
2935 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2936 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2937 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2939 rm = (modrm & 7) | REX_B(s);
2940 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2941 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2944 case 0x311: /* movsd ea, xmm */
2946 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2947 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2949 rm = (modrm & 7) | REX_B(s);
2950 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2951 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2954 case 0x013: /* movlps */
2955 case 0x113: /* movlpd */
2957 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2958 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2963 case 0x017: /* movhps */
2964 case 0x117: /* movhpd */
2966 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2967 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2972 case 0x71: /* shift mm, im */
2975 case 0x171: /* shift xmm, im */
2978 val = ldub_code(s->pc++);
2980 gen_op_movl_T0_im(val);
2981 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2983 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2984 op1_offset = offsetof(CPUX86State,xmm_t0);
2986 gen_op_movl_T0_im(val);
2987 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2989 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2990 op1_offset = offsetof(CPUX86State,mmx_t0);
2992 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2996 rm = (modrm & 7) | REX_B(s);
2997 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3000 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3002 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3003 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3004 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3006 case 0x050: /* movmskps */
3007 rm = (modrm & 7) | REX_B(s);
3008 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3009 offsetof(CPUX86State,xmm_regs[rm]));
3010 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2, cpu_ptr0);
3011 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3012 gen_op_mov_reg_T0(OT_LONG, reg);
3014 case 0x150: /* movmskpd */
3015 rm = (modrm & 7) | REX_B(s);
3016 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3017 offsetof(CPUX86State,xmm_regs[rm]));
3018 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2, cpu_ptr0);
3019 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3020 gen_op_mov_reg_T0(OT_LONG, reg);
3022 case 0x02a: /* cvtpi2ps */
3023 case 0x12a: /* cvtpi2pd */
3024 tcg_gen_helper_0_0(helper_enter_mmx);
3026 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3027 op2_offset = offsetof(CPUX86State,mmx_t0);
3028 gen_ldq_env_A0(s->mem_index, op2_offset);
3031 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3033 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3034 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3035 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3038 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3042 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3046 case 0x22a: /* cvtsi2ss */
3047 case 0x32a: /* cvtsi2sd */
3048 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3049 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3050 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3051 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3052 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3053 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3054 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2);
3056 case 0x02c: /* cvttps2pi */
3057 case 0x12c: /* cvttpd2pi */
3058 case 0x02d: /* cvtps2pi */
3059 case 0x12d: /* cvtpd2pi */
3060 tcg_gen_helper_0_0(helper_enter_mmx);
3062 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3063 op2_offset = offsetof(CPUX86State,xmm_t0);
3064 gen_ldo_env_A0(s->mem_index, op2_offset);
3066 rm = (modrm & 7) | REX_B(s);
3067 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3069 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3070 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3071 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3074 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3077 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3080 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3083 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3087 case 0x22c: /* cvttss2si */
3088 case 0x32c: /* cvttsd2si */
3089 case 0x22d: /* cvtss2si */
3090 case 0x32d: /* cvtsd2si */
3091 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3093 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3095 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3097 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3098 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3100 op2_offset = offsetof(CPUX86State,xmm_t0);
3102 rm = (modrm & 7) | REX_B(s);
3103 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3105 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3107 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3108 if (ot == OT_LONG) {
3109 tcg_gen_helper_1_1(sse_op2, cpu_tmp2, cpu_ptr0);
3110 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3112 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3114 gen_op_mov_reg_T0(ot, reg);
3116 case 0xc4: /* pinsrw */
3119 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3120 val = ldub_code(s->pc++);
3123 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3124 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3127 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3128 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3131 case 0xc5: /* pextrw */
3135 val = ldub_code(s->pc++);
3138 rm = (modrm & 7) | REX_B(s);
3139 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3140 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3144 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3145 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3147 reg = ((modrm >> 3) & 7) | rex_r;
3148 gen_op_mov_reg_T0(OT_LONG, reg);
3150 case 0x1d6: /* movq ea, xmm */
3152 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3153 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3155 rm = (modrm & 7) | REX_B(s);
3156 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3157 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3158 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3161 case 0x2d6: /* movq2dq */
3162 tcg_gen_helper_0_0(helper_enter_mmx);
3164 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3165 offsetof(CPUX86State,fpregs[rm].mmx));
3166 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3168 case 0x3d6: /* movdq2q */
3169 tcg_gen_helper_0_0(helper_enter_mmx);
3170 rm = (modrm & 7) | REX_B(s);
3171 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3172 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3174 case 0xd7: /* pmovmskb */
3179 rm = (modrm & 7) | REX_B(s);
3180 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3181 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2, cpu_ptr0);
3184 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3185 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2, cpu_ptr0);
3187 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3188 reg = ((modrm >> 3) & 7) | rex_r;
3189 gen_op_mov_reg_T0(OT_LONG, reg);
3195 /* generic MMX or SSE operation */
3197 case 0x70: /* pshufx insn */
3198 case 0xc6: /* pshufx insn */
3199 case 0xc2: /* compare insns */
3206 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3208 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3209 op2_offset = offsetof(CPUX86State,xmm_t0);
3210 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3212 /* specific case for SSE single instructions */
3215 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3216 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3219 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3222 gen_ldo_env_A0(s->mem_index, op2_offset);
3225 rm = (modrm & 7) | REX_B(s);
3226 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3229 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3231 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3232 op2_offset = offsetof(CPUX86State,mmx_t0);
3233 gen_ldq_env_A0(s->mem_index, op2_offset);
3236 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3240 case 0x0f: /* 3DNow! data insns */
3241 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3243 val = ldub_code(s->pc++);
3244 sse_op2 = sse_op_table5[val];
3247 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3248 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3249 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3251 case 0x70: /* pshufx insn */
3252 case 0xc6: /* pshufx insn */
3253 val = ldub_code(s->pc++);
3254 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3255 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3256 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3260 val = ldub_code(s->pc++);
3263 sse_op2 = sse_op_table4[val][b1];
3264 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3265 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3266 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3269 /* maskmov : we must prepare A0 */
3272 #ifdef TARGET_X86_64
3273 if (s->aflag == 2) {
3274 gen_op_movq_A0_reg(R_EDI);
3278 gen_op_movl_A0_reg(R_EDI);
3280 gen_op_andl_A0_ffff();
3282 gen_add_A0_ds_seg(s);
3284 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3285 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3286 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3289 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3290 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3291 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3294 if (b == 0x2e || b == 0x2f) {
3295 /* just to keep the EFLAGS optimization correct */
3297 s->cc_op = CC_OP_EFLAGS;
3302 /* convert one instruction. s->is_jmp is set if the translation must
3303 be stopped. Return the next pc value */
3304 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3306 int b, prefixes, aflag, dflag;
3308 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3309 target_ulong next_eip, tval;
3319 #ifdef TARGET_X86_64
3324 s->rip_offset = 0; /* for relative ip address */
3326 b = ldub_code(s->pc);
3328 /* check prefixes */
3329 #ifdef TARGET_X86_64
3333 prefixes |= PREFIX_REPZ;
3336 prefixes |= PREFIX_REPNZ;
3339 prefixes |= PREFIX_LOCK;
3360 prefixes |= PREFIX_DATA;
3363 prefixes |= PREFIX_ADR;
3367 rex_w = (b >> 3) & 1;
3368 rex_r = (b & 0x4) << 1;
3369 s->rex_x = (b & 0x2) << 2;
3370 REX_B(s) = (b & 0x1) << 3;
3371 x86_64_hregs = 1; /* select uniform byte register addressing */
3375 /* 0x66 is ignored if rex.w is set */
3378 if (prefixes & PREFIX_DATA)
3381 if (!(prefixes & PREFIX_ADR))
3388 prefixes |= PREFIX_REPZ;
3391 prefixes |= PREFIX_REPNZ;
3394 prefixes |= PREFIX_LOCK;
3415 prefixes |= PREFIX_DATA;
3418 prefixes |= PREFIX_ADR;
3421 if (prefixes & PREFIX_DATA)
3423 if (prefixes & PREFIX_ADR)
3427 s->prefix = prefixes;
3431 /* lock generation */
3432 if (prefixes & PREFIX_LOCK)
3433 tcg_gen_helper_0_0(helper_lock);
3435 /* now check op code */
3439 /**************************/
3440 /* extended op code */
3441 b = ldub_code(s->pc++) | 0x100;
3444 /**************************/
3462 ot = dflag + OT_WORD;
3465 case 0: /* OP Ev, Gv */
3466 modrm = ldub_code(s->pc++);
3467 reg = ((modrm >> 3) & 7) | rex_r;
3468 mod = (modrm >> 6) & 3;
3469 rm = (modrm & 7) | REX_B(s);
3471 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3473 } else if (op == OP_XORL && rm == reg) {
3475 /* xor reg, reg optimisation */
3477 s->cc_op = CC_OP_LOGICB + ot;
3478 gen_op_mov_reg_T0(ot, reg);
3479 gen_op_update1_cc();
3484 gen_op_mov_TN_reg(ot, 1, reg);
3485 gen_op(s, op, ot, opreg);
3487 case 1: /* OP Gv, Ev */
3488 modrm = ldub_code(s->pc++);
3489 mod = (modrm >> 6) & 3;
3490 reg = ((modrm >> 3) & 7) | rex_r;
3491 rm = (modrm & 7) | REX_B(s);
3493 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3494 gen_op_ld_T1_A0(ot + s->mem_index);
3495 } else if (op == OP_XORL && rm == reg) {
3498 gen_op_mov_TN_reg(ot, 1, rm);
3500 gen_op(s, op, ot, reg);
3502 case 2: /* OP A, Iv */
3503 val = insn_get(s, ot);
3504 gen_op_movl_T1_im(val);
3505 gen_op(s, op, ot, OR_EAX);
3511 case 0x80: /* GRP1 */
3521 ot = dflag + OT_WORD;
3523 modrm = ldub_code(s->pc++);
3524 mod = (modrm >> 6) & 3;
3525 rm = (modrm & 7) | REX_B(s);
3526 op = (modrm >> 3) & 7;
3532 s->rip_offset = insn_const_size(ot);
3533 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3544 val = insn_get(s, ot);
3547 val = (int8_t)insn_get(s, OT_BYTE);
3550 gen_op_movl_T1_im(val);
3551 gen_op(s, op, ot, opreg);
3555 /**************************/
3556 /* inc, dec, and other misc arith */
3557 case 0x40 ... 0x47: /* inc Gv */
3558 ot = dflag ? OT_LONG : OT_WORD;
3559 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3561 case 0x48 ... 0x4f: /* dec Gv */
3562 ot = dflag ? OT_LONG : OT_WORD;
3563 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3565 case 0xf6: /* GRP3 */
3570 ot = dflag + OT_WORD;
3572 modrm = ldub_code(s->pc++);
3573 mod = (modrm >> 6) & 3;
3574 rm = (modrm & 7) | REX_B(s);
3575 op = (modrm >> 3) & 7;
3578 s->rip_offset = insn_const_size(ot);
3579 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3580 gen_op_ld_T0_A0(ot + s->mem_index);
3582 gen_op_mov_TN_reg(ot, 0, rm);
3587 val = insn_get(s, ot);
3588 gen_op_movl_T1_im(val);
3589 gen_op_testl_T0_T1_cc();
3590 s->cc_op = CC_OP_LOGICB + ot;
3595 gen_op_st_T0_A0(ot + s->mem_index);
3597 gen_op_mov_reg_T0(ot, rm);
3603 gen_op_st_T0_A0(ot + s->mem_index);
3605 gen_op_mov_reg_T0(ot, rm);
3607 gen_op_update_neg_cc();
3608 s->cc_op = CC_OP_SUBB + ot;
3613 gen_op_mulb_AL_T0();
3614 s->cc_op = CC_OP_MULB;
3617 gen_op_mulw_AX_T0();
3618 s->cc_op = CC_OP_MULW;
3622 gen_op_mull_EAX_T0();
3623 s->cc_op = CC_OP_MULL;
3625 #ifdef TARGET_X86_64
3627 gen_op_mulq_EAX_T0();
3628 s->cc_op = CC_OP_MULQ;
3636 gen_op_imulb_AL_T0();
3637 s->cc_op = CC_OP_MULB;
3640 gen_op_imulw_AX_T0();
3641 s->cc_op = CC_OP_MULW;
3645 gen_op_imull_EAX_T0();
3646 s->cc_op = CC_OP_MULL;
3648 #ifdef TARGET_X86_64
3650 gen_op_imulq_EAX_T0();
3651 s->cc_op = CC_OP_MULQ;
3659 gen_jmp_im(pc_start - s->cs_base);
3660 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3663 gen_jmp_im(pc_start - s->cs_base);
3664 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3668 gen_jmp_im(pc_start - s->cs_base);
3669 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3671 #ifdef TARGET_X86_64
3673 gen_jmp_im(pc_start - s->cs_base);
3674 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3682 gen_jmp_im(pc_start - s->cs_base);
3683 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3686 gen_jmp_im(pc_start - s->cs_base);
3687 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3691 gen_jmp_im(pc_start - s->cs_base);
3692 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3694 #ifdef TARGET_X86_64
3696 gen_jmp_im(pc_start - s->cs_base);
3697 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3707 case 0xfe: /* GRP4 */
3708 case 0xff: /* GRP5 */
3712 ot = dflag + OT_WORD;
3714 modrm = ldub_code(s->pc++);
3715 mod = (modrm >> 6) & 3;
3716 rm = (modrm & 7) | REX_B(s);
3717 op = (modrm >> 3) & 7;
3718 if (op >= 2 && b == 0xfe) {
3722 if (op == 2 || op == 4) {
3723 /* operand size for jumps is 64 bit */
3725 } else if (op == 3 || op == 5) {
3726 /* for call calls, the operand is 16 or 32 bit, even
3728 ot = dflag ? OT_LONG : OT_WORD;
3729 } else if (op == 6) {
3730 /* default push size is 64 bit */
3731 ot = dflag ? OT_QUAD : OT_WORD;
3735 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3736 if (op >= 2 && op != 3 && op != 5)
3737 gen_op_ld_T0_A0(ot + s->mem_index);
3739 gen_op_mov_TN_reg(ot, 0, rm);
3743 case 0: /* inc Ev */
3748 gen_inc(s, ot, opreg, 1);
3750 case 1: /* dec Ev */
3755 gen_inc(s, ot, opreg, -1);
3757 case 2: /* call Ev */
3758 /* XXX: optimize if memory (no 'and' is necessary) */
3760 gen_op_andl_T0_ffff();
3761 next_eip = s->pc - s->cs_base;
3762 gen_movtl_T1_im(next_eip);
3767 case 3: /* lcall Ev */
3768 gen_op_ld_T1_A0(ot + s->mem_index);
3769 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3770 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3772 if (s->pe && !s->vm86) {
3773 if (s->cc_op != CC_OP_DYNAMIC)
3774 gen_op_set_cc_op(s->cc_op);
3775 gen_jmp_im(pc_start - s->cs_base);
3776 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3777 tcg_gen_helper_0_4(helper_lcall_protected,
3779 tcg_const_i32(dflag),
3780 tcg_const_i32(s->pc - pc_start));
3782 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3783 tcg_gen_helper_0_4(helper_lcall_real,
3785 tcg_const_i32(dflag),
3786 tcg_const_i32(s->pc - s->cs_base));
3790 case 4: /* jmp Ev */
3792 gen_op_andl_T0_ffff();
3796 case 5: /* ljmp Ev */
3797 gen_op_ld_T1_A0(ot + s->mem_index);
3798 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3799 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3801 if (s->pe && !s->vm86) {
3802 if (s->cc_op != CC_OP_DYNAMIC)
3803 gen_op_set_cc_op(s->cc_op);
3804 gen_jmp_im(pc_start - s->cs_base);
3805 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3806 tcg_gen_helper_0_3(helper_ljmp_protected,
3809 tcg_const_i32(s->pc - pc_start));
3811 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3812 gen_op_movl_T0_T1();
3817 case 6: /* push Ev */
3825 case 0x84: /* test Ev, Gv */
3830 ot = dflag + OT_WORD;
3832 modrm = ldub_code(s->pc++);
3833 mod = (modrm >> 6) & 3;
3834 rm = (modrm & 7) | REX_B(s);
3835 reg = ((modrm >> 3) & 7) | rex_r;
3837 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3838 gen_op_mov_TN_reg(ot, 1, reg);
3839 gen_op_testl_T0_T1_cc();
3840 s->cc_op = CC_OP_LOGICB + ot;
3843 case 0xa8: /* test eAX, Iv */
3848 ot = dflag + OT_WORD;
3849 val = insn_get(s, ot);
3851 gen_op_mov_TN_reg(ot, 0, OR_EAX);
3852 gen_op_movl_T1_im(val);
3853 gen_op_testl_T0_T1_cc();
3854 s->cc_op = CC_OP_LOGICB + ot;
3857 case 0x98: /* CWDE/CBW */
3858 #ifdef TARGET_X86_64
3860 gen_op_movslq_RAX_EAX();
3864 gen_op_movswl_EAX_AX();
3866 gen_op_movsbw_AX_AL();
3868 case 0x99: /* CDQ/CWD */
3869 #ifdef TARGET_X86_64
3871 gen_op_movsqo_RDX_RAX();
3875 gen_op_movslq_EDX_EAX();
3877 gen_op_movswl_DX_AX();
3879 case 0x1af: /* imul Gv, Ev */
3880 case 0x69: /* imul Gv, Ev, I */
3882 ot = dflag + OT_WORD;
3883 modrm = ldub_code(s->pc++);
3884 reg = ((modrm >> 3) & 7) | rex_r;
3886 s->rip_offset = insn_const_size(ot);
3889 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3891 val = insn_get(s, ot);
3892 gen_op_movl_T1_im(val);
3893 } else if (b == 0x6b) {
3894 val = (int8_t)insn_get(s, OT_BYTE);
3895 gen_op_movl_T1_im(val);
3897 gen_op_mov_TN_reg(ot, 1, reg);
3900 #ifdef TARGET_X86_64
3901 if (ot == OT_QUAD) {
3902 gen_op_imulq_T0_T1();
3905 if (ot == OT_LONG) {
3906 gen_op_imull_T0_T1();
3908 gen_op_imulw_T0_T1();
3910 gen_op_mov_reg_T0(ot, reg);
3911 s->cc_op = CC_OP_MULB + ot;
3914 case 0x1c1: /* xadd Ev, Gv */
3918 ot = dflag + OT_WORD;
3919 modrm = ldub_code(s->pc++);
3920 reg = ((modrm >> 3) & 7) | rex_r;
3921 mod = (modrm >> 6) & 3;
3923 rm = (modrm & 7) | REX_B(s);
3924 gen_op_mov_TN_reg(ot, 0, reg);
3925 gen_op_mov_TN_reg(ot, 1, rm);
3926 gen_op_addl_T0_T1();
3927 gen_op_mov_reg_T1(ot, reg);
3928 gen_op_mov_reg_T0(ot, rm);
3930 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3931 gen_op_mov_TN_reg(ot, 0, reg);
3932 gen_op_ld_T1_A0(ot + s->mem_index);
3933 gen_op_addl_T0_T1();
3934 gen_op_st_T0_A0(ot + s->mem_index);
3935 gen_op_mov_reg_T1(ot, reg);
3937 gen_op_update2_cc();
3938 s->cc_op = CC_OP_ADDB + ot;
3941 case 0x1b1: /* cmpxchg Ev, Gv */
3945 ot = dflag + OT_WORD;
3946 modrm = ldub_code(s->pc++);
3947 reg = ((modrm >> 3) & 7) | rex_r;
3948 mod = (modrm >> 6) & 3;
3949 gen_op_mov_TN_reg(ot, 1, reg);
3951 rm = (modrm & 7) | REX_B(s);
3952 gen_op_mov_TN_reg(ot, 0, rm);
3953 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3954 gen_op_mov_reg_T0(ot, rm);
3956 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3957 gen_op_ld_T0_A0(ot + s->mem_index);
3958 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3960 s->cc_op = CC_OP_SUBB + ot;
3962 case 0x1c7: /* cmpxchg8b */
3963 modrm = ldub_code(s->pc++);
3964 mod = (modrm >> 6) & 3;
3965 if ((mod == 3) || ((modrm & 0x38) != 0x8))
3967 gen_jmp_im(pc_start - s->cs_base);
3968 if (s->cc_op != CC_OP_DYNAMIC)
3969 gen_op_set_cc_op(s->cc_op);
3970 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3972 s->cc_op = CC_OP_EFLAGS;
3975 /**************************/
3977 case 0x50 ... 0x57: /* push */
3978 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3981 case 0x58 ... 0x5f: /* pop */
3983 ot = dflag ? OT_QUAD : OT_WORD;
3985 ot = dflag + OT_WORD;
3988 /* NOTE: order is important for pop %sp */
3990 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3992 case 0x60: /* pusha */
3997 case 0x61: /* popa */
4002 case 0x68: /* push Iv */
4005 ot = dflag ? OT_QUAD : OT_WORD;
4007 ot = dflag + OT_WORD;
4010 val = insn_get(s, ot);
4012 val = (int8_t)insn_get(s, OT_BYTE);
4013 gen_op_movl_T0_im(val);
4016 case 0x8f: /* pop Ev */
4018 ot = dflag ? OT_QUAD : OT_WORD;
4020 ot = dflag + OT_WORD;
4022 modrm = ldub_code(s->pc++);
4023 mod = (modrm >> 6) & 3;
4026 /* NOTE: order is important for pop %sp */
4028 rm = (modrm & 7) | REX_B(s);
4029 gen_op_mov_reg_T0(ot, rm);
4031 /* NOTE: order is important too for MMU exceptions */
4032 s->popl_esp_hack = 1 << ot;
4033 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4034 s->popl_esp_hack = 0;
4038 case 0xc8: /* enter */
4041 val = lduw_code(s->pc);
4043 level = ldub_code(s->pc++);
4044 gen_enter(s, val, level);
4047 case 0xc9: /* leave */
4048 /* XXX: exception not precise (ESP is updated before potential exception) */
4050 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4051 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4052 } else if (s->ss32) {
4053 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4054 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4056 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4057 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4061 ot = dflag ? OT_QUAD : OT_WORD;
4063 ot = dflag + OT_WORD;
4065 gen_op_mov_reg_T0(ot, R_EBP);
4068 case 0x06: /* push es */
4069 case 0x0e: /* push cs */
4070 case 0x16: /* push ss */
4071 case 0x1e: /* push ds */
4074 gen_op_movl_T0_seg(b >> 3);
4077 case 0x1a0: /* push fs */
4078 case 0x1a8: /* push gs */
4079 gen_op_movl_T0_seg((b >> 3) & 7);
4082 case 0x07: /* pop es */
4083 case 0x17: /* pop ss */
4084 case 0x1f: /* pop ds */
4089 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4092 /* if reg == SS, inhibit interrupts/trace. */
4093 /* If several instructions disable interrupts, only the
4095 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4096 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4100 gen_jmp_im(s->pc - s->cs_base);
4104 case 0x1a1: /* pop fs */
4105 case 0x1a9: /* pop gs */
4107 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4110 gen_jmp_im(s->pc - s->cs_base);
4115 /**************************/
4118 case 0x89: /* mov Gv, Ev */
4122 ot = dflag + OT_WORD;
4123 modrm = ldub_code(s->pc++);
4124 reg = ((modrm >> 3) & 7) | rex_r;
4126 /* generate a generic store */
4127 gen_ldst_modrm(s, modrm, ot, reg, 1);
4130 case 0xc7: /* mov Ev, Iv */
4134 ot = dflag + OT_WORD;
4135 modrm = ldub_code(s->pc++);
4136 mod = (modrm >> 6) & 3;
4138 s->rip_offset = insn_const_size(ot);
4139 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4141 val = insn_get(s, ot);
4142 gen_op_movl_T0_im(val);
4144 gen_op_st_T0_A0(ot + s->mem_index);
4146 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4149 case 0x8b: /* mov Ev, Gv */
4153 ot = OT_WORD + dflag;
4154 modrm = ldub_code(s->pc++);
4155 reg = ((modrm >> 3) & 7) | rex_r;
4157 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4158 gen_op_mov_reg_T0(ot, reg);
4160 case 0x8e: /* mov seg, Gv */
4161 modrm = ldub_code(s->pc++);
4162 reg = (modrm >> 3) & 7;
4163 if (reg >= 6 || reg == R_CS)
4165 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4166 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4168 /* if reg == SS, inhibit interrupts/trace */
4169 /* If several instructions disable interrupts, only the
4171 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4172 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4176 gen_jmp_im(s->pc - s->cs_base);
4180 case 0x8c: /* mov Gv, seg */
4181 modrm = ldub_code(s->pc++);
4182 reg = (modrm >> 3) & 7;
4183 mod = (modrm >> 6) & 3;
4186 gen_op_movl_T0_seg(reg);
4188 ot = OT_WORD + dflag;
4191 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4194 case 0x1b6: /* movzbS Gv, Eb */
4195 case 0x1b7: /* movzwS Gv, Eb */
4196 case 0x1be: /* movsbS Gv, Eb */
4197 case 0x1bf: /* movswS Gv, Eb */
4200 /* d_ot is the size of destination */
4201 d_ot = dflag + OT_WORD;
4202 /* ot is the size of source */
4203 ot = (b & 1) + OT_BYTE;
4204 modrm = ldub_code(s->pc++);
4205 reg = ((modrm >> 3) & 7) | rex_r;
4206 mod = (modrm >> 6) & 3;
4207 rm = (modrm & 7) | REX_B(s);
4210 gen_op_mov_TN_reg(ot, 0, rm);
4211 switch(ot | (b & 8)) {
4213 gen_op_movzbl_T0_T0();
4216 gen_op_movsbl_T0_T0();
4219 gen_op_movzwl_T0_T0();
4223 gen_op_movswl_T0_T0();
4226 gen_op_mov_reg_T0(d_ot, reg);
4228 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4230 gen_op_lds_T0_A0(ot + s->mem_index);
4232 gen_op_ldu_T0_A0(ot + s->mem_index);
4234 gen_op_mov_reg_T0(d_ot, reg);
4239 case 0x8d: /* lea */
4240 ot = dflag + OT_WORD;
4241 modrm = ldub_code(s->pc++);
4242 mod = (modrm >> 6) & 3;
4245 reg = ((modrm >> 3) & 7) | rex_r;
4246 /* we must ensure that no segment is added */
4250 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4252 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4255 case 0xa0: /* mov EAX, Ov */
4257 case 0xa2: /* mov Ov, EAX */
4260 target_ulong offset_addr;
4265 ot = dflag + OT_WORD;
4266 #ifdef TARGET_X86_64
4267 if (s->aflag == 2) {
4268 offset_addr = ldq_code(s->pc);
4270 gen_op_movq_A0_im(offset_addr);
4275 offset_addr = insn_get(s, OT_LONG);
4277 offset_addr = insn_get(s, OT_WORD);
4279 gen_op_movl_A0_im(offset_addr);
4281 gen_add_A0_ds_seg(s);
4283 gen_op_ld_T0_A0(ot + s->mem_index);
4284 gen_op_mov_reg_T0(ot, R_EAX);
4286 gen_op_mov_TN_reg(ot, 0, R_EAX);
4287 gen_op_st_T0_A0(ot + s->mem_index);
4291 case 0xd7: /* xlat */
4292 #ifdef TARGET_X86_64
4293 if (s->aflag == 2) {
4294 gen_op_movq_A0_reg(R_EBX);
4295 gen_op_addq_A0_AL();
4299 gen_op_movl_A0_reg(R_EBX);
4300 gen_op_addl_A0_AL();
4302 gen_op_andl_A0_ffff();
4304 gen_add_A0_ds_seg(s);
4305 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4306 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4308 case 0xb0 ... 0xb7: /* mov R, Ib */
4309 val = insn_get(s, OT_BYTE);
4310 gen_op_movl_T0_im(val);
4311 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4313 case 0xb8 ... 0xbf: /* mov R, Iv */
4314 #ifdef TARGET_X86_64
4318 tmp = ldq_code(s->pc);
4320 reg = (b & 7) | REX_B(s);
4321 gen_movtl_T0_im(tmp);
4322 gen_op_mov_reg_T0(OT_QUAD, reg);
4326 ot = dflag ? OT_LONG : OT_WORD;
4327 val = insn_get(s, ot);
4328 reg = (b & 7) | REX_B(s);
4329 gen_op_movl_T0_im(val);
4330 gen_op_mov_reg_T0(ot, reg);
4334 case 0x91 ... 0x97: /* xchg R, EAX */
4335 ot = dflag + OT_WORD;
4336 reg = (b & 7) | REX_B(s);
4340 case 0x87: /* xchg Ev, Gv */
4344 ot = dflag + OT_WORD;
4345 modrm = ldub_code(s->pc++);
4346 reg = ((modrm >> 3) & 7) | rex_r;
4347 mod = (modrm >> 6) & 3;
4349 rm = (modrm & 7) | REX_B(s);
4351 gen_op_mov_TN_reg(ot, 0, reg);
4352 gen_op_mov_TN_reg(ot, 1, rm);
4353 gen_op_mov_reg_T0(ot, rm);
4354 gen_op_mov_reg_T1(ot, reg);
4356 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4357 gen_op_mov_TN_reg(ot, 0, reg);
4358 /* for xchg, lock is implicit */
4359 if (!(prefixes & PREFIX_LOCK))
4360 tcg_gen_helper_0_0(helper_lock);
4361 gen_op_ld_T1_A0(ot + s->mem_index);
4362 gen_op_st_T0_A0(ot + s->mem_index);
4363 if (!(prefixes & PREFIX_LOCK))
4364 tcg_gen_helper_0_0(helper_unlock);
4365 gen_op_mov_reg_T1(ot, reg);
4368 case 0xc4: /* les Gv */
4373 case 0xc5: /* lds Gv */
4378 case 0x1b2: /* lss Gv */
4381 case 0x1b4: /* lfs Gv */
4384 case 0x1b5: /* lgs Gv */
4387 ot = dflag ? OT_LONG : OT_WORD;
4388 modrm = ldub_code(s->pc++);
4389 reg = ((modrm >> 3) & 7) | rex_r;
4390 mod = (modrm >> 6) & 3;
4393 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4394 gen_op_ld_T1_A0(ot + s->mem_index);
4395 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4396 /* load the segment first to handle exceptions properly */
4397 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4398 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4399 /* then put the data */
4400 gen_op_mov_reg_T1(ot, reg);
4402 gen_jmp_im(s->pc - s->cs_base);
4407 /************************/
4418 ot = dflag + OT_WORD;
4420 modrm = ldub_code(s->pc++);
4421 mod = (modrm >> 6) & 3;
4422 op = (modrm >> 3) & 7;
4428 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4431 opreg = (modrm & 7) | REX_B(s);
4436 gen_shift(s, op, ot, opreg, OR_ECX);
4439 shift = ldub_code(s->pc++);
4441 gen_shifti(s, op, ot, opreg, shift);
4456 case 0x1a4: /* shld imm */
4460 case 0x1a5: /* shld cl */
4464 case 0x1ac: /* shrd imm */
4468 case 0x1ad: /* shrd cl */
4472 ot = dflag + OT_WORD;
4473 modrm = ldub_code(s->pc++);
4474 mod = (modrm >> 6) & 3;
4475 rm = (modrm & 7) | REX_B(s);
4476 reg = ((modrm >> 3) & 7) | rex_r;
4479 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4480 gen_op_ld_T0_A0(ot + s->mem_index);
4482 gen_op_mov_TN_reg(ot, 0, rm);
4484 gen_op_mov_TN_reg(ot, 1, reg);
4487 val = ldub_code(s->pc++);
4494 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4496 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4497 if (op == 0 && ot != OT_WORD)
4498 s->cc_op = CC_OP_SHLB + ot;
4500 s->cc_op = CC_OP_SARB + ot;
4503 if (s->cc_op != CC_OP_DYNAMIC)
4504 gen_op_set_cc_op(s->cc_op);
4506 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4508 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4509 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4512 gen_op_mov_reg_T0(ot, rm);
4516 /************************/
4519 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4520 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4521 /* XXX: what to do if illegal op ? */
4522 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4525 modrm = ldub_code(s->pc++);
4526 mod = (modrm >> 6) & 3;
4528 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4531 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4533 case 0x00 ... 0x07: /* fxxxs */
4534 case 0x10 ... 0x17: /* fixxxl */
4535 case 0x20 ... 0x27: /* fxxxl */
4536 case 0x30 ... 0x37: /* fixxx */
4543 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4544 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4545 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2);
4548 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4549 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4550 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4553 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0,
4554 (s->mem_index >> 2) - 1);
4555 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1);
4559 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4560 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4561 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4565 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4567 /* fcomp needs pop */
4568 tcg_gen_helper_0_0(helper_fpop);
4572 case 0x08: /* flds */
4573 case 0x0a: /* fsts */
4574 case 0x0b: /* fstps */
4575 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4576 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4577 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4582 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4583 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4584 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2);
4587 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4588 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4589 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4592 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0,
4593 (s->mem_index >> 2) - 1);
4594 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1);
4598 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4599 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4600 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4605 /* XXX: the corresponding CPUID bit must be tested ! */
4608 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2);
4609 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4610 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4613 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1);
4614 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0,
4615 (s->mem_index >> 2) - 1);
4619 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2);
4620 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4621 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4624 tcg_gen_helper_0_0(helper_fpop);
4629 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2);
4630 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4631 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4634 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2);
4635 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4636 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4639 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1);
4640 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0,
4641 (s->mem_index >> 2) - 1);
4645 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2);
4646 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4647 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4651 tcg_gen_helper_0_0(helper_fpop);
4655 case 0x0c: /* fldenv mem */
4656 if (s->cc_op != CC_OP_DYNAMIC)
4657 gen_op_set_cc_op(s->cc_op);
4658 gen_jmp_im(pc_start - s->cs_base);
4659 tcg_gen_helper_0_2(helper_fldenv,
4660 cpu_A0, tcg_const_i32(s->dflag));
4662 case 0x0d: /* fldcw mem */
4663 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4664 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4665 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2);
4667 case 0x0e: /* fnstenv mem */
4668 if (s->cc_op != CC_OP_DYNAMIC)
4669 gen_op_set_cc_op(s->cc_op);
4670 gen_jmp_im(pc_start - s->cs_base);
4671 tcg_gen_helper_0_2(helper_fstenv,
4672 cpu_A0, tcg_const_i32(s->dflag));
4674 case 0x0f: /* fnstcw mem */
4675 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2);
4676 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4677 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4679 case 0x1d: /* fldt mem */
4680 if (s->cc_op != CC_OP_DYNAMIC)
4681 gen_op_set_cc_op(s->cc_op);
4682 gen_jmp_im(pc_start - s->cs_base);
4683 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4685 case 0x1f: /* fstpt mem */
4686 if (s->cc_op != CC_OP_DYNAMIC)
4687 gen_op_set_cc_op(s->cc_op);
4688 gen_jmp_im(pc_start - s->cs_base);
4689 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4690 tcg_gen_helper_0_0(helper_fpop);
4692 case 0x2c: /* frstor mem */
4693 if (s->cc_op != CC_OP_DYNAMIC)
4694 gen_op_set_cc_op(s->cc_op);
4695 gen_jmp_im(pc_start - s->cs_base);
4696 tcg_gen_helper_0_2(helper_frstor,
4697 cpu_A0, tcg_const_i32(s->dflag));
4699 case 0x2e: /* fnsave mem */
4700 if (s->cc_op != CC_OP_DYNAMIC)
4701 gen_op_set_cc_op(s->cc_op);
4702 gen_jmp_im(pc_start - s->cs_base);
4703 tcg_gen_helper_0_2(helper_fsave,
4704 cpu_A0, tcg_const_i32(s->dflag));
4706 case 0x2f: /* fnstsw mem */
4707 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4708 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4709 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4711 case 0x3c: /* fbld */
4712 if (s->cc_op != CC_OP_DYNAMIC)
4713 gen_op_set_cc_op(s->cc_op);
4714 gen_jmp_im(pc_start - s->cs_base);
4715 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4717 case 0x3e: /* fbstp */
4718 if (s->cc_op != CC_OP_DYNAMIC)
4719 gen_op_set_cc_op(s->cc_op);
4720 gen_jmp_im(pc_start - s->cs_base);
4721 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4722 tcg_gen_helper_0_0(helper_fpop);
4724 case 0x3d: /* fildll */
4725 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0,
4726 (s->mem_index >> 2) - 1);
4727 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1);
4729 case 0x3f: /* fistpll */
4730 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1);
4731 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0,
4732 (s->mem_index >> 2) - 1);
4733 tcg_gen_helper_0_0(helper_fpop);
4739 /* register float ops */
4743 case 0x08: /* fld sti */
4744 tcg_gen_helper_0_0(helper_fpush);
4745 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
4747 case 0x09: /* fxchg sti */
4748 case 0x29: /* fxchg4 sti, undocumented op */
4749 case 0x39: /* fxchg7 sti, undocumented op */
4750 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
4752 case 0x0a: /* grp d9/2 */
4755 /* check exceptions (FreeBSD FPU probe) */
4756 if (s->cc_op != CC_OP_DYNAMIC)
4757 gen_op_set_cc_op(s->cc_op);
4758 gen_jmp_im(pc_start - s->cs_base);
4759 tcg_gen_helper_0_0(helper_fwait);
4765 case 0x0c: /* grp d9/4 */
4768 tcg_gen_helper_0_0(helper_fchs_ST0);
4771 tcg_gen_helper_0_0(helper_fabs_ST0);
4774 tcg_gen_helper_0_0(helper_fldz_FT0);
4775 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4778 tcg_gen_helper_0_0(helper_fxam_ST0);
4784 case 0x0d: /* grp d9/5 */
4788 tcg_gen_helper_0_0(helper_fpush);
4789 tcg_gen_helper_0_0(helper_fld1_ST0);
4792 tcg_gen_helper_0_0(helper_fpush);
4793 tcg_gen_helper_0_0(helper_fldl2t_ST0);
4796 tcg_gen_helper_0_0(helper_fpush);
4797 tcg_gen_helper_0_0(helper_fldl2e_ST0);
4800 tcg_gen_helper_0_0(helper_fpush);
4801 tcg_gen_helper_0_0(helper_fldpi_ST0);
4804 tcg_gen_helper_0_0(helper_fpush);
4805 tcg_gen_helper_0_0(helper_fldlg2_ST0);
4808 tcg_gen_helper_0_0(helper_fpush);
4809 tcg_gen_helper_0_0(helper_fldln2_ST0);
4812 tcg_gen_helper_0_0(helper_fpush);
4813 tcg_gen_helper_0_0(helper_fldz_ST0);
4820 case 0x0e: /* grp d9/6 */
4823 tcg_gen_helper_0_0(helper_f2xm1);
4826 tcg_gen_helper_0_0(helper_fyl2x);
4829 tcg_gen_helper_0_0(helper_fptan);
4831 case 3: /* fpatan */
4832 tcg_gen_helper_0_0(helper_fpatan);
4834 case 4: /* fxtract */
4835 tcg_gen_helper_0_0(helper_fxtract);
4837 case 5: /* fprem1 */
4838 tcg_gen_helper_0_0(helper_fprem1);
4840 case 6: /* fdecstp */
4841 tcg_gen_helper_0_0(helper_fdecstp);
4844 case 7: /* fincstp */
4845 tcg_gen_helper_0_0(helper_fincstp);
4849 case 0x0f: /* grp d9/7 */
4852 tcg_gen_helper_0_0(helper_fprem);
4854 case 1: /* fyl2xp1 */
4855 tcg_gen_helper_0_0(helper_fyl2xp1);
4858 tcg_gen_helper_0_0(helper_fsqrt);
4860 case 3: /* fsincos */
4861 tcg_gen_helper_0_0(helper_fsincos);
4863 case 5: /* fscale */
4864 tcg_gen_helper_0_0(helper_fscale);
4866 case 4: /* frndint */
4867 tcg_gen_helper_0_0(helper_frndint);
4870 tcg_gen_helper_0_0(helper_fsin);
4874 tcg_gen_helper_0_0(helper_fcos);
4878 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4879 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4880 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4886 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
4888 tcg_gen_helper_0_0(helper_fpop);
4890 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4891 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4895 case 0x02: /* fcom */
4896 case 0x22: /* fcom2, undocumented op */
4897 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4898 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4900 case 0x03: /* fcomp */
4901 case 0x23: /* fcomp3, undocumented op */
4902 case 0x32: /* fcomp5, undocumented op */
4903 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4904 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4905 tcg_gen_helper_0_0(helper_fpop);
4907 case 0x15: /* da/5 */
4909 case 1: /* fucompp */
4910 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4911 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4912 tcg_gen_helper_0_0(helper_fpop);
4913 tcg_gen_helper_0_0(helper_fpop);
4921 case 0: /* feni (287 only, just do nop here) */
4923 case 1: /* fdisi (287 only, just do nop here) */
4926 tcg_gen_helper_0_0(helper_fclex);
4928 case 3: /* fninit */
4929 tcg_gen_helper_0_0(helper_fninit);
4931 case 4: /* fsetpm (287 only, just do nop here) */
4937 case 0x1d: /* fucomi */
4938 if (s->cc_op != CC_OP_DYNAMIC)
4939 gen_op_set_cc_op(s->cc_op);
4940 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4941 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
4942 gen_op_fcomi_dummy();
4943 s->cc_op = CC_OP_EFLAGS;
4945 case 0x1e: /* fcomi */
4946 if (s->cc_op != CC_OP_DYNAMIC)
4947 gen_op_set_cc_op(s->cc_op);
4948 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4949 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
4950 gen_op_fcomi_dummy();
4951 s->cc_op = CC_OP_EFLAGS;
4953 case 0x28: /* ffree sti */
4954 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4956 case 0x2a: /* fst sti */
4957 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4959 case 0x2b: /* fstp sti */
4960 case 0x0b: /* fstp1 sti, undocumented op */
4961 case 0x3a: /* fstp8 sti, undocumented op */
4962 case 0x3b: /* fstp9 sti, undocumented op */
4963 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4964 tcg_gen_helper_0_0(helper_fpop);
4966 case 0x2c: /* fucom st(i) */
4967 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4968 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4970 case 0x2d: /* fucomp st(i) */
4971 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4972 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4973 tcg_gen_helper_0_0(helper_fpop);
4975 case 0x33: /* de/3 */
4977 case 1: /* fcompp */
4978 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4979 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4980 tcg_gen_helper_0_0(helper_fpop);
4981 tcg_gen_helper_0_0(helper_fpop);
4987 case 0x38: /* ffreep sti, undocumented op */
4988 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4989 tcg_gen_helper_0_0(helper_fpop);
4991 case 0x3c: /* df/4 */
4994 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4995 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4996 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5002 case 0x3d: /* fucomip */
5003 if (s->cc_op != CC_OP_DYNAMIC)
5004 gen_op_set_cc_op(s->cc_op);
5005 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5006 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5007 tcg_gen_helper_0_0(helper_fpop);
5008 gen_op_fcomi_dummy();
5009 s->cc_op = CC_OP_EFLAGS;
5011 case 0x3e: /* fcomip */
5012 if (s->cc_op != CC_OP_DYNAMIC)
5013 gen_op_set_cc_op(s->cc_op);
5014 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5015 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5016 tcg_gen_helper_0_0(helper_fpop);
5017 gen_op_fcomi_dummy();
5018 s->cc_op = CC_OP_EFLAGS;
5020 case 0x10 ... 0x13: /* fcmovxx */
5024 const static uint8_t fcmov_cc[8] = {
5030 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5032 l1 = gen_new_label();
5033 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5034 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5043 /************************/
5046 case 0xa4: /* movsS */
5051 ot = dflag + OT_WORD;
5053 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5054 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5060 case 0xaa: /* stosS */
5065 ot = dflag + OT_WORD;
5067 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5068 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5073 case 0xac: /* lodsS */
5078 ot = dflag + OT_WORD;
5079 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5080 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5085 case 0xae: /* scasS */
5090 ot = dflag + OT_WORD;
5091 if (prefixes & PREFIX_REPNZ) {
5092 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5093 } else if (prefixes & PREFIX_REPZ) {
5094 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5097 s->cc_op = CC_OP_SUBB + ot;
5101 case 0xa6: /* cmpsS */
5106 ot = dflag + OT_WORD;
5107 if (prefixes & PREFIX_REPNZ) {
5108 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5109 } else if (prefixes & PREFIX_REPZ) {
5110 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5113 s->cc_op = CC_OP_SUBB + ot;
5116 case 0x6c: /* insS */
5121 ot = dflag ? OT_LONG : OT_WORD;
5122 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5123 gen_op_andl_T0_ffff();
5124 gen_check_io(s, ot, pc_start - s->cs_base,
5125 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5126 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5127 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5132 case 0x6e: /* outsS */
5137 ot = dflag ? OT_LONG : OT_WORD;
5138 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5139 gen_op_andl_T0_ffff();
5140 gen_check_io(s, ot, pc_start - s->cs_base,
5141 svm_is_rep(prefixes) | 4);
5142 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5143 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5149 /************************/
5157 ot = dflag ? OT_LONG : OT_WORD;
5158 val = ldub_code(s->pc++);
5159 gen_op_movl_T0_im(val);
5160 gen_check_io(s, ot, pc_start - s->cs_base,
5161 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5162 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5163 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2);
5164 gen_op_mov_reg_T1(ot, R_EAX);
5171 ot = dflag ? OT_LONG : OT_WORD;
5172 val = ldub_code(s->pc++);
5173 gen_op_movl_T0_im(val);
5174 gen_check_io(s, ot, pc_start - s->cs_base,
5175 svm_is_rep(prefixes));
5176 gen_op_mov_TN_reg(ot, 1, R_EAX);
5178 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5179 tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
5180 tcg_gen_trunc_tl_i32(cpu_tmp3, cpu_T[1]);
5181 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2, cpu_tmp3);
5188 ot = dflag ? OT_LONG : OT_WORD;
5189 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5190 gen_op_andl_T0_ffff();
5191 gen_check_io(s, ot, pc_start - s->cs_base,
5192 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5193 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5194 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2);
5195 gen_op_mov_reg_T1(ot, R_EAX);
5202 ot = dflag ? OT_LONG : OT_WORD;
5203 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5204 gen_op_andl_T0_ffff();
5205 gen_check_io(s, ot, pc_start - s->cs_base,
5206 svm_is_rep(prefixes));
5207 gen_op_mov_TN_reg(ot, 1, R_EAX);
5209 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5210 tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
5211 tcg_gen_trunc_tl_i32(cpu_tmp3, cpu_T[1]);
5212 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2, cpu_tmp3);
5215 /************************/
5217 case 0xc2: /* ret im */
5218 val = ldsw_code(s->pc);
5221 if (CODE64(s) && s->dflag)
5223 gen_stack_update(s, val + (2 << s->dflag));
5225 gen_op_andl_T0_ffff();
5229 case 0xc3: /* ret */
5233 gen_op_andl_T0_ffff();
5237 case 0xca: /* lret im */
5238 val = ldsw_code(s->pc);
5241 if (s->pe && !s->vm86) {
5242 if (s->cc_op != CC_OP_DYNAMIC)
5243 gen_op_set_cc_op(s->cc_op);
5244 gen_jmp_im(pc_start - s->cs_base);
5245 tcg_gen_helper_0_2(helper_lret_protected,
5246 tcg_const_i32(s->dflag),
5247 tcg_const_i32(val));
5251 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5253 gen_op_andl_T0_ffff();
5254 /* NOTE: keeping EIP updated is not a problem in case of
5258 gen_op_addl_A0_im(2 << s->dflag);
5259 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5260 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5261 /* add stack offset */
5262 gen_stack_update(s, val + (4 << s->dflag));
5266 case 0xcb: /* lret */
5269 case 0xcf: /* iret */
5270 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5274 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5275 s->cc_op = CC_OP_EFLAGS;
5276 } else if (s->vm86) {
5278 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5280 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5281 s->cc_op = CC_OP_EFLAGS;
5284 if (s->cc_op != CC_OP_DYNAMIC)
5285 gen_op_set_cc_op(s->cc_op);
5286 gen_jmp_im(pc_start - s->cs_base);
5287 tcg_gen_helper_0_2(helper_iret_protected,
5288 tcg_const_i32(s->dflag),
5289 tcg_const_i32(s->pc - s->cs_base));
5290 s->cc_op = CC_OP_EFLAGS;
5294 case 0xe8: /* call im */
5297 tval = (int32_t)insn_get(s, OT_LONG);
5299 tval = (int16_t)insn_get(s, OT_WORD);
5300 next_eip = s->pc - s->cs_base;
5304 gen_movtl_T0_im(next_eip);
5309 case 0x9a: /* lcall im */
5311 unsigned int selector, offset;
5315 ot = dflag ? OT_LONG : OT_WORD;
5316 offset = insn_get(s, ot);
5317 selector = insn_get(s, OT_WORD);
5319 gen_op_movl_T0_im(selector);
5320 gen_op_movl_T1_imu(offset);
5323 case 0xe9: /* jmp im */
5325 tval = (int32_t)insn_get(s, OT_LONG);
5327 tval = (int16_t)insn_get(s, OT_WORD);
5328 tval += s->pc - s->cs_base;
5333 case 0xea: /* ljmp im */
5335 unsigned int selector, offset;
5339 ot = dflag ? OT_LONG : OT_WORD;
5340 offset = insn_get(s, ot);
5341 selector = insn_get(s, OT_WORD);
5343 gen_op_movl_T0_im(selector);
5344 gen_op_movl_T1_imu(offset);
5347 case 0xeb: /* jmp Jb */
5348 tval = (int8_t)insn_get(s, OT_BYTE);
5349 tval += s->pc - s->cs_base;
5354 case 0x70 ... 0x7f: /* jcc Jb */
5355 tval = (int8_t)insn_get(s, OT_BYTE);
5357 case 0x180 ... 0x18f: /* jcc Jv */
5359 tval = (int32_t)insn_get(s, OT_LONG);
5361 tval = (int16_t)insn_get(s, OT_WORD);
5364 next_eip = s->pc - s->cs_base;
5368 gen_jcc(s, b, tval, next_eip);
5371 case 0x190 ... 0x19f: /* setcc Gv */
5372 modrm = ldub_code(s->pc++);
5374 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5376 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5377 ot = dflag + OT_WORD;
5378 modrm = ldub_code(s->pc++);
5379 reg = ((modrm >> 3) & 7) | rex_r;
5380 mod = (modrm >> 6) & 3;
5383 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5384 gen_op_ld_T1_A0(ot + s->mem_index);
5386 rm = (modrm & 7) | REX_B(s);
5387 gen_op_mov_TN_reg(ot, 1, rm);
5389 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5392 /************************/
5394 case 0x9c: /* pushf */
5395 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5397 if (s->vm86 && s->iopl != 3) {
5398 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5400 if (s->cc_op != CC_OP_DYNAMIC)
5401 gen_op_set_cc_op(s->cc_op);
5402 gen_op_movl_T0_eflags();
5406 case 0x9d: /* popf */
5407 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5409 if (s->vm86 && s->iopl != 3) {
5410 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5415 gen_op_movl_eflags_T0_cpl0();
5417 gen_op_movw_eflags_T0_cpl0();
5420 if (s->cpl <= s->iopl) {
5422 gen_op_movl_eflags_T0_io();
5424 gen_op_movw_eflags_T0_io();
5428 gen_op_movl_eflags_T0();
5430 gen_op_movw_eflags_T0();
5435 s->cc_op = CC_OP_EFLAGS;
5436 /* abort translation because TF flag may change */
5437 gen_jmp_im(s->pc - s->cs_base);
5441 case 0x9e: /* sahf */
5444 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5445 if (s->cc_op != CC_OP_DYNAMIC)
5446 gen_op_set_cc_op(s->cc_op);
5447 gen_op_movb_eflags_T0();
5448 s->cc_op = CC_OP_EFLAGS;
5450 case 0x9f: /* lahf */
5453 if (s->cc_op != CC_OP_DYNAMIC)
5454 gen_op_set_cc_op(s->cc_op);
5455 gen_op_movl_T0_eflags();
5456 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5458 case 0xf5: /* cmc */
5459 if (s->cc_op != CC_OP_DYNAMIC)
5460 gen_op_set_cc_op(s->cc_op);
5462 s->cc_op = CC_OP_EFLAGS;
5464 case 0xf8: /* clc */
5465 if (s->cc_op != CC_OP_DYNAMIC)
5466 gen_op_set_cc_op(s->cc_op);
5468 s->cc_op = CC_OP_EFLAGS;
5470 case 0xf9: /* stc */
5471 if (s->cc_op != CC_OP_DYNAMIC)
5472 gen_op_set_cc_op(s->cc_op);
5474 s->cc_op = CC_OP_EFLAGS;
5476 case 0xfc: /* cld */
5477 tcg_gen_movi_i32(cpu_tmp2, 1);
5478 tcg_gen_st_i32(cpu_tmp2, cpu_env, offsetof(CPUState, df));
5480 case 0xfd: /* std */
5481 tcg_gen_movi_i32(cpu_tmp2, -1);
5482 tcg_gen_st_i32(cpu_tmp2, cpu_env, offsetof(CPUState, df));
5485 /************************/
5486 /* bit operations */
5487 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5488 ot = dflag + OT_WORD;
5489 modrm = ldub_code(s->pc++);
5490 op = (modrm >> 3) & 7;
5491 mod = (modrm >> 6) & 3;
5492 rm = (modrm & 7) | REX_B(s);
5495 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5496 gen_op_ld_T0_A0(ot + s->mem_index);
5498 gen_op_mov_TN_reg(ot, 0, rm);
5501 val = ldub_code(s->pc++);
5502 gen_op_movl_T1_im(val);
5506 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5507 s->cc_op = CC_OP_SARB + ot;
5510 gen_op_st_T0_A0(ot + s->mem_index);
5512 gen_op_mov_reg_T0(ot, rm);
5513 gen_op_update_bt_cc();
5516 case 0x1a3: /* bt Gv, Ev */
5519 case 0x1ab: /* bts */
5522 case 0x1b3: /* btr */
5525 case 0x1bb: /* btc */
5528 ot = dflag + OT_WORD;
5529 modrm = ldub_code(s->pc++);
5530 reg = ((modrm >> 3) & 7) | rex_r;
5531 mod = (modrm >> 6) & 3;
5532 rm = (modrm & 7) | REX_B(s);
5533 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5535 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5536 /* specific case: we need to add a displacement */
5537 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5538 gen_op_ld_T0_A0(ot + s->mem_index);
5540 gen_op_mov_TN_reg(ot, 0, rm);
5542 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5543 s->cc_op = CC_OP_SARB + ot;
5546 gen_op_st_T0_A0(ot + s->mem_index);
5548 gen_op_mov_reg_T0(ot, rm);
5549 gen_op_update_bt_cc();
5552 case 0x1bc: /* bsf */
5553 case 0x1bd: /* bsr */
5554 ot = dflag + OT_WORD;
5555 modrm = ldub_code(s->pc++);
5556 reg = ((modrm >> 3) & 7) | rex_r;
5557 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5558 /* NOTE: in order to handle the 0 case, we must load the
5559 result. It could be optimized with a generated jump */
5560 gen_op_mov_TN_reg(ot, 1, reg);
5561 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5562 gen_op_mov_reg_T1(ot, reg);
5563 s->cc_op = CC_OP_LOGICB + ot;
5565 /************************/
5567 case 0x27: /* daa */
5570 if (s->cc_op != CC_OP_DYNAMIC)
5571 gen_op_set_cc_op(s->cc_op);
5573 s->cc_op = CC_OP_EFLAGS;
5575 case 0x2f: /* das */
5578 if (s->cc_op != CC_OP_DYNAMIC)
5579 gen_op_set_cc_op(s->cc_op);
5581 s->cc_op = CC_OP_EFLAGS;
5583 case 0x37: /* aaa */
5586 if (s->cc_op != CC_OP_DYNAMIC)
5587 gen_op_set_cc_op(s->cc_op);
5589 s->cc_op = CC_OP_EFLAGS;
5591 case 0x3f: /* aas */
5594 if (s->cc_op != CC_OP_DYNAMIC)
5595 gen_op_set_cc_op(s->cc_op);
5597 s->cc_op = CC_OP_EFLAGS;
5599 case 0xd4: /* aam */
5602 val = ldub_code(s->pc++);
5604 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5607 s->cc_op = CC_OP_LOGICB;
5610 case 0xd5: /* aad */
5613 val = ldub_code(s->pc++);
5615 s->cc_op = CC_OP_LOGICB;
5617 /************************/
5619 case 0x90: /* nop */
5620 /* XXX: xchg + rex handling */
5621 /* XXX: correct lock test for all insn */
5622 if (prefixes & PREFIX_LOCK)
5624 if (prefixes & PREFIX_REPZ) {
5625 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5628 case 0x9b: /* fwait */
5629 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5630 (HF_MP_MASK | HF_TS_MASK)) {
5631 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5633 if (s->cc_op != CC_OP_DYNAMIC)
5634 gen_op_set_cc_op(s->cc_op);
5635 gen_jmp_im(pc_start - s->cs_base);
5636 tcg_gen_helper_0_0(helper_fwait);
5639 case 0xcc: /* int3 */
5640 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5642 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5644 case 0xcd: /* int N */
5645 val = ldub_code(s->pc++);
5646 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5648 if (s->vm86 && s->iopl != 3) {
5649 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5651 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5654 case 0xce: /* into */
5657 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5659 if (s->cc_op != CC_OP_DYNAMIC)
5660 gen_op_set_cc_op(s->cc_op);
5661 gen_jmp_im(pc_start - s->cs_base);
5662 gen_op_into(s->pc - pc_start);
5664 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5665 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5668 gen_debug(s, pc_start - s->cs_base);
5671 tb_flush(cpu_single_env);
5672 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5675 case 0xfa: /* cli */
5677 if (s->cpl <= s->iopl) {
5678 tcg_gen_helper_0_0(helper_cli);
5680 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5684 tcg_gen_helper_0_0(helper_cli);
5686 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5690 case 0xfb: /* sti */
5692 if (s->cpl <= s->iopl) {
5694 tcg_gen_helper_0_0(helper_sti);
5695 /* interruptions are enabled only the first insn after sti */
5696 /* If several instructions disable interrupts, only the
5698 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5699 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5700 /* give a chance to handle pending irqs */
5701 gen_jmp_im(s->pc - s->cs_base);
5704 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5710 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5714 case 0x62: /* bound */
5717 ot = dflag ? OT_LONG : OT_WORD;
5718 modrm = ldub_code(s->pc++);
5719 reg = (modrm >> 3) & 7;
5720 mod = (modrm >> 6) & 3;
5723 gen_op_mov_TN_reg(ot, 0, reg);
5724 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5725 gen_jmp_im(pc_start - s->cs_base);
5726 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5728 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2);
5730 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2);
5732 case 0x1c8 ... 0x1cf: /* bswap reg */
5733 reg = (b & 7) | REX_B(s);
5734 #ifdef TARGET_X86_64
5736 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5737 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5738 gen_op_mov_reg_T0(OT_QUAD, reg);
5742 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5744 tmp0 = tcg_temp_new(TCG_TYPE_I32);
5745 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5746 tcg_gen_bswap_i32(tmp0, tmp0);
5747 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5748 gen_op_mov_reg_T0(OT_LONG, reg);
5752 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5753 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5754 gen_op_mov_reg_T0(OT_LONG, reg);
5758 case 0xd6: /* salc */
5761 if (s->cc_op != CC_OP_DYNAMIC)
5762 gen_op_set_cc_op(s->cc_op);
5765 case 0xe0: /* loopnz */
5766 case 0xe1: /* loopz */
5767 if (s->cc_op != CC_OP_DYNAMIC)
5768 gen_op_set_cc_op(s->cc_op);
5770 case 0xe2: /* loop */
5771 case 0xe3: /* jecxz */
5775 tval = (int8_t)insn_get(s, OT_BYTE);
5776 next_eip = s->pc - s->cs_base;
5781 l1 = gen_new_label();
5782 l2 = gen_new_label();
5785 gen_op_jz_ecx[s->aflag](l1);
5787 gen_op_dec_ECX[s->aflag]();
5790 gen_op_loop[s->aflag][b](l1);
5793 gen_jmp_im(next_eip);
5794 gen_op_jmp_label(l2);
5801 case 0x130: /* wrmsr */
5802 case 0x132: /* rdmsr */
5804 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5808 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5809 tcg_gen_helper_0_0(helper_rdmsr);
5811 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5812 tcg_gen_helper_0_0(helper_wrmsr);
5818 case 0x131: /* rdtsc */
5819 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5821 gen_jmp_im(pc_start - s->cs_base);
5822 tcg_gen_helper_0_0(helper_rdtsc);
5824 case 0x133: /* rdpmc */
5825 gen_jmp_im(pc_start - s->cs_base);
5826 tcg_gen_helper_0_0(helper_rdpmc);
5828 case 0x134: /* sysenter */
5832 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5834 if (s->cc_op != CC_OP_DYNAMIC) {
5835 gen_op_set_cc_op(s->cc_op);
5836 s->cc_op = CC_OP_DYNAMIC;
5838 gen_jmp_im(pc_start - s->cs_base);
5839 tcg_gen_helper_0_0(helper_sysenter);
5843 case 0x135: /* sysexit */
5847 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5849 if (s->cc_op != CC_OP_DYNAMIC) {
5850 gen_op_set_cc_op(s->cc_op);
5851 s->cc_op = CC_OP_DYNAMIC;
5853 gen_jmp_im(pc_start - s->cs_base);
5854 tcg_gen_helper_0_0(helper_sysexit);
5858 #ifdef TARGET_X86_64
5859 case 0x105: /* syscall */
5860 /* XXX: is it usable in real mode ? */
5861 if (s->cc_op != CC_OP_DYNAMIC) {
5862 gen_op_set_cc_op(s->cc_op);
5863 s->cc_op = CC_OP_DYNAMIC;
5865 gen_jmp_im(pc_start - s->cs_base);
5866 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
5869 case 0x107: /* sysret */
5871 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5873 if (s->cc_op != CC_OP_DYNAMIC) {
5874 gen_op_set_cc_op(s->cc_op);
5875 s->cc_op = CC_OP_DYNAMIC;
5877 gen_jmp_im(pc_start - s->cs_base);
5878 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
5879 /* condition codes are modified only in long mode */
5881 s->cc_op = CC_OP_EFLAGS;
5886 case 0x1a2: /* cpuid */
5887 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5889 tcg_gen_helper_0_0(helper_cpuid);
5891 case 0xf4: /* hlt */
5893 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5895 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5897 if (s->cc_op != CC_OP_DYNAMIC)
5898 gen_op_set_cc_op(s->cc_op);
5899 gen_jmp_im(s->pc - s->cs_base);
5900 tcg_gen_helper_0_0(helper_hlt);
5905 modrm = ldub_code(s->pc++);
5906 mod = (modrm >> 6) & 3;
5907 op = (modrm >> 3) & 7;
5910 if (!s->pe || s->vm86)
5912 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5914 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5918 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5921 if (!s->pe || s->vm86)
5924 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5926 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5928 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5929 gen_jmp_im(pc_start - s->cs_base);
5930 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5931 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2);
5935 if (!s->pe || s->vm86)
5937 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5939 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5943 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5946 if (!s->pe || s->vm86)
5949 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5951 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5953 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5954 gen_jmp_im(pc_start - s->cs_base);
5955 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5956 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2);
5961 if (!s->pe || s->vm86)
5963 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5964 if (s->cc_op != CC_OP_DYNAMIC)
5965 gen_op_set_cc_op(s->cc_op);
5970 s->cc_op = CC_OP_EFLAGS;
5977 modrm = ldub_code(s->pc++);
5978 mod = (modrm >> 6) & 3;
5979 op = (modrm >> 3) & 7;
5985 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5987 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5988 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5989 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5990 gen_add_A0_im(s, 2);
5991 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5993 gen_op_andl_T0_im(0xffffff);
5994 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5999 case 0: /* monitor */
6000 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6003 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6005 gen_jmp_im(pc_start - s->cs_base);
6006 #ifdef TARGET_X86_64
6007 if (s->aflag == 2) {
6008 gen_op_movq_A0_reg(R_EBX);
6009 gen_op_addq_A0_AL();
6013 gen_op_movl_A0_reg(R_EBX);
6014 gen_op_addl_A0_AL();
6016 gen_op_andl_A0_ffff();
6018 gen_add_A0_ds_seg(s);
6019 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6022 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6025 if (s->cc_op != CC_OP_DYNAMIC) {
6026 gen_op_set_cc_op(s->cc_op);
6027 s->cc_op = CC_OP_DYNAMIC;
6029 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6031 gen_jmp_im(s->pc - s->cs_base);
6032 tcg_gen_helper_0_0(helper_mwait);
6039 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6041 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6042 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6043 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6044 gen_add_A0_im(s, 2);
6045 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6047 gen_op_andl_T0_im(0xffffff);
6048 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6056 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6058 if (s->cc_op != CC_OP_DYNAMIC)
6059 gen_op_set_cc_op(s->cc_op);
6060 gen_jmp_im(s->pc - s->cs_base);
6061 tcg_gen_helper_0_0(helper_vmrun);
6062 s->cc_op = CC_OP_EFLAGS;
6065 case 1: /* VMMCALL */
6066 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6068 /* FIXME: cause #UD if hflags & SVM */
6069 tcg_gen_helper_0_0(helper_vmmcall);
6071 case 2: /* VMLOAD */
6072 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6074 tcg_gen_helper_0_0(helper_vmload);
6076 case 3: /* VMSAVE */
6077 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6079 tcg_gen_helper_0_0(helper_vmsave);
6082 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6084 tcg_gen_helper_0_0(helper_stgi);
6087 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6089 tcg_gen_helper_0_0(helper_clgi);
6091 case 6: /* SKINIT */
6092 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6094 tcg_gen_helper_0_0(helper_skinit);
6096 case 7: /* INVLPGA */
6097 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6099 tcg_gen_helper_0_0(helper_invlpga);
6104 } else if (s->cpl != 0) {
6105 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6107 if (gen_svm_check_intercept(s, pc_start,
6108 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6110 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6111 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6112 gen_add_A0_im(s, 2);
6113 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6115 gen_op_andl_T0_im(0xffffff);
6117 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6118 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6120 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6121 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6126 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6128 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6129 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6133 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6135 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6137 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6138 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6139 gen_jmp_im(s->pc - s->cs_base);
6143 case 7: /* invlpg */
6145 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6148 #ifdef TARGET_X86_64
6149 if (CODE64(s) && rm == 0) {
6151 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6152 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6153 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6154 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6161 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6163 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6164 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6165 gen_jmp_im(s->pc - s->cs_base);
6174 case 0x108: /* invd */
6175 case 0x109: /* wbinvd */
6177 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6179 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6184 case 0x63: /* arpl or movslS (x86_64) */
6185 #ifdef TARGET_X86_64
6188 /* d_ot is the size of destination */
6189 d_ot = dflag + OT_WORD;
6191 modrm = ldub_code(s->pc++);
6192 reg = ((modrm >> 3) & 7) | rex_r;
6193 mod = (modrm >> 6) & 3;
6194 rm = (modrm & 7) | REX_B(s);
6197 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6199 if (d_ot == OT_QUAD)
6200 gen_op_movslq_T0_T0();
6201 gen_op_mov_reg_T0(d_ot, reg);
6203 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6204 if (d_ot == OT_QUAD) {
6205 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6207 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6209 gen_op_mov_reg_T0(d_ot, reg);
6214 if (!s->pe || s->vm86)
6216 ot = dflag ? OT_LONG : OT_WORD;
6217 modrm = ldub_code(s->pc++);
6218 reg = (modrm >> 3) & 7;
6219 mod = (modrm >> 6) & 3;
6222 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6223 gen_op_ld_T0_A0(ot + s->mem_index);
6225 gen_op_mov_TN_reg(ot, 0, rm);
6227 gen_op_mov_TN_reg(ot, 1, reg);
6228 if (s->cc_op != CC_OP_DYNAMIC)
6229 gen_op_set_cc_op(s->cc_op);
6231 s->cc_op = CC_OP_EFLAGS;
6233 gen_op_st_T0_A0(ot + s->mem_index);
6235 gen_op_mov_reg_T0(ot, rm);
6237 gen_op_arpl_update();
6240 case 0x102: /* lar */
6241 case 0x103: /* lsl */
6242 if (!s->pe || s->vm86)
6244 ot = dflag ? OT_LONG : OT_WORD;
6245 modrm = ldub_code(s->pc++);
6246 reg = ((modrm >> 3) & 7) | rex_r;
6247 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6248 gen_op_mov_TN_reg(ot, 1, reg);
6249 if (s->cc_op != CC_OP_DYNAMIC)
6250 gen_op_set_cc_op(s->cc_op);
6255 s->cc_op = CC_OP_EFLAGS;
6256 gen_op_mov_reg_T1(ot, reg);
6259 modrm = ldub_code(s->pc++);
6260 mod = (modrm >> 6) & 3;
6261 op = (modrm >> 3) & 7;
6263 case 0: /* prefetchnta */
6264 case 1: /* prefetchnt0 */
6265 case 2: /* prefetchnt0 */
6266 case 3: /* prefetchnt0 */
6269 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6270 /* nothing more to do */
6272 default: /* nop (multi byte) */
6273 gen_nop_modrm(s, modrm);
6277 case 0x119 ... 0x11f: /* nop (multi byte) */
6278 modrm = ldub_code(s->pc++);
6279 gen_nop_modrm(s, modrm);
6281 case 0x120: /* mov reg, crN */
6282 case 0x122: /* mov crN, reg */
6284 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6286 modrm = ldub_code(s->pc++);
6287 if ((modrm & 0xc0) != 0xc0)
6289 rm = (modrm & 7) | REX_B(s);
6290 reg = ((modrm >> 3) & 7) | rex_r;
6302 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6303 gen_op_mov_TN_reg(ot, 0, rm);
6304 tcg_gen_helper_0_2(helper_movl_crN_T0,
6305 tcg_const_i32(reg), cpu_T[0]);
6306 gen_jmp_im(s->pc - s->cs_base);
6309 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6310 #if !defined(CONFIG_USER_ONLY)
6312 tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6315 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6316 gen_op_mov_reg_T0(ot, rm);
6324 case 0x121: /* mov reg, drN */
6325 case 0x123: /* mov drN, reg */
6327 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6329 modrm = ldub_code(s->pc++);
6330 if ((modrm & 0xc0) != 0xc0)
6332 rm = (modrm & 7) | REX_B(s);
6333 reg = ((modrm >> 3) & 7) | rex_r;
6338 /* XXX: do it dynamically with CR4.DE bit */
6339 if (reg == 4 || reg == 5 || reg >= 8)
6342 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6343 gen_op_mov_TN_reg(ot, 0, rm);
6344 tcg_gen_helper_0_2(helper_movl_drN_T0,
6345 tcg_const_i32(reg), cpu_T[0]);
6346 gen_jmp_im(s->pc - s->cs_base);
6349 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6350 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6351 gen_op_mov_reg_T0(ot, rm);
6355 case 0x106: /* clts */
6357 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6359 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6360 tcg_gen_helper_0_0(helper_clts);
6361 /* abort block because static cpu state changed */
6362 gen_jmp_im(s->pc - s->cs_base);
6366 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6367 case 0x1c3: /* MOVNTI reg, mem */
6368 if (!(s->cpuid_features & CPUID_SSE2))
6370 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6371 modrm = ldub_code(s->pc++);
6372 mod = (modrm >> 6) & 3;
6375 reg = ((modrm >> 3) & 7) | rex_r;
6376 /* generate a generic store */
6377 gen_ldst_modrm(s, modrm, ot, reg, 1);
6380 modrm = ldub_code(s->pc++);
6381 mod = (modrm >> 6) & 3;
6382 op = (modrm >> 3) & 7;
6384 case 0: /* fxsave */
6385 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6386 (s->flags & HF_EM_MASK))
6388 if (s->flags & HF_TS_MASK) {
6389 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6392 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6393 if (s->cc_op != CC_OP_DYNAMIC)
6394 gen_op_set_cc_op(s->cc_op);
6395 gen_jmp_im(pc_start - s->cs_base);
6396 tcg_gen_helper_0_2(helper_fxsave,
6397 cpu_A0, tcg_const_i32((s->dflag == 2)));
6399 case 1: /* fxrstor */
6400 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6401 (s->flags & HF_EM_MASK))
6403 if (s->flags & HF_TS_MASK) {
6404 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6407 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6408 if (s->cc_op != CC_OP_DYNAMIC)
6409 gen_op_set_cc_op(s->cc_op);
6410 gen_jmp_im(pc_start - s->cs_base);
6411 tcg_gen_helper_0_2(helper_fxrstor,
6412 cpu_A0, tcg_const_i32((s->dflag == 2)));
6414 case 2: /* ldmxcsr */
6415 case 3: /* stmxcsr */
6416 if (s->flags & HF_TS_MASK) {
6417 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6420 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6423 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6425 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6426 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6428 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6429 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6432 case 5: /* lfence */
6433 case 6: /* mfence */
6434 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6437 case 7: /* sfence / clflush */
6438 if ((modrm & 0xc7) == 0xc0) {
6440 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6441 if (!(s->cpuid_features & CPUID_SSE))
6445 if (!(s->cpuid_features & CPUID_CLFLUSH))
6447 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6454 case 0x10d: /* 3DNow! prefetch(w) */
6455 modrm = ldub_code(s->pc++);
6456 mod = (modrm >> 6) & 3;
6459 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6460 /* ignore for now */
6462 case 0x1aa: /* rsm */
6463 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6465 if (!(s->flags & HF_SMM_MASK))
6467 if (s->cc_op != CC_OP_DYNAMIC) {
6468 gen_op_set_cc_op(s->cc_op);
6469 s->cc_op = CC_OP_DYNAMIC;
6471 gen_jmp_im(s->pc - s->cs_base);
6472 tcg_gen_helper_0_0(helper_rsm);
6475 case 0x10e ... 0x10f:
6476 /* 3DNow! instructions, ignore prefixes */
6477 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6478 case 0x110 ... 0x117:
6479 case 0x128 ... 0x12f:
6480 case 0x150 ... 0x177:
6481 case 0x17c ... 0x17f:
6483 case 0x1c4 ... 0x1c6:
6484 case 0x1d0 ... 0x1fe:
6485 gen_sse(s, b, pc_start, rex_r);
6490 /* lock generation */
6491 if (s->prefix & PREFIX_LOCK)
6492 tcg_gen_helper_0_0(helper_unlock);
6495 if (s->prefix & PREFIX_LOCK)
6496 tcg_gen_helper_0_0(helper_unlock);
6497 /* XXX: ensure that no lock was generated */
6498 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6502 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6503 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6505 /* flags read by an operation */
6506 static uint16_t opc_read_flags[NB_OPS] = {
6507 [INDEX_op_aas] = CC_A,
6508 [INDEX_op_aaa] = CC_A,
6509 [INDEX_op_das] = CC_A | CC_C,
6510 [INDEX_op_daa] = CC_A | CC_C,
6512 /* subtle: due to the incl/decl implementation, C is used */
6513 [INDEX_op_update_inc_cc] = CC_C,
6515 [INDEX_op_into] = CC_O,
6517 [INDEX_op_jb_subb] = CC_C,
6518 [INDEX_op_jb_subw] = CC_C,
6519 [INDEX_op_jb_subl] = CC_C,
6521 [INDEX_op_jz_subb] = CC_Z,
6522 [INDEX_op_jz_subw] = CC_Z,
6523 [INDEX_op_jz_subl] = CC_Z,
6525 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6526 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6527 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6529 [INDEX_op_js_subb] = CC_S,
6530 [INDEX_op_js_subw] = CC_S,
6531 [INDEX_op_js_subl] = CC_S,
6533 [INDEX_op_jl_subb] = CC_O | CC_S,
6534 [INDEX_op_jl_subw] = CC_O | CC_S,
6535 [INDEX_op_jl_subl] = CC_O | CC_S,
6537 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6538 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6539 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6541 [INDEX_op_loopnzw] = CC_Z,
6542 [INDEX_op_loopnzl] = CC_Z,
6543 [INDEX_op_loopzw] = CC_Z,
6544 [INDEX_op_loopzl] = CC_Z,
6546 [INDEX_op_seto_T0_cc] = CC_O,
6547 [INDEX_op_setb_T0_cc] = CC_C,
6548 [INDEX_op_setz_T0_cc] = CC_Z,
6549 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6550 [INDEX_op_sets_T0_cc] = CC_S,
6551 [INDEX_op_setp_T0_cc] = CC_P,
6552 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6553 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6555 [INDEX_op_setb_T0_subb] = CC_C,
6556 [INDEX_op_setb_T0_subw] = CC_C,
6557 [INDEX_op_setb_T0_subl] = CC_C,
6559 [INDEX_op_setz_T0_subb] = CC_Z,
6560 [INDEX_op_setz_T0_subw] = CC_Z,
6561 [INDEX_op_setz_T0_subl] = CC_Z,
6563 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6564 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6565 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6567 [INDEX_op_sets_T0_subb] = CC_S,
6568 [INDEX_op_sets_T0_subw] = CC_S,
6569 [INDEX_op_sets_T0_subl] = CC_S,
6571 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6572 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6573 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6575 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6576 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6577 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6579 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6580 [INDEX_op_cmc] = CC_C,
6581 [INDEX_op_salc] = CC_C,
6583 /* needed for correct flag optimisation before string ops */
6584 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6585 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6586 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6587 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6589 #ifdef TARGET_X86_64
6590 [INDEX_op_jb_subq] = CC_C,
6591 [INDEX_op_jz_subq] = CC_Z,
6592 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6593 [INDEX_op_js_subq] = CC_S,
6594 [INDEX_op_jl_subq] = CC_O | CC_S,
6595 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6597 [INDEX_op_loopnzq] = CC_Z,
6598 [INDEX_op_loopzq] = CC_Z,
6600 [INDEX_op_setb_T0_subq] = CC_C,
6601 [INDEX_op_setz_T0_subq] = CC_Z,
6602 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6603 [INDEX_op_sets_T0_subq] = CC_S,
6604 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6605 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6607 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6608 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6611 #define DEF_READF(SUFFIX)\
6612 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6613 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6614 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6615 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6616 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6617 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6618 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6619 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6621 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6622 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6623 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6624 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6625 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6626 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6627 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6628 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6632 #ifndef CONFIG_USER_ONLY
6638 /* flags written by an operation */
6639 static uint16_t opc_write_flags[NB_OPS] = {
6640 [INDEX_op_update2_cc] = CC_OSZAPC,
6641 [INDEX_op_update1_cc] = CC_OSZAPC,
6642 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6643 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6644 /* subtle: due to the incl/decl implementation, C is used */
6645 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6646 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6648 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6649 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6650 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6651 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6652 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6653 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6654 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6655 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6656 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6657 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6658 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6661 [INDEX_op_com_dummy] = CC_OSZAPC,
6662 [INDEX_op_com_dummy] = CC_OSZAPC,
6663 [INDEX_op_com_dummy] = CC_OSZAPC,
6664 [INDEX_op_com_dummy] = CC_OSZAPC,
6667 [INDEX_op_aam] = CC_OSZAPC,
6668 [INDEX_op_aad] = CC_OSZAPC,
6669 [INDEX_op_aas] = CC_OSZAPC,
6670 [INDEX_op_aaa] = CC_OSZAPC,
6671 [INDEX_op_das] = CC_OSZAPC,
6672 [INDEX_op_daa] = CC_OSZAPC,
6674 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6675 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6676 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6677 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6678 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6679 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6680 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6681 [INDEX_op_clc] = CC_C,
6682 [INDEX_op_stc] = CC_C,
6683 [INDEX_op_cmc] = CC_C,
6685 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6686 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6687 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6688 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6689 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6690 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6691 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6692 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6693 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6694 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6695 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6696 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6698 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6699 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6700 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6701 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6702 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6703 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6705 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6706 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6707 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6708 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6710 [INDEX_op_cmpxchg8b] = CC_Z,
6711 [INDEX_op_lar] = CC_Z,
6712 [INDEX_op_lsl] = CC_Z,
6713 [INDEX_op_verr] = CC_Z,
6714 [INDEX_op_verw] = CC_Z,
6715 [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6716 [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6718 #define DEF_WRITEF(SUFFIX)\
6719 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6720 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6721 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6722 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6723 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6724 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6725 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6726 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6728 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6729 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6730 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6731 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6732 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6733 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6734 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6735 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6737 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6738 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6739 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6740 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6741 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6742 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6743 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6744 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6746 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6747 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6748 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6749 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6751 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6752 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6753 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6754 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6756 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6757 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6758 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6759 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6761 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6762 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6763 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6764 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6765 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6766 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6768 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6769 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6770 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6771 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6772 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6773 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6775 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6776 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6777 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6778 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6783 #ifndef CONFIG_USER_ONLY
6789 /* simpler form of an operation if no flags need to be generated */
6790 static uint16_t opc_simpler[NB_OPS] = {
6791 [INDEX_op_update2_cc] = INDEX_op_nop,
6792 [INDEX_op_update1_cc] = INDEX_op_nop,
6793 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6795 /* broken: CC_OP logic must be rewritten */
6796 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6799 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6800 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6801 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6802 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6804 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6805 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6806 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6807 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6809 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6810 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6811 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6812 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6814 #define DEF_SIMPLER(SUFFIX)\
6815 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6816 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6817 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6818 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6820 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6821 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6822 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6823 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6827 #ifndef CONFIG_USER_ONLY
6828 DEF_SIMPLER(_kernel)
6833 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6838 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6844 void optimize_flags_init(void)
6847 /* put default values in arrays */
6848 for(i = 0; i < NB_OPS; i++) {
6849 if (opc_simpler[i] == 0)
6853 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6855 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6856 #if TARGET_LONG_BITS > HOST_LONG_BITS
6857 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6858 TCG_AREG0, offsetof(CPUState, t0), "T0");
6859 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6860 TCG_AREG0, offsetof(CPUState, t1), "T1");
6861 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6862 TCG_AREG0, offsetof(CPUState, t2), "A0");
6864 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6865 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6866 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6868 #if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6869 /* XXX: must be suppressed once there are less fixed registers */
6870 cpu_tmp1 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6874 /* CPU flags computation optimization: we move backward thru the
6875 generated code to see which flags are needed. The operation is
6876 modified if suitable */
6877 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6880 int live_flags, write_flags, op;
6882 opc_ptr = opc_buf + opc_buf_len;
6883 /* live_flags contains the flags needed by the next instructions
6884 in the code. At the end of the block, we consider that all the
6886 live_flags = CC_OSZAPC;
6887 while (opc_ptr > opc_buf) {
6889 /* if none of the flags written by the instruction is used,
6890 then we can try to find a simpler instruction */
6891 write_flags = opc_write_flags[op];
6892 if ((live_flags & write_flags) == 0) {
6893 *opc_ptr = opc_simpler[op];
6895 /* compute the live flags before the instruction */
6896 live_flags &= ~write_flags;
6897 live_flags |= opc_read_flags[op];
6901 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6902 basic block 'tb'. If search_pc is TRUE, also generate PC
6903 information for each intermediate instruction. */
6904 static inline int gen_intermediate_code_internal(CPUState *env,
6905 TranslationBlock *tb,
6908 DisasContext dc1, *dc = &dc1;
6909 target_ulong pc_ptr;
6910 uint16_t *gen_opc_end;
6913 target_ulong pc_start;
6914 target_ulong cs_base;
6916 /* generate intermediate code */
6918 cs_base = tb->cs_base;
6920 cflags = tb->cflags;
6922 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6923 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6924 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6925 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6927 dc->vm86 = (flags >> VM_SHIFT) & 1;
6928 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6929 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6930 dc->tf = (flags >> TF_SHIFT) & 1;
6931 dc->singlestep_enabled = env->singlestep_enabled;
6932 dc->cc_op = CC_OP_DYNAMIC;
6933 dc->cs_base = cs_base;
6935 dc->popl_esp_hack = 0;
6936 /* select memory access functions */
6938 if (flags & HF_SOFTMMU_MASK) {
6940 dc->mem_index = 2 * 4;
6942 dc->mem_index = 1 * 4;
6944 dc->cpuid_features = env->cpuid_features;
6945 dc->cpuid_ext_features = env->cpuid_ext_features;
6946 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6947 #ifdef TARGET_X86_64
6948 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6949 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6952 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6953 (flags & HF_INHIBIT_IRQ_MASK)
6954 #ifndef CONFIG_SOFTMMU
6955 || (flags & HF_SOFTMMU_MASK)
6959 /* check addseg logic */
6960 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6961 printf("ERROR addseg\n");
6964 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6965 #if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6966 cpu_tmp1 = tcg_temp_new(TCG_TYPE_I64);
6968 cpu_tmp2 = tcg_temp_new(TCG_TYPE_I32);
6969 cpu_tmp3 = tcg_temp_new(TCG_TYPE_I32);
6970 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6971 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6973 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6975 dc->is_jmp = DISAS_NEXT;
6980 if (env->nb_breakpoints > 0) {
6981 for(j = 0; j < env->nb_breakpoints; j++) {
6982 if (env->breakpoints[j] == pc_ptr) {
6983 gen_debug(dc, pc_ptr - dc->cs_base);
6989 j = gen_opc_ptr - gen_opc_buf;
6993 gen_opc_instr_start[lj++] = 0;
6995 gen_opc_pc[lj] = pc_ptr;
6996 gen_opc_cc_op[lj] = dc->cc_op;
6997 gen_opc_instr_start[lj] = 1;
6999 pc_ptr = disas_insn(dc, pc_ptr);
7000 /* stop translation if indicated */
7003 /* if single step mode, we generate only one instruction and
7004 generate an exception */
7005 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7006 the flag and abort the translation to give the irqs a
7007 change to be happen */
7008 if (dc->tf || dc->singlestep_enabled ||
7009 (flags & HF_INHIBIT_IRQ_MASK) ||
7010 (cflags & CF_SINGLE_INSN)) {
7011 gen_jmp_im(pc_ptr - dc->cs_base);
7015 /* if too long translation, stop generation too */
7016 if (gen_opc_ptr >= gen_opc_end ||
7017 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7018 gen_jmp_im(pc_ptr - dc->cs_base);
7023 *gen_opc_ptr = INDEX_op_end;
7024 /* we don't forget to fill the last values */
7026 j = gen_opc_ptr - gen_opc_buf;
7029 gen_opc_instr_start[lj++] = 0;
7033 if (loglevel & CPU_LOG_TB_CPU) {
7034 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7036 if (loglevel & CPU_LOG_TB_IN_ASM) {
7038 fprintf(logfile, "----------------\n");
7039 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7040 #ifdef TARGET_X86_64
7045 disas_flags = !dc->code32;
7046 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7047 fprintf(logfile, "\n");
7048 if (loglevel & CPU_LOG_TB_OP_OPT) {
7049 fprintf(logfile, "OP before opt:\n");
7050 tcg_dump_ops(&tcg_ctx, logfile);
7051 fprintf(logfile, "\n");
7056 /* optimize flag computations */
7057 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
7060 tb->size = pc_ptr - pc_start;
7064 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7066 return gen_intermediate_code_internal(env, tb, 0);
7069 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7071 return gen_intermediate_code_internal(env, tb, 1);
7074 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7075 unsigned long searched_pc, int pc_pos, void *puc)
7079 if (loglevel & CPU_LOG_TB_OP) {
7081 fprintf(logfile, "RESTORE:\n");
7082 for(i = 0;i <= pc_pos; i++) {
7083 if (gen_opc_instr_start[i]) {
7084 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7087 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7088 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7089 (uint32_t)tb->cs_base);
7092 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7093 cc_op = gen_opc_cc_op[pc_pos];
7094 if (cc_op != CC_OP_DYNAMIC)