4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr;
34 static uint32_t *gen_opparam_ptr;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #define BUGGY_64(x) NULL
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
61 static int x86_64_hregs;
64 #ifdef USE_DIRECT_JUMP
67 #define TBPARAM(x) (long)(x)
70 typedef struct DisasContext {
71 /* current insn context */
72 int override; /* -1 if no override */
75 target_ulong pc; /* pc = eip + cs_base */
76 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base; /* base of CS segment */
80 int pe; /* protected mode */
81 int code32; /* 32 bit code segment */
83 int lma; /* long mode active */
84 int code64; /* 64 bit code segment */
87 int ss32; /* 32 bit stack segment */
88 int cc_op; /* current CC operation */
89 int addseg; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st; /* currently unused */
91 int vm86; /* vm86 mode */
94 int tf; /* TF cpu flag */
95 int singlestep_enabled; /* "hardware" single step enabled */
96 int jmp_opt; /* use direct block chaining for direct jumps */
97 int mem_index; /* select memory access functions */
98 int flags; /* all execution flags */
99 struct TranslationBlock *tb;
100 int popl_esp_hack; /* for correct popl with esp base handling */
101 int rip_offset; /* only used in x86_64, but left for simplicity */
105 static void gen_eob(DisasContext *s);
106 static void gen_jmp(DisasContext *s, target_ulong eip);
107 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109 /* i386 arith/logic operations */
129 OP_SHL1, /* undocumented */
134 #define DEF(s, n, copy_size) INDEX_op_ ## s,
151 /* I386 int registers */
152 OR_EAX, /* MUST be even numbered */
161 OR_TMP0 = 16, /* temporary operand register */
163 OR_A0, /* temporary register used when doing address evaluation */
168 #define NB_OP_SIZES 4
170 #define DEF_REGS(prefix, suffix) \
171 prefix ## EAX ## suffix,\
172 prefix ## ECX ## suffix,\
173 prefix ## EDX ## suffix,\
174 prefix ## EBX ## suffix,\
175 prefix ## ESP ## suffix,\
176 prefix ## EBP ## suffix,\
177 prefix ## ESI ## suffix,\
178 prefix ## EDI ## suffix,\
179 prefix ## R8 ## suffix,\
180 prefix ## R9 ## suffix,\
181 prefix ## R10 ## suffix,\
182 prefix ## R11 ## suffix,\
183 prefix ## R12 ## suffix,\
184 prefix ## R13 ## suffix,\
185 prefix ## R14 ## suffix,\
186 prefix ## R15 ## suffix,
188 #define DEF_BREGS(prefixb, prefixh, suffix) \
190 static void prefixb ## ESP ## suffix ## _wrapper(void) \
193 prefixb ## ESP ## suffix (); \
195 prefixh ## EAX ## suffix (); \
198 static void prefixb ## EBP ## suffix ## _wrapper(void) \
201 prefixb ## EBP ## suffix (); \
203 prefixh ## ECX ## suffix (); \
206 static void prefixb ## ESI ## suffix ## _wrapper(void) \
209 prefixb ## ESI ## suffix (); \
211 prefixh ## EDX ## suffix (); \
214 static void prefixb ## EDI ## suffix ## _wrapper(void) \
217 prefixb ## EDI ## suffix (); \
219 prefixh ## EBX ## suffix (); \
222 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
223 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
224 DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
225 DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
227 #else /* !TARGET_X86_64 */
229 #define NB_OP_SIZES 3
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,
241 #endif /* !TARGET_X86_64 */
243 static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
250 gen_op_movb_ESP_T0_wrapper,
251 gen_op_movb_EBP_T0_wrapper,
252 gen_op_movb_ESI_T0_wrapper,
253 gen_op_movb_EDI_T0_wrapper,
270 DEF_REGS(gen_op_movw_, _T0)
273 DEF_REGS(gen_op_movl_, _T0)
277 DEF_REGS(gen_op_movq_, _T0)
282 static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
289 gen_op_movb_ESP_T1_wrapper,
290 gen_op_movb_EBP_T1_wrapper,
291 gen_op_movb_ESI_T1_wrapper,
292 gen_op_movb_EDI_T1_wrapper,
309 DEF_REGS(gen_op_movw_, _T1)
312 DEF_REGS(gen_op_movl_, _T1)
316 DEF_REGS(gen_op_movq_, _T1)
321 static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
323 DEF_REGS(gen_op_movw_, _A0)
326 DEF_REGS(gen_op_movl_, _A0)
330 DEF_REGS(gen_op_movq_, _A0)
335 static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
344 gen_op_movl_T0_ESP_wrapper,
345 gen_op_movl_T0_EBP_wrapper,
346 gen_op_movl_T0_ESI_wrapper,
347 gen_op_movl_T0_EDI_wrapper,
369 gen_op_movl_T1_ESP_wrapper,
370 gen_op_movl_T1_EBP_wrapper,
371 gen_op_movl_T1_ESI_wrapper,
372 gen_op_movl_T1_EDI_wrapper,
391 DEF_REGS(gen_op_movl_T0_, )
394 DEF_REGS(gen_op_movl_T1_, )
399 DEF_REGS(gen_op_movl_T0_, )
402 DEF_REGS(gen_op_movl_T1_, )
408 DEF_REGS(gen_op_movl_T0_, )
411 DEF_REGS(gen_op_movl_T1_, )
417 static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
418 DEF_REGS(gen_op_movl_A0_, )
421 static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
423 DEF_REGS(gen_op_addl_A0_, )
426 DEF_REGS(gen_op_addl_A0_, _s1)
429 DEF_REGS(gen_op_addl_A0_, _s2)
432 DEF_REGS(gen_op_addl_A0_, _s3)
437 static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
438 DEF_REGS(gen_op_movq_A0_, )
441 static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
443 DEF_REGS(gen_op_addq_A0_, )
446 DEF_REGS(gen_op_addq_A0_, _s1)
449 DEF_REGS(gen_op_addq_A0_, _s2)
452 DEF_REGS(gen_op_addq_A0_, _s3)
457 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
459 DEF_REGS(gen_op_cmovw_, _T1_T0)
462 DEF_REGS(gen_op_cmovl_, _T1_T0)
466 DEF_REGS(gen_op_cmovq_, _T1_T0)
471 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
482 #define DEF_ARITHC(SUFFIX)\
484 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
485 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
488 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
489 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
492 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
493 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
496 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
497 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
500 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
504 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
506 #ifndef CONFIG_USER_ONLY
512 static const int cc_op_arithb[8] = {
523 #define DEF_CMPXCHG(SUFFIX)\
524 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
525 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
527 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
529 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
533 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
535 #ifndef CONFIG_USER_ONLY
541 #define DEF_SHIFT(SUFFIX)\
543 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
544 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
553 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
554 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
563 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
570 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
573 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
574 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
583 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
587 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
589 #ifndef CONFIG_USER_ONLY
595 #define DEF_SHIFTD(SUFFIX, op)\
601 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
602 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
605 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
609 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
610 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
613 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
617 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
621 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
623 #ifndef CONFIG_USER_ONLY
624 DEF_SHIFTD(_kernel, im)
625 DEF_SHIFTD(_user, im)
629 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
630 DEF_SHIFTD(_raw, ECX)
631 #ifndef CONFIG_USER_ONLY
632 DEF_SHIFTD(_kernel, ECX)
633 DEF_SHIFTD(_user, ECX)
637 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
640 gen_op_btsw_T0_T1_cc,
641 gen_op_btrw_T0_T1_cc,
642 gen_op_btcw_T0_T1_cc,
646 gen_op_btsl_T0_T1_cc,
647 gen_op_btrl_T0_T1_cc,
648 gen_op_btcl_T0_T1_cc,
653 gen_op_btsq_T0_T1_cc,
654 gen_op_btrq_T0_T1_cc,
655 gen_op_btcq_T0_T1_cc,
660 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
661 gen_op_add_bitw_A0_T1,
662 gen_op_add_bitl_A0_T1,
663 X86_64_ONLY(gen_op_add_bitq_A0_T1),
666 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
683 static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
684 gen_op_ldsb_raw_T0_A0,
685 gen_op_ldsw_raw_T0_A0,
686 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
688 #ifndef CONFIG_USER_ONLY
689 gen_op_ldsb_kernel_T0_A0,
690 gen_op_ldsw_kernel_T0_A0,
691 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
694 gen_op_ldsb_user_T0_A0,
695 gen_op_ldsw_user_T0_A0,
696 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
701 static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
702 gen_op_ldub_raw_T0_A0,
703 gen_op_lduw_raw_T0_A0,
707 #ifndef CONFIG_USER_ONLY
708 gen_op_ldub_kernel_T0_A0,
709 gen_op_lduw_kernel_T0_A0,
713 gen_op_ldub_user_T0_A0,
714 gen_op_lduw_user_T0_A0,
720 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
721 static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
722 gen_op_ldub_raw_T0_A0,
723 gen_op_lduw_raw_T0_A0,
724 gen_op_ldl_raw_T0_A0,
725 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
727 #ifndef CONFIG_USER_ONLY
728 gen_op_ldub_kernel_T0_A0,
729 gen_op_lduw_kernel_T0_A0,
730 gen_op_ldl_kernel_T0_A0,
731 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
733 gen_op_ldub_user_T0_A0,
734 gen_op_lduw_user_T0_A0,
735 gen_op_ldl_user_T0_A0,
736 X86_64_ONLY(gen_op_ldq_user_T0_A0),
740 static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
741 gen_op_ldub_raw_T1_A0,
742 gen_op_lduw_raw_T1_A0,
743 gen_op_ldl_raw_T1_A0,
744 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
746 #ifndef CONFIG_USER_ONLY
747 gen_op_ldub_kernel_T1_A0,
748 gen_op_lduw_kernel_T1_A0,
749 gen_op_ldl_kernel_T1_A0,
750 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
752 gen_op_ldub_user_T1_A0,
753 gen_op_lduw_user_T1_A0,
754 gen_op_ldl_user_T1_A0,
755 X86_64_ONLY(gen_op_ldq_user_T1_A0),
759 static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
760 gen_op_stb_raw_T0_A0,
761 gen_op_stw_raw_T0_A0,
762 gen_op_stl_raw_T0_A0,
763 X86_64_ONLY(gen_op_stq_raw_T0_A0),
765 #ifndef CONFIG_USER_ONLY
766 gen_op_stb_kernel_T0_A0,
767 gen_op_stw_kernel_T0_A0,
768 gen_op_stl_kernel_T0_A0,
769 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
771 gen_op_stb_user_T0_A0,
772 gen_op_stw_user_T0_A0,
773 gen_op_stl_user_T0_A0,
774 X86_64_ONLY(gen_op_stq_user_T0_A0),
778 static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
780 gen_op_stw_raw_T1_A0,
781 gen_op_stl_raw_T1_A0,
782 X86_64_ONLY(gen_op_stq_raw_T1_A0),
784 #ifndef CONFIG_USER_ONLY
786 gen_op_stw_kernel_T1_A0,
787 gen_op_stl_kernel_T1_A0,
788 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
791 gen_op_stw_user_T1_A0,
792 gen_op_stl_user_T1_A0,
793 X86_64_ONLY(gen_op_stq_user_T1_A0),
797 static inline void gen_jmp_im(target_ulong pc)
800 if (pc == (uint32_t)pc) {
801 gen_op_movl_eip_im(pc);
802 } else if (pc == (int32_t)pc) {
803 gen_op_movq_eip_im(pc);
805 gen_op_movq_eip_im64(pc >> 32, pc);
808 gen_op_movl_eip_im(pc);
812 static inline void gen_string_movl_A0_ESI(DisasContext *s)
816 override = s->override;
820 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
821 gen_op_addq_A0_reg_sN[0][R_ESI]();
823 gen_op_movq_A0_reg[R_ESI]();
829 if (s->addseg && override < 0)
832 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
833 gen_op_addl_A0_reg_sN[0][R_ESI]();
835 gen_op_movl_A0_reg[R_ESI]();
838 /* 16 address, always override */
841 gen_op_movl_A0_reg[R_ESI]();
842 gen_op_andl_A0_ffff();
843 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
847 static inline void gen_string_movl_A0_EDI(DisasContext *s)
851 gen_op_movq_A0_reg[R_EDI]();
856 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
857 gen_op_addl_A0_reg_sN[0][R_EDI]();
859 gen_op_movl_A0_reg[R_EDI]();
862 gen_op_movl_A0_reg[R_EDI]();
863 gen_op_andl_A0_ffff();
864 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
868 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
869 gen_op_movl_T0_Dshiftb,
870 gen_op_movl_T0_Dshiftw,
871 gen_op_movl_T0_Dshiftl,
872 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
875 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
878 X86_64_ONLY(gen_op_jnz_ecxq),
881 static GenOpFunc1 *gen_op_jz_ecx[3] = {
884 X86_64_ONLY(gen_op_jz_ecxq),
887 static GenOpFunc *gen_op_dec_ECX[3] = {
890 X86_64_ONLY(gen_op_decq_ECX),
893 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
898 X86_64_ONLY(gen_op_jnz_subq),
904 X86_64_ONLY(gen_op_jz_subq),
908 static GenOpFunc *gen_op_in_DX_T0[3] = {
914 static GenOpFunc *gen_op_out_DX_T0[3] = {
920 static GenOpFunc *gen_op_in[3] = {
926 static GenOpFunc *gen_op_out[3] = {
932 static GenOpFunc *gen_check_io_T0[3] = {
938 static GenOpFunc *gen_check_io_DX[3] = {
944 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
946 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
947 if (s->cc_op != CC_OP_DYNAMIC)
948 gen_op_set_cc_op(s->cc_op);
951 gen_check_io_DX[ot]();
953 gen_check_io_T0[ot]();
957 static inline void gen_movs(DisasContext *s, int ot)
959 gen_string_movl_A0_ESI(s);
960 gen_op_ld_T0_A0[ot + s->mem_index]();
961 gen_string_movl_A0_EDI(s);
962 gen_op_st_T0_A0[ot + s->mem_index]();
963 gen_op_movl_T0_Dshift[ot]();
966 gen_op_addq_ESI_T0();
967 gen_op_addq_EDI_T0();
971 gen_op_addl_ESI_T0();
972 gen_op_addl_EDI_T0();
974 gen_op_addw_ESI_T0();
975 gen_op_addw_EDI_T0();
979 static inline void gen_update_cc_op(DisasContext *s)
981 if (s->cc_op != CC_OP_DYNAMIC) {
982 gen_op_set_cc_op(s->cc_op);
983 s->cc_op = CC_OP_DYNAMIC;
987 /* XXX: does not work with gdbstub "ice" single step - not a
989 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
993 l1 = gen_new_label();
994 l2 = gen_new_label();
995 gen_op_jnz_ecx[s->aflag](l1);
997 gen_jmp_tb(s, next_eip, 1);
1002 static inline void gen_stos(DisasContext *s, int ot)
1004 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1005 gen_string_movl_A0_EDI(s);
1006 gen_op_st_T0_A0[ot + s->mem_index]();
1007 gen_op_movl_T0_Dshift[ot]();
1008 #ifdef TARGET_X86_64
1009 if (s->aflag == 2) {
1010 gen_op_addq_EDI_T0();
1014 gen_op_addl_EDI_T0();
1016 gen_op_addw_EDI_T0();
1020 static inline void gen_lods(DisasContext *s, int ot)
1022 gen_string_movl_A0_ESI(s);
1023 gen_op_ld_T0_A0[ot + s->mem_index]();
1024 gen_op_mov_reg_T0[ot][R_EAX]();
1025 gen_op_movl_T0_Dshift[ot]();
1026 #ifdef TARGET_X86_64
1027 if (s->aflag == 2) {
1028 gen_op_addq_ESI_T0();
1032 gen_op_addl_ESI_T0();
1034 gen_op_addw_ESI_T0();
1038 static inline void gen_scas(DisasContext *s, int ot)
1040 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1041 gen_string_movl_A0_EDI(s);
1042 gen_op_ld_T1_A0[ot + s->mem_index]();
1043 gen_op_cmpl_T0_T1_cc();
1044 gen_op_movl_T0_Dshift[ot]();
1045 #ifdef TARGET_X86_64
1046 if (s->aflag == 2) {
1047 gen_op_addq_EDI_T0();
1051 gen_op_addl_EDI_T0();
1053 gen_op_addw_EDI_T0();
1057 static inline void gen_cmps(DisasContext *s, int ot)
1059 gen_string_movl_A0_ESI(s);
1060 gen_op_ld_T0_A0[ot + s->mem_index]();
1061 gen_string_movl_A0_EDI(s);
1062 gen_op_ld_T1_A0[ot + s->mem_index]();
1063 gen_op_cmpl_T0_T1_cc();
1064 gen_op_movl_T0_Dshift[ot]();
1065 #ifdef TARGET_X86_64
1066 if (s->aflag == 2) {
1067 gen_op_addq_ESI_T0();
1068 gen_op_addq_EDI_T0();
1072 gen_op_addl_ESI_T0();
1073 gen_op_addl_EDI_T0();
1075 gen_op_addw_ESI_T0();
1076 gen_op_addw_EDI_T0();
1080 static inline void gen_ins(DisasContext *s, int ot)
1082 gen_string_movl_A0_EDI(s);
1084 gen_op_st_T0_A0[ot + s->mem_index]();
1085 gen_op_in_DX_T0[ot]();
1086 gen_op_st_T0_A0[ot + s->mem_index]();
1087 gen_op_movl_T0_Dshift[ot]();
1088 #ifdef TARGET_X86_64
1089 if (s->aflag == 2) {
1090 gen_op_addq_EDI_T0();
1094 gen_op_addl_EDI_T0();
1096 gen_op_addw_EDI_T0();
1100 static inline void gen_outs(DisasContext *s, int ot)
1102 gen_string_movl_A0_ESI(s);
1103 gen_op_ld_T0_A0[ot + s->mem_index]();
1104 gen_op_out_DX_T0[ot]();
1105 gen_op_movl_T0_Dshift[ot]();
1106 #ifdef TARGET_X86_64
1107 if (s->aflag == 2) {
1108 gen_op_addq_ESI_T0();
1112 gen_op_addl_ESI_T0();
1114 gen_op_addw_ESI_T0();
1118 /* same method as Valgrind : we generate jumps to current or next
1120 #define GEN_REPZ(op) \
1121 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1122 target_ulong cur_eip, target_ulong next_eip) \
1125 gen_update_cc_op(s); \
1126 l2 = gen_jz_ecx_string(s, next_eip); \
1127 gen_ ## op(s, ot); \
1128 gen_op_dec_ECX[s->aflag](); \
1129 /* a loop would cause two single step exceptions if ECX = 1 \
1130 before rep string_insn */ \
1132 gen_op_jz_ecx[s->aflag](l2); \
1133 gen_jmp(s, cur_eip); \
1136 #define GEN_REPZ2(op) \
1137 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1138 target_ulong cur_eip, \
1139 target_ulong next_eip, \
1143 gen_update_cc_op(s); \
1144 l2 = gen_jz_ecx_string(s, next_eip); \
1145 gen_ ## op(s, ot); \
1146 gen_op_dec_ECX[s->aflag](); \
1147 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1148 gen_op_string_jnz_sub[nz][ot](l2);\
1150 gen_op_jz_ecx[s->aflag](l2); \
1151 gen_jmp(s, cur_eip); \
1173 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1204 #ifdef TARGET_X86_64
1207 BUGGY_64(gen_op_jb_subq),
1209 BUGGY_64(gen_op_jbe_subq),
1212 BUGGY_64(gen_op_jl_subq),
1213 BUGGY_64(gen_op_jle_subq),
1217 static GenOpFunc1 *gen_op_loop[3][4] = {
1228 #ifdef TARGET_X86_64
1237 static GenOpFunc *gen_setcc_slow[8] = {
1248 static GenOpFunc *gen_setcc_sub[4][8] = {
1251 gen_op_setb_T0_subb,
1252 gen_op_setz_T0_subb,
1253 gen_op_setbe_T0_subb,
1254 gen_op_sets_T0_subb,
1256 gen_op_setl_T0_subb,
1257 gen_op_setle_T0_subb,
1261 gen_op_setb_T0_subw,
1262 gen_op_setz_T0_subw,
1263 gen_op_setbe_T0_subw,
1264 gen_op_sets_T0_subw,
1266 gen_op_setl_T0_subw,
1267 gen_op_setle_T0_subw,
1271 gen_op_setb_T0_subl,
1272 gen_op_setz_T0_subl,
1273 gen_op_setbe_T0_subl,
1274 gen_op_sets_T0_subl,
1276 gen_op_setl_T0_subl,
1277 gen_op_setle_T0_subl,
1279 #ifdef TARGET_X86_64
1282 gen_op_setb_T0_subq,
1283 gen_op_setz_T0_subq,
1284 gen_op_setbe_T0_subq,
1285 gen_op_sets_T0_subq,
1287 gen_op_setl_T0_subq,
1288 gen_op_setle_T0_subq,
1293 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1294 gen_op_fadd_ST0_FT0,
1295 gen_op_fmul_ST0_FT0,
1296 gen_op_fcom_ST0_FT0,
1297 gen_op_fcom_ST0_FT0,
1298 gen_op_fsub_ST0_FT0,
1299 gen_op_fsubr_ST0_FT0,
1300 gen_op_fdiv_ST0_FT0,
1301 gen_op_fdivr_ST0_FT0,
1304 /* NOTE the exception in "r" op ordering */
1305 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1306 gen_op_fadd_STN_ST0,
1307 gen_op_fmul_STN_ST0,
1310 gen_op_fsubr_STN_ST0,
1311 gen_op_fsub_STN_ST0,
1312 gen_op_fdivr_STN_ST0,
1313 gen_op_fdiv_STN_ST0,
1316 /* if d == OR_TMP0, it means memory operand (address in A0) */
1317 static void gen_op(DisasContext *s1, int op, int ot, int d)
1319 GenOpFunc *gen_update_cc;
1322 gen_op_mov_TN_reg[ot][0][d]();
1324 gen_op_ld_T0_A0[ot + s1->mem_index]();
1329 if (s1->cc_op != CC_OP_DYNAMIC)
1330 gen_op_set_cc_op(s1->cc_op);
1332 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1333 gen_op_mov_reg_T0[ot][d]();
1335 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1337 s1->cc_op = CC_OP_DYNAMIC;
1340 gen_op_addl_T0_T1();
1341 s1->cc_op = CC_OP_ADDB + ot;
1342 gen_update_cc = gen_op_update2_cc;
1345 gen_op_subl_T0_T1();
1346 s1->cc_op = CC_OP_SUBB + ot;
1347 gen_update_cc = gen_op_update2_cc;
1353 gen_op_arith_T0_T1_cc[op]();
1354 s1->cc_op = CC_OP_LOGICB + ot;
1355 gen_update_cc = gen_op_update1_cc;
1358 gen_op_cmpl_T0_T1_cc();
1359 s1->cc_op = CC_OP_SUBB + ot;
1360 gen_update_cc = NULL;
1363 if (op != OP_CMPL) {
1365 gen_op_mov_reg_T0[ot][d]();
1367 gen_op_st_T0_A0[ot + s1->mem_index]();
1369 /* the flags update must happen after the memory write (precise
1370 exception support) */
1376 /* if d == OR_TMP0, it means memory operand (address in A0) */
1377 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1380 gen_op_mov_TN_reg[ot][0][d]();
1382 gen_op_ld_T0_A0[ot + s1->mem_index]();
1383 if (s1->cc_op != CC_OP_DYNAMIC)
1384 gen_op_set_cc_op(s1->cc_op);
1387 s1->cc_op = CC_OP_INCB + ot;
1390 s1->cc_op = CC_OP_DECB + ot;
1393 gen_op_mov_reg_T0[ot][d]();
1395 gen_op_st_T0_A0[ot + s1->mem_index]();
1396 gen_op_update_inc_cc();
1399 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1402 gen_op_mov_TN_reg[ot][0][d]();
1404 gen_op_ld_T0_A0[ot + s1->mem_index]();
1406 gen_op_mov_TN_reg[ot][1][s]();
1407 /* for zero counts, flags are not updated, so must do it dynamically */
1408 if (s1->cc_op != CC_OP_DYNAMIC)
1409 gen_op_set_cc_op(s1->cc_op);
1412 gen_op_shift_T0_T1_cc[ot][op]();
1414 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1416 gen_op_mov_reg_T0[ot][d]();
1417 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1420 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1422 /* currently not optimized */
1423 gen_op_movl_T1_im(c);
1424 gen_shift(s1, op, ot, d, OR_TMP1);
1427 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1435 int mod, rm, code, override, must_add_seg;
1437 override = s->override;
1438 must_add_seg = s->addseg;
1441 mod = (modrm >> 6) & 3;
1453 code = ldub_code(s->pc++);
1454 scale = (code >> 6) & 3;
1455 index = ((code >> 3) & 7) | REX_X(s);
1462 if ((base & 7) == 5) {
1464 disp = (int32_t)ldl_code(s->pc);
1466 if (CODE64(s) && !havesib) {
1467 disp += s->pc + s->rip_offset;
1474 disp = (int8_t)ldub_code(s->pc++);
1478 disp = ldl_code(s->pc);
1484 /* for correct popl handling with esp */
1485 if (base == 4 && s->popl_esp_hack)
1486 disp += s->popl_esp_hack;
1487 #ifdef TARGET_X86_64
1488 if (s->aflag == 2) {
1489 gen_op_movq_A0_reg[base]();
1491 if ((int32_t)disp == disp)
1492 gen_op_addq_A0_im(disp);
1494 gen_op_addq_A0_im64(disp >> 32, disp);
1499 gen_op_movl_A0_reg[base]();
1501 gen_op_addl_A0_im(disp);
1504 #ifdef TARGET_X86_64
1505 if (s->aflag == 2) {
1506 if ((int32_t)disp == disp)
1507 gen_op_movq_A0_im(disp);
1509 gen_op_movq_A0_im64(disp >> 32, disp);
1513 gen_op_movl_A0_im(disp);
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib && (index != 4 || scale != 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s->aflag == 2) {
1520 gen_op_addq_A0_reg_sN[scale][index]();
1524 gen_op_addl_A0_reg_sN[scale][index]();
1529 if (base == R_EBP || base == R_ESP)
1534 #ifdef TARGET_X86_64
1535 if (s->aflag == 2) {
1536 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1540 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1547 disp = lduw_code(s->pc);
1549 gen_op_movl_A0_im(disp);
1550 rm = 0; /* avoid SS override */
1557 disp = (int8_t)ldub_code(s->pc++);
1561 disp = lduw_code(s->pc);
1567 gen_op_movl_A0_reg[R_EBX]();
1568 gen_op_addl_A0_reg_sN[0][R_ESI]();
1571 gen_op_movl_A0_reg[R_EBX]();
1572 gen_op_addl_A0_reg_sN[0][R_EDI]();
1575 gen_op_movl_A0_reg[R_EBP]();
1576 gen_op_addl_A0_reg_sN[0][R_ESI]();
1579 gen_op_movl_A0_reg[R_EBP]();
1580 gen_op_addl_A0_reg_sN[0][R_EDI]();
1583 gen_op_movl_A0_reg[R_ESI]();
1586 gen_op_movl_A0_reg[R_EDI]();
1589 gen_op_movl_A0_reg[R_EBP]();
1593 gen_op_movl_A0_reg[R_EBX]();
1597 gen_op_addl_A0_im(disp);
1598 gen_op_andl_A0_ffff();
1602 if (rm == 2 || rm == 3 || rm == 6)
1607 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1617 /* used for LEA and MOV AX, mem */
1618 static void gen_add_A0_ds_seg(DisasContext *s)
1620 int override, must_add_seg;
1621 must_add_seg = s->addseg;
1623 if (s->override >= 0) {
1624 override = s->override;
1630 #ifdef TARGET_X86_64
1632 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1636 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1641 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1643 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1645 int mod, rm, opreg, disp;
1647 mod = (modrm >> 6) & 3;
1648 rm = (modrm & 7) | REX_B(s);
1652 gen_op_mov_TN_reg[ot][0][reg]();
1653 gen_op_mov_reg_T0[ot][rm]();
1655 gen_op_mov_TN_reg[ot][0][rm]();
1657 gen_op_mov_reg_T0[ot][reg]();
1660 gen_lea_modrm(s, modrm, &opreg, &disp);
1663 gen_op_mov_TN_reg[ot][0][reg]();
1664 gen_op_st_T0_A0[ot + s->mem_index]();
1666 gen_op_ld_T0_A0[ot + s->mem_index]();
1668 gen_op_mov_reg_T0[ot][reg]();
1673 static inline uint32_t insn_get(DisasContext *s, int ot)
1679 ret = ldub_code(s->pc);
1683 ret = lduw_code(s->pc);
1688 ret = ldl_code(s->pc);
1695 static inline int insn_const_size(unsigned int ot)
1703 static inline void gen_jcc(DisasContext *s, int b,
1704 target_ulong val, target_ulong next_eip)
1706 TranslationBlock *tb;
1713 jcc_op = (b >> 1) & 7;
1717 /* we optimize the cmp/jcc case */
1722 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1725 /* some jumps are easy to compute */
1767 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1770 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1782 if (s->cc_op != CC_OP_DYNAMIC)
1783 gen_op_set_cc_op(s->cc_op);
1786 gen_setcc_slow[jcc_op]();
1787 func = gen_op_jnz_T0_label;
1797 l1 = gen_new_label();
1800 gen_op_goto_tb0(TBPARAM(tb));
1801 gen_jmp_im(next_eip);
1802 gen_op_movl_T0_im((long)tb + 0);
1806 gen_op_goto_tb1(TBPARAM(tb));
1808 gen_op_movl_T0_im((long)tb + 1);
1814 if (s->cc_op != CC_OP_DYNAMIC) {
1815 gen_op_set_cc_op(s->cc_op);
1816 s->cc_op = CC_OP_DYNAMIC;
1818 gen_setcc_slow[jcc_op]();
1824 l1 = gen_new_label();
1825 l2 = gen_new_label();
1826 gen_op_jnz_T0_label(l1);
1827 gen_jmp_im(next_eip);
1828 gen_op_jmp_label(l2);
1836 static void gen_setcc(DisasContext *s, int b)
1842 jcc_op = (b >> 1) & 7;
1844 /* we optimize the cmp/jcc case */
1849 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1854 /* some jumps are easy to compute */
1881 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1884 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1892 if (s->cc_op != CC_OP_DYNAMIC)
1893 gen_op_set_cc_op(s->cc_op);
1894 func = gen_setcc_slow[jcc_op];
1903 /* move T0 to seg_reg and compute if the CPU state may change. Never
1904 call this function with seg_reg == R_CS */
1905 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1907 if (s->pe && !s->vm86) {
1908 /* XXX: optimize by finding processor state dynamically */
1909 if (s->cc_op != CC_OP_DYNAMIC)
1910 gen_op_set_cc_op(s->cc_op);
1911 gen_jmp_im(cur_eip);
1912 gen_op_movl_seg_T0(seg_reg);
1913 /* abort translation because the addseg value may change or
1914 because ss32 may change. For R_SS, translation must always
1915 stop as a special handling must be done to disable hardware
1916 interrupts for the next instruction */
1917 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1920 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1921 if (seg_reg == R_SS)
1926 static inline void gen_stack_update(DisasContext *s, int addend)
1928 #ifdef TARGET_X86_64
1931 gen_op_addq_ESP_8();
1933 gen_op_addq_ESP_im(addend);
1938 gen_op_addl_ESP_2();
1939 else if (addend == 4)
1940 gen_op_addl_ESP_4();
1942 gen_op_addl_ESP_im(addend);
1945 gen_op_addw_ESP_2();
1946 else if (addend == 4)
1947 gen_op_addw_ESP_4();
1949 gen_op_addw_ESP_im(addend);
1953 /* generate a push. It depends on ss32, addseg and dflag */
1954 static void gen_push_T0(DisasContext *s)
1956 #ifdef TARGET_X86_64
1958 gen_op_movq_A0_reg[R_ESP]();
1961 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
1964 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
1966 gen_op_movq_ESP_A0();
1970 gen_op_movl_A0_reg[R_ESP]();
1977 gen_op_movl_T1_A0();
1978 gen_op_addl_A0_SS();
1981 gen_op_andl_A0_ffff();
1982 gen_op_movl_T1_A0();
1983 gen_op_addl_A0_SS();
1985 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
1986 if (s->ss32 && !s->addseg)
1987 gen_op_movl_ESP_A0();
1989 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
1993 /* generate a push. It depends on ss32, addseg and dflag */
1994 /* slower version for T1, only used for call Ev */
1995 static void gen_push_T1(DisasContext *s)
1997 #ifdef TARGET_X86_64
1999 gen_op_movq_A0_reg[R_ESP]();
2002 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2005 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2007 gen_op_movq_ESP_A0();
2011 gen_op_movl_A0_reg[R_ESP]();
2018 gen_op_addl_A0_SS();
2021 gen_op_andl_A0_ffff();
2022 gen_op_addl_A0_SS();
2024 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2026 if (s->ss32 && !s->addseg)
2027 gen_op_movl_ESP_A0();
2029 gen_stack_update(s, (-2) << s->dflag);
2033 /* two step pop is necessary for precise exceptions */
2034 static void gen_pop_T0(DisasContext *s)
2036 #ifdef TARGET_X86_64
2038 gen_op_movq_A0_reg[R_ESP]();
2039 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2043 gen_op_movl_A0_reg[R_ESP]();
2046 gen_op_addl_A0_SS();
2048 gen_op_andl_A0_ffff();
2049 gen_op_addl_A0_SS();
2051 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2055 static void gen_pop_update(DisasContext *s)
2057 #ifdef TARGET_X86_64
2058 if (CODE64(s) && s->dflag) {
2059 gen_stack_update(s, 8);
2063 gen_stack_update(s, 2 << s->dflag);
2067 static void gen_stack_A0(DisasContext *s)
2069 gen_op_movl_A0_ESP();
2071 gen_op_andl_A0_ffff();
2072 gen_op_movl_T1_A0();
2074 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2077 /* NOTE: wrap around in 16 bit not fully handled */
2078 static void gen_pusha(DisasContext *s)
2081 gen_op_movl_A0_ESP();
2082 gen_op_addl_A0_im(-16 << s->dflag);
2084 gen_op_andl_A0_ffff();
2085 gen_op_movl_T1_A0();
2087 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2088 for(i = 0;i < 8; i++) {
2089 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2090 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2091 gen_op_addl_A0_im(2 << s->dflag);
2093 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2096 /* NOTE: wrap around in 16 bit not fully handled */
2097 static void gen_popa(DisasContext *s)
2100 gen_op_movl_A0_ESP();
2102 gen_op_andl_A0_ffff();
2103 gen_op_movl_T1_A0();
2104 gen_op_addl_T1_im(16 << s->dflag);
2106 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2107 for(i = 0;i < 8; i++) {
2108 /* ESP is not reloaded */
2110 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2111 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2113 gen_op_addl_A0_im(2 << s->dflag);
2115 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2118 static void gen_enter(DisasContext *s, int esp_addend, int level)
2123 #ifdef TARGET_X86_64
2125 ot = s->dflag ? OT_QUAD : OT_WORD;
2128 gen_op_movl_A0_ESP();
2129 gen_op_addq_A0_im(-opsize);
2130 gen_op_movl_T1_A0();
2133 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2134 gen_op_st_T0_A0[ot + s->mem_index]();
2136 gen_op_enter64_level(level, (ot == OT_QUAD));
2138 gen_op_mov_reg_T1[ot][R_EBP]();
2139 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2140 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2144 ot = s->dflag + OT_WORD;
2145 opsize = 2 << s->dflag;
2147 gen_op_movl_A0_ESP();
2148 gen_op_addl_A0_im(-opsize);
2150 gen_op_andl_A0_ffff();
2151 gen_op_movl_T1_A0();
2153 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2155 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2156 gen_op_st_T0_A0[ot + s->mem_index]();
2158 gen_op_enter_level(level, s->dflag);
2160 gen_op_mov_reg_T1[ot][R_EBP]();
2161 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2162 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2166 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2168 if (s->cc_op != CC_OP_DYNAMIC)
2169 gen_op_set_cc_op(s->cc_op);
2170 gen_jmp_im(cur_eip);
2171 gen_op_raise_exception(trapno);
2175 /* an interrupt is different from an exception because of the
2176 priviledge checks */
2177 static void gen_interrupt(DisasContext *s, int intno,
2178 target_ulong cur_eip, target_ulong next_eip)
2180 if (s->cc_op != CC_OP_DYNAMIC)
2181 gen_op_set_cc_op(s->cc_op);
2182 gen_jmp_im(cur_eip);
2183 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2187 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2189 if (s->cc_op != CC_OP_DYNAMIC)
2190 gen_op_set_cc_op(s->cc_op);
2191 gen_jmp_im(cur_eip);
2196 /* generate a generic end of block. Trace exception is also generated
2198 static void gen_eob(DisasContext *s)
2200 if (s->cc_op != CC_OP_DYNAMIC)
2201 gen_op_set_cc_op(s->cc_op);
2202 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2203 gen_op_reset_inhibit_irq();
2205 if (s->singlestep_enabled) {
2208 gen_op_raise_exception(EXCP01_SSTP);
2216 /* generate a jump to eip. No segment change must happen before as a
2217 direct call to the next block may occur */
2218 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2220 TranslationBlock *tb = s->tb;
2223 if (s->cc_op != CC_OP_DYNAMIC)
2224 gen_op_set_cc_op(s->cc_op);
2226 gen_op_goto_tb1(TBPARAM(tb));
2228 gen_op_goto_tb0(TBPARAM(tb));
2230 gen_op_movl_T0_im((long)tb + tb_num);
2239 static void gen_jmp(DisasContext *s, target_ulong eip)
2241 gen_jmp_tb(s, eip, 0);
2244 static void gen_movtl_T0_im(target_ulong val)
2246 #ifdef TARGET_X86_64
2247 if ((int32_t)val == val) {
2248 gen_op_movl_T0_im(val);
2250 gen_op_movq_T0_im64(val >> 32, val);
2253 gen_op_movl_T0_im(val);
2257 static void gen_movtl_T1_im(target_ulong val)
2259 #ifdef TARGET_X86_64
2260 if ((int32_t)val == val) {
2261 gen_op_movl_T1_im(val);
2263 gen_op_movq_T1_im64(val >> 32, val);
2266 gen_op_movl_T1_im(val);
2270 static void gen_add_A0_im(DisasContext *s, int val)
2272 #ifdef TARGET_X86_64
2274 gen_op_addq_A0_im(val);
2277 gen_op_addl_A0_im(val);
2280 static GenOpFunc1 *gen_ldq_env_A0[3] = {
2281 gen_op_ldq_raw_env_A0,
2282 #ifndef CONFIG_USER_ONLY
2283 gen_op_ldq_kernel_env_A0,
2284 gen_op_ldq_user_env_A0,
2288 static GenOpFunc1 *gen_stq_env_A0[3] = {
2289 gen_op_stq_raw_env_A0,
2290 #ifndef CONFIG_USER_ONLY
2291 gen_op_stq_kernel_env_A0,
2292 gen_op_stq_user_env_A0,
2296 static GenOpFunc1 *gen_ldo_env_A0[3] = {
2297 gen_op_ldo_raw_env_A0,
2298 #ifndef CONFIG_USER_ONLY
2299 gen_op_ldo_kernel_env_A0,
2300 gen_op_ldo_user_env_A0,
2304 static GenOpFunc1 *gen_sto_env_A0[3] = {
2305 gen_op_sto_raw_env_A0,
2306 #ifndef CONFIG_USER_ONLY
2307 gen_op_sto_kernel_env_A0,
2308 gen_op_sto_user_env_A0,
2312 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2314 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2315 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2316 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2318 static GenOpFunc2 *sse_op_table1[256][4] = {
2319 /* pure SSE operations */
2320 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2321 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2322 [0x12] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2323 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2324 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2325 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2326 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2327 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2329 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2330 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2331 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2332 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2333 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2334 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2335 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2336 [0x2f] = { gen_op_comiss, gen_op_comisd },
2337 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2338 [0x51] = SSE_FOP(sqrt),
2339 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2340 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2341 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2342 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2343 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2344 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2345 [0x58] = SSE_FOP(add),
2346 [0x59] = SSE_FOP(mul),
2347 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2348 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2349 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2350 [0x5c] = SSE_FOP(sub),
2351 [0x5d] = SSE_FOP(min),
2352 [0x5e] = SSE_FOP(div),
2353 [0x5f] = SSE_FOP(max),
2355 [0xc2] = SSE_FOP(cmpeq),
2356 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2358 /* MMX ops and their SSE extensions */
2359 [0x60] = MMX_OP2(punpcklbw),
2360 [0x61] = MMX_OP2(punpcklwd),
2361 [0x62] = MMX_OP2(punpckldq),
2362 [0x63] = MMX_OP2(packsswb),
2363 [0x64] = MMX_OP2(pcmpgtb),
2364 [0x65] = MMX_OP2(pcmpgtw),
2365 [0x66] = MMX_OP2(pcmpgtl),
2366 [0x67] = MMX_OP2(packuswb),
2367 [0x68] = MMX_OP2(punpckhbw),
2368 [0x69] = MMX_OP2(punpckhwd),
2369 [0x6a] = MMX_OP2(punpckhdq),
2370 [0x6b] = MMX_OP2(packssdw),
2371 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2372 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2373 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2374 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2375 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2376 (GenOpFunc2 *)gen_op_pshufd_xmm,
2377 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2378 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2379 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2380 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2381 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2382 [0x74] = MMX_OP2(pcmpeqb),
2383 [0x75] = MMX_OP2(pcmpeqw),
2384 [0x76] = MMX_OP2(pcmpeql),
2385 [0x77] = { SSE_SPECIAL }, /* emms */
2386 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2387 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2388 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2389 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2390 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2391 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2392 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2393 [0xd1] = MMX_OP2(psrlw),
2394 [0xd2] = MMX_OP2(psrld),
2395 [0xd3] = MMX_OP2(psrlq),
2396 [0xd4] = MMX_OP2(paddq),
2397 [0xd5] = MMX_OP2(pmullw),
2398 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2399 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2400 [0xd8] = MMX_OP2(psubusb),
2401 [0xd9] = MMX_OP2(psubusw),
2402 [0xda] = MMX_OP2(pminub),
2403 [0xdb] = MMX_OP2(pand),
2404 [0xdc] = MMX_OP2(paddusb),
2405 [0xdd] = MMX_OP2(paddusw),
2406 [0xde] = MMX_OP2(pmaxub),
2407 [0xdf] = MMX_OP2(pandn),
2408 [0xe0] = MMX_OP2(pavgb),
2409 [0xe1] = MMX_OP2(psraw),
2410 [0xe2] = MMX_OP2(psrad),
2411 [0xe3] = MMX_OP2(pavgw),
2412 [0xe4] = MMX_OP2(pmulhuw),
2413 [0xe5] = MMX_OP2(pmulhw),
2414 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2415 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2416 [0xe8] = MMX_OP2(psubsb),
2417 [0xe9] = MMX_OP2(psubsw),
2418 [0xea] = MMX_OP2(pminsw),
2419 [0xeb] = MMX_OP2(por),
2420 [0xec] = MMX_OP2(paddsb),
2421 [0xed] = MMX_OP2(paddsw),
2422 [0xee] = MMX_OP2(pmaxsw),
2423 [0xef] = MMX_OP2(pxor),
2424 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu (PNI) */
2425 [0xf1] = MMX_OP2(psllw),
2426 [0xf2] = MMX_OP2(pslld),
2427 [0xf3] = MMX_OP2(psllq),
2428 [0xf4] = MMX_OP2(pmuludq),
2429 [0xf5] = MMX_OP2(pmaddwd),
2430 [0xf6] = MMX_OP2(psadbw),
2431 [0xf7] = MMX_OP2(maskmov),
2432 [0xf8] = MMX_OP2(psubb),
2433 [0xf9] = MMX_OP2(psubw),
2434 [0xfa] = MMX_OP2(psubl),
2435 [0xfb] = MMX_OP2(psubq),
2436 [0xfc] = MMX_OP2(paddb),
2437 [0xfd] = MMX_OP2(paddw),
2438 [0xfe] = MMX_OP2(paddl),
2441 static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2442 [0 + 2] = MMX_OP2(psrlw),
2443 [0 + 4] = MMX_OP2(psraw),
2444 [0 + 6] = MMX_OP2(psllw),
2445 [8 + 2] = MMX_OP2(psrld),
2446 [8 + 4] = MMX_OP2(psrad),
2447 [8 + 6] = MMX_OP2(pslld),
2448 [16 + 2] = MMX_OP2(psrlq),
2449 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2450 [16 + 6] = MMX_OP2(psllq),
2451 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2454 static GenOpFunc1 *sse_op_table3[4 * 3] = {
2457 X86_64_ONLY(gen_op_cvtsq2ss),
2458 X86_64_ONLY(gen_op_cvtsq2sd),
2462 X86_64_ONLY(gen_op_cvttss2sq),
2463 X86_64_ONLY(gen_op_cvttsd2sq),
2467 X86_64_ONLY(gen_op_cvtss2sq),
2468 X86_64_ONLY(gen_op_cvtsd2sq),
2471 static GenOpFunc2 *sse_op_table4[8][4] = {
2482 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2484 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2485 int modrm, mod, rm, reg, reg_addr, offset_addr;
2486 GenOpFunc2 *sse_op2;
2487 GenOpFunc3 *sse_op3;
2490 if (s->prefix & PREFIX_DATA)
2492 else if (s->prefix & PREFIX_REPZ)
2494 else if (s->prefix & PREFIX_REPNZ)
2498 sse_op2 = sse_op_table1[b][b1];
2501 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2511 /* simple MMX/SSE operation */
2512 if (s->flags & HF_TS_MASK) {
2513 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2516 if (s->flags & HF_EM_MASK) {
2518 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2521 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2528 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2529 the static cpu state) */
2534 modrm = ldub_code(s->pc++);
2535 reg = ((modrm >> 3) & 7);
2538 mod = (modrm >> 6) & 3;
2539 if (sse_op2 == SSE_SPECIAL) {
2542 case 0x0e7: /* movntq */
2545 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2546 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2548 case 0x1e7: /* movntdq */
2549 case 0x02b: /* movntps */
2550 case 0x12b: /* movntps */
2551 case 0x2f0: /* lddqu */
2554 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2555 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2557 case 0x6e: /* movd mm, ea */
2558 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2559 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2561 case 0x16e: /* movd xmm, ea */
2562 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2563 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2565 case 0x6f: /* movq mm, ea */
2567 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2568 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2571 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2572 offsetof(CPUX86State,fpregs[rm].mmx));
2575 case 0x010: /* movups */
2576 case 0x110: /* movupd */
2577 case 0x028: /* movaps */
2578 case 0x128: /* movapd */
2579 case 0x16f: /* movdqa xmm, ea */
2580 case 0x26f: /* movdqu xmm, ea */
2582 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2583 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2585 rm = (modrm & 7) | REX_B(s);
2586 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2587 offsetof(CPUX86State,xmm_regs[rm]));
2590 case 0x210: /* movss xmm, ea */
2592 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2593 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2594 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2596 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2597 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2598 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2600 rm = (modrm & 7) | REX_B(s);
2601 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2602 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2605 case 0x310: /* movsd xmm, ea */
2607 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2608 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2610 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2611 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2613 rm = (modrm & 7) | REX_B(s);
2614 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2615 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2618 case 0x012: /* movlps */
2619 case 0x112: /* movlpd */
2621 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2622 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2625 rm = (modrm & 7) | REX_B(s);
2626 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2627 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2630 case 0x016: /* movhps */
2631 case 0x116: /* movhpd */
2633 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2634 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2637 rm = (modrm & 7) | REX_B(s);
2638 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2639 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2642 case 0x216: /* movshdup */
2644 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2645 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2647 rm = (modrm & 7) | REX_B(s);
2648 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2649 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2650 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2651 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2653 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2654 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2655 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2656 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2658 case 0x7e: /* movd ea, mm */
2659 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2660 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2662 case 0x17e: /* movd ea, xmm */
2663 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2664 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2666 case 0x27e: /* movq xmm, ea */
2668 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2669 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2671 rm = (modrm & 7) | REX_B(s);
2672 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2673 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2675 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2677 case 0x7f: /* movq ea, mm */
2679 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2680 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2683 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2684 offsetof(CPUX86State,fpregs[reg].mmx));
2687 case 0x011: /* movups */
2688 case 0x111: /* movupd */
2689 case 0x029: /* movaps */
2690 case 0x129: /* movapd */
2691 case 0x17f: /* movdqa ea, xmm */
2692 case 0x27f: /* movdqu ea, xmm */
2694 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2695 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2697 rm = (modrm & 7) | REX_B(s);
2698 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2699 offsetof(CPUX86State,xmm_regs[reg]));
2702 case 0x211: /* movss ea, xmm */
2704 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2705 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2706 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2708 rm = (modrm & 7) | REX_B(s);
2709 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2710 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2713 case 0x311: /* movsd ea, xmm */
2715 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2716 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2718 rm = (modrm & 7) | REX_B(s);
2719 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2720 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2723 case 0x013: /* movlps */
2724 case 0x113: /* movlpd */
2726 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2727 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2732 case 0x017: /* movhps */
2733 case 0x117: /* movhpd */
2735 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2736 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2741 case 0x71: /* shift mm, im */
2744 case 0x171: /* shift xmm, im */
2747 val = ldub_code(s->pc++);
2749 gen_op_movl_T0_im(val);
2750 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2752 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2753 op1_offset = offsetof(CPUX86State,xmm_t0);
2755 gen_op_movl_T0_im(val);
2756 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2758 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2759 op1_offset = offsetof(CPUX86State,mmx_t0);
2761 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2765 rm = (modrm & 7) | REX_B(s);
2766 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2769 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2771 sse_op2(op2_offset, op1_offset);
2773 case 0x050: /* movmskps */
2774 rm = (modrm & 7) | REX_B(s);
2775 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2776 gen_op_mov_reg_T0[OT_LONG][reg]();
2778 case 0x150: /* movmskpd */
2779 rm = (modrm & 7) | REX_B(s);
2780 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2781 gen_op_mov_reg_T0[OT_LONG][reg]();
2783 case 0x02a: /* cvtpi2ps */
2784 case 0x12a: /* cvtpi2pd */
2787 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2788 op2_offset = offsetof(CPUX86State,mmx_t0);
2789 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2792 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2794 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2797 gen_op_cvtpi2ps(op1_offset, op2_offset);
2801 gen_op_cvtpi2pd(op1_offset, op2_offset);
2805 case 0x22a: /* cvtsi2ss */
2806 case 0x32a: /* cvtsi2sd */
2807 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2808 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2809 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2810 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2812 case 0x02c: /* cvttps2pi */
2813 case 0x12c: /* cvttpd2pi */
2814 case 0x02d: /* cvtps2pi */
2815 case 0x12d: /* cvtpd2pi */
2818 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2819 op2_offset = offsetof(CPUX86State,xmm_t0);
2820 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2822 rm = (modrm & 7) | REX_B(s);
2823 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2825 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2828 gen_op_cvttps2pi(op1_offset, op2_offset);
2831 gen_op_cvttpd2pi(op1_offset, op2_offset);
2834 gen_op_cvtps2pi(op1_offset, op2_offset);
2837 gen_op_cvtpd2pi(op1_offset, op2_offset);
2841 case 0x22c: /* cvttss2si */
2842 case 0x32c: /* cvttsd2si */
2843 case 0x22d: /* cvtss2si */
2844 case 0x32d: /* cvtsd2si */
2845 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2847 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2849 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2851 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2852 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2854 op2_offset = offsetof(CPUX86State,xmm_t0);
2856 rm = (modrm & 7) | REX_B(s);
2857 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2859 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
2860 (b & 1) * 4](op2_offset);
2861 gen_op_mov_reg_T0[ot][reg]();
2863 case 0xc4: /* pinsrw */
2865 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2866 val = ldub_code(s->pc++);
2869 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2872 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2875 case 0xc5: /* pextrw */
2879 val = ldub_code(s->pc++);
2882 rm = (modrm & 7) | REX_B(s);
2883 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2887 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2889 reg = ((modrm >> 3) & 7) | rex_r;
2890 gen_op_mov_reg_T0[OT_LONG][reg]();
2892 case 0x1d6: /* movq ea, xmm */
2894 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2895 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2897 rm = (modrm & 7) | REX_B(s);
2898 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2899 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2900 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2903 case 0x2d6: /* movq2dq */
2905 rm = (modrm & 7) | REX_B(s);
2906 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2907 offsetof(CPUX86State,fpregs[reg & 7].mmx));
2908 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2910 case 0x3d6: /* movdq2q */
2913 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2914 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2916 case 0xd7: /* pmovmskb */
2921 rm = (modrm & 7) | REX_B(s);
2922 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
2925 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
2927 reg = ((modrm >> 3) & 7) | rex_r;
2928 gen_op_mov_reg_T0[OT_LONG][reg]();
2934 /* generic MMX or SSE operation */
2936 /* maskmov : we must prepare A0 */
2939 #ifdef TARGET_X86_64
2940 if (s->aflag == 2) {
2941 gen_op_movq_A0_reg[R_EDI]();
2945 gen_op_movl_A0_reg[R_EDI]();
2947 gen_op_andl_A0_ffff();
2949 gen_add_A0_ds_seg(s);
2952 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2954 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2955 op2_offset = offsetof(CPUX86State,xmm_t0);
2956 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f) ||
2958 /* specific case for SSE single instructions */
2961 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2962 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2965 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
2968 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2971 rm = (modrm & 7) | REX_B(s);
2972 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2975 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
2977 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2978 op2_offset = offsetof(CPUX86State,mmx_t0);
2979 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2982 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2986 case 0x70: /* pshufx insn */
2987 case 0xc6: /* pshufx insn */
2988 val = ldub_code(s->pc++);
2989 sse_op3 = (GenOpFunc3 *)sse_op2;
2990 sse_op3(op1_offset, op2_offset, val);
2994 val = ldub_code(s->pc++);
2997 sse_op2 = sse_op_table4[val][b1];
2998 sse_op2(op1_offset, op2_offset);
3001 sse_op2(op1_offset, op2_offset);
3004 if (b == 0x2e || b == 0x2f) {
3005 s->cc_op = CC_OP_EFLAGS;
3011 /* convert one instruction. s->is_jmp is set if the translation must
3012 be stopped. Return the next pc value */
3013 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3015 int b, prefixes, aflag, dflag;
3017 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3018 target_ulong next_eip, tval;
3028 #ifdef TARGET_X86_64
3033 s->rip_offset = 0; /* for relative ip address */
3035 b = ldub_code(s->pc);
3037 /* check prefixes */
3038 #ifdef TARGET_X86_64
3042 prefixes |= PREFIX_REPZ;
3045 prefixes |= PREFIX_REPNZ;
3048 prefixes |= PREFIX_LOCK;
3069 prefixes |= PREFIX_DATA;
3072 prefixes |= PREFIX_ADR;
3076 rex_w = (b >> 3) & 1;
3077 rex_r = (b & 0x4) << 1;
3078 s->rex_x = (b & 0x2) << 2;
3079 REX_B(s) = (b & 0x1) << 3;
3080 x86_64_hregs = 1; /* select uniform byte register addressing */
3084 /* 0x66 is ignored if rex.w is set */
3087 if (prefixes & PREFIX_DATA)
3090 if (!(prefixes & PREFIX_ADR))
3097 prefixes |= PREFIX_REPZ;
3100 prefixes |= PREFIX_REPNZ;
3103 prefixes |= PREFIX_LOCK;
3124 prefixes |= PREFIX_DATA;
3127 prefixes |= PREFIX_ADR;
3130 if (prefixes & PREFIX_DATA)
3132 if (prefixes & PREFIX_ADR)
3136 s->prefix = prefixes;
3140 /* lock generation */
3141 if (prefixes & PREFIX_LOCK)
3144 /* now check op code */
3148 /**************************/
3149 /* extended op code */
3150 b = ldub_code(s->pc++) | 0x100;
3153 /**************************/
3171 ot = dflag + OT_WORD;
3174 case 0: /* OP Ev, Gv */
3175 modrm = ldub_code(s->pc++);
3176 reg = ((modrm >> 3) & 7) | rex_r;
3177 mod = (modrm >> 6) & 3;
3178 rm = (modrm & 7) | REX_B(s);
3180 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3182 } else if (op == OP_XORL && rm == reg) {
3184 /* xor reg, reg optimisation */
3186 s->cc_op = CC_OP_LOGICB + ot;
3187 gen_op_mov_reg_T0[ot][reg]();
3188 gen_op_update1_cc();
3193 gen_op_mov_TN_reg[ot][1][reg]();
3194 gen_op(s, op, ot, opreg);
3196 case 1: /* OP Gv, Ev */
3197 modrm = ldub_code(s->pc++);
3198 mod = (modrm >> 6) & 3;
3199 reg = ((modrm >> 3) & 7) | rex_r;
3200 rm = (modrm & 7) | REX_B(s);
3202 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3203 gen_op_ld_T1_A0[ot + s->mem_index]();
3204 } else if (op == OP_XORL && rm == reg) {
3207 gen_op_mov_TN_reg[ot][1][rm]();
3209 gen_op(s, op, ot, reg);
3211 case 2: /* OP A, Iv */
3212 val = insn_get(s, ot);
3213 gen_op_movl_T1_im(val);
3214 gen_op(s, op, ot, OR_EAX);
3220 case 0x80: /* GRP1 */
3230 ot = dflag + OT_WORD;
3232 modrm = ldub_code(s->pc++);
3233 mod = (modrm >> 6) & 3;
3234 rm = (modrm & 7) | REX_B(s);
3235 op = (modrm >> 3) & 7;
3241 s->rip_offset = insn_const_size(ot);
3242 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3253 val = insn_get(s, ot);
3256 val = (int8_t)insn_get(s, OT_BYTE);
3259 gen_op_movl_T1_im(val);
3260 gen_op(s, op, ot, opreg);
3264 /**************************/
3265 /* inc, dec, and other misc arith */
3266 case 0x40 ... 0x47: /* inc Gv */
3267 ot = dflag ? OT_LONG : OT_WORD;
3268 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3270 case 0x48 ... 0x4f: /* dec Gv */
3271 ot = dflag ? OT_LONG : OT_WORD;
3272 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3274 case 0xf6: /* GRP3 */
3279 ot = dflag + OT_WORD;
3281 modrm = ldub_code(s->pc++);
3282 mod = (modrm >> 6) & 3;
3283 rm = (modrm & 7) | REX_B(s);
3284 op = (modrm >> 3) & 7;
3287 s->rip_offset = insn_const_size(ot);
3288 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3289 gen_op_ld_T0_A0[ot + s->mem_index]();
3291 gen_op_mov_TN_reg[ot][0][rm]();
3296 val = insn_get(s, ot);
3297 gen_op_movl_T1_im(val);
3298 gen_op_testl_T0_T1_cc();
3299 s->cc_op = CC_OP_LOGICB + ot;
3304 gen_op_st_T0_A0[ot + s->mem_index]();
3306 gen_op_mov_reg_T0[ot][rm]();
3312 gen_op_st_T0_A0[ot + s->mem_index]();
3314 gen_op_mov_reg_T0[ot][rm]();
3316 gen_op_update_neg_cc();
3317 s->cc_op = CC_OP_SUBB + ot;
3322 gen_op_mulb_AL_T0();
3323 s->cc_op = CC_OP_MULB;
3326 gen_op_mulw_AX_T0();
3327 s->cc_op = CC_OP_MULW;
3331 gen_op_mull_EAX_T0();
3332 s->cc_op = CC_OP_MULL;
3334 #ifdef TARGET_X86_64
3336 gen_op_mulq_EAX_T0();
3337 s->cc_op = CC_OP_MULQ;
3345 gen_op_imulb_AL_T0();
3346 s->cc_op = CC_OP_MULB;
3349 gen_op_imulw_AX_T0();
3350 s->cc_op = CC_OP_MULW;
3354 gen_op_imull_EAX_T0();
3355 s->cc_op = CC_OP_MULL;
3357 #ifdef TARGET_X86_64
3359 gen_op_imulq_EAX_T0();
3360 s->cc_op = CC_OP_MULQ;
3368 gen_jmp_im(pc_start - s->cs_base);
3369 gen_op_divb_AL_T0();
3372 gen_jmp_im(pc_start - s->cs_base);
3373 gen_op_divw_AX_T0();
3377 gen_jmp_im(pc_start - s->cs_base);
3378 gen_op_divl_EAX_T0();
3380 #ifdef TARGET_X86_64
3382 gen_jmp_im(pc_start - s->cs_base);
3383 gen_op_divq_EAX_T0();
3391 gen_jmp_im(pc_start - s->cs_base);
3392 gen_op_idivb_AL_T0();
3395 gen_jmp_im(pc_start - s->cs_base);
3396 gen_op_idivw_AX_T0();
3400 gen_jmp_im(pc_start - s->cs_base);
3401 gen_op_idivl_EAX_T0();
3403 #ifdef TARGET_X86_64
3405 gen_jmp_im(pc_start - s->cs_base);
3406 gen_op_idivq_EAX_T0();
3416 case 0xfe: /* GRP4 */
3417 case 0xff: /* GRP5 */
3421 ot = dflag + OT_WORD;
3423 modrm = ldub_code(s->pc++);
3424 mod = (modrm >> 6) & 3;
3425 rm = (modrm & 7) | REX_B(s);
3426 op = (modrm >> 3) & 7;
3427 if (op >= 2 && b == 0xfe) {
3431 if (op == 2 || op == 4) {
3432 /* operand size for jumps is 64 bit */
3434 } else if (op == 3 || op == 5) {
3435 /* for call calls, the operand is 16 or 32 bit, even
3437 ot = dflag ? OT_LONG : OT_WORD;
3438 } else if (op == 6) {
3439 /* default push size is 64 bit */
3440 ot = dflag ? OT_QUAD : OT_WORD;
3444 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3445 if (op >= 2 && op != 3 && op != 5)
3446 gen_op_ld_T0_A0[ot + s->mem_index]();
3448 gen_op_mov_TN_reg[ot][0][rm]();
3452 case 0: /* inc Ev */
3457 gen_inc(s, ot, opreg, 1);
3459 case 1: /* dec Ev */
3464 gen_inc(s, ot, opreg, -1);
3466 case 2: /* call Ev */
3467 /* XXX: optimize if memory (no 'and' is necessary) */
3469 gen_op_andl_T0_ffff();
3470 next_eip = s->pc - s->cs_base;
3471 gen_movtl_T1_im(next_eip);
3476 case 3: /* lcall Ev */
3477 gen_op_ld_T1_A0[ot + s->mem_index]();
3478 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3479 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3481 if (s->pe && !s->vm86) {
3482 if (s->cc_op != CC_OP_DYNAMIC)
3483 gen_op_set_cc_op(s->cc_op);
3484 gen_jmp_im(pc_start - s->cs_base);
3485 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3487 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3491 case 4: /* jmp Ev */
3493 gen_op_andl_T0_ffff();
3497 case 5: /* ljmp Ev */
3498 gen_op_ld_T1_A0[ot + s->mem_index]();
3499 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3500 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3502 if (s->pe && !s->vm86) {
3503 if (s->cc_op != CC_OP_DYNAMIC)
3504 gen_op_set_cc_op(s->cc_op);
3505 gen_jmp_im(pc_start - s->cs_base);
3506 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3508 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3509 gen_op_movl_T0_T1();
3514 case 6: /* push Ev */
3522 case 0x84: /* test Ev, Gv */
3527 ot = dflag + OT_WORD;
3529 modrm = ldub_code(s->pc++);
3530 mod = (modrm >> 6) & 3;
3531 rm = (modrm & 7) | REX_B(s);
3532 reg = ((modrm >> 3) & 7) | rex_r;
3534 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3535 gen_op_mov_TN_reg[ot][1][reg]();
3536 gen_op_testl_T0_T1_cc();
3537 s->cc_op = CC_OP_LOGICB + ot;
3540 case 0xa8: /* test eAX, Iv */
3545 ot = dflag + OT_WORD;
3546 val = insn_get(s, ot);
3548 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3549 gen_op_movl_T1_im(val);
3550 gen_op_testl_T0_T1_cc();
3551 s->cc_op = CC_OP_LOGICB + ot;
3554 case 0x98: /* CWDE/CBW */
3555 #ifdef TARGET_X86_64
3557 gen_op_movslq_RAX_EAX();
3561 gen_op_movswl_EAX_AX();
3563 gen_op_movsbw_AX_AL();
3565 case 0x99: /* CDQ/CWD */
3566 #ifdef TARGET_X86_64
3568 gen_op_movsqo_RDX_RAX();
3572 gen_op_movslq_EDX_EAX();
3574 gen_op_movswl_DX_AX();
3576 case 0x1af: /* imul Gv, Ev */
3577 case 0x69: /* imul Gv, Ev, I */
3579 ot = dflag + OT_WORD;
3580 modrm = ldub_code(s->pc++);
3581 reg = ((modrm >> 3) & 7) | rex_r;
3583 s->rip_offset = insn_const_size(ot);
3586 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3588 val = insn_get(s, ot);
3589 gen_op_movl_T1_im(val);
3590 } else if (b == 0x6b) {
3591 val = (int8_t)insn_get(s, OT_BYTE);
3592 gen_op_movl_T1_im(val);
3594 gen_op_mov_TN_reg[ot][1][reg]();
3597 #ifdef TARGET_X86_64
3598 if (ot == OT_QUAD) {
3599 gen_op_imulq_T0_T1();
3602 if (ot == OT_LONG) {
3603 gen_op_imull_T0_T1();
3605 gen_op_imulw_T0_T1();
3607 gen_op_mov_reg_T0[ot][reg]();
3608 s->cc_op = CC_OP_MULB + ot;
3611 case 0x1c1: /* xadd Ev, Gv */
3615 ot = dflag + OT_WORD;
3616 modrm = ldub_code(s->pc++);
3617 reg = ((modrm >> 3) & 7) | rex_r;
3618 mod = (modrm >> 6) & 3;
3620 rm = (modrm & 7) | REX_B(s);
3621 gen_op_mov_TN_reg[ot][0][reg]();
3622 gen_op_mov_TN_reg[ot][1][rm]();
3623 gen_op_addl_T0_T1();
3624 gen_op_mov_reg_T1[ot][reg]();
3625 gen_op_mov_reg_T0[ot][rm]();
3627 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3628 gen_op_mov_TN_reg[ot][0][reg]();
3629 gen_op_ld_T1_A0[ot + s->mem_index]();
3630 gen_op_addl_T0_T1();
3631 gen_op_st_T0_A0[ot + s->mem_index]();
3632 gen_op_mov_reg_T1[ot][reg]();
3634 gen_op_update2_cc();
3635 s->cc_op = CC_OP_ADDB + ot;
3638 case 0x1b1: /* cmpxchg Ev, Gv */
3642 ot = dflag + OT_WORD;
3643 modrm = ldub_code(s->pc++);
3644 reg = ((modrm >> 3) & 7) | rex_r;
3645 mod = (modrm >> 6) & 3;
3646 gen_op_mov_TN_reg[ot][1][reg]();
3648 rm = (modrm & 7) | REX_B(s);
3649 gen_op_mov_TN_reg[ot][0][rm]();
3650 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3651 gen_op_mov_reg_T0[ot][rm]();
3653 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3654 gen_op_ld_T0_A0[ot + s->mem_index]();
3655 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3657 s->cc_op = CC_OP_SUBB + ot;
3659 case 0x1c7: /* cmpxchg8b */
3660 modrm = ldub_code(s->pc++);
3661 mod = (modrm >> 6) & 3;
3664 if (s->cc_op != CC_OP_DYNAMIC)
3665 gen_op_set_cc_op(s->cc_op);
3666 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3668 s->cc_op = CC_OP_EFLAGS;
3671 /**************************/
3673 case 0x50 ... 0x57: /* push */
3674 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3677 case 0x58 ... 0x5f: /* pop */
3679 ot = dflag ? OT_QUAD : OT_WORD;
3681 ot = dflag + OT_WORD;
3684 /* NOTE: order is important for pop %sp */
3686 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3688 case 0x60: /* pusha */
3693 case 0x61: /* popa */
3698 case 0x68: /* push Iv */
3701 ot = dflag ? OT_QUAD : OT_WORD;
3703 ot = dflag + OT_WORD;
3706 val = insn_get(s, ot);
3708 val = (int8_t)insn_get(s, OT_BYTE);
3709 gen_op_movl_T0_im(val);
3712 case 0x8f: /* pop Ev */
3714 ot = dflag ? OT_QUAD : OT_WORD;
3716 ot = dflag + OT_WORD;
3718 modrm = ldub_code(s->pc++);
3719 mod = (modrm >> 6) & 3;
3722 /* NOTE: order is important for pop %sp */
3724 rm = (modrm & 7) | REX_B(s);
3725 gen_op_mov_reg_T0[ot][rm]();
3727 /* NOTE: order is important too for MMU exceptions */
3728 s->popl_esp_hack = 1 << ot;
3729 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3730 s->popl_esp_hack = 0;
3734 case 0xc8: /* enter */
3737 val = lduw_code(s->pc);
3739 level = ldub_code(s->pc++);
3740 gen_enter(s, val, level);
3743 case 0xc9: /* leave */
3744 /* XXX: exception not precise (ESP is updated before potential exception) */
3746 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3747 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3748 } else if (s->ss32) {
3749 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3750 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3752 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3753 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3757 ot = dflag ? OT_QUAD : OT_WORD;
3759 ot = dflag + OT_WORD;
3761 gen_op_mov_reg_T0[ot][R_EBP]();
3764 case 0x06: /* push es */
3765 case 0x0e: /* push cs */
3766 case 0x16: /* push ss */
3767 case 0x1e: /* push ds */
3770 gen_op_movl_T0_seg(b >> 3);
3773 case 0x1a0: /* push fs */
3774 case 0x1a8: /* push gs */
3775 gen_op_movl_T0_seg((b >> 3) & 7);
3778 case 0x07: /* pop es */
3779 case 0x17: /* pop ss */
3780 case 0x1f: /* pop ds */
3785 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3788 /* if reg == SS, inhibit interrupts/trace. */
3789 /* If several instructions disable interrupts, only the
3791 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3792 gen_op_set_inhibit_irq();
3796 gen_jmp_im(s->pc - s->cs_base);
3800 case 0x1a1: /* pop fs */
3801 case 0x1a9: /* pop gs */
3803 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3806 gen_jmp_im(s->pc - s->cs_base);
3811 /**************************/
3814 case 0x89: /* mov Gv, Ev */
3818 ot = dflag + OT_WORD;
3819 modrm = ldub_code(s->pc++);
3820 reg = ((modrm >> 3) & 7) | rex_r;
3822 /* generate a generic store */
3823 gen_ldst_modrm(s, modrm, ot, reg, 1);
3826 case 0xc7: /* mov Ev, Iv */
3830 ot = dflag + OT_WORD;
3831 modrm = ldub_code(s->pc++);
3832 mod = (modrm >> 6) & 3;
3834 s->rip_offset = insn_const_size(ot);
3835 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3837 val = insn_get(s, ot);
3838 gen_op_movl_T0_im(val);
3840 gen_op_st_T0_A0[ot + s->mem_index]();
3842 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3845 case 0x8b: /* mov Ev, Gv */
3849 ot = OT_WORD + dflag;
3850 modrm = ldub_code(s->pc++);
3851 reg = ((modrm >> 3) & 7) | rex_r;
3853 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3854 gen_op_mov_reg_T0[ot][reg]();
3856 case 0x8e: /* mov seg, Gv */
3857 modrm = ldub_code(s->pc++);
3858 reg = (modrm >> 3) & 7;
3859 if (reg >= 6 || reg == R_CS)
3861 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3862 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3864 /* if reg == SS, inhibit interrupts/trace */
3865 /* If several instructions disable interrupts, only the
3867 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3868 gen_op_set_inhibit_irq();
3872 gen_jmp_im(s->pc - s->cs_base);
3876 case 0x8c: /* mov Gv, seg */
3877 modrm = ldub_code(s->pc++);
3878 reg = (modrm >> 3) & 7;
3879 mod = (modrm >> 6) & 3;
3882 gen_op_movl_T0_seg(reg);
3884 ot = OT_WORD + dflag;
3887 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3890 case 0x1b6: /* movzbS Gv, Eb */
3891 case 0x1b7: /* movzwS Gv, Eb */
3892 case 0x1be: /* movsbS Gv, Eb */
3893 case 0x1bf: /* movswS Gv, Eb */
3896 /* d_ot is the size of destination */
3897 d_ot = dflag + OT_WORD;
3898 /* ot is the size of source */
3899 ot = (b & 1) + OT_BYTE;
3900 modrm = ldub_code(s->pc++);
3901 reg = ((modrm >> 3) & 7) | rex_r;
3902 mod = (modrm >> 6) & 3;
3903 rm = (modrm & 7) | REX_B(s);
3906 gen_op_mov_TN_reg[ot][0][rm]();
3907 switch(ot | (b & 8)) {
3909 gen_op_movzbl_T0_T0();
3912 gen_op_movsbl_T0_T0();
3915 gen_op_movzwl_T0_T0();
3919 gen_op_movswl_T0_T0();
3922 gen_op_mov_reg_T0[d_ot][reg]();
3924 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3926 gen_op_lds_T0_A0[ot + s->mem_index]();
3928 gen_op_ldu_T0_A0[ot + s->mem_index]();
3930 gen_op_mov_reg_T0[d_ot][reg]();
3935 case 0x8d: /* lea */
3936 ot = dflag + OT_WORD;
3937 modrm = ldub_code(s->pc++);
3938 mod = (modrm >> 6) & 3;
3941 reg = ((modrm >> 3) & 7) | rex_r;
3942 /* we must ensure that no segment is added */
3946 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3948 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
3951 case 0xa0: /* mov EAX, Ov */
3953 case 0xa2: /* mov Ov, EAX */
3956 target_ulong offset_addr;
3961 ot = dflag + OT_WORD;
3962 #ifdef TARGET_X86_64
3963 if (s->aflag == 2) {
3964 offset_addr = ldq_code(s->pc);
3966 if (offset_addr == (int32_t)offset_addr)
3967 gen_op_movq_A0_im(offset_addr);
3969 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
3974 offset_addr = insn_get(s, OT_LONG);
3976 offset_addr = insn_get(s, OT_WORD);
3978 gen_op_movl_A0_im(offset_addr);
3980 gen_add_A0_ds_seg(s);
3982 gen_op_ld_T0_A0[ot + s->mem_index]();
3983 gen_op_mov_reg_T0[ot][R_EAX]();
3985 gen_op_mov_TN_reg[ot][0][R_EAX]();
3986 gen_op_st_T0_A0[ot + s->mem_index]();
3990 case 0xd7: /* xlat */
3991 #ifdef TARGET_X86_64
3992 if (s->aflag == 2) {
3993 gen_op_movq_A0_reg[R_EBX]();
3994 gen_op_addq_A0_AL();
3998 gen_op_movl_A0_reg[R_EBX]();
3999 gen_op_addl_A0_AL();
4001 gen_op_andl_A0_ffff();
4003 gen_add_A0_ds_seg(s);
4004 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4005 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4007 case 0xb0 ... 0xb7: /* mov R, Ib */
4008 val = insn_get(s, OT_BYTE);
4009 gen_op_movl_T0_im(val);
4010 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4012 case 0xb8 ... 0xbf: /* mov R, Iv */
4013 #ifdef TARGET_X86_64
4017 tmp = ldq_code(s->pc);
4019 reg = (b & 7) | REX_B(s);
4020 gen_movtl_T0_im(tmp);
4021 gen_op_mov_reg_T0[OT_QUAD][reg]();
4025 ot = dflag ? OT_LONG : OT_WORD;
4026 val = insn_get(s, ot);
4027 reg = (b & 7) | REX_B(s);
4028 gen_op_movl_T0_im(val);
4029 gen_op_mov_reg_T0[ot][reg]();
4033 case 0x91 ... 0x97: /* xchg R, EAX */
4034 ot = dflag + OT_WORD;
4035 reg = (b & 7) | REX_B(s);
4039 case 0x87: /* xchg Ev, Gv */
4043 ot = dflag + OT_WORD;
4044 modrm = ldub_code(s->pc++);
4045 reg = ((modrm >> 3) & 7) | rex_r;
4046 mod = (modrm >> 6) & 3;
4048 rm = (modrm & 7) | REX_B(s);
4050 gen_op_mov_TN_reg[ot][0][reg]();
4051 gen_op_mov_TN_reg[ot][1][rm]();
4052 gen_op_mov_reg_T0[ot][rm]();
4053 gen_op_mov_reg_T1[ot][reg]();
4055 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4056 gen_op_mov_TN_reg[ot][0][reg]();
4057 /* for xchg, lock is implicit */
4058 if (!(prefixes & PREFIX_LOCK))
4060 gen_op_ld_T1_A0[ot + s->mem_index]();
4061 gen_op_st_T0_A0[ot + s->mem_index]();
4062 if (!(prefixes & PREFIX_LOCK))
4064 gen_op_mov_reg_T1[ot][reg]();
4067 case 0xc4: /* les Gv */
4072 case 0xc5: /* lds Gv */
4077 case 0x1b2: /* lss Gv */
4080 case 0x1b4: /* lfs Gv */
4083 case 0x1b5: /* lgs Gv */
4086 ot = dflag ? OT_LONG : OT_WORD;
4087 modrm = ldub_code(s->pc++);
4088 reg = ((modrm >> 3) & 7) | rex_r;
4089 mod = (modrm >> 6) & 3;
4092 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4093 gen_op_ld_T1_A0[ot + s->mem_index]();
4094 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4095 /* load the segment first to handle exceptions properly */
4096 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4097 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4098 /* then put the data */
4099 gen_op_mov_reg_T1[ot][reg]();
4101 gen_jmp_im(s->pc - s->cs_base);
4106 /************************/
4117 ot = dflag + OT_WORD;
4119 modrm = ldub_code(s->pc++);
4120 mod = (modrm >> 6) & 3;
4121 op = (modrm >> 3) & 7;
4127 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4130 opreg = (modrm & 7) | REX_B(s);
4135 gen_shift(s, op, ot, opreg, OR_ECX);
4138 shift = ldub_code(s->pc++);
4140 gen_shifti(s, op, ot, opreg, shift);
4155 case 0x1a4: /* shld imm */
4159 case 0x1a5: /* shld cl */
4163 case 0x1ac: /* shrd imm */
4167 case 0x1ad: /* shrd cl */
4171 ot = dflag + OT_WORD;
4172 modrm = ldub_code(s->pc++);
4173 mod = (modrm >> 6) & 3;
4174 rm = (modrm & 7) | REX_B(s);
4175 reg = ((modrm >> 3) & 7) | rex_r;
4178 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4179 gen_op_ld_T0_A0[ot + s->mem_index]();
4181 gen_op_mov_TN_reg[ot][0][rm]();
4183 gen_op_mov_TN_reg[ot][1][reg]();
4186 val = ldub_code(s->pc++);
4193 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4195 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4196 if (op == 0 && ot != OT_WORD)
4197 s->cc_op = CC_OP_SHLB + ot;
4199 s->cc_op = CC_OP_SARB + ot;
4202 if (s->cc_op != CC_OP_DYNAMIC)
4203 gen_op_set_cc_op(s->cc_op);
4205 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4207 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4208 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4211 gen_op_mov_reg_T0[ot][rm]();
4215 /************************/
4218 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4219 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4220 /* XXX: what to do if illegal op ? */
4221 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4224 modrm = ldub_code(s->pc++);
4225 mod = (modrm >> 6) & 3;
4227 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4230 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4232 case 0x00 ... 0x07: /* fxxxs */
4233 case 0x10 ... 0x17: /* fixxxl */
4234 case 0x20 ... 0x27: /* fxxxl */
4235 case 0x30 ... 0x37: /* fixxx */
4242 gen_op_flds_FT0_A0();
4245 gen_op_fildl_FT0_A0();
4248 gen_op_fldl_FT0_A0();
4252 gen_op_fild_FT0_A0();
4256 gen_op_fp_arith_ST0_FT0[op1]();
4258 /* fcomp needs pop */
4263 case 0x08: /* flds */
4264 case 0x0a: /* fsts */
4265 case 0x0b: /* fstps */
4266 case 0x18: /* fildl */
4267 case 0x1a: /* fistl */
4268 case 0x1b: /* fistpl */
4269 case 0x28: /* fldl */
4270 case 0x2a: /* fstl */
4271 case 0x2b: /* fstpl */
4272 case 0x38: /* filds */
4273 case 0x3a: /* fists */
4274 case 0x3b: /* fistps */
4280 gen_op_flds_ST0_A0();
4283 gen_op_fildl_ST0_A0();
4286 gen_op_fldl_ST0_A0();
4290 gen_op_fild_ST0_A0();
4297 gen_op_fsts_ST0_A0();
4300 gen_op_fistl_ST0_A0();
4303 gen_op_fstl_ST0_A0();
4307 gen_op_fist_ST0_A0();
4315 case 0x0c: /* fldenv mem */
4316 gen_op_fldenv_A0(s->dflag);
4318 case 0x0d: /* fldcw mem */
4321 case 0x0e: /* fnstenv mem */
4322 gen_op_fnstenv_A0(s->dflag);
4324 case 0x0f: /* fnstcw mem */
4327 case 0x1d: /* fldt mem */
4328 gen_op_fldt_ST0_A0();
4330 case 0x1f: /* fstpt mem */
4331 gen_op_fstt_ST0_A0();
4334 case 0x2c: /* frstor mem */
4335 gen_op_frstor_A0(s->dflag);
4337 case 0x2e: /* fnsave mem */
4338 gen_op_fnsave_A0(s->dflag);
4340 case 0x2f: /* fnstsw mem */
4343 case 0x3c: /* fbld */
4344 gen_op_fbld_ST0_A0();
4346 case 0x3e: /* fbstp */
4347 gen_op_fbst_ST0_A0();
4350 case 0x3d: /* fildll */
4351 gen_op_fildll_ST0_A0();
4353 case 0x3f: /* fistpll */
4354 gen_op_fistll_ST0_A0();
4361 /* register float ops */
4365 case 0x08: /* fld sti */
4367 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4369 case 0x09: /* fxchg sti */
4370 case 0x29: /* fxchg4 sti, undocumented op */
4371 case 0x39: /* fxchg7 sti, undocumented op */
4372 gen_op_fxchg_ST0_STN(opreg);
4374 case 0x0a: /* grp d9/2 */
4377 /* check exceptions (FreeBSD FPU probe) */
4378 if (s->cc_op != CC_OP_DYNAMIC)
4379 gen_op_set_cc_op(s->cc_op);
4380 gen_jmp_im(pc_start - s->cs_base);
4387 case 0x0c: /* grp d9/4 */
4397 gen_op_fcom_ST0_FT0();
4406 case 0x0d: /* grp d9/5 */
4415 gen_op_fldl2t_ST0();
4419 gen_op_fldl2e_ST0();
4427 gen_op_fldlg2_ST0();
4431 gen_op_fldln2_ST0();
4442 case 0x0e: /* grp d9/6 */
4453 case 3: /* fpatan */
4456 case 4: /* fxtract */
4459 case 5: /* fprem1 */
4462 case 6: /* fdecstp */
4466 case 7: /* fincstp */
4471 case 0x0f: /* grp d9/7 */
4476 case 1: /* fyl2xp1 */
4482 case 3: /* fsincos */
4485 case 5: /* fscale */
4488 case 4: /* frndint */
4500 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4501 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4502 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4508 gen_op_fp_arith_STN_ST0[op1](opreg);
4512 gen_op_fmov_FT0_STN(opreg);
4513 gen_op_fp_arith_ST0_FT0[op1]();
4517 case 0x02: /* fcom */
4518 case 0x22: /* fcom2, undocumented op */
4519 gen_op_fmov_FT0_STN(opreg);
4520 gen_op_fcom_ST0_FT0();
4522 case 0x03: /* fcomp */
4523 case 0x23: /* fcomp3, undocumented op */
4524 case 0x32: /* fcomp5, undocumented op */
4525 gen_op_fmov_FT0_STN(opreg);
4526 gen_op_fcom_ST0_FT0();
4529 case 0x15: /* da/5 */
4531 case 1: /* fucompp */
4532 gen_op_fmov_FT0_STN(1);
4533 gen_op_fucom_ST0_FT0();
4543 case 0: /* feni (287 only, just do nop here) */
4545 case 1: /* fdisi (287 only, just do nop here) */
4550 case 3: /* fninit */
4553 case 4: /* fsetpm (287 only, just do nop here) */
4559 case 0x1d: /* fucomi */
4560 if (s->cc_op != CC_OP_DYNAMIC)
4561 gen_op_set_cc_op(s->cc_op);
4562 gen_op_fmov_FT0_STN(opreg);
4563 gen_op_fucomi_ST0_FT0();
4564 s->cc_op = CC_OP_EFLAGS;
4566 case 0x1e: /* fcomi */
4567 if (s->cc_op != CC_OP_DYNAMIC)
4568 gen_op_set_cc_op(s->cc_op);
4569 gen_op_fmov_FT0_STN(opreg);
4570 gen_op_fcomi_ST0_FT0();
4571 s->cc_op = CC_OP_EFLAGS;
4573 case 0x28: /* ffree sti */
4574 gen_op_ffree_STN(opreg);
4576 case 0x2a: /* fst sti */
4577 gen_op_fmov_STN_ST0(opreg);
4579 case 0x2b: /* fstp sti */
4580 case 0x0b: /* fstp1 sti, undocumented op */
4581 case 0x3a: /* fstp8 sti, undocumented op */
4582 case 0x3b: /* fstp9 sti, undocumented op */
4583 gen_op_fmov_STN_ST0(opreg);
4586 case 0x2c: /* fucom st(i) */
4587 gen_op_fmov_FT0_STN(opreg);
4588 gen_op_fucom_ST0_FT0();
4590 case 0x2d: /* fucomp st(i) */
4591 gen_op_fmov_FT0_STN(opreg);
4592 gen_op_fucom_ST0_FT0();
4595 case 0x33: /* de/3 */
4597 case 1: /* fcompp */
4598 gen_op_fmov_FT0_STN(1);
4599 gen_op_fcom_ST0_FT0();
4607 case 0x38: /* ffreep sti, undocumented op */
4608 gen_op_ffree_STN(opreg);
4611 case 0x3c: /* df/4 */
4614 gen_op_fnstsw_EAX();
4620 case 0x3d: /* fucomip */
4621 if (s->cc_op != CC_OP_DYNAMIC)
4622 gen_op_set_cc_op(s->cc_op);
4623 gen_op_fmov_FT0_STN(opreg);
4624 gen_op_fucomi_ST0_FT0();
4626 s->cc_op = CC_OP_EFLAGS;
4628 case 0x3e: /* fcomip */
4629 if (s->cc_op != CC_OP_DYNAMIC)
4630 gen_op_set_cc_op(s->cc_op);
4631 gen_op_fmov_FT0_STN(opreg);
4632 gen_op_fcomi_ST0_FT0();
4634 s->cc_op = CC_OP_EFLAGS;
4636 case 0x10 ... 0x13: /* fcmovxx */
4640 const static uint8_t fcmov_cc[8] = {
4646 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4648 gen_op_fcmov_ST0_STN_T0(opreg);
4655 #ifdef USE_CODE_COPY
4656 s->tb->cflags |= CF_TB_FP_USED;
4659 /************************/
4662 case 0xa4: /* movsS */
4667 ot = dflag + OT_WORD;
4669 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4670 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4676 case 0xaa: /* stosS */
4681 ot = dflag + OT_WORD;
4683 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4684 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4689 case 0xac: /* lodsS */
4694 ot = dflag + OT_WORD;
4695 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4696 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4701 case 0xae: /* scasS */
4706 ot = dflag + OT_WORD;
4707 if (prefixes & PREFIX_REPNZ) {
4708 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4709 } else if (prefixes & PREFIX_REPZ) {
4710 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4713 s->cc_op = CC_OP_SUBB + ot;
4717 case 0xa6: /* cmpsS */
4722 ot = dflag + OT_WORD;
4723 if (prefixes & PREFIX_REPNZ) {
4724 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4725 } else if (prefixes & PREFIX_REPZ) {
4726 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4729 s->cc_op = CC_OP_SUBB + ot;
4732 case 0x6c: /* insS */
4737 ot = dflag ? OT_LONG : OT_WORD;
4738 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4739 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4740 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4745 case 0x6e: /* outsS */
4750 ot = dflag ? OT_LONG : OT_WORD;
4751 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4752 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4753 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4759 /************************/
4766 ot = dflag ? OT_LONG : OT_WORD;
4767 val = ldub_code(s->pc++);
4768 gen_op_movl_T0_im(val);
4769 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4771 gen_op_mov_reg_T1[ot][R_EAX]();
4778 ot = dflag ? OT_LONG : OT_WORD;
4779 val = ldub_code(s->pc++);
4780 gen_op_movl_T0_im(val);
4781 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4782 gen_op_mov_TN_reg[ot][1][R_EAX]();
4790 ot = dflag ? OT_LONG : OT_WORD;
4791 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4792 gen_op_andl_T0_ffff();
4793 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4795 gen_op_mov_reg_T1[ot][R_EAX]();
4802 ot = dflag ? OT_LONG : OT_WORD;
4803 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4804 gen_op_andl_T0_ffff();
4805 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4806 gen_op_mov_TN_reg[ot][1][R_EAX]();
4810 /************************/
4812 case 0xc2: /* ret im */
4813 val = ldsw_code(s->pc);
4816 if (CODE64(s) && s->dflag)
4818 gen_stack_update(s, val + (2 << s->dflag));
4820 gen_op_andl_T0_ffff();
4824 case 0xc3: /* ret */
4828 gen_op_andl_T0_ffff();
4832 case 0xca: /* lret im */
4833 val = ldsw_code(s->pc);
4836 if (s->pe && !s->vm86) {
4837 if (s->cc_op != CC_OP_DYNAMIC)
4838 gen_op_set_cc_op(s->cc_op);
4839 gen_jmp_im(pc_start - s->cs_base);
4840 gen_op_lret_protected(s->dflag, val);
4844 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4846 gen_op_andl_T0_ffff();
4847 /* NOTE: keeping EIP updated is not a problem in case of
4851 gen_op_addl_A0_im(2 << s->dflag);
4852 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4853 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4854 /* add stack offset */
4855 gen_stack_update(s, val + (4 << s->dflag));
4859 case 0xcb: /* lret */
4862 case 0xcf: /* iret */
4865 gen_op_iret_real(s->dflag);
4866 s->cc_op = CC_OP_EFLAGS;
4867 } else if (s->vm86) {
4869 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4871 gen_op_iret_real(s->dflag);
4872 s->cc_op = CC_OP_EFLAGS;
4875 if (s->cc_op != CC_OP_DYNAMIC)
4876 gen_op_set_cc_op(s->cc_op);
4877 gen_jmp_im(pc_start - s->cs_base);
4878 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4879 s->cc_op = CC_OP_EFLAGS;
4883 case 0xe8: /* call im */
4886 tval = (int32_t)insn_get(s, OT_LONG);
4888 tval = (int16_t)insn_get(s, OT_WORD);
4889 next_eip = s->pc - s->cs_base;
4893 gen_movtl_T0_im(next_eip);
4898 case 0x9a: /* lcall im */
4900 unsigned int selector, offset;
4904 ot = dflag ? OT_LONG : OT_WORD;
4905 offset = insn_get(s, ot);
4906 selector = insn_get(s, OT_WORD);
4908 gen_op_movl_T0_im(selector);
4909 gen_op_movl_T1_imu(offset);
4912 case 0xe9: /* jmp */
4914 tval = (int32_t)insn_get(s, OT_LONG);
4916 tval = (int16_t)insn_get(s, OT_WORD);
4917 tval += s->pc - s->cs_base;
4922 case 0xea: /* ljmp im */
4924 unsigned int selector, offset;
4928 ot = dflag ? OT_LONG : OT_WORD;
4929 offset = insn_get(s, ot);
4930 selector = insn_get(s, OT_WORD);
4932 gen_op_movl_T0_im(selector);
4933 gen_op_movl_T1_imu(offset);
4936 case 0xeb: /* jmp Jb */
4937 tval = (int8_t)insn_get(s, OT_BYTE);
4938 tval += s->pc - s->cs_base;
4943 case 0x70 ... 0x7f: /* jcc Jb */
4944 tval = (int8_t)insn_get(s, OT_BYTE);
4946 case 0x180 ... 0x18f: /* jcc Jv */
4948 tval = (int32_t)insn_get(s, OT_LONG);
4950 tval = (int16_t)insn_get(s, OT_WORD);
4953 next_eip = s->pc - s->cs_base;
4957 gen_jcc(s, b, tval, next_eip);
4960 case 0x190 ... 0x19f: /* setcc Gv */
4961 modrm = ldub_code(s->pc++);
4963 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
4965 case 0x140 ... 0x14f: /* cmov Gv, Ev */
4966 ot = dflag + OT_WORD;
4967 modrm = ldub_code(s->pc++);
4968 reg = ((modrm >> 3) & 7) | rex_r;
4969 mod = (modrm >> 6) & 3;
4972 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4973 gen_op_ld_T1_A0[ot + s->mem_index]();
4975 rm = (modrm & 7) | REX_B(s);
4976 gen_op_mov_TN_reg[ot][1][rm]();
4978 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
4981 /************************/
4983 case 0x9c: /* pushf */
4984 if (s->vm86 && s->iopl != 3) {
4985 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4987 if (s->cc_op != CC_OP_DYNAMIC)
4988 gen_op_set_cc_op(s->cc_op);
4989 gen_op_movl_T0_eflags();
4993 case 0x9d: /* popf */
4994 if (s->vm86 && s->iopl != 3) {
4995 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5000 gen_op_movl_eflags_T0_cpl0();
5002 gen_op_movw_eflags_T0_cpl0();
5005 if (s->cpl <= s->iopl) {
5007 gen_op_movl_eflags_T0_io();
5009 gen_op_movw_eflags_T0_io();
5013 gen_op_movl_eflags_T0();
5015 gen_op_movw_eflags_T0();
5020 s->cc_op = CC_OP_EFLAGS;
5021 /* abort translation because TF flag may change */
5022 gen_jmp_im(s->pc - s->cs_base);
5026 case 0x9e: /* sahf */
5029 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5030 if (s->cc_op != CC_OP_DYNAMIC)
5031 gen_op_set_cc_op(s->cc_op);
5032 gen_op_movb_eflags_T0();
5033 s->cc_op = CC_OP_EFLAGS;
5035 case 0x9f: /* lahf */
5038 if (s->cc_op != CC_OP_DYNAMIC)
5039 gen_op_set_cc_op(s->cc_op);
5040 gen_op_movl_T0_eflags();
5041 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5043 case 0xf5: /* cmc */
5044 if (s->cc_op != CC_OP_DYNAMIC)
5045 gen_op_set_cc_op(s->cc_op);
5047 s->cc_op = CC_OP_EFLAGS;
5049 case 0xf8: /* clc */
5050 if (s->cc_op != CC_OP_DYNAMIC)
5051 gen_op_set_cc_op(s->cc_op);
5053 s->cc_op = CC_OP_EFLAGS;
5055 case 0xf9: /* stc */
5056 if (s->cc_op != CC_OP_DYNAMIC)
5057 gen_op_set_cc_op(s->cc_op);
5059 s->cc_op = CC_OP_EFLAGS;
5061 case 0xfc: /* cld */
5064 case 0xfd: /* std */
5068 /************************/
5069 /* bit operations */
5070 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5071 ot = dflag + OT_WORD;
5072 modrm = ldub_code(s->pc++);
5073 op = ((modrm >> 3) & 7) | rex_r;
5074 mod = (modrm >> 6) & 3;
5075 rm = (modrm & 7) | REX_B(s);
5078 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5079 gen_op_ld_T0_A0[ot + s->mem_index]();
5081 gen_op_mov_TN_reg[ot][0][rm]();
5084 val = ldub_code(s->pc++);
5085 gen_op_movl_T1_im(val);
5089 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5090 s->cc_op = CC_OP_SARB + ot;
5093 gen_op_st_T0_A0[ot + s->mem_index]();
5095 gen_op_mov_reg_T0[ot][rm]();
5096 gen_op_update_bt_cc();
5099 case 0x1a3: /* bt Gv, Ev */
5102 case 0x1ab: /* bts */
5105 case 0x1b3: /* btr */
5108 case 0x1bb: /* btc */
5111 ot = dflag + OT_WORD;
5112 modrm = ldub_code(s->pc++);
5113 reg = ((modrm >> 3) & 7) | rex_r;
5114 mod = (modrm >> 6) & 3;
5115 rm = (modrm & 7) | REX_B(s);
5116 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5118 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5119 /* specific case: we need to add a displacement */
5120 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5121 gen_op_ld_T0_A0[ot + s->mem_index]();
5123 gen_op_mov_TN_reg[ot][0][rm]();
5125 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5126 s->cc_op = CC_OP_SARB + ot;
5129 gen_op_st_T0_A0[ot + s->mem_index]();
5131 gen_op_mov_reg_T0[ot][rm]();
5132 gen_op_update_bt_cc();
5135 case 0x1bc: /* bsf */
5136 case 0x1bd: /* bsr */
5137 ot = dflag + OT_WORD;
5138 modrm = ldub_code(s->pc++);
5139 reg = ((modrm >> 3) & 7) | rex_r;
5140 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5141 /* NOTE: in order to handle the 0 case, we must load the
5142 result. It could be optimized with a generated jump */
5143 gen_op_mov_TN_reg[ot][1][reg]();
5144 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5145 gen_op_mov_reg_T1[ot][reg]();
5146 s->cc_op = CC_OP_LOGICB + ot;
5148 /************************/
5150 case 0x27: /* daa */
5153 if (s->cc_op != CC_OP_DYNAMIC)
5154 gen_op_set_cc_op(s->cc_op);
5156 s->cc_op = CC_OP_EFLAGS;
5158 case 0x2f: /* das */
5161 if (s->cc_op != CC_OP_DYNAMIC)
5162 gen_op_set_cc_op(s->cc_op);
5164 s->cc_op = CC_OP_EFLAGS;
5166 case 0x37: /* aaa */
5169 if (s->cc_op != CC_OP_DYNAMIC)
5170 gen_op_set_cc_op(s->cc_op);
5172 s->cc_op = CC_OP_EFLAGS;
5174 case 0x3f: /* aas */
5177 if (s->cc_op != CC_OP_DYNAMIC)
5178 gen_op_set_cc_op(s->cc_op);
5180 s->cc_op = CC_OP_EFLAGS;
5182 case 0xd4: /* aam */
5185 val = ldub_code(s->pc++);
5187 s->cc_op = CC_OP_LOGICB;
5189 case 0xd5: /* aad */
5192 val = ldub_code(s->pc++);
5194 s->cc_op = CC_OP_LOGICB;
5196 /************************/
5198 case 0x90: /* nop */
5199 /* XXX: xchg + rex handling */
5200 /* XXX: correct lock test for all insn */
5201 if (prefixes & PREFIX_LOCK)
5204 case 0x9b: /* fwait */
5205 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5206 (HF_MP_MASK | HF_TS_MASK)) {
5207 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5209 if (s->cc_op != CC_OP_DYNAMIC)
5210 gen_op_set_cc_op(s->cc_op);
5211 gen_jmp_im(pc_start - s->cs_base);
5215 case 0xcc: /* int3 */
5216 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5218 case 0xcd: /* int N */
5219 val = ldub_code(s->pc++);
5220 if (s->vm86 && s->iopl != 3) {
5221 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5223 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5226 case 0xce: /* into */
5229 if (s->cc_op != CC_OP_DYNAMIC)
5230 gen_op_set_cc_op(s->cc_op);
5231 gen_jmp_im(pc_start - s->cs_base);
5232 gen_op_into(s->pc - pc_start);
5234 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5236 gen_debug(s, pc_start - s->cs_base);
5239 tb_flush(cpu_single_env);
5240 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5243 case 0xfa: /* cli */
5245 if (s->cpl <= s->iopl) {
5248 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5254 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5258 case 0xfb: /* sti */
5260 if (s->cpl <= s->iopl) {
5263 /* interruptions are enabled only the first insn after sti */
5264 /* If several instructions disable interrupts, only the
5266 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5267 gen_op_set_inhibit_irq();
5268 /* give a chance to handle pending irqs */
5269 gen_jmp_im(s->pc - s->cs_base);
5272 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5278 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5282 case 0x62: /* bound */
5285 ot = dflag ? OT_LONG : OT_WORD;
5286 modrm = ldub_code(s->pc++);
5287 reg = (modrm >> 3) & 7;
5288 mod = (modrm >> 6) & 3;
5291 gen_op_mov_TN_reg[ot][0][reg]();
5292 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5293 gen_jmp_im(pc_start - s->cs_base);
5299 case 0x1c8 ... 0x1cf: /* bswap reg */
5300 reg = (b & 7) | REX_B(s);
5301 #ifdef TARGET_X86_64
5303 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5305 gen_op_mov_reg_T0[OT_QUAD][reg]();
5309 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5311 gen_op_mov_reg_T0[OT_LONG][reg]();
5314 case 0xd6: /* salc */
5317 if (s->cc_op != CC_OP_DYNAMIC)
5318 gen_op_set_cc_op(s->cc_op);
5321 case 0xe0: /* loopnz */
5322 case 0xe1: /* loopz */
5323 if (s->cc_op != CC_OP_DYNAMIC)
5324 gen_op_set_cc_op(s->cc_op);
5326 case 0xe2: /* loop */
5327 case 0xe3: /* jecxz */
5331 tval = (int8_t)insn_get(s, OT_BYTE);
5332 next_eip = s->pc - s->cs_base;
5337 l1 = gen_new_label();
5338 l2 = gen_new_label();
5341 gen_op_jz_ecx[s->aflag](l1);
5343 gen_op_dec_ECX[s->aflag]();
5346 gen_op_loop[s->aflag][b](l1);
5349 gen_jmp_im(next_eip);
5350 gen_op_jmp_label(l2);
5357 case 0x130: /* wrmsr */
5358 case 0x132: /* rdmsr */
5360 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5368 case 0x131: /* rdtsc */
5371 case 0x134: /* sysenter */
5375 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5377 if (s->cc_op != CC_OP_DYNAMIC) {
5378 gen_op_set_cc_op(s->cc_op);
5379 s->cc_op = CC_OP_DYNAMIC;
5381 gen_jmp_im(pc_start - s->cs_base);
5386 case 0x135: /* sysexit */
5390 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5392 if (s->cc_op != CC_OP_DYNAMIC) {
5393 gen_op_set_cc_op(s->cc_op);
5394 s->cc_op = CC_OP_DYNAMIC;
5396 gen_jmp_im(pc_start - s->cs_base);
5401 #ifdef TARGET_X86_64
5402 case 0x105: /* syscall */
5403 /* XXX: is it usable in real mode ? */
5404 if (s->cc_op != CC_OP_DYNAMIC) {
5405 gen_op_set_cc_op(s->cc_op);
5406 s->cc_op = CC_OP_DYNAMIC;
5408 gen_jmp_im(pc_start - s->cs_base);
5409 gen_op_syscall(s->pc - pc_start);
5412 case 0x107: /* sysret */
5414 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5416 if (s->cc_op != CC_OP_DYNAMIC) {
5417 gen_op_set_cc_op(s->cc_op);
5418 s->cc_op = CC_OP_DYNAMIC;
5420 gen_jmp_im(pc_start - s->cs_base);
5421 gen_op_sysret(s->dflag);
5422 /* condition codes are modified only in long mode */
5424 s->cc_op = CC_OP_EFLAGS;
5429 case 0x1a2: /* cpuid */
5432 case 0xf4: /* hlt */
5434 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5436 if (s->cc_op != CC_OP_DYNAMIC)
5437 gen_op_set_cc_op(s->cc_op);
5438 gen_jmp_im(s->pc - s->cs_base);
5444 modrm = ldub_code(s->pc++);
5445 mod = (modrm >> 6) & 3;
5446 op = (modrm >> 3) & 7;
5449 if (!s->pe || s->vm86)
5451 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5455 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5458 if (!s->pe || s->vm86)
5461 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5463 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5464 gen_jmp_im(pc_start - s->cs_base);
5469 if (!s->pe || s->vm86)
5471 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5475 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5478 if (!s->pe || s->vm86)
5481 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5483 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5484 gen_jmp_im(pc_start - s->cs_base);
5490 if (!s->pe || s->vm86)
5492 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5493 if (s->cc_op != CC_OP_DYNAMIC)
5494 gen_op_set_cc_op(s->cc_op);
5499 s->cc_op = CC_OP_EFLAGS;
5506 modrm = ldub_code(s->pc++);
5507 mod = (modrm >> 6) & 3;
5508 op = (modrm >> 3) & 7;
5514 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5516 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
5518 gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
5519 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5520 gen_add_A0_im(s, 2);
5522 gen_op_movtl_T0_env(offsetof(CPUX86State,gdt.base));
5524 gen_op_movtl_T0_env(offsetof(CPUX86State,idt.base));
5526 gen_op_andl_T0_im(0xffffff);
5527 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5534 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5536 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5537 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5538 gen_add_A0_im(s, 2);
5539 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5541 gen_op_andl_T0_im(0xffffff);
5543 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5544 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5546 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5547 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5552 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5553 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5557 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5559 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5561 gen_jmp_im(s->pc - s->cs_base);
5565 case 7: /* invlpg */
5567 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5570 #ifdef TARGET_X86_64
5571 if (CODE64(s) && (modrm & 7) == 0) {
5573 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5574 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5575 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5576 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5583 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5585 gen_jmp_im(s->pc - s->cs_base);
5594 case 0x108: /* invd */
5595 case 0x109: /* wbinvd */
5597 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5602 case 0x63: /* arpl or movslS (x86_64) */
5603 #ifdef TARGET_X86_64
5606 /* d_ot is the size of destination */
5607 d_ot = dflag + OT_WORD;
5609 modrm = ldub_code(s->pc++);
5610 reg = ((modrm >> 3) & 7) | rex_r;
5611 mod = (modrm >> 6) & 3;
5612 rm = (modrm & 7) | REX_B(s);
5615 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5617 if (d_ot == OT_QUAD)
5618 gen_op_movslq_T0_T0();
5619 gen_op_mov_reg_T0[d_ot][reg]();
5621 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5622 if (d_ot == OT_QUAD) {
5623 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5625 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5627 gen_op_mov_reg_T0[d_ot][reg]();
5632 if (!s->pe || s->vm86)
5634 ot = dflag ? OT_LONG : OT_WORD;
5635 modrm = ldub_code(s->pc++);
5636 reg = (modrm >> 3) & 7;
5637 mod = (modrm >> 6) & 3;
5640 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5641 gen_op_ld_T0_A0[ot + s->mem_index]();
5643 gen_op_mov_TN_reg[ot][0][rm]();
5645 if (s->cc_op != CC_OP_DYNAMIC)
5646 gen_op_set_cc_op(s->cc_op);
5648 s->cc_op = CC_OP_EFLAGS;
5650 gen_op_st_T0_A0[ot + s->mem_index]();
5652 gen_op_mov_reg_T0[ot][rm]();
5654 gen_op_arpl_update();
5657 case 0x102: /* lar */
5658 case 0x103: /* lsl */
5659 if (!s->pe || s->vm86)
5661 ot = dflag ? OT_LONG : OT_WORD;
5662 modrm = ldub_code(s->pc++);
5663 reg = ((modrm >> 3) & 7) | rex_r;
5664 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5665 gen_op_mov_TN_reg[ot][1][reg]();
5666 if (s->cc_op != CC_OP_DYNAMIC)
5667 gen_op_set_cc_op(s->cc_op);
5672 s->cc_op = CC_OP_EFLAGS;
5673 gen_op_mov_reg_T1[ot][reg]();
5676 modrm = ldub_code(s->pc++);
5677 mod = (modrm >> 6) & 3;
5678 op = (modrm >> 3) & 7;
5680 case 0: /* prefetchnta */
5681 case 1: /* prefetchnt0 */
5682 case 2: /* prefetchnt0 */
5683 case 3: /* prefetchnt0 */
5686 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5687 /* nothing more to do */
5693 case 0x120: /* mov reg, crN */
5694 case 0x122: /* mov crN, reg */
5696 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5698 modrm = ldub_code(s->pc++);
5699 if ((modrm & 0xc0) != 0xc0)
5701 rm = (modrm & 7) | REX_B(s);
5702 reg = ((modrm >> 3) & 7) | rex_r;
5714 gen_op_mov_TN_reg[ot][0][rm]();
5715 gen_op_movl_crN_T0(reg);
5716 gen_jmp_im(s->pc - s->cs_base);
5719 #if !defined(CONFIG_USER_ONLY)
5721 gen_op_movtl_T0_cr8();
5724 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5725 gen_op_mov_reg_T0[ot][rm]();
5733 case 0x121: /* mov reg, drN */
5734 case 0x123: /* mov drN, reg */
5736 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5738 modrm = ldub_code(s->pc++);
5739 if ((modrm & 0xc0) != 0xc0)
5741 rm = (modrm & 7) | REX_B(s);
5742 reg = ((modrm >> 3) & 7) | rex_r;
5747 /* XXX: do it dynamically with CR4.DE bit */
5748 if (reg == 4 || reg == 5 || reg >= 8)
5751 gen_op_mov_TN_reg[ot][0][rm]();
5752 gen_op_movl_drN_T0(reg);
5753 gen_jmp_im(s->pc - s->cs_base);
5756 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5757 gen_op_mov_reg_T0[ot][rm]();
5761 case 0x106: /* clts */
5763 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5766 /* abort block because static cpu state changed */
5767 gen_jmp_im(s->pc - s->cs_base);
5771 /* MMX/SSE/SSE2/PNI support */
5772 case 0x1c3: /* MOVNTI reg, mem */
5773 if (!(s->cpuid_features & CPUID_SSE2))
5775 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5776 modrm = ldub_code(s->pc++);
5777 mod = (modrm >> 6) & 3;
5780 reg = ((modrm >> 3) & 7) | rex_r;
5781 /* generate a generic store */
5782 gen_ldst_modrm(s, modrm, ot, reg, 1);
5785 modrm = ldub_code(s->pc++);
5786 mod = (modrm >> 6) & 3;
5787 op = (modrm >> 3) & 7;
5789 case 0: /* fxsave */
5790 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5792 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5793 gen_op_fxsave_A0((s->dflag == 2));
5795 case 1: /* fxrstor */
5796 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5798 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5799 gen_op_fxrstor_A0((s->dflag == 2));
5801 case 2: /* ldmxcsr */
5802 case 3: /* stmxcsr */
5803 if (s->flags & HF_TS_MASK) {
5804 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5807 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
5810 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5812 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5813 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
5815 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
5816 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
5819 case 5: /* lfence */
5820 case 6: /* mfence */
5821 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
5824 case 7: /* sfence / clflush */
5825 if ((modrm & 0xc7) == 0xc0) {
5827 if (!(s->cpuid_features & CPUID_SSE))
5831 if (!(s->cpuid_features & CPUID_CLFLUSH))
5833 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5840 case 0x10d: /* prefetch */
5841 modrm = ldub_code(s->pc++);
5842 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5843 /* ignore for now */
5845 case 0x110 ... 0x117:
5846 case 0x128 ... 0x12f:
5847 case 0x150 ... 0x177:
5848 case 0x17c ... 0x17f:
5850 case 0x1c4 ... 0x1c6:
5851 case 0x1d0 ... 0x1fe:
5852 gen_sse(s, b, pc_start, rex_r);
5857 /* lock generation */
5858 if (s->prefix & PREFIX_LOCK)
5862 if (s->prefix & PREFIX_LOCK)
5864 /* XXX: ensure that no lock was generated */
5865 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
5869 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5870 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5872 /* flags read by an operation */
5873 static uint16_t opc_read_flags[NB_OPS] = {
5874 [INDEX_op_aas] = CC_A,
5875 [INDEX_op_aaa] = CC_A,
5876 [INDEX_op_das] = CC_A | CC_C,
5877 [INDEX_op_daa] = CC_A | CC_C,
5879 /* subtle: due to the incl/decl implementation, C is used */
5880 [INDEX_op_update_inc_cc] = CC_C,
5882 [INDEX_op_into] = CC_O,
5884 [INDEX_op_jb_subb] = CC_C,
5885 [INDEX_op_jb_subw] = CC_C,
5886 [INDEX_op_jb_subl] = CC_C,
5888 [INDEX_op_jz_subb] = CC_Z,
5889 [INDEX_op_jz_subw] = CC_Z,
5890 [INDEX_op_jz_subl] = CC_Z,
5892 [INDEX_op_jbe_subb] = CC_Z | CC_C,
5893 [INDEX_op_jbe_subw] = CC_Z | CC_C,
5894 [INDEX_op_jbe_subl] = CC_Z | CC_C,
5896 [INDEX_op_js_subb] = CC_S,
5897 [INDEX_op_js_subw] = CC_S,
5898 [INDEX_op_js_subl] = CC_S,
5900 [INDEX_op_jl_subb] = CC_O | CC_S,
5901 [INDEX_op_jl_subw] = CC_O | CC_S,
5902 [INDEX_op_jl_subl] = CC_O | CC_S,
5904 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
5905 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
5906 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
5908 [INDEX_op_loopnzw] = CC_Z,
5909 [INDEX_op_loopnzl] = CC_Z,
5910 [INDEX_op_loopzw] = CC_Z,
5911 [INDEX_op_loopzl] = CC_Z,
5913 [INDEX_op_seto_T0_cc] = CC_O,
5914 [INDEX_op_setb_T0_cc] = CC_C,
5915 [INDEX_op_setz_T0_cc] = CC_Z,
5916 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
5917 [INDEX_op_sets_T0_cc] = CC_S,
5918 [INDEX_op_setp_T0_cc] = CC_P,
5919 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
5920 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
5922 [INDEX_op_setb_T0_subb] = CC_C,
5923 [INDEX_op_setb_T0_subw] = CC_C,
5924 [INDEX_op_setb_T0_subl] = CC_C,
5926 [INDEX_op_setz_T0_subb] = CC_Z,
5927 [INDEX_op_setz_T0_subw] = CC_Z,
5928 [INDEX_op_setz_T0_subl] = CC_Z,
5930 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
5931 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
5932 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
5934 [INDEX_op_sets_T0_subb] = CC_S,
5935 [INDEX_op_sets_T0_subw] = CC_S,
5936 [INDEX_op_sets_T0_subl] = CC_S,
5938 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
5939 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
5940 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
5942 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
5943 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
5944 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
5946 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
5947 [INDEX_op_cmc] = CC_C,
5948 [INDEX_op_salc] = CC_C,
5950 /* needed for correct flag optimisation before string ops */
5951 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
5952 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
5953 [INDEX_op_jz_ecxw] = CC_OSZAPC,
5954 [INDEX_op_jz_ecxl] = CC_OSZAPC,
5956 #ifdef TARGET_X86_64
5957 [INDEX_op_jb_subq] = CC_C,
5958 [INDEX_op_jz_subq] = CC_Z,
5959 [INDEX_op_jbe_subq] = CC_Z | CC_C,
5960 [INDEX_op_js_subq] = CC_S,
5961 [INDEX_op_jl_subq] = CC_O | CC_S,
5962 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
5964 [INDEX_op_loopnzq] = CC_Z,
5965 [INDEX_op_loopzq] = CC_Z,
5967 [INDEX_op_setb_T0_subq] = CC_C,
5968 [INDEX_op_setz_T0_subq] = CC_Z,
5969 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
5970 [INDEX_op_sets_T0_subq] = CC_S,
5971 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
5972 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
5974 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
5975 [INDEX_op_jz_ecxq] = CC_OSZAPC,
5978 #define DEF_READF(SUFFIX)\
5979 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5980 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5981 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5982 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5983 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5984 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5985 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5986 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5988 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5989 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5990 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5991 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5992 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5993 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5994 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5995 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5999 #ifndef CONFIG_USER_ONLY
6005 /* flags written by an operation */
6006 static uint16_t opc_write_flags[NB_OPS] = {
6007 [INDEX_op_update2_cc] = CC_OSZAPC,
6008 [INDEX_op_update1_cc] = CC_OSZAPC,
6009 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6010 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6011 /* subtle: due to the incl/decl implementation, C is used */
6012 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6013 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6015 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6016 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6017 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6018 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6019 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6020 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6021 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6022 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6023 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6024 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6025 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6028 [INDEX_op_ucomiss] = CC_OSZAPC,
6029 [INDEX_op_ucomisd] = CC_OSZAPC,
6030 [INDEX_op_comiss] = CC_OSZAPC,
6031 [INDEX_op_comisd] = CC_OSZAPC,
6034 [INDEX_op_aam] = CC_OSZAPC,
6035 [INDEX_op_aad] = CC_OSZAPC,
6036 [INDEX_op_aas] = CC_OSZAPC,
6037 [INDEX_op_aaa] = CC_OSZAPC,
6038 [INDEX_op_das] = CC_OSZAPC,
6039 [INDEX_op_daa] = CC_OSZAPC,
6041 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6042 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6043 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6044 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6045 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6046 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6047 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6048 [INDEX_op_clc] = CC_C,
6049 [INDEX_op_stc] = CC_C,
6050 [INDEX_op_cmc] = CC_C,
6052 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6053 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6054 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6055 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6056 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6057 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6058 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6059 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6060 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6061 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6062 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6063 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6065 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6066 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6067 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6068 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6069 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6070 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6072 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6073 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6074 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6075 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6077 [INDEX_op_cmpxchg8b] = CC_Z,
6078 [INDEX_op_lar] = CC_Z,
6079 [INDEX_op_lsl] = CC_Z,
6080 [INDEX_op_verr] = CC_Z,
6081 [INDEX_op_verw] = CC_Z,
6082 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6083 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6085 #define DEF_WRITEF(SUFFIX)\
6086 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6087 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6088 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6089 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6090 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6091 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6092 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6093 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6095 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6096 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6097 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6098 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6099 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6100 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6101 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6102 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6104 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6105 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6106 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6107 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6108 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6109 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6110 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6111 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6113 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6114 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6115 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6116 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6118 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6119 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6120 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6121 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6123 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6124 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6125 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6126 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6128 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6129 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6130 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6131 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6132 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6133 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6135 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6136 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6137 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6138 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6139 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6140 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6142 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6143 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6144 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6145 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6150 #ifndef CONFIG_USER_ONLY
6156 /* simpler form of an operation if no flags need to be generated */
6157 static uint16_t opc_simpler[NB_OPS] = {
6158 [INDEX_op_update2_cc] = INDEX_op_nop,
6159 [INDEX_op_update1_cc] = INDEX_op_nop,
6160 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6162 /* broken: CC_OP logic must be rewritten */
6163 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6166 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6167 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6168 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6169 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6171 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6172 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6173 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6174 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6176 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6177 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6178 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6179 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6181 #define DEF_SIMPLER(SUFFIX)\
6182 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6183 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6184 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6185 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6187 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6188 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6189 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6190 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6194 #ifndef CONFIG_USER_ONLY
6195 DEF_SIMPLER(_kernel)
6200 void optimize_flags_init(void)
6203 /* put default values in arrays */
6204 for(i = 0; i < NB_OPS; i++) {
6205 if (opc_simpler[i] == 0)
6210 /* CPU flags computation optimization: we move backward thru the
6211 generated code to see which flags are needed. The operation is
6212 modified if suitable */
6213 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6216 int live_flags, write_flags, op;
6218 opc_ptr = opc_buf + opc_buf_len;
6219 /* live_flags contains the flags needed by the next instructions
6220 in the code. At the end of the bloc, we consider that all the
6222 live_flags = CC_OSZAPC;
6223 while (opc_ptr > opc_buf) {
6225 /* if none of the flags written by the instruction is used,
6226 then we can try to find a simpler instruction */
6227 write_flags = opc_write_flags[op];
6228 if ((live_flags & write_flags) == 0) {
6229 *opc_ptr = opc_simpler[op];
6231 /* compute the live flags before the instruction */
6232 live_flags &= ~write_flags;
6233 live_flags |= opc_read_flags[op];
6237 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6238 basic block 'tb'. If search_pc is TRUE, also generate PC
6239 information for each intermediate instruction. */
6240 static inline int gen_intermediate_code_internal(CPUState *env,
6241 TranslationBlock *tb,
6244 DisasContext dc1, *dc = &dc1;
6245 target_ulong pc_ptr;
6246 uint16_t *gen_opc_end;
6247 int flags, j, lj, cflags;
6248 target_ulong pc_start;
6249 target_ulong cs_base;
6251 /* generate intermediate code */
6253 cs_base = tb->cs_base;
6255 cflags = tb->cflags;
6257 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6258 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6259 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6260 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6262 dc->vm86 = (flags >> VM_SHIFT) & 1;
6263 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6264 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6265 dc->tf = (flags >> TF_SHIFT) & 1;
6266 dc->singlestep_enabled = env->singlestep_enabled;
6267 dc->cc_op = CC_OP_DYNAMIC;
6268 dc->cs_base = cs_base;
6270 dc->popl_esp_hack = 0;
6271 /* select memory access functions */
6273 if (flags & HF_SOFTMMU_MASK) {
6275 dc->mem_index = 2 * 4;
6277 dc->mem_index = 1 * 4;
6279 dc->cpuid_features = env->cpuid_features;
6280 #ifdef TARGET_X86_64
6281 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6282 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6285 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6286 (flags & HF_INHIBIT_IRQ_MASK)
6287 #ifndef CONFIG_SOFTMMU
6288 || (flags & HF_SOFTMMU_MASK)
6292 /* check addseg logic */
6293 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6294 printf("ERROR addseg\n");
6297 gen_opc_ptr = gen_opc_buf;
6298 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6299 gen_opparam_ptr = gen_opparam_buf;
6302 dc->is_jmp = DISAS_NEXT;
6307 if (env->nb_breakpoints > 0) {
6308 for(j = 0; j < env->nb_breakpoints; j++) {
6309 if (env->breakpoints[j] == pc_ptr) {
6310 gen_debug(dc, pc_ptr - dc->cs_base);
6316 j = gen_opc_ptr - gen_opc_buf;
6320 gen_opc_instr_start[lj++] = 0;
6322 gen_opc_pc[lj] = pc_ptr;
6323 gen_opc_cc_op[lj] = dc->cc_op;
6324 gen_opc_instr_start[lj] = 1;
6326 pc_ptr = disas_insn(dc, pc_ptr);
6327 /* stop translation if indicated */
6330 /* if single step mode, we generate only one instruction and
6331 generate an exception */
6332 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6333 the flag and abort the translation to give the irqs a
6334 change to be happen */
6335 if (dc->tf || dc->singlestep_enabled ||
6336 (flags & HF_INHIBIT_IRQ_MASK) ||
6337 (cflags & CF_SINGLE_INSN)) {
6338 gen_jmp_im(pc_ptr - dc->cs_base);
6342 /* if too long translation, stop generation too */
6343 if (gen_opc_ptr >= gen_opc_end ||
6344 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6345 gen_jmp_im(pc_ptr - dc->cs_base);
6350 *gen_opc_ptr = INDEX_op_end;
6351 /* we don't forget to fill the last values */
6353 j = gen_opc_ptr - gen_opc_buf;
6356 gen_opc_instr_start[lj++] = 0;
6360 if (loglevel & CPU_LOG_TB_CPU) {
6361 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6363 if (loglevel & CPU_LOG_TB_IN_ASM) {
6365 fprintf(logfile, "----------------\n");
6366 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6367 #ifdef TARGET_X86_64
6372 disas_flags = !dc->code32;
6373 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6374 fprintf(logfile, "\n");
6375 if (loglevel & CPU_LOG_TB_OP) {
6376 fprintf(logfile, "OP:\n");
6377 dump_ops(gen_opc_buf, gen_opparam_buf);
6378 fprintf(logfile, "\n");
6383 /* optimize flag computations */
6384 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6387 if (loglevel & CPU_LOG_TB_OP_OPT) {
6388 fprintf(logfile, "AFTER FLAGS OPT:\n");
6389 dump_ops(gen_opc_buf, gen_opparam_buf);
6390 fprintf(logfile, "\n");
6394 tb->size = pc_ptr - pc_start;
6398 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6400 return gen_intermediate_code_internal(env, tb, 0);
6403 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6405 return gen_intermediate_code_internal(env, tb, 1);