4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr;
34 static uint32_t *gen_opparam_ptr;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #define BUGGY_64(x) NULL
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
61 static int x86_64_hregs;
64 #ifdef USE_DIRECT_JUMP
67 #define TBPARAM(x) (long)(x)
70 typedef struct DisasContext {
71 /* current insn context */
72 int override; /* -1 if no override */
75 target_ulong pc; /* pc = eip + cs_base */
76 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base; /* base of CS segment */
80 int pe; /* protected mode */
81 int code32; /* 32 bit code segment */
83 int lma; /* long mode active */
84 int code64; /* 64 bit code segment */
87 int ss32; /* 32 bit stack segment */
88 int cc_op; /* current CC operation */
89 int addseg; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st; /* currently unused */
91 int vm86; /* vm86 mode */
94 int tf; /* TF cpu flag */
95 int singlestep_enabled; /* "hardware" single step enabled */
96 int jmp_opt; /* use direct block chaining for direct jumps */
97 int mem_index; /* select memory access functions */
98 int flags; /* all execution flags */
99 struct TranslationBlock *tb;
100 int popl_esp_hack; /* for correct popl with esp base handling */
101 int rip_offset; /* only used in x86_64, but left for simplicity */
105 static void gen_eob(DisasContext *s);
106 static void gen_jmp(DisasContext *s, target_ulong eip);
107 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109 /* i386 arith/logic operations */
129 OP_SHL1, /* undocumented */
134 #define DEF(s, n, copy_size) INDEX_op_ ## s,
151 /* I386 int registers */
152 OR_EAX, /* MUST be even numbered */
161 OR_TMP0 = 16, /* temporary operand register */
163 OR_A0, /* temporary register used when doing address evaluation */
168 #define NB_OP_SIZES 4
170 #define DEF_REGS(prefix, suffix) \
171 prefix ## EAX ## suffix,\
172 prefix ## ECX ## suffix,\
173 prefix ## EDX ## suffix,\
174 prefix ## EBX ## suffix,\
175 prefix ## ESP ## suffix,\
176 prefix ## EBP ## suffix,\
177 prefix ## ESI ## suffix,\
178 prefix ## EDI ## suffix,\
179 prefix ## R8 ## suffix,\
180 prefix ## R9 ## suffix,\
181 prefix ## R10 ## suffix,\
182 prefix ## R11 ## suffix,\
183 prefix ## R12 ## suffix,\
184 prefix ## R13 ## suffix,\
185 prefix ## R14 ## suffix,\
186 prefix ## R15 ## suffix,
188 #define DEF_BREGS(prefixb, prefixh, suffix) \
190 static void prefixb ## ESP ## suffix ## _wrapper(void) \
193 prefixb ## ESP ## suffix (); \
195 prefixh ## EAX ## suffix (); \
198 static void prefixb ## EBP ## suffix ## _wrapper(void) \
201 prefixb ## EBP ## suffix (); \
203 prefixh ## ECX ## suffix (); \
206 static void prefixb ## ESI ## suffix ## _wrapper(void) \
209 prefixb ## ESI ## suffix (); \
211 prefixh ## EDX ## suffix (); \
214 static void prefixb ## EDI ## suffix ## _wrapper(void) \
217 prefixb ## EDI ## suffix (); \
219 prefixh ## EBX ## suffix (); \
222 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
223 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
224 DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
225 DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
227 #else /* !TARGET_X86_64 */
229 #define NB_OP_SIZES 3
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,
241 #endif /* !TARGET_X86_64 */
243 static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
250 gen_op_movb_ESP_T0_wrapper,
251 gen_op_movb_EBP_T0_wrapper,
252 gen_op_movb_ESI_T0_wrapper,
253 gen_op_movb_EDI_T0_wrapper,
270 DEF_REGS(gen_op_movw_, _T0)
273 DEF_REGS(gen_op_movl_, _T0)
277 DEF_REGS(gen_op_movq_, _T0)
282 static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
289 gen_op_movb_ESP_T1_wrapper,
290 gen_op_movb_EBP_T1_wrapper,
291 gen_op_movb_ESI_T1_wrapper,
292 gen_op_movb_EDI_T1_wrapper,
309 DEF_REGS(gen_op_movw_, _T1)
312 DEF_REGS(gen_op_movl_, _T1)
316 DEF_REGS(gen_op_movq_, _T1)
321 static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
323 DEF_REGS(gen_op_movw_, _A0)
326 DEF_REGS(gen_op_movl_, _A0)
330 DEF_REGS(gen_op_movq_, _A0)
335 static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
344 gen_op_movl_T0_ESP_wrapper,
345 gen_op_movl_T0_EBP_wrapper,
346 gen_op_movl_T0_ESI_wrapper,
347 gen_op_movl_T0_EDI_wrapper,
369 gen_op_movl_T1_ESP_wrapper,
370 gen_op_movl_T1_EBP_wrapper,
371 gen_op_movl_T1_ESI_wrapper,
372 gen_op_movl_T1_EDI_wrapper,
391 DEF_REGS(gen_op_movl_T0_, )
394 DEF_REGS(gen_op_movl_T1_, )
399 DEF_REGS(gen_op_movl_T0_, )
402 DEF_REGS(gen_op_movl_T1_, )
408 DEF_REGS(gen_op_movl_T0_, )
411 DEF_REGS(gen_op_movl_T1_, )
417 static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
418 DEF_REGS(gen_op_movl_A0_, )
421 static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
423 DEF_REGS(gen_op_addl_A0_, )
426 DEF_REGS(gen_op_addl_A0_, _s1)
429 DEF_REGS(gen_op_addl_A0_, _s2)
432 DEF_REGS(gen_op_addl_A0_, _s3)
437 static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
438 DEF_REGS(gen_op_movq_A0_, )
441 static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
443 DEF_REGS(gen_op_addq_A0_, )
446 DEF_REGS(gen_op_addq_A0_, _s1)
449 DEF_REGS(gen_op_addq_A0_, _s2)
452 DEF_REGS(gen_op_addq_A0_, _s3)
457 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
459 DEF_REGS(gen_op_cmovw_, _T1_T0)
462 DEF_REGS(gen_op_cmovl_, _T1_T0)
466 DEF_REGS(gen_op_cmovq_, _T1_T0)
471 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
482 #define DEF_ARITHC(SUFFIX)\
484 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
485 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
488 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
489 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
492 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
493 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
496 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
497 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
500 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
504 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
506 #ifndef CONFIG_USER_ONLY
512 static const int cc_op_arithb[8] = {
523 #define DEF_CMPXCHG(SUFFIX)\
524 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
525 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
527 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
529 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
533 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
535 #ifndef CONFIG_USER_ONLY
541 #define DEF_SHIFT(SUFFIX)\
543 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
544 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
553 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
554 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
563 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
570 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
573 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
574 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
583 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
587 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
589 #ifndef CONFIG_USER_ONLY
595 #define DEF_SHIFTD(SUFFIX, op)\
601 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
602 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
605 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
615 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
619 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
621 #ifndef CONFIG_USER_ONLY
622 DEF_SHIFTD(_kernel, im)
623 DEF_SHIFTD(_user, im)
627 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
628 DEF_SHIFTD(_raw, ECX)
629 #ifndef CONFIG_USER_ONLY
630 DEF_SHIFTD(_kernel, ECX)
631 DEF_SHIFTD(_user, ECX)
635 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
638 gen_op_btsw_T0_T1_cc,
639 gen_op_btrw_T0_T1_cc,
640 gen_op_btcw_T0_T1_cc,
644 gen_op_btsl_T0_T1_cc,
645 gen_op_btrl_T0_T1_cc,
646 gen_op_btcl_T0_T1_cc,
651 gen_op_btsq_T0_T1_cc,
652 gen_op_btrq_T0_T1_cc,
653 gen_op_btcq_T0_T1_cc,
658 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
659 gen_op_add_bitw_A0_T1,
660 gen_op_add_bitl_A0_T1,
661 X86_64_ONLY(gen_op_add_bitq_A0_T1),
664 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
681 static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
682 gen_op_ldsb_raw_T0_A0,
683 gen_op_ldsw_raw_T0_A0,
684 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
686 #ifndef CONFIG_USER_ONLY
687 gen_op_ldsb_kernel_T0_A0,
688 gen_op_ldsw_kernel_T0_A0,
689 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
692 gen_op_ldsb_user_T0_A0,
693 gen_op_ldsw_user_T0_A0,
694 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
699 static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
700 gen_op_ldub_raw_T0_A0,
701 gen_op_lduw_raw_T0_A0,
705 #ifndef CONFIG_USER_ONLY
706 gen_op_ldub_kernel_T0_A0,
707 gen_op_lduw_kernel_T0_A0,
711 gen_op_ldub_user_T0_A0,
712 gen_op_lduw_user_T0_A0,
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
720 gen_op_ldub_raw_T0_A0,
721 gen_op_lduw_raw_T0_A0,
722 gen_op_ldl_raw_T0_A0,
723 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
725 #ifndef CONFIG_USER_ONLY
726 gen_op_ldub_kernel_T0_A0,
727 gen_op_lduw_kernel_T0_A0,
728 gen_op_ldl_kernel_T0_A0,
729 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
731 gen_op_ldub_user_T0_A0,
732 gen_op_lduw_user_T0_A0,
733 gen_op_ldl_user_T0_A0,
734 X86_64_ONLY(gen_op_ldq_user_T0_A0),
738 static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
739 gen_op_ldub_raw_T1_A0,
740 gen_op_lduw_raw_T1_A0,
741 gen_op_ldl_raw_T1_A0,
742 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
744 #ifndef CONFIG_USER_ONLY
745 gen_op_ldub_kernel_T1_A0,
746 gen_op_lduw_kernel_T1_A0,
747 gen_op_ldl_kernel_T1_A0,
748 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
750 gen_op_ldub_user_T1_A0,
751 gen_op_lduw_user_T1_A0,
752 gen_op_ldl_user_T1_A0,
753 X86_64_ONLY(gen_op_ldq_user_T1_A0),
757 static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
758 gen_op_stb_raw_T0_A0,
759 gen_op_stw_raw_T0_A0,
760 gen_op_stl_raw_T0_A0,
761 X86_64_ONLY(gen_op_stq_raw_T0_A0),
763 #ifndef CONFIG_USER_ONLY
764 gen_op_stb_kernel_T0_A0,
765 gen_op_stw_kernel_T0_A0,
766 gen_op_stl_kernel_T0_A0,
767 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
769 gen_op_stb_user_T0_A0,
770 gen_op_stw_user_T0_A0,
771 gen_op_stl_user_T0_A0,
772 X86_64_ONLY(gen_op_stq_user_T0_A0),
776 static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
778 gen_op_stw_raw_T1_A0,
779 gen_op_stl_raw_T1_A0,
780 X86_64_ONLY(gen_op_stq_raw_T1_A0),
782 #ifndef CONFIG_USER_ONLY
784 gen_op_stw_kernel_T1_A0,
785 gen_op_stl_kernel_T1_A0,
786 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
789 gen_op_stw_user_T1_A0,
790 gen_op_stl_user_T1_A0,
791 X86_64_ONLY(gen_op_stq_user_T1_A0),
795 static inline void gen_jmp_im(target_ulong pc)
798 if (pc == (uint32_t)pc) {
799 gen_op_movl_eip_im(pc);
800 } else if (pc == (int32_t)pc) {
801 gen_op_movq_eip_im(pc);
803 gen_op_movq_eip_im64(pc >> 32, pc);
806 gen_op_movl_eip_im(pc);
810 static inline void gen_string_movl_A0_ESI(DisasContext *s)
814 override = s->override;
818 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
819 gen_op_addq_A0_reg_sN[0][R_ESI]();
821 gen_op_movq_A0_reg[R_ESI]();
827 if (s->addseg && override < 0)
830 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
831 gen_op_addl_A0_reg_sN[0][R_ESI]();
833 gen_op_movl_A0_reg[R_ESI]();
836 /* 16 address, always override */
839 gen_op_movl_A0_reg[R_ESI]();
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
845 static inline void gen_string_movl_A0_EDI(DisasContext *s)
849 gen_op_movq_A0_reg[R_EDI]();
854 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
855 gen_op_addl_A0_reg_sN[0][R_EDI]();
857 gen_op_movl_A0_reg[R_EDI]();
860 gen_op_movl_A0_reg[R_EDI]();
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
866 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867 gen_op_movl_T0_Dshiftb,
868 gen_op_movl_T0_Dshiftw,
869 gen_op_movl_T0_Dshiftl,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
873 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
876 X86_64_ONLY(gen_op_jnz_ecxq),
879 static GenOpFunc1 *gen_op_jz_ecx[3] = {
882 X86_64_ONLY(gen_op_jz_ecxq),
885 static GenOpFunc *gen_op_dec_ECX[3] = {
888 X86_64_ONLY(gen_op_decq_ECX),
891 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
896 X86_64_ONLY(gen_op_jnz_subq),
902 X86_64_ONLY(gen_op_jz_subq),
906 static GenOpFunc *gen_op_in_DX_T0[3] = {
912 static GenOpFunc *gen_op_out_DX_T0[3] = {
918 static GenOpFunc *gen_op_in[3] = {
924 static GenOpFunc *gen_op_out[3] = {
930 static GenOpFunc *gen_check_io_T0[3] = {
936 static GenOpFunc *gen_check_io_DX[3] = {
942 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
944 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945 if (s->cc_op != CC_OP_DYNAMIC)
946 gen_op_set_cc_op(s->cc_op);
949 gen_check_io_DX[ot]();
951 gen_check_io_T0[ot]();
955 static inline void gen_movs(DisasContext *s, int ot)
957 gen_string_movl_A0_ESI(s);
958 gen_op_ld_T0_A0[ot + s->mem_index]();
959 gen_string_movl_A0_EDI(s);
960 gen_op_st_T0_A0[ot + s->mem_index]();
961 gen_op_movl_T0_Dshift[ot]();
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
977 static inline void gen_update_cc_op(DisasContext *s)
979 if (s->cc_op != CC_OP_DYNAMIC) {
980 gen_op_set_cc_op(s->cc_op);
981 s->cc_op = CC_OP_DYNAMIC;
985 /* XXX: does not work with gdbstub "ice" single step - not a
987 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
991 l1 = gen_new_label();
992 l2 = gen_new_label();
993 gen_op_jnz_ecx[s->aflag](l1);
995 gen_jmp_tb(s, next_eip, 1);
1000 static inline void gen_stos(DisasContext *s, int ot)
1002 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1003 gen_string_movl_A0_EDI(s);
1004 gen_op_st_T0_A0[ot + s->mem_index]();
1005 gen_op_movl_T0_Dshift[ot]();
1006 #ifdef TARGET_X86_64
1007 if (s->aflag == 2) {
1008 gen_op_addq_EDI_T0();
1012 gen_op_addl_EDI_T0();
1014 gen_op_addw_EDI_T0();
1018 static inline void gen_lods(DisasContext *s, int ot)
1020 gen_string_movl_A0_ESI(s);
1021 gen_op_ld_T0_A0[ot + s->mem_index]();
1022 gen_op_mov_reg_T0[ot][R_EAX]();
1023 gen_op_movl_T0_Dshift[ot]();
1024 #ifdef TARGET_X86_64
1025 if (s->aflag == 2) {
1026 gen_op_addq_ESI_T0();
1030 gen_op_addl_ESI_T0();
1032 gen_op_addw_ESI_T0();
1036 static inline void gen_scas(DisasContext *s, int ot)
1038 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1039 gen_string_movl_A0_EDI(s);
1040 gen_op_ld_T1_A0[ot + s->mem_index]();
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift[ot]();
1043 #ifdef TARGET_X86_64
1044 if (s->aflag == 2) {
1045 gen_op_addq_EDI_T0();
1049 gen_op_addl_EDI_T0();
1051 gen_op_addw_EDI_T0();
1055 static inline void gen_cmps(DisasContext *s, int ot)
1057 gen_string_movl_A0_ESI(s);
1058 gen_op_ld_T0_A0[ot + s->mem_index]();
1059 gen_string_movl_A0_EDI(s);
1060 gen_op_ld_T1_A0[ot + s->mem_index]();
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift[ot]();
1063 #ifdef TARGET_X86_64
1064 if (s->aflag == 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1078 static inline void gen_ins(DisasContext *s, int ot)
1080 gen_string_movl_A0_EDI(s);
1082 gen_op_st_T0_A0[ot + s->mem_index]();
1083 gen_op_in_DX_T0[ot]();
1084 gen_op_st_T0_A0[ot + s->mem_index]();
1085 gen_op_movl_T0_Dshift[ot]();
1086 #ifdef TARGET_X86_64
1087 if (s->aflag == 2) {
1088 gen_op_addq_EDI_T0();
1092 gen_op_addl_EDI_T0();
1094 gen_op_addw_EDI_T0();
1098 static inline void gen_outs(DisasContext *s, int ot)
1100 gen_string_movl_A0_ESI(s);
1101 gen_op_ld_T0_A0[ot + s->mem_index]();
1102 gen_op_out_DX_T0[ot]();
1103 gen_op_movl_T0_Dshift[ot]();
1104 #ifdef TARGET_X86_64
1105 if (s->aflag == 2) {
1106 gen_op_addq_ESI_T0();
1110 gen_op_addl_ESI_T0();
1112 gen_op_addw_ESI_T0();
1116 /* same method as Valgrind : we generate jumps to current or next
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1171 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1202 #ifdef TARGET_X86_64
1205 BUGGY_64(gen_op_jb_subq),
1207 BUGGY_64(gen_op_jbe_subq),
1210 BUGGY_64(gen_op_jl_subq),
1211 BUGGY_64(gen_op_jle_subq),
1215 static GenOpFunc1 *gen_op_loop[3][4] = {
1226 #ifdef TARGET_X86_64
1235 static GenOpFunc *gen_setcc_slow[8] = {
1246 static GenOpFunc *gen_setcc_sub[4][8] = {
1249 gen_op_setb_T0_subb,
1250 gen_op_setz_T0_subb,
1251 gen_op_setbe_T0_subb,
1252 gen_op_sets_T0_subb,
1254 gen_op_setl_T0_subb,
1255 gen_op_setle_T0_subb,
1259 gen_op_setb_T0_subw,
1260 gen_op_setz_T0_subw,
1261 gen_op_setbe_T0_subw,
1262 gen_op_sets_T0_subw,
1264 gen_op_setl_T0_subw,
1265 gen_op_setle_T0_subw,
1269 gen_op_setb_T0_subl,
1270 gen_op_setz_T0_subl,
1271 gen_op_setbe_T0_subl,
1272 gen_op_sets_T0_subl,
1274 gen_op_setl_T0_subl,
1275 gen_op_setle_T0_subl,
1277 #ifdef TARGET_X86_64
1280 gen_op_setb_T0_subq,
1281 gen_op_setz_T0_subq,
1282 gen_op_setbe_T0_subq,
1283 gen_op_sets_T0_subq,
1285 gen_op_setl_T0_subq,
1286 gen_op_setle_T0_subq,
1291 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1292 gen_op_fadd_ST0_FT0,
1293 gen_op_fmul_ST0_FT0,
1294 gen_op_fcom_ST0_FT0,
1295 gen_op_fcom_ST0_FT0,
1296 gen_op_fsub_ST0_FT0,
1297 gen_op_fsubr_ST0_FT0,
1298 gen_op_fdiv_ST0_FT0,
1299 gen_op_fdivr_ST0_FT0,
1302 /* NOTE the exception in "r" op ordering */
1303 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1304 gen_op_fadd_STN_ST0,
1305 gen_op_fmul_STN_ST0,
1308 gen_op_fsubr_STN_ST0,
1309 gen_op_fsub_STN_ST0,
1310 gen_op_fdivr_STN_ST0,
1311 gen_op_fdiv_STN_ST0,
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext *s1, int op, int ot, int d)
1317 GenOpFunc *gen_update_cc;
1320 gen_op_mov_TN_reg[ot][0][d]();
1322 gen_op_ld_T0_A0[ot + s1->mem_index]();
1327 if (s1->cc_op != CC_OP_DYNAMIC)
1328 gen_op_set_cc_op(s1->cc_op);
1330 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331 gen_op_mov_reg_T0[ot][d]();
1333 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1335 s1->cc_op = CC_OP_DYNAMIC;
1338 gen_op_addl_T0_T1();
1339 s1->cc_op = CC_OP_ADDB + ot;
1340 gen_update_cc = gen_op_update2_cc;
1343 gen_op_subl_T0_T1();
1344 s1->cc_op = CC_OP_SUBB + ot;
1345 gen_update_cc = gen_op_update2_cc;
1351 gen_op_arith_T0_T1_cc[op]();
1352 s1->cc_op = CC_OP_LOGICB + ot;
1353 gen_update_cc = gen_op_update1_cc;
1356 gen_op_cmpl_T0_T1_cc();
1357 s1->cc_op = CC_OP_SUBB + ot;
1358 gen_update_cc = NULL;
1361 if (op != OP_CMPL) {
1363 gen_op_mov_reg_T0[ot][d]();
1365 gen_op_st_T0_A0[ot + s1->mem_index]();
1367 /* the flags update must happen after the memory write (precise
1368 exception support) */
1374 /* if d == OR_TMP0, it means memory operand (address in A0) */
1375 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1378 gen_op_mov_TN_reg[ot][0][d]();
1380 gen_op_ld_T0_A0[ot + s1->mem_index]();
1381 if (s1->cc_op != CC_OP_DYNAMIC)
1382 gen_op_set_cc_op(s1->cc_op);
1385 s1->cc_op = CC_OP_INCB + ot;
1388 s1->cc_op = CC_OP_DECB + ot;
1391 gen_op_mov_reg_T0[ot][d]();
1393 gen_op_st_T0_A0[ot + s1->mem_index]();
1394 gen_op_update_inc_cc();
1397 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1400 gen_op_mov_TN_reg[ot][0][d]();
1402 gen_op_ld_T0_A0[ot + s1->mem_index]();
1404 gen_op_mov_TN_reg[ot][1][s]();
1405 /* for zero counts, flags are not updated, so must do it dynamically */
1406 if (s1->cc_op != CC_OP_DYNAMIC)
1407 gen_op_set_cc_op(s1->cc_op);
1410 gen_op_shift_T0_T1_cc[ot][op]();
1412 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1414 gen_op_mov_reg_T0[ot][d]();
1415 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1418 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1420 /* currently not optimized */
1421 gen_op_movl_T1_im(c);
1422 gen_shift(s1, op, ot, d, OR_TMP1);
1425 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1433 int mod, rm, code, override, must_add_seg;
1435 override = s->override;
1436 must_add_seg = s->addseg;
1439 mod = (modrm >> 6) & 3;
1451 code = ldub_code(s->pc++);
1452 scale = (code >> 6) & 3;
1453 index = ((code >> 3) & 7) | REX_X(s);
1460 if ((base & 7) == 5) {
1462 disp = (int32_t)ldl_code(s->pc);
1464 if (CODE64(s) && !havesib) {
1465 disp += s->pc + s->rip_offset;
1472 disp = (int8_t)ldub_code(s->pc++);
1476 disp = ldl_code(s->pc);
1482 /* for correct popl handling with esp */
1483 if (base == 4 && s->popl_esp_hack)
1484 disp += s->popl_esp_hack;
1485 #ifdef TARGET_X86_64
1486 if (s->aflag == 2) {
1487 gen_op_movq_A0_reg[base]();
1489 if ((int32_t)disp == disp)
1490 gen_op_addq_A0_im(disp);
1492 gen_op_addq_A0_im64(disp >> 32, disp);
1497 gen_op_movl_A0_reg[base]();
1499 gen_op_addl_A0_im(disp);
1502 #ifdef TARGET_X86_64
1503 if (s->aflag == 2) {
1504 if ((int32_t)disp == disp)
1505 gen_op_movq_A0_im(disp);
1507 gen_op_movq_A0_im64(disp >> 32, disp);
1511 gen_op_movl_A0_im(disp);
1514 /* XXX: index == 4 is always invalid */
1515 if (havesib && (index != 4 || scale != 0)) {
1516 #ifdef TARGET_X86_64
1517 if (s->aflag == 2) {
1518 gen_op_addq_A0_reg_sN[scale][index]();
1522 gen_op_addl_A0_reg_sN[scale][index]();
1527 if (base == R_EBP || base == R_ESP)
1532 #ifdef TARGET_X86_64
1533 if (s->aflag == 2) {
1534 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1538 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1545 disp = lduw_code(s->pc);
1547 gen_op_movl_A0_im(disp);
1548 rm = 0; /* avoid SS override */
1555 disp = (int8_t)ldub_code(s->pc++);
1559 disp = lduw_code(s->pc);
1565 gen_op_movl_A0_reg[R_EBX]();
1566 gen_op_addl_A0_reg_sN[0][R_ESI]();
1569 gen_op_movl_A0_reg[R_EBX]();
1570 gen_op_addl_A0_reg_sN[0][R_EDI]();
1573 gen_op_movl_A0_reg[R_EBP]();
1574 gen_op_addl_A0_reg_sN[0][R_ESI]();
1577 gen_op_movl_A0_reg[R_EBP]();
1578 gen_op_addl_A0_reg_sN[0][R_EDI]();
1581 gen_op_movl_A0_reg[R_ESI]();
1584 gen_op_movl_A0_reg[R_EDI]();
1587 gen_op_movl_A0_reg[R_EBP]();
1591 gen_op_movl_A0_reg[R_EBX]();
1595 gen_op_addl_A0_im(disp);
1596 gen_op_andl_A0_ffff();
1600 if (rm == 2 || rm == 3 || rm == 6)
1605 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1615 /* used for LEA and MOV AX, mem */
1616 static void gen_add_A0_ds_seg(DisasContext *s)
1618 int override, must_add_seg;
1619 must_add_seg = s->addseg;
1621 if (s->override >= 0) {
1622 override = s->override;
1628 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1632 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1634 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1636 int mod, rm, opreg, disp;
1638 mod = (modrm >> 6) & 3;
1639 rm = (modrm & 7) | REX_B(s);
1643 gen_op_mov_TN_reg[ot][0][reg]();
1644 gen_op_mov_reg_T0[ot][rm]();
1646 gen_op_mov_TN_reg[ot][0][rm]();
1648 gen_op_mov_reg_T0[ot][reg]();
1651 gen_lea_modrm(s, modrm, &opreg, &disp);
1654 gen_op_mov_TN_reg[ot][0][reg]();
1655 gen_op_st_T0_A0[ot + s->mem_index]();
1657 gen_op_ld_T0_A0[ot + s->mem_index]();
1659 gen_op_mov_reg_T0[ot][reg]();
1664 static inline uint32_t insn_get(DisasContext *s, int ot)
1670 ret = ldub_code(s->pc);
1674 ret = lduw_code(s->pc);
1679 ret = ldl_code(s->pc);
1686 static inline int insn_const_size(unsigned int ot)
1694 static inline void gen_jcc(DisasContext *s, int b,
1695 target_ulong val, target_ulong next_eip)
1697 TranslationBlock *tb;
1704 jcc_op = (b >> 1) & 7;
1708 /* we optimize the cmp/jcc case */
1713 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1716 /* some jumps are easy to compute */
1758 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1761 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1773 if (s->cc_op != CC_OP_DYNAMIC)
1774 gen_op_set_cc_op(s->cc_op);
1777 gen_setcc_slow[jcc_op]();
1778 func = gen_op_jnz_T0_label;
1788 l1 = gen_new_label();
1791 gen_op_goto_tb0(TBPARAM(tb));
1792 gen_jmp_im(next_eip);
1793 gen_op_movl_T0_im((long)tb + 0);
1797 gen_op_goto_tb1(TBPARAM(tb));
1799 gen_op_movl_T0_im((long)tb + 1);
1805 if (s->cc_op != CC_OP_DYNAMIC) {
1806 gen_op_set_cc_op(s->cc_op);
1807 s->cc_op = CC_OP_DYNAMIC;
1809 gen_setcc_slow[jcc_op]();
1815 l1 = gen_new_label();
1816 l2 = gen_new_label();
1817 gen_op_jnz_T0_label(l1);
1818 gen_jmp_im(next_eip);
1819 gen_op_jmp_label(l2);
1827 static void gen_setcc(DisasContext *s, int b)
1833 jcc_op = (b >> 1) & 7;
1835 /* we optimize the cmp/jcc case */
1840 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1845 /* some jumps are easy to compute */
1872 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1875 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1883 if (s->cc_op != CC_OP_DYNAMIC)
1884 gen_op_set_cc_op(s->cc_op);
1885 func = gen_setcc_slow[jcc_op];
1894 /* move T0 to seg_reg and compute if the CPU state may change. Never
1895 call this function with seg_reg == R_CS */
1896 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1898 if (s->pe && !s->vm86) {
1899 /* XXX: optimize by finding processor state dynamically */
1900 if (s->cc_op != CC_OP_DYNAMIC)
1901 gen_op_set_cc_op(s->cc_op);
1902 gen_jmp_im(cur_eip);
1903 gen_op_movl_seg_T0(seg_reg);
1904 /* abort translation because the addseg value may change or
1905 because ss32 may change. For R_SS, translation must always
1906 stop as a special handling must be done to disable hardware
1907 interrupts for the next instruction */
1908 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1911 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1912 if (seg_reg == R_SS)
1917 static inline void gen_stack_update(DisasContext *s, int addend)
1919 #ifdef TARGET_X86_64
1922 gen_op_addq_ESP_8();
1924 gen_op_addq_ESP_im(addend);
1929 gen_op_addl_ESP_2();
1930 else if (addend == 4)
1931 gen_op_addl_ESP_4();
1933 gen_op_addl_ESP_im(addend);
1936 gen_op_addw_ESP_2();
1937 else if (addend == 4)
1938 gen_op_addw_ESP_4();
1940 gen_op_addw_ESP_im(addend);
1944 /* generate a push. It depends on ss32, addseg and dflag */
1945 static void gen_push_T0(DisasContext *s)
1947 #ifdef TARGET_X86_64
1949 /* XXX: check 16 bit behaviour */
1950 gen_op_movq_A0_reg[R_ESP]();
1952 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
1953 gen_op_movq_ESP_A0();
1957 gen_op_movl_A0_reg[R_ESP]();
1964 gen_op_movl_T1_A0();
1965 gen_op_addl_A0_SS();
1968 gen_op_andl_A0_ffff();
1969 gen_op_movl_T1_A0();
1970 gen_op_addl_A0_SS();
1972 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
1973 if (s->ss32 && !s->addseg)
1974 gen_op_movl_ESP_A0();
1976 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
1980 /* generate a push. It depends on ss32, addseg and dflag */
1981 /* slower version for T1, only used for call Ev */
1982 static void gen_push_T1(DisasContext *s)
1984 #ifdef TARGET_X86_64
1986 /* XXX: check 16 bit behaviour */
1987 gen_op_movq_A0_reg[R_ESP]();
1989 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
1990 gen_op_movq_ESP_A0();
1994 gen_op_movl_A0_reg[R_ESP]();
2001 gen_op_addl_A0_SS();
2004 gen_op_andl_A0_ffff();
2005 gen_op_addl_A0_SS();
2007 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2009 if (s->ss32 && !s->addseg)
2010 gen_op_movl_ESP_A0();
2012 gen_stack_update(s, (-2) << s->dflag);
2016 /* two step pop is necessary for precise exceptions */
2017 static void gen_pop_T0(DisasContext *s)
2019 #ifdef TARGET_X86_64
2021 /* XXX: check 16 bit behaviour */
2022 gen_op_movq_A0_reg[R_ESP]();
2023 gen_op_ld_T0_A0[OT_QUAD + s->mem_index]();
2027 gen_op_movl_A0_reg[R_ESP]();
2030 gen_op_addl_A0_SS();
2032 gen_op_andl_A0_ffff();
2033 gen_op_addl_A0_SS();
2035 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2039 static void gen_pop_update(DisasContext *s)
2041 #ifdef TARGET_X86_64
2043 gen_stack_update(s, 8);
2047 gen_stack_update(s, 2 << s->dflag);
2051 static void gen_stack_A0(DisasContext *s)
2053 gen_op_movl_A0_ESP();
2055 gen_op_andl_A0_ffff();
2056 gen_op_movl_T1_A0();
2058 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2061 /* NOTE: wrap around in 16 bit not fully handled */
2062 static void gen_pusha(DisasContext *s)
2065 gen_op_movl_A0_ESP();
2066 gen_op_addl_A0_im(-16 << s->dflag);
2068 gen_op_andl_A0_ffff();
2069 gen_op_movl_T1_A0();
2071 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2072 for(i = 0;i < 8; i++) {
2073 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2074 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2075 gen_op_addl_A0_im(2 << s->dflag);
2077 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
2080 /* NOTE: wrap around in 16 bit not fully handled */
2081 static void gen_popa(DisasContext *s)
2084 gen_op_movl_A0_ESP();
2086 gen_op_andl_A0_ffff();
2087 gen_op_movl_T1_A0();
2088 gen_op_addl_T1_im(16 << s->dflag);
2090 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2091 for(i = 0;i < 8; i++) {
2092 /* ESP is not reloaded */
2094 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2095 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2097 gen_op_addl_A0_im(2 << s->dflag);
2099 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
2102 static void gen_enter(DisasContext *s, int esp_addend, int level)
2106 ot = s->dflag + OT_WORD;
2108 opsize = 2 << s->dflag;
2110 gen_op_movl_A0_ESP();
2111 gen_op_addl_A0_im(-opsize);
2113 gen_op_andl_A0_ffff();
2114 gen_op_movl_T1_A0();
2116 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2118 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2119 gen_op_st_T0_A0[ot + s->mem_index]();
2121 gen_op_enter_level(level, s->dflag);
2123 gen_op_mov_reg_T1[ot][R_EBP]();
2124 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2125 gen_op_mov_reg_T1[ot][R_ESP]();
2128 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2130 if (s->cc_op != CC_OP_DYNAMIC)
2131 gen_op_set_cc_op(s->cc_op);
2132 gen_jmp_im(cur_eip);
2133 gen_op_raise_exception(trapno);
2137 /* an interrupt is different from an exception because of the
2138 priviledge checks */
2139 static void gen_interrupt(DisasContext *s, int intno,
2140 target_ulong cur_eip, target_ulong next_eip)
2142 if (s->cc_op != CC_OP_DYNAMIC)
2143 gen_op_set_cc_op(s->cc_op);
2144 gen_jmp_im(cur_eip);
2145 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2149 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2151 if (s->cc_op != CC_OP_DYNAMIC)
2152 gen_op_set_cc_op(s->cc_op);
2153 gen_jmp_im(cur_eip);
2158 /* generate a generic end of block. Trace exception is also generated
2160 static void gen_eob(DisasContext *s)
2162 if (s->cc_op != CC_OP_DYNAMIC)
2163 gen_op_set_cc_op(s->cc_op);
2164 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2165 gen_op_reset_inhibit_irq();
2167 if (s->singlestep_enabled) {
2170 gen_op_raise_exception(EXCP01_SSTP);
2178 /* generate a jump to eip. No segment change must happen before as a
2179 direct call to the next block may occur */
2180 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2182 TranslationBlock *tb = s->tb;
2185 if (s->cc_op != CC_OP_DYNAMIC)
2186 gen_op_set_cc_op(s->cc_op);
2188 gen_op_goto_tb1(TBPARAM(tb));
2190 gen_op_goto_tb0(TBPARAM(tb));
2192 gen_op_movl_T0_im((long)tb + tb_num);
2201 static void gen_jmp(DisasContext *s, target_ulong eip)
2203 gen_jmp_tb(s, eip, 0);
2206 static void gen_movtl_T0_im(target_ulong val)
2208 #ifdef TARGET_X86_64
2209 if ((int32_t)val == val) {
2210 gen_op_movl_T0_im(val);
2212 gen_op_movq_T0_im64(val >> 32, val);
2215 gen_op_movl_T0_im(val);
2219 static GenOpFunc1 *gen_ldq_env_A0[3] = {
2220 gen_op_ldq_raw_env_A0,
2221 #ifndef CONFIG_USER_ONLY
2222 gen_op_ldq_kernel_env_A0,
2223 gen_op_ldq_user_env_A0,
2227 static GenOpFunc1 *gen_stq_env_A0[3] = {
2228 gen_op_stq_raw_env_A0,
2229 #ifndef CONFIG_USER_ONLY
2230 gen_op_stq_kernel_env_A0,
2231 gen_op_stq_user_env_A0,
2235 static GenOpFunc1 *gen_ldo_env_A0[3] = {
2236 gen_op_ldo_raw_env_A0,
2237 #ifndef CONFIG_USER_ONLY
2238 gen_op_ldo_kernel_env_A0,
2239 gen_op_ldo_user_env_A0,
2243 static GenOpFunc1 *gen_sto_env_A0[3] = {
2244 gen_op_sto_raw_env_A0,
2245 #ifndef CONFIG_USER_ONLY
2246 gen_op_sto_kernel_env_A0,
2247 gen_op_sto_user_env_A0,
2251 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2253 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2254 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2255 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2257 static GenOpFunc2 *sse_op_table1[256][4] = {
2258 /* pure SSE operations */
2259 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2260 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2261 [0x12] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2262 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2263 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2264 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2265 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2266 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2268 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2269 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2270 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2271 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2272 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2273 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2274 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2275 [0x2f] = { gen_op_comiss, gen_op_comisd },
2276 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2277 [0x51] = SSE_FOP(sqrt),
2278 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2279 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2280 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2281 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2282 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2283 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2284 [0x58] = SSE_FOP(add),
2285 [0x59] = SSE_FOP(mul),
2286 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2287 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2288 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2289 [0x5c] = SSE_FOP(sub),
2290 [0x5d] = SSE_FOP(min),
2291 [0x5e] = SSE_FOP(div),
2292 [0x5f] = SSE_FOP(max),
2294 [0xc2] = SSE_FOP(cmpeq),
2295 [0xc6] = { (GenOpFunc2 *)gen_op_pshufd_xmm, (GenOpFunc2 *)gen_op_shufpd },
2297 /* MMX ops and their SSE extensions */
2298 [0x60] = MMX_OP2(punpcklbw),
2299 [0x61] = MMX_OP2(punpcklwd),
2300 [0x62] = MMX_OP2(punpckldq),
2301 [0x63] = MMX_OP2(packsswb),
2302 [0x64] = MMX_OP2(pcmpgtb),
2303 [0x65] = MMX_OP2(pcmpgtw),
2304 [0x66] = MMX_OP2(pcmpgtl),
2305 [0x67] = MMX_OP2(packuswb),
2306 [0x68] = MMX_OP2(punpckhbw),
2307 [0x69] = MMX_OP2(punpckhwd),
2308 [0x6a] = MMX_OP2(punpckhdq),
2309 [0x6b] = MMX_OP2(packssdw),
2310 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2311 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2312 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2313 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2314 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2315 (GenOpFunc2 *)gen_op_pshufd_xmm,
2316 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2317 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2318 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2319 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2320 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2321 [0x74] = MMX_OP2(pcmpeqb),
2322 [0x75] = MMX_OP2(pcmpeqw),
2323 [0x76] = MMX_OP2(pcmpeql),
2324 [0x77] = { SSE_SPECIAL }, /* emms */
2325 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2326 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2327 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2328 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2329 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2330 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2331 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2332 [0xd1] = MMX_OP2(psrlw),
2333 [0xd2] = MMX_OP2(psrld),
2334 [0xd3] = MMX_OP2(psrlq),
2335 [0xd4] = MMX_OP2(paddq),
2336 [0xd5] = MMX_OP2(pmullw),
2337 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2338 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2339 [0xd8] = MMX_OP2(psubusb),
2340 [0xd9] = MMX_OP2(psubusw),
2341 [0xda] = MMX_OP2(pminub),
2342 [0xdb] = MMX_OP2(pand),
2343 [0xdc] = MMX_OP2(paddusb),
2344 [0xdd] = MMX_OP2(paddusw),
2345 [0xde] = MMX_OP2(pmaxub),
2346 [0xdf] = MMX_OP2(pandn),
2347 [0xe0] = MMX_OP2(pavgb),
2348 [0xe1] = MMX_OP2(psraw),
2349 [0xe2] = MMX_OP2(psrad),
2350 [0xe3] = MMX_OP2(pavgw),
2351 [0xe4] = MMX_OP2(pmulhuw),
2352 [0xe5] = MMX_OP2(pmulhw),
2353 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2354 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2355 [0xe8] = MMX_OP2(psubsb),
2356 [0xe9] = MMX_OP2(psubsw),
2357 [0xea] = MMX_OP2(pminsw),
2358 [0xeb] = MMX_OP2(por),
2359 [0xec] = MMX_OP2(paddsb),
2360 [0xed] = MMX_OP2(paddsw),
2361 [0xee] = MMX_OP2(pmaxsw),
2362 [0xef] = MMX_OP2(pxor),
2363 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu (PNI) */
2364 [0xf1] = MMX_OP2(psllw),
2365 [0xf2] = MMX_OP2(pslld),
2366 [0xf3] = MMX_OP2(psllq),
2367 [0xf4] = MMX_OP2(pmuludq),
2368 [0xf5] = MMX_OP2(pmaddwd),
2369 [0xf6] = MMX_OP2(psadbw),
2370 [0xf7] = MMX_OP2(maskmov),
2371 [0xf8] = MMX_OP2(psubb),
2372 [0xf9] = MMX_OP2(psubw),
2373 [0xfa] = MMX_OP2(psubl),
2374 [0xfb] = MMX_OP2(psubq),
2375 [0xfc] = MMX_OP2(paddb),
2376 [0xfd] = MMX_OP2(paddw),
2377 [0xfe] = MMX_OP2(paddl),
2380 static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2381 [0 + 2] = MMX_OP2(psrlw),
2382 [0 + 4] = MMX_OP2(psraw),
2383 [0 + 6] = MMX_OP2(psllw),
2384 [8 + 2] = MMX_OP2(psrld),
2385 [8 + 4] = MMX_OP2(psrad),
2386 [8 + 6] = MMX_OP2(pslld),
2387 [16 + 2] = MMX_OP2(psrlq),
2388 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2389 [16 + 6] = MMX_OP2(psllq),
2390 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2393 static GenOpFunc1 *sse_op_table3[4 * 3] = {
2396 X86_64_ONLY(gen_op_cvtsq2ss),
2397 X86_64_ONLY(gen_op_cvtsq2sd),
2401 X86_64_ONLY(gen_op_cvttss2sq),
2402 X86_64_ONLY(gen_op_cvttsd2sq),
2406 X86_64_ONLY(gen_op_cvtss2sq),
2407 X86_64_ONLY(gen_op_cvtsd2sq),
2410 static GenOpFunc2 *sse_op_table4[8][4] = {
2421 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2423 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2424 int modrm, mod, rm, reg, reg_addr, offset_addr;
2425 GenOpFunc2 *sse_op2;
2426 GenOpFunc3 *sse_op3;
2429 if (s->prefix & PREFIX_DATA)
2431 else if (s->prefix & PREFIX_REPZ)
2433 else if (s->prefix & PREFIX_REPNZ)
2437 sse_op2 = sse_op_table1[b][b1];
2440 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2450 /* simple MMX/SSE operation */
2451 if (s->flags & HF_TS_MASK) {
2452 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2455 if (s->flags & HF_EM_MASK) {
2457 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2460 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2467 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2468 the static cpu state) */
2473 modrm = ldub_code(s->pc++);
2474 reg = ((modrm >> 3) & 7);
2477 mod = (modrm >> 6) & 3;
2478 if (sse_op2 == SSE_SPECIAL) {
2481 case 0x0e7: /* movntq */
2484 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2485 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2487 case 0x1e7: /* movntdq */
2488 case 0x02b: /* movntps */
2489 case 0x12b: /* movntps */
2490 case 0x2f0: /* lddqu */
2493 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2494 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2496 case 0x6e: /* movd mm, ea */
2497 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2498 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2500 case 0x16e: /* movd xmm, ea */
2501 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2502 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2504 case 0x6f: /* movq mm, ea */
2506 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2507 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2510 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2511 offsetof(CPUX86State,fpregs[rm].mmx));
2514 case 0x010: /* movups */
2515 case 0x110: /* movupd */
2516 case 0x028: /* movaps */
2517 case 0x128: /* movapd */
2518 case 0x16f: /* movdqa xmm, ea */
2519 case 0x26f: /* movdqu xmm, ea */
2521 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2522 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2524 rm = (modrm & 7) | REX_B(s);
2525 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2526 offsetof(CPUX86State,xmm_regs[rm]));
2529 case 0x210: /* movss xmm, ea */
2531 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2532 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2533 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2535 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2536 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2537 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2539 rm = (modrm & 7) | REX_B(s);
2540 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2541 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2544 case 0x310: /* movsd xmm, ea */
2546 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2547 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2549 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2550 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2552 rm = (modrm & 7) | REX_B(s);
2553 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2554 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2557 case 0x012: /* movlps */
2558 case 0x112: /* movlpd */
2560 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2561 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2564 rm = (modrm & 7) | REX_B(s);
2565 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2566 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2569 case 0x016: /* movhps */
2570 case 0x116: /* movhpd */
2572 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2573 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2576 rm = (modrm & 7) | REX_B(s);
2577 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2578 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2581 case 0x216: /* movshdup */
2583 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2584 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2586 rm = (modrm & 7) | REX_B(s);
2587 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2588 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2589 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2590 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2592 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2593 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2594 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2595 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2597 case 0x7e: /* movd ea, mm */
2598 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2599 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2601 case 0x17e: /* movd ea, xmm */
2602 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2603 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2605 case 0x27e: /* movq xmm, ea */
2607 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2608 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2610 rm = (modrm & 7) | REX_B(s);
2611 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2612 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2614 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2616 case 0x7f: /* movq ea, mm */
2618 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2619 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2622 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2623 offsetof(CPUX86State,fpregs[reg].mmx));
2626 case 0x011: /* movups */
2627 case 0x111: /* movupd */
2628 case 0x029: /* movaps */
2629 case 0x129: /* movapd */
2630 case 0x17f: /* movdqa ea, xmm */
2631 case 0x27f: /* movdqu ea, xmm */
2633 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2634 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2636 rm = (modrm & 7) | REX_B(s);
2637 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2638 offsetof(CPUX86State,xmm_regs[reg]));
2641 case 0x211: /* movss ea, xmm */
2643 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2644 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2645 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2647 rm = (modrm & 7) | REX_B(s);
2648 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2649 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2652 case 0x311: /* movsd ea, xmm */
2654 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2655 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2657 rm = (modrm & 7) | REX_B(s);
2658 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2659 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2662 case 0x013: /* movlps */
2663 case 0x113: /* movlpd */
2665 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2666 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2671 case 0x017: /* movhps */
2672 case 0x117: /* movhpd */
2674 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2675 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2680 case 0x71: /* shift mm, im */
2683 case 0x171: /* shift xmm, im */
2686 val = ldub_code(s->pc++);
2688 gen_op_movl_T0_im(val);
2689 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2691 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2692 op1_offset = offsetof(CPUX86State,xmm_t0);
2694 gen_op_movl_T0_im(val);
2695 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2697 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2698 op1_offset = offsetof(CPUX86State,mmx_t0);
2700 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2704 rm = (modrm & 7) | REX_B(s);
2705 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2708 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2710 sse_op2(op2_offset, op1_offset);
2712 case 0x050: /* movmskps */
2713 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[reg]));
2714 rm = (modrm & 7) | REX_B(s);
2715 gen_op_mov_reg_T0[OT_LONG][rm]();
2717 case 0x150: /* movmskpd */
2718 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[reg]));
2719 rm = (modrm & 7) | REX_B(s);
2720 gen_op_mov_reg_T0[OT_LONG][rm]();
2722 case 0x02a: /* cvtpi2ps */
2723 case 0x12a: /* cvtpi2pd */
2726 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2727 op2_offset = offsetof(CPUX86State,mmx_t0);
2728 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2731 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2733 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2736 gen_op_cvtpi2ps(op1_offset, op2_offset);
2740 gen_op_cvtpi2pd(op1_offset, op2_offset);
2744 case 0x22a: /* cvtsi2ss */
2745 case 0x32a: /* cvtsi2sd */
2746 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2747 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2748 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2749 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2751 case 0x02c: /* cvttps2pi */
2752 case 0x12c: /* cvttpd2pi */
2753 case 0x02d: /* cvtps2pi */
2754 case 0x12d: /* cvtpd2pi */
2757 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2758 op2_offset = offsetof(CPUX86State,xmm_t0);
2759 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2761 rm = (modrm & 7) | REX_B(s);
2762 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2764 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2767 gen_op_cvttps2pi(op1_offset, op2_offset);
2770 gen_op_cvttpd2pi(op1_offset, op2_offset);
2773 gen_op_cvtps2pi(op1_offset, op2_offset);
2776 gen_op_cvtpd2pi(op1_offset, op2_offset);
2780 case 0x22c: /* cvttss2si */
2781 case 0x32c: /* cvttsd2si */
2782 case 0x22d: /* cvtss2si */
2783 case 0x32d: /* cvtsd2si */
2784 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2785 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2786 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
2787 (b & 1) * 4](op1_offset);
2788 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2790 case 0xc4: /* pinsrw */
2792 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2793 val = ldub_code(s->pc++);
2796 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2799 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2802 case 0xc5: /* pextrw */
2806 val = ldub_code(s->pc++);
2809 rm = (modrm & 7) | REX_B(s);
2810 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2814 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2816 reg = ((modrm >> 3) & 7) | rex_r;
2817 gen_op_mov_reg_T0[OT_LONG][reg]();
2819 case 0x1d6: /* movq ea, xmm */
2821 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2822 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2824 rm = (modrm & 7) | REX_B(s);
2825 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2826 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2827 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2830 case 0x2d6: /* movq2dq */
2832 rm = (modrm & 7) | REX_B(s);
2833 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2834 offsetof(CPUX86State,fpregs[reg & 7].mmx));
2835 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2837 case 0x3d6: /* movdq2q */
2840 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2841 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2843 case 0xd7: /* pmovmskb */
2848 rm = (modrm & 7) | REX_B(s);
2849 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
2852 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
2854 reg = ((modrm >> 3) & 7) | rex_r;
2855 gen_op_mov_reg_T0[OT_LONG][reg]();
2861 /* generic MMX or SSE operation */
2863 /* maskmov : we must prepare A0 */
2866 #ifdef TARGET_X86_64
2868 gen_op_movq_A0_reg[R_EDI]();
2872 gen_op_movl_A0_reg[R_EDI]();
2874 gen_op_andl_A0_ffff();
2876 gen_add_A0_ds_seg(s);
2879 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2881 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2882 op2_offset = offsetof(CPUX86State,xmm_t0);
2883 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f) ||
2885 /* specific case for SSE single instructions */
2888 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2889 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2892 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
2895 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2898 rm = (modrm & 7) | REX_B(s);
2899 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2902 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
2904 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2905 op2_offset = offsetof(CPUX86State,mmx_t0);
2906 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2909 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2913 case 0x70: /* pshufx insn */
2914 case 0xc6: /* pshufx insn */
2915 val = ldub_code(s->pc++);
2916 sse_op3 = (GenOpFunc3 *)sse_op2;
2917 sse_op3(op1_offset, op2_offset, val);
2921 val = ldub_code(s->pc++);
2924 sse_op2 = sse_op_table4[val][b1];
2925 sse_op2(op1_offset, op2_offset);
2928 sse_op2(op1_offset, op2_offset);
2931 if (b == 0x2e || b == 0x2f) {
2932 s->cc_op = CC_OP_EFLAGS;
2938 /* convert one instruction. s->is_jmp is set if the translation must
2939 be stopped. Return the next pc value */
2940 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
2942 int b, prefixes, aflag, dflag;
2944 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
2945 target_ulong next_eip, tval;
2955 #ifdef TARGET_X86_64
2960 s->rip_offset = 0; /* for relative ip address */
2962 b = ldub_code(s->pc);
2964 /* check prefixes */
2965 #ifdef TARGET_X86_64
2969 prefixes |= PREFIX_REPZ;
2972 prefixes |= PREFIX_REPNZ;
2975 prefixes |= PREFIX_LOCK;
2996 prefixes |= PREFIX_DATA;
2999 prefixes |= PREFIX_ADR;
3003 rex_w = (b >> 3) & 1;
3004 rex_r = (b & 0x4) << 1;
3005 s->rex_x = (b & 0x2) << 2;
3006 REX_B(s) = (b & 0x1) << 3;
3007 x86_64_hregs = 1; /* select uniform byte register addressing */
3011 /* 0x66 is ignored if rex.w is set */
3014 if (prefixes & PREFIX_DATA)
3017 if (!(prefixes & PREFIX_ADR))
3024 prefixes |= PREFIX_REPZ;
3027 prefixes |= PREFIX_REPNZ;
3030 prefixes |= PREFIX_LOCK;
3051 prefixes |= PREFIX_DATA;
3054 prefixes |= PREFIX_ADR;
3057 if (prefixes & PREFIX_DATA)
3059 if (prefixes & PREFIX_ADR)
3063 s->prefix = prefixes;
3067 /* lock generation */
3068 if (prefixes & PREFIX_LOCK)
3071 /* now check op code */
3075 /**************************/
3076 /* extended op code */
3077 b = ldub_code(s->pc++) | 0x100;
3080 /**************************/
3098 ot = dflag + OT_WORD;
3101 case 0: /* OP Ev, Gv */
3102 modrm = ldub_code(s->pc++);
3103 reg = ((modrm >> 3) & 7) | rex_r;
3104 mod = (modrm >> 6) & 3;
3105 rm = (modrm & 7) | REX_B(s);
3107 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3109 } else if (op == OP_XORL && rm == reg) {
3111 /* xor reg, reg optimisation */
3113 s->cc_op = CC_OP_LOGICB + ot;
3114 gen_op_mov_reg_T0[ot][reg]();
3115 gen_op_update1_cc();
3120 gen_op_mov_TN_reg[ot][1][reg]();
3121 gen_op(s, op, ot, opreg);
3123 case 1: /* OP Gv, Ev */
3124 modrm = ldub_code(s->pc++);
3125 mod = (modrm >> 6) & 3;
3126 reg = ((modrm >> 3) & 7) | rex_r;
3127 rm = (modrm & 7) | REX_B(s);
3129 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3130 gen_op_ld_T1_A0[ot + s->mem_index]();
3131 } else if (op == OP_XORL && rm == reg) {
3134 gen_op_mov_TN_reg[ot][1][rm]();
3136 gen_op(s, op, ot, reg);
3138 case 2: /* OP A, Iv */
3139 val = insn_get(s, ot);
3140 gen_op_movl_T1_im(val);
3141 gen_op(s, op, ot, OR_EAX);
3147 case 0x80: /* GRP1 */
3157 ot = dflag + OT_WORD;
3159 modrm = ldub_code(s->pc++);
3160 mod = (modrm >> 6) & 3;
3161 rm = (modrm & 7) | REX_B(s);
3162 op = (modrm >> 3) & 7;
3168 s->rip_offset = insn_const_size(ot);
3169 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3180 val = insn_get(s, ot);
3183 val = (int8_t)insn_get(s, OT_BYTE);
3186 gen_op_movl_T1_im(val);
3187 gen_op(s, op, ot, opreg);
3191 /**************************/
3192 /* inc, dec, and other misc arith */
3193 case 0x40 ... 0x47: /* inc Gv */
3194 ot = dflag ? OT_LONG : OT_WORD;
3195 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3197 case 0x48 ... 0x4f: /* dec Gv */
3198 ot = dflag ? OT_LONG : OT_WORD;
3199 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3201 case 0xf6: /* GRP3 */
3206 ot = dflag + OT_WORD;
3208 modrm = ldub_code(s->pc++);
3209 mod = (modrm >> 6) & 3;
3210 rm = (modrm & 7) | REX_B(s);
3211 op = (modrm >> 3) & 7;
3214 s->rip_offset = insn_const_size(ot);
3215 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3216 gen_op_ld_T0_A0[ot + s->mem_index]();
3218 gen_op_mov_TN_reg[ot][0][rm]();
3223 val = insn_get(s, ot);
3224 gen_op_movl_T1_im(val);
3225 gen_op_testl_T0_T1_cc();
3226 s->cc_op = CC_OP_LOGICB + ot;
3231 gen_op_st_T0_A0[ot + s->mem_index]();
3233 gen_op_mov_reg_T0[ot][rm]();
3239 gen_op_st_T0_A0[ot + s->mem_index]();
3241 gen_op_mov_reg_T0[ot][rm]();
3243 gen_op_update_neg_cc();
3244 s->cc_op = CC_OP_SUBB + ot;
3249 gen_op_mulb_AL_T0();
3250 s->cc_op = CC_OP_MULB;
3253 gen_op_mulw_AX_T0();
3254 s->cc_op = CC_OP_MULW;
3258 gen_op_mull_EAX_T0();
3259 s->cc_op = CC_OP_MULL;
3261 #ifdef TARGET_X86_64
3263 gen_op_mulq_EAX_T0();
3264 s->cc_op = CC_OP_MULQ;
3272 gen_op_imulb_AL_T0();
3273 s->cc_op = CC_OP_MULB;
3276 gen_op_imulw_AX_T0();
3277 s->cc_op = CC_OP_MULW;
3281 gen_op_imull_EAX_T0();
3282 s->cc_op = CC_OP_MULL;
3284 #ifdef TARGET_X86_64
3286 gen_op_imulq_EAX_T0();
3287 s->cc_op = CC_OP_MULQ;
3295 gen_jmp_im(pc_start - s->cs_base);
3296 gen_op_divb_AL_T0();
3299 gen_jmp_im(pc_start - s->cs_base);
3300 gen_op_divw_AX_T0();
3304 gen_jmp_im(pc_start - s->cs_base);
3305 gen_op_divl_EAX_T0();
3307 #ifdef TARGET_X86_64
3309 gen_jmp_im(pc_start - s->cs_base);
3310 gen_op_divq_EAX_T0();
3318 gen_jmp_im(pc_start - s->cs_base);
3319 gen_op_idivb_AL_T0();
3322 gen_jmp_im(pc_start - s->cs_base);
3323 gen_op_idivw_AX_T0();
3327 gen_jmp_im(pc_start - s->cs_base);
3328 gen_op_idivl_EAX_T0();
3330 #ifdef TARGET_X86_64
3332 gen_jmp_im(pc_start - s->cs_base);
3333 gen_op_idivq_EAX_T0();
3343 case 0xfe: /* GRP4 */
3344 case 0xff: /* GRP5 */
3348 ot = dflag + OT_WORD;
3350 modrm = ldub_code(s->pc++);
3351 mod = (modrm >> 6) & 3;
3352 rm = (modrm & 7) | REX_B(s);
3353 op = (modrm >> 3) & 7;
3354 if (op >= 2 && b == 0xfe) {
3358 if (op >= 2 && op <= 5) {
3359 /* operand size for jumps is 64 bit */
3361 } else if (op == 6) {
3362 /* default push size is 64 bit */
3363 ot = dflag ? OT_QUAD : OT_WORD;
3367 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3368 if (op >= 2 && op != 3 && op != 5)
3369 gen_op_ld_T0_A0[ot + s->mem_index]();
3371 gen_op_mov_TN_reg[ot][0][rm]();
3375 case 0: /* inc Ev */
3380 gen_inc(s, ot, opreg, 1);
3382 case 1: /* dec Ev */
3387 gen_inc(s, ot, opreg, -1);
3389 case 2: /* call Ev */
3390 /* XXX: optimize if memory (no 'and' is necessary) */
3392 gen_op_andl_T0_ffff();
3393 next_eip = s->pc - s->cs_base;
3394 gen_op_movl_T1_im(next_eip);
3399 case 3: /* lcall Ev */
3400 gen_op_ld_T1_A0[ot + s->mem_index]();
3401 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3402 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3404 if (s->pe && !s->vm86) {
3405 if (s->cc_op != CC_OP_DYNAMIC)
3406 gen_op_set_cc_op(s->cc_op);
3407 gen_jmp_im(pc_start - s->cs_base);
3408 gen_op_lcall_protected_T0_T1(dflag, s->pc - s->cs_base);
3410 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3414 case 4: /* jmp Ev */
3416 gen_op_andl_T0_ffff();
3420 case 5: /* ljmp Ev */
3421 gen_op_ld_T1_A0[ot + s->mem_index]();
3422 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3423 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3425 if (s->pe && !s->vm86) {
3426 if (s->cc_op != CC_OP_DYNAMIC)
3427 gen_op_set_cc_op(s->cc_op);
3428 gen_jmp_im(pc_start - s->cs_base);
3429 gen_op_ljmp_protected_T0_T1(s->pc - s->cs_base);
3431 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3432 gen_op_movl_T0_T1();
3437 case 6: /* push Ev */
3445 case 0x84: /* test Ev, Gv */
3450 ot = dflag + OT_WORD;
3452 modrm = ldub_code(s->pc++);
3453 mod = (modrm >> 6) & 3;
3454 rm = (modrm & 7) | REX_B(s);
3455 reg = ((modrm >> 3) & 7) | rex_r;
3457 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3458 gen_op_mov_TN_reg[ot][1][reg]();
3459 gen_op_testl_T0_T1_cc();
3460 s->cc_op = CC_OP_LOGICB + ot;
3463 case 0xa8: /* test eAX, Iv */
3468 ot = dflag + OT_WORD;
3469 val = insn_get(s, ot);
3471 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3472 gen_op_movl_T1_im(val);
3473 gen_op_testl_T0_T1_cc();
3474 s->cc_op = CC_OP_LOGICB + ot;
3477 case 0x98: /* CWDE/CBW */
3478 #ifdef TARGET_X86_64
3480 gen_op_movslq_RAX_EAX();
3484 gen_op_movswl_EAX_AX();
3486 gen_op_movsbw_AX_AL();
3488 case 0x99: /* CDQ/CWD */
3489 #ifdef TARGET_X86_64
3491 gen_op_movsqo_RDX_RAX();
3495 gen_op_movslq_EDX_EAX();
3497 gen_op_movswl_DX_AX();
3499 case 0x1af: /* imul Gv, Ev */
3500 case 0x69: /* imul Gv, Ev, I */
3502 ot = dflag + OT_WORD;
3503 modrm = ldub_code(s->pc++);
3504 reg = ((modrm >> 3) & 7) | rex_r;
3506 s->rip_offset = insn_const_size(ot);
3509 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3511 val = insn_get(s, ot);
3512 gen_op_movl_T1_im(val);
3513 } else if (b == 0x6b) {
3514 val = (int8_t)insn_get(s, OT_BYTE);
3515 gen_op_movl_T1_im(val);
3517 gen_op_mov_TN_reg[ot][1][reg]();
3520 #ifdef TARGET_X86_64
3521 if (ot == OT_QUAD) {
3522 gen_op_imulq_T0_T1();
3525 if (ot == OT_LONG) {
3526 gen_op_imull_T0_T1();
3528 gen_op_imulw_T0_T1();
3530 gen_op_mov_reg_T0[ot][reg]();
3531 s->cc_op = CC_OP_MULB + ot;
3534 case 0x1c1: /* xadd Ev, Gv */
3538 ot = dflag + OT_WORD;
3539 modrm = ldub_code(s->pc++);
3540 reg = ((modrm >> 3) & 7) | rex_r;
3541 mod = (modrm >> 6) & 3;
3543 rm = (modrm & 7) | REX_B(s);
3544 gen_op_mov_TN_reg[ot][0][reg]();
3545 gen_op_mov_TN_reg[ot][1][rm]();
3546 gen_op_addl_T0_T1();
3547 gen_op_mov_reg_T1[ot][reg]();
3548 gen_op_mov_reg_T0[ot][rm]();
3550 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3551 gen_op_mov_TN_reg[ot][0][reg]();
3552 gen_op_ld_T1_A0[ot + s->mem_index]();
3553 gen_op_addl_T0_T1();
3554 gen_op_st_T0_A0[ot + s->mem_index]();
3555 gen_op_mov_reg_T1[ot][reg]();
3557 gen_op_update2_cc();
3558 s->cc_op = CC_OP_ADDB + ot;
3561 case 0x1b1: /* cmpxchg Ev, Gv */
3565 ot = dflag + OT_WORD;
3566 modrm = ldub_code(s->pc++);
3567 reg = ((modrm >> 3) & 7) | rex_r;
3568 mod = (modrm >> 6) & 3;
3569 gen_op_mov_TN_reg[ot][1][reg]();
3571 rm = (modrm & 7) | REX_B(s);
3572 gen_op_mov_TN_reg[ot][0][rm]();
3573 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3574 gen_op_mov_reg_T0[ot][rm]();
3576 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3577 gen_op_ld_T0_A0[ot + s->mem_index]();
3578 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3580 s->cc_op = CC_OP_SUBB + ot;
3582 case 0x1c7: /* cmpxchg8b */
3583 modrm = ldub_code(s->pc++);
3584 mod = (modrm >> 6) & 3;
3587 if (s->cc_op != CC_OP_DYNAMIC)
3588 gen_op_set_cc_op(s->cc_op);
3589 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3591 s->cc_op = CC_OP_EFLAGS;
3594 /**************************/
3596 case 0x50 ... 0x57: /* push */
3597 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3600 case 0x58 ... 0x5f: /* pop */
3602 ot = dflag ? OT_QUAD : OT_WORD;
3604 ot = dflag + OT_WORD;
3607 /* NOTE: order is important for pop %sp */
3609 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3611 case 0x60: /* pusha */
3616 case 0x61: /* popa */
3621 case 0x68: /* push Iv */
3624 ot = dflag ? OT_QUAD : OT_WORD;
3626 ot = dflag + OT_WORD;
3629 val = insn_get(s, ot);
3631 val = (int8_t)insn_get(s, OT_BYTE);
3632 gen_op_movl_T0_im(val);
3635 case 0x8f: /* pop Ev */
3637 ot = dflag ? OT_QUAD : OT_WORD;
3639 ot = dflag + OT_WORD;
3641 modrm = ldub_code(s->pc++);
3642 mod = (modrm >> 6) & 3;
3645 /* NOTE: order is important for pop %sp */
3647 rm = (modrm & 7) | REX_B(s);
3648 gen_op_mov_reg_T0[ot][rm]();
3650 /* NOTE: order is important too for MMU exceptions */
3651 s->popl_esp_hack = 1 << ot;
3652 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3653 s->popl_esp_hack = 0;
3657 case 0xc8: /* enter */
3659 /* XXX: long mode support */
3661 val = lduw_code(s->pc);
3663 level = ldub_code(s->pc++);
3664 gen_enter(s, val, level);
3667 case 0xc9: /* leave */
3668 /* XXX: exception not precise (ESP is updated before potential exception) */
3669 /* XXX: may be invalid for 16 bit in long mode */
3671 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3672 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3673 } else if (s->ss32) {
3674 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3675 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3677 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3678 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3682 ot = dflag ? OT_QUAD : OT_WORD;
3684 ot = dflag + OT_WORD;
3686 gen_op_mov_reg_T0[ot][R_EBP]();
3689 case 0x06: /* push es */
3690 case 0x0e: /* push cs */
3691 case 0x16: /* push ss */
3692 case 0x1e: /* push ds */
3695 gen_op_movl_T0_seg(b >> 3);
3698 case 0x1a0: /* push fs */
3699 case 0x1a8: /* push gs */
3700 gen_op_movl_T0_seg((b >> 3) & 7);
3703 case 0x07: /* pop es */
3704 case 0x17: /* pop ss */
3705 case 0x1f: /* pop ds */
3710 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3713 /* if reg == SS, inhibit interrupts/trace. */
3714 /* If several instructions disable interrupts, only the
3716 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3717 gen_op_set_inhibit_irq();
3721 gen_jmp_im(s->pc - s->cs_base);
3725 case 0x1a1: /* pop fs */
3726 case 0x1a9: /* pop gs */
3728 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3731 gen_jmp_im(s->pc - s->cs_base);
3736 /**************************/
3739 case 0x89: /* mov Gv, Ev */
3743 ot = dflag + OT_WORD;
3744 modrm = ldub_code(s->pc++);
3745 reg = ((modrm >> 3) & 7) | rex_r;
3747 /* generate a generic store */
3748 gen_ldst_modrm(s, modrm, ot, reg, 1);
3751 case 0xc7: /* mov Ev, Iv */
3755 ot = dflag + OT_WORD;
3756 modrm = ldub_code(s->pc++);
3757 mod = (modrm >> 6) & 3;
3759 s->rip_offset = insn_const_size(ot);
3760 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3762 val = insn_get(s, ot);
3763 gen_op_movl_T0_im(val);
3765 gen_op_st_T0_A0[ot + s->mem_index]();
3767 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3770 case 0x8b: /* mov Ev, Gv */
3774 ot = OT_WORD + dflag;
3775 modrm = ldub_code(s->pc++);
3776 reg = ((modrm >> 3) & 7) | rex_r;
3778 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3779 gen_op_mov_reg_T0[ot][reg]();
3781 case 0x8e: /* mov seg, Gv */
3782 modrm = ldub_code(s->pc++);
3783 reg = (modrm >> 3) & 7;
3784 if (reg >= 6 || reg == R_CS)
3786 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3787 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3789 /* if reg == SS, inhibit interrupts/trace */
3790 /* If several instructions disable interrupts, only the
3792 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3793 gen_op_set_inhibit_irq();
3797 gen_jmp_im(s->pc - s->cs_base);
3801 case 0x8c: /* mov Gv, seg */
3802 modrm = ldub_code(s->pc++);
3803 reg = (modrm >> 3) & 7;
3804 mod = (modrm >> 6) & 3;
3807 gen_op_movl_T0_seg(reg);
3809 ot = OT_WORD + dflag;
3812 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3815 case 0x1b6: /* movzbS Gv, Eb */
3816 case 0x1b7: /* movzwS Gv, Eb */
3817 case 0x1be: /* movsbS Gv, Eb */
3818 case 0x1bf: /* movswS Gv, Eb */
3821 /* d_ot is the size of destination */
3822 d_ot = dflag + OT_WORD;
3823 /* ot is the size of source */
3824 ot = (b & 1) + OT_BYTE;
3825 modrm = ldub_code(s->pc++);
3826 reg = ((modrm >> 3) & 7) | rex_r;
3827 mod = (modrm >> 6) & 3;
3828 rm = (modrm & 7) | REX_B(s);
3831 gen_op_mov_TN_reg[ot][0][rm]();
3832 switch(ot | (b & 8)) {
3834 gen_op_movzbl_T0_T0();
3837 gen_op_movsbl_T0_T0();
3840 gen_op_movzwl_T0_T0();
3844 gen_op_movswl_T0_T0();
3847 gen_op_mov_reg_T0[d_ot][reg]();
3849 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3851 gen_op_lds_T0_A0[ot + s->mem_index]();
3853 gen_op_ldu_T0_A0[ot + s->mem_index]();
3855 gen_op_mov_reg_T0[d_ot][reg]();
3860 case 0x8d: /* lea */
3861 ot = dflag + OT_WORD;
3862 modrm = ldub_code(s->pc++);
3863 mod = (modrm >> 6) & 3;
3866 reg = ((modrm >> 3) & 7) | rex_r;
3867 /* we must ensure that no segment is added */
3871 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3873 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
3876 case 0xa0: /* mov EAX, Ov */
3878 case 0xa2: /* mov Ov, EAX */
3881 target_ulong offset_addr;
3886 ot = dflag + OT_WORD;
3887 #ifdef TARGET_X86_64
3889 offset_addr = ldq_code(s->pc);
3891 if (offset_addr == (int32_t)offset_addr)
3892 gen_op_movq_A0_im(offset_addr);
3894 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
3899 offset_addr = insn_get(s, OT_LONG);
3901 offset_addr = insn_get(s, OT_WORD);
3903 gen_op_movl_A0_im(offset_addr);
3905 gen_add_A0_ds_seg(s);
3907 gen_op_ld_T0_A0[ot + s->mem_index]();
3908 gen_op_mov_reg_T0[ot][R_EAX]();
3910 gen_op_mov_TN_reg[ot][0][R_EAX]();
3911 gen_op_st_T0_A0[ot + s->mem_index]();
3915 case 0xd7: /* xlat */
3916 #ifdef TARGET_X86_64
3918 gen_op_movq_A0_reg[R_EBX]();
3919 gen_op_addq_A0_AL();
3923 gen_op_movl_A0_reg[R_EBX]();
3924 gen_op_addl_A0_AL();
3926 gen_op_andl_A0_ffff();
3928 gen_add_A0_ds_seg(s);
3929 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
3930 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
3932 case 0xb0 ... 0xb7: /* mov R, Ib */
3933 val = insn_get(s, OT_BYTE);
3934 gen_op_movl_T0_im(val);
3935 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
3937 case 0xb8 ... 0xbf: /* mov R, Iv */
3938 #ifdef TARGET_X86_64
3942 tmp = ldq_code(s->pc);
3944 reg = (b & 7) | REX_B(s);
3945 gen_movtl_T0_im(tmp);
3946 gen_op_mov_reg_T0[OT_QUAD][reg]();
3950 ot = dflag ? OT_LONG : OT_WORD;
3951 val = insn_get(s, ot);
3952 reg = (b & 7) | REX_B(s);
3953 gen_op_movl_T0_im(val);
3954 gen_op_mov_reg_T0[ot][reg]();
3958 case 0x91 ... 0x97: /* xchg R, EAX */
3959 ot = dflag + OT_WORD;
3960 reg = (b & 7) | REX_B(s);
3964 case 0x87: /* xchg Ev, Gv */
3968 ot = dflag + OT_WORD;
3969 modrm = ldub_code(s->pc++);
3970 reg = ((modrm >> 3) & 7) | rex_r;
3971 mod = (modrm >> 6) & 3;
3973 rm = (modrm & 7) | REX_B(s);
3975 gen_op_mov_TN_reg[ot][0][reg]();
3976 gen_op_mov_TN_reg[ot][1][rm]();
3977 gen_op_mov_reg_T0[ot][rm]();
3978 gen_op_mov_reg_T1[ot][reg]();
3980 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3981 gen_op_mov_TN_reg[ot][0][reg]();
3982 /* for xchg, lock is implicit */
3983 if (!(prefixes & PREFIX_LOCK))
3985 gen_op_ld_T1_A0[ot + s->mem_index]();
3986 gen_op_st_T0_A0[ot + s->mem_index]();
3987 if (!(prefixes & PREFIX_LOCK))
3989 gen_op_mov_reg_T1[ot][reg]();
3992 case 0xc4: /* les Gv */
3997 case 0xc5: /* lds Gv */
4002 case 0x1b2: /* lss Gv */
4005 case 0x1b4: /* lfs Gv */
4008 case 0x1b5: /* lgs Gv */
4011 ot = dflag ? OT_LONG : OT_WORD;
4012 modrm = ldub_code(s->pc++);
4013 reg = ((modrm >> 3) & 7) | rex_r;
4014 mod = (modrm >> 6) & 3;
4017 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4018 gen_op_ld_T1_A0[ot + s->mem_index]();
4019 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
4020 /* load the segment first to handle exceptions properly */
4021 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4022 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4023 /* then put the data */
4024 gen_op_mov_reg_T1[ot][reg]();
4026 gen_jmp_im(s->pc - s->cs_base);
4031 /************************/
4042 ot = dflag + OT_WORD;
4044 modrm = ldub_code(s->pc++);
4045 mod = (modrm >> 6) & 3;
4046 op = (modrm >> 3) & 7;
4052 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4055 opreg = (modrm & 7) | REX_B(s);
4060 gen_shift(s, op, ot, opreg, OR_ECX);
4063 shift = ldub_code(s->pc++);
4065 gen_shifti(s, op, ot, opreg, shift);
4080 case 0x1a4: /* shld imm */
4084 case 0x1a5: /* shld cl */
4088 case 0x1ac: /* shrd imm */
4092 case 0x1ad: /* shrd cl */
4096 ot = dflag + OT_WORD;
4097 modrm = ldub_code(s->pc++);
4098 mod = (modrm >> 6) & 3;
4099 rm = (modrm & 7) | REX_B(s);
4100 reg = ((modrm >> 3) & 7) | rex_r;
4103 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4104 gen_op_ld_T0_A0[ot + s->mem_index]();
4106 gen_op_mov_TN_reg[ot][0][rm]();
4108 gen_op_mov_TN_reg[ot][1][reg]();
4111 val = ldub_code(s->pc++);
4118 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4120 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4121 if (op == 0 && ot != OT_WORD)
4122 s->cc_op = CC_OP_SHLB + ot;
4124 s->cc_op = CC_OP_SARB + ot;
4127 if (s->cc_op != CC_OP_DYNAMIC)
4128 gen_op_set_cc_op(s->cc_op);
4130 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4132 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4133 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4136 gen_op_mov_reg_T0[ot][rm]();
4140 /************************/
4143 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4144 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4145 /* XXX: what to do if illegal op ? */
4146 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4149 modrm = ldub_code(s->pc++);
4150 mod = (modrm >> 6) & 3;
4152 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4155 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4157 case 0x00 ... 0x07: /* fxxxs */
4158 case 0x10 ... 0x17: /* fixxxl */
4159 case 0x20 ... 0x27: /* fxxxl */
4160 case 0x30 ... 0x37: /* fixxx */
4167 gen_op_flds_FT0_A0();
4170 gen_op_fildl_FT0_A0();
4173 gen_op_fldl_FT0_A0();
4177 gen_op_fild_FT0_A0();
4181 gen_op_fp_arith_ST0_FT0[op1]();
4183 /* fcomp needs pop */
4188 case 0x08: /* flds */
4189 case 0x0a: /* fsts */
4190 case 0x0b: /* fstps */
4191 case 0x18: /* fildl */
4192 case 0x1a: /* fistl */
4193 case 0x1b: /* fistpl */
4194 case 0x28: /* fldl */
4195 case 0x2a: /* fstl */
4196 case 0x2b: /* fstpl */
4197 case 0x38: /* filds */
4198 case 0x3a: /* fists */
4199 case 0x3b: /* fistps */
4205 gen_op_flds_ST0_A0();
4208 gen_op_fildl_ST0_A0();
4211 gen_op_fldl_ST0_A0();
4215 gen_op_fild_ST0_A0();
4222 gen_op_fsts_ST0_A0();
4225 gen_op_fistl_ST0_A0();
4228 gen_op_fstl_ST0_A0();
4232 gen_op_fist_ST0_A0();
4240 case 0x0c: /* fldenv mem */
4241 gen_op_fldenv_A0(s->dflag);
4243 case 0x0d: /* fldcw mem */
4246 case 0x0e: /* fnstenv mem */
4247 gen_op_fnstenv_A0(s->dflag);
4249 case 0x0f: /* fnstcw mem */
4252 case 0x1d: /* fldt mem */
4253 gen_op_fldt_ST0_A0();
4255 case 0x1f: /* fstpt mem */
4256 gen_op_fstt_ST0_A0();
4259 case 0x2c: /* frstor mem */
4260 gen_op_frstor_A0(s->dflag);
4262 case 0x2e: /* fnsave mem */
4263 gen_op_fnsave_A0(s->dflag);
4265 case 0x2f: /* fnstsw mem */
4268 case 0x3c: /* fbld */
4269 gen_op_fbld_ST0_A0();
4271 case 0x3e: /* fbstp */
4272 gen_op_fbst_ST0_A0();
4275 case 0x3d: /* fildll */
4276 gen_op_fildll_ST0_A0();
4278 case 0x3f: /* fistpll */
4279 gen_op_fistll_ST0_A0();
4286 /* register float ops */
4290 case 0x08: /* fld sti */
4292 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4294 case 0x09: /* fxchg sti */
4295 case 0x29: /* fxchg4 sti, undocumented op */
4296 case 0x39: /* fxchg7 sti, undocumented op */
4297 gen_op_fxchg_ST0_STN(opreg);
4299 case 0x0a: /* grp d9/2 */
4302 /* check exceptions (FreeBSD FPU probe) */
4303 if (s->cc_op != CC_OP_DYNAMIC)
4304 gen_op_set_cc_op(s->cc_op);
4305 gen_jmp_im(pc_start - s->cs_base);
4312 case 0x0c: /* grp d9/4 */
4322 gen_op_fcom_ST0_FT0();
4331 case 0x0d: /* grp d9/5 */
4340 gen_op_fldl2t_ST0();
4344 gen_op_fldl2e_ST0();
4352 gen_op_fldlg2_ST0();
4356 gen_op_fldln2_ST0();
4367 case 0x0e: /* grp d9/6 */
4378 case 3: /* fpatan */
4381 case 4: /* fxtract */
4384 case 5: /* fprem1 */
4387 case 6: /* fdecstp */
4391 case 7: /* fincstp */
4396 case 0x0f: /* grp d9/7 */
4401 case 1: /* fyl2xp1 */
4407 case 3: /* fsincos */
4410 case 5: /* fscale */
4413 case 4: /* frndint */
4425 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4426 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4427 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4433 gen_op_fp_arith_STN_ST0[op1](opreg);
4437 gen_op_fmov_FT0_STN(opreg);
4438 gen_op_fp_arith_ST0_FT0[op1]();
4442 case 0x02: /* fcom */
4443 case 0x22: /* fcom2, undocumented op */
4444 gen_op_fmov_FT0_STN(opreg);
4445 gen_op_fcom_ST0_FT0();
4447 case 0x03: /* fcomp */
4448 case 0x23: /* fcomp3, undocumented op */
4449 case 0x32: /* fcomp5, undocumented op */
4450 gen_op_fmov_FT0_STN(opreg);
4451 gen_op_fcom_ST0_FT0();
4454 case 0x15: /* da/5 */
4456 case 1: /* fucompp */
4457 gen_op_fmov_FT0_STN(1);
4458 gen_op_fucom_ST0_FT0();
4468 case 0: /* feni (287 only, just do nop here) */
4470 case 1: /* fdisi (287 only, just do nop here) */
4475 case 3: /* fninit */
4478 case 4: /* fsetpm (287 only, just do nop here) */
4484 case 0x1d: /* fucomi */
4485 if (s->cc_op != CC_OP_DYNAMIC)
4486 gen_op_set_cc_op(s->cc_op);
4487 gen_op_fmov_FT0_STN(opreg);
4488 gen_op_fucomi_ST0_FT0();
4489 s->cc_op = CC_OP_EFLAGS;
4491 case 0x1e: /* fcomi */
4492 if (s->cc_op != CC_OP_DYNAMIC)
4493 gen_op_set_cc_op(s->cc_op);
4494 gen_op_fmov_FT0_STN(opreg);
4495 gen_op_fcomi_ST0_FT0();
4496 s->cc_op = CC_OP_EFLAGS;
4498 case 0x28: /* ffree sti */
4499 gen_op_ffree_STN(opreg);
4501 case 0x2a: /* fst sti */
4502 gen_op_fmov_STN_ST0(opreg);
4504 case 0x2b: /* fstp sti */
4505 case 0x0b: /* fstp1 sti, undocumented op */
4506 case 0x3a: /* fstp8 sti, undocumented op */
4507 case 0x3b: /* fstp9 sti, undocumented op */
4508 gen_op_fmov_STN_ST0(opreg);
4511 case 0x2c: /* fucom st(i) */
4512 gen_op_fmov_FT0_STN(opreg);
4513 gen_op_fucom_ST0_FT0();
4515 case 0x2d: /* fucomp st(i) */
4516 gen_op_fmov_FT0_STN(opreg);
4517 gen_op_fucom_ST0_FT0();
4520 case 0x33: /* de/3 */
4522 case 1: /* fcompp */
4523 gen_op_fmov_FT0_STN(1);
4524 gen_op_fcom_ST0_FT0();
4532 case 0x38: /* ffreep sti, undocumented op */
4533 gen_op_ffree_STN(opreg);
4536 case 0x3c: /* df/4 */
4539 gen_op_fnstsw_EAX();
4545 case 0x3d: /* fucomip */
4546 if (s->cc_op != CC_OP_DYNAMIC)
4547 gen_op_set_cc_op(s->cc_op);
4548 gen_op_fmov_FT0_STN(opreg);
4549 gen_op_fucomi_ST0_FT0();
4551 s->cc_op = CC_OP_EFLAGS;
4553 case 0x3e: /* fcomip */
4554 if (s->cc_op != CC_OP_DYNAMIC)
4555 gen_op_set_cc_op(s->cc_op);
4556 gen_op_fmov_FT0_STN(opreg);
4557 gen_op_fcomi_ST0_FT0();
4559 s->cc_op = CC_OP_EFLAGS;
4561 case 0x10 ... 0x13: /* fcmovxx */
4565 const static uint8_t fcmov_cc[8] = {
4571 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4573 gen_op_fcmov_ST0_STN_T0(opreg);
4580 #ifdef USE_CODE_COPY
4581 s->tb->cflags |= CF_TB_FP_USED;
4584 /************************/
4587 case 0xa4: /* movsS */
4592 ot = dflag + OT_WORD;
4594 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4595 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4601 case 0xaa: /* stosS */
4606 ot = dflag + OT_WORD;
4608 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4609 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4614 case 0xac: /* lodsS */
4619 ot = dflag + OT_WORD;
4620 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4621 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4626 case 0xae: /* scasS */
4631 ot = dflag + OT_WORD;
4632 if (prefixes & PREFIX_REPNZ) {
4633 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4634 } else if (prefixes & PREFIX_REPZ) {
4635 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4638 s->cc_op = CC_OP_SUBB + ot;
4642 case 0xa6: /* cmpsS */
4647 ot = dflag + OT_WORD;
4648 if (prefixes & PREFIX_REPNZ) {
4649 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4650 } else if (prefixes & PREFIX_REPZ) {
4651 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4654 s->cc_op = CC_OP_SUBB + ot;
4657 case 0x6c: /* insS */
4662 ot = dflag ? OT_LONG : OT_WORD;
4663 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4664 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4665 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4670 case 0x6e: /* outsS */
4675 ot = dflag ? OT_LONG : OT_WORD;
4676 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4677 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4678 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4684 /************************/
4691 ot = dflag ? OT_LONG : OT_WORD;
4692 val = ldub_code(s->pc++);
4693 gen_op_movl_T0_im(val);
4694 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4696 gen_op_mov_reg_T1[ot][R_EAX]();
4703 ot = dflag ? OT_LONG : OT_WORD;
4704 val = ldub_code(s->pc++);
4705 gen_op_movl_T0_im(val);
4706 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4707 gen_op_mov_TN_reg[ot][1][R_EAX]();
4715 ot = dflag ? OT_LONG : OT_WORD;
4716 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4717 gen_op_andl_T0_ffff();
4718 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4720 gen_op_mov_reg_T1[ot][R_EAX]();
4727 ot = dflag ? OT_LONG : OT_WORD;
4728 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4729 gen_op_andl_T0_ffff();
4730 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4731 gen_op_mov_TN_reg[ot][1][R_EAX]();
4735 /************************/
4737 case 0xc2: /* ret im */
4738 val = ldsw_code(s->pc);
4741 gen_stack_update(s, val + (2 << s->dflag));
4743 gen_op_andl_T0_ffff();
4747 case 0xc3: /* ret */
4751 gen_op_andl_T0_ffff();
4755 case 0xca: /* lret im */
4756 val = ldsw_code(s->pc);
4759 if (s->pe && !s->vm86) {
4760 if (s->cc_op != CC_OP_DYNAMIC)
4761 gen_op_set_cc_op(s->cc_op);
4762 gen_jmp_im(pc_start - s->cs_base);
4763 gen_op_lret_protected(s->dflag, val);
4767 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4769 gen_op_andl_T0_ffff();
4770 /* NOTE: keeping EIP updated is not a problem in case of
4774 gen_op_addl_A0_im(2 << s->dflag);
4775 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4776 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4777 /* add stack offset */
4778 gen_stack_update(s, val + (4 << s->dflag));
4782 case 0xcb: /* lret */
4785 case 0xcf: /* iret */
4788 gen_op_iret_real(s->dflag);
4789 s->cc_op = CC_OP_EFLAGS;
4790 } else if (s->vm86) {
4792 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4794 gen_op_iret_real(s->dflag);
4795 s->cc_op = CC_OP_EFLAGS;
4798 if (s->cc_op != CC_OP_DYNAMIC)
4799 gen_op_set_cc_op(s->cc_op);
4800 gen_jmp_im(pc_start - s->cs_base);
4801 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4802 s->cc_op = CC_OP_EFLAGS;
4806 case 0xe8: /* call im */
4809 tval = (int32_t)insn_get(s, OT_LONG);
4811 tval = (int16_t)insn_get(s, OT_WORD);
4812 next_eip = s->pc - s->cs_base;
4816 gen_movtl_T0_im(next_eip);
4821 case 0x9a: /* lcall im */
4823 unsigned int selector, offset;
4827 ot = dflag ? OT_LONG : OT_WORD;
4828 offset = insn_get(s, ot);
4829 selector = insn_get(s, OT_WORD);
4831 gen_op_movl_T0_im(selector);
4832 gen_op_movl_T1_imu(offset);
4835 case 0xe9: /* jmp */
4837 tval = (int32_t)insn_get(s, OT_LONG);
4839 tval = (int16_t)insn_get(s, OT_WORD);
4840 tval += s->pc - s->cs_base;
4845 case 0xea: /* ljmp im */
4847 unsigned int selector, offset;
4851 ot = dflag ? OT_LONG : OT_WORD;
4852 offset = insn_get(s, ot);
4853 selector = insn_get(s, OT_WORD);
4855 gen_op_movl_T0_im(selector);
4856 gen_op_movl_T1_imu(offset);
4859 case 0xeb: /* jmp Jb */
4860 tval = (int8_t)insn_get(s, OT_BYTE);
4861 tval += s->pc - s->cs_base;
4866 case 0x70 ... 0x7f: /* jcc Jb */
4867 tval = (int8_t)insn_get(s, OT_BYTE);
4869 case 0x180 ... 0x18f: /* jcc Jv */
4871 tval = (int32_t)insn_get(s, OT_LONG);
4873 tval = (int16_t)insn_get(s, OT_WORD);
4876 next_eip = s->pc - s->cs_base;
4880 gen_jcc(s, b, tval, next_eip);
4883 case 0x190 ... 0x19f: /* setcc Gv */
4884 modrm = ldub_code(s->pc++);
4886 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
4888 case 0x140 ... 0x14f: /* cmov Gv, Ev */
4889 ot = dflag + OT_WORD;
4890 modrm = ldub_code(s->pc++);
4891 reg = ((modrm >> 3) & 7) | rex_r;
4892 mod = (modrm >> 6) & 3;
4895 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4896 gen_op_ld_T1_A0[ot + s->mem_index]();
4898 rm = (modrm & 7) | REX_B(s);
4899 gen_op_mov_TN_reg[ot][1][rm]();
4901 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
4904 /************************/
4906 case 0x9c: /* pushf */
4907 if (s->vm86 && s->iopl != 3) {
4908 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4910 if (s->cc_op != CC_OP_DYNAMIC)
4911 gen_op_set_cc_op(s->cc_op);
4912 gen_op_movl_T0_eflags();
4916 case 0x9d: /* popf */
4917 if (s->vm86 && s->iopl != 3) {
4918 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4923 gen_op_movl_eflags_T0_cpl0();
4925 gen_op_movw_eflags_T0_cpl0();
4928 if (s->cpl <= s->iopl) {
4930 gen_op_movl_eflags_T0_io();
4932 gen_op_movw_eflags_T0_io();
4936 gen_op_movl_eflags_T0();
4938 gen_op_movw_eflags_T0();
4943 s->cc_op = CC_OP_EFLAGS;
4944 /* abort translation because TF flag may change */
4945 gen_jmp_im(s->pc - s->cs_base);
4949 case 0x9e: /* sahf */
4952 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
4953 if (s->cc_op != CC_OP_DYNAMIC)
4954 gen_op_set_cc_op(s->cc_op);
4955 gen_op_movb_eflags_T0();
4956 s->cc_op = CC_OP_EFLAGS;
4958 case 0x9f: /* lahf */
4961 if (s->cc_op != CC_OP_DYNAMIC)
4962 gen_op_set_cc_op(s->cc_op);
4963 gen_op_movl_T0_eflags();
4964 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
4966 case 0xf5: /* cmc */
4967 if (s->cc_op != CC_OP_DYNAMIC)
4968 gen_op_set_cc_op(s->cc_op);
4970 s->cc_op = CC_OP_EFLAGS;
4972 case 0xf8: /* clc */
4973 if (s->cc_op != CC_OP_DYNAMIC)
4974 gen_op_set_cc_op(s->cc_op);
4976 s->cc_op = CC_OP_EFLAGS;
4978 case 0xf9: /* stc */
4979 if (s->cc_op != CC_OP_DYNAMIC)
4980 gen_op_set_cc_op(s->cc_op);
4982 s->cc_op = CC_OP_EFLAGS;
4984 case 0xfc: /* cld */
4987 case 0xfd: /* std */
4991 /************************/
4992 /* bit operations */
4993 case 0x1ba: /* bt/bts/btr/btc Gv, im */
4994 ot = dflag + OT_WORD;
4995 modrm = ldub_code(s->pc++);
4996 op = ((modrm >> 3) & 7) | rex_r;
4997 mod = (modrm >> 6) & 3;
4998 rm = (modrm & 7) | REX_B(s);
5001 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5002 gen_op_ld_T0_A0[ot + s->mem_index]();
5004 gen_op_mov_TN_reg[ot][0][rm]();
5007 val = ldub_code(s->pc++);
5008 gen_op_movl_T1_im(val);
5012 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5013 s->cc_op = CC_OP_SARB + ot;
5016 gen_op_st_T0_A0[ot + s->mem_index]();
5018 gen_op_mov_reg_T0[ot][rm]();
5019 gen_op_update_bt_cc();
5022 case 0x1a3: /* bt Gv, Ev */
5025 case 0x1ab: /* bts */
5028 case 0x1b3: /* btr */
5031 case 0x1bb: /* btc */
5034 ot = dflag + OT_WORD;
5035 modrm = ldub_code(s->pc++);
5036 reg = ((modrm >> 3) & 7) | rex_r;
5037 mod = (modrm >> 6) & 3;
5038 rm = (modrm & 7) | REX_B(s);
5039 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5041 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5042 /* specific case: we need to add a displacement */
5043 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5044 gen_op_ld_T0_A0[ot + s->mem_index]();
5046 gen_op_mov_TN_reg[ot][0][rm]();
5048 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5049 s->cc_op = CC_OP_SARB + ot;
5052 gen_op_st_T0_A0[ot + s->mem_index]();
5054 gen_op_mov_reg_T0[ot][rm]();
5055 gen_op_update_bt_cc();
5058 case 0x1bc: /* bsf */
5059 case 0x1bd: /* bsr */
5060 ot = dflag + OT_WORD;
5061 modrm = ldub_code(s->pc++);
5062 reg = ((modrm >> 3) & 7) | rex_r;
5063 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5064 /* NOTE: in order to handle the 0 case, we must load the
5065 result. It could be optimized with a generated jump */
5066 gen_op_mov_TN_reg[ot][1][reg]();
5067 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5068 gen_op_mov_reg_T1[ot][reg]();
5069 s->cc_op = CC_OP_LOGICB + ot;
5071 /************************/
5073 case 0x27: /* daa */
5076 if (s->cc_op != CC_OP_DYNAMIC)
5077 gen_op_set_cc_op(s->cc_op);
5079 s->cc_op = CC_OP_EFLAGS;
5081 case 0x2f: /* das */
5084 if (s->cc_op != CC_OP_DYNAMIC)
5085 gen_op_set_cc_op(s->cc_op);
5087 s->cc_op = CC_OP_EFLAGS;
5089 case 0x37: /* aaa */
5092 if (s->cc_op != CC_OP_DYNAMIC)
5093 gen_op_set_cc_op(s->cc_op);
5095 s->cc_op = CC_OP_EFLAGS;
5097 case 0x3f: /* aas */
5100 if (s->cc_op != CC_OP_DYNAMIC)
5101 gen_op_set_cc_op(s->cc_op);
5103 s->cc_op = CC_OP_EFLAGS;
5105 case 0xd4: /* aam */
5108 val = ldub_code(s->pc++);
5110 s->cc_op = CC_OP_LOGICB;
5112 case 0xd5: /* aad */
5115 val = ldub_code(s->pc++);
5117 s->cc_op = CC_OP_LOGICB;
5119 /************************/
5121 case 0x90: /* nop */
5122 /* XXX: xchg + rex handling */
5123 /* XXX: correct lock test for all insn */
5124 if (prefixes & PREFIX_LOCK)
5127 case 0x9b: /* fwait */
5128 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5129 (HF_MP_MASK | HF_TS_MASK)) {
5130 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5132 if (s->cc_op != CC_OP_DYNAMIC)
5133 gen_op_set_cc_op(s->cc_op);
5134 gen_jmp_im(pc_start - s->cs_base);
5138 case 0xcc: /* int3 */
5139 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5141 case 0xcd: /* int N */
5142 val = ldub_code(s->pc++);
5143 if (s->vm86 && s->iopl != 3) {
5144 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5146 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5149 case 0xce: /* into */
5152 if (s->cc_op != CC_OP_DYNAMIC)
5153 gen_op_set_cc_op(s->cc_op);
5154 gen_jmp_im(pc_start - s->cs_base);
5155 gen_op_into(s->pc - pc_start);
5157 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5159 gen_debug(s, pc_start - s->cs_base);
5162 cpu_set_log(CPU_LOG_TB_IN_ASM | CPU_LOG_PCALL);
5165 case 0xfa: /* cli */
5167 if (s->cpl <= s->iopl) {
5170 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5176 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5180 case 0xfb: /* sti */
5182 if (s->cpl <= s->iopl) {
5185 /* interruptions are enabled only the first insn after sti */
5186 /* If several instructions disable interrupts, only the
5188 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5189 gen_op_set_inhibit_irq();
5190 /* give a chance to handle pending irqs */
5191 gen_jmp_im(s->pc - s->cs_base);
5194 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5200 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5204 case 0x62: /* bound */
5207 ot = dflag ? OT_LONG : OT_WORD;
5208 modrm = ldub_code(s->pc++);
5209 reg = (modrm >> 3) & 7;
5210 mod = (modrm >> 6) & 3;
5213 gen_op_mov_TN_reg[ot][0][reg]();
5214 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5215 gen_jmp_im(pc_start - s->cs_base);
5221 case 0x1c8 ... 0x1cf: /* bswap reg */
5222 reg = (b & 7) | REX_B(s);
5223 #ifdef TARGET_X86_64
5225 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5227 gen_op_mov_reg_T0[OT_QUAD][reg]();
5231 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5233 gen_op_mov_reg_T0[OT_LONG][reg]();
5236 case 0xd6: /* salc */
5239 if (s->cc_op != CC_OP_DYNAMIC)
5240 gen_op_set_cc_op(s->cc_op);
5243 case 0xe0: /* loopnz */
5244 case 0xe1: /* loopz */
5245 if (s->cc_op != CC_OP_DYNAMIC)
5246 gen_op_set_cc_op(s->cc_op);
5248 case 0xe2: /* loop */
5249 case 0xe3: /* jecxz */
5253 tval = (int8_t)insn_get(s, OT_BYTE);
5254 next_eip = s->pc - s->cs_base;
5259 l1 = gen_new_label();
5260 l2 = gen_new_label();
5263 gen_op_jz_ecx[s->aflag](l1);
5265 gen_op_dec_ECX[s->aflag]();
5266 gen_op_loop[s->aflag][b](l1);
5269 gen_jmp_im(next_eip);
5270 gen_op_jmp_label(l2);
5277 case 0x130: /* wrmsr */
5278 case 0x132: /* rdmsr */
5280 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5288 case 0x131: /* rdtsc */
5291 case 0x134: /* sysenter */
5295 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5297 if (s->cc_op != CC_OP_DYNAMIC) {
5298 gen_op_set_cc_op(s->cc_op);
5299 s->cc_op = CC_OP_DYNAMIC;
5301 gen_jmp_im(pc_start - s->cs_base);
5306 case 0x135: /* sysexit */
5310 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5312 if (s->cc_op != CC_OP_DYNAMIC) {
5313 gen_op_set_cc_op(s->cc_op);
5314 s->cc_op = CC_OP_DYNAMIC;
5316 gen_jmp_im(pc_start - s->cs_base);
5321 #ifdef TARGET_X86_64
5322 case 0x105: /* syscall */
5323 /* XXX: is it usable in real mode ? */
5324 if (s->cc_op != CC_OP_DYNAMIC) {
5325 gen_op_set_cc_op(s->cc_op);
5326 s->cc_op = CC_OP_DYNAMIC;
5328 gen_jmp_im(pc_start - s->cs_base);
5329 gen_op_syscall(s->pc - pc_start);
5332 case 0x107: /* sysret */
5334 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5336 if (s->cc_op != CC_OP_DYNAMIC) {
5337 gen_op_set_cc_op(s->cc_op);
5338 s->cc_op = CC_OP_DYNAMIC;
5340 gen_jmp_im(pc_start - s->cs_base);
5341 gen_op_sysret(s->dflag);
5346 case 0x1a2: /* cpuid */
5349 case 0xf4: /* hlt */
5351 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5353 if (s->cc_op != CC_OP_DYNAMIC)
5354 gen_op_set_cc_op(s->cc_op);
5355 gen_jmp_im(s->pc - s->cs_base);
5361 modrm = ldub_code(s->pc++);
5362 mod = (modrm >> 6) & 3;
5363 op = (modrm >> 3) & 7;
5366 if (!s->pe || s->vm86)
5368 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5372 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5375 if (!s->pe || s->vm86)
5378 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5380 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5381 gen_jmp_im(pc_start - s->cs_base);
5386 if (!s->pe || s->vm86)
5388 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5392 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5395 if (!s->pe || s->vm86)
5398 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5400 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5401 gen_jmp_im(pc_start - s->cs_base);
5407 if (!s->pe || s->vm86)
5409 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5410 if (s->cc_op != CC_OP_DYNAMIC)
5411 gen_op_set_cc_op(s->cc_op);
5416 s->cc_op = CC_OP_EFLAGS;
5423 modrm = ldub_code(s->pc++);
5424 mod = (modrm >> 6) & 3;
5425 op = (modrm >> 3) & 7;
5431 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5433 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
5435 gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
5436 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5437 #ifdef TARGET_X86_64
5439 gen_op_addq_A0_im(2);
5442 gen_op_addl_A0_im(2);
5444 gen_op_movtl_T0_env(offsetof(CPUX86State,gdt.base));
5446 gen_op_movtl_T0_env(offsetof(CPUX86State,idt.base));
5448 gen_op_andl_T0_im(0xffffff);
5449 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5456 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5458 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5459 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5460 #ifdef TARGET_X86_64
5462 gen_op_addq_A0_im(2);
5465 gen_op_addl_A0_im(2);
5466 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5468 gen_op_andl_T0_im(0xffffff);
5470 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5471 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5473 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5474 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5479 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5480 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5484 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5486 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5488 gen_jmp_im(s->pc - s->cs_base);
5492 case 7: /* invlpg */
5494 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5497 #ifdef TARGET_X86_64
5498 if (CODE64(s) && (modrm & 7) == 0) {
5500 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5501 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5502 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5503 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5510 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5512 gen_jmp_im(s->pc - s->cs_base);
5521 case 0x108: /* invd */
5522 case 0x109: /* wbinvd */
5524 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5529 case 0x63: /* arpl or movslS (x86_64) */
5530 #ifdef TARGET_X86_64
5533 /* d_ot is the size of destination */
5534 d_ot = dflag + OT_WORD;
5536 modrm = ldub_code(s->pc++);
5537 reg = ((modrm >> 3) & 7) | rex_r;
5538 mod = (modrm >> 6) & 3;
5539 rm = (modrm & 7) | REX_B(s);
5542 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5544 if (d_ot == OT_QUAD)
5545 gen_op_movslq_T0_T0();
5546 gen_op_mov_reg_T0[d_ot][reg]();
5548 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5549 if (d_ot == OT_QUAD) {
5550 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5552 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5554 gen_op_mov_reg_T0[d_ot][reg]();
5559 if (!s->pe || s->vm86)
5561 ot = dflag ? OT_LONG : OT_WORD;
5562 modrm = ldub_code(s->pc++);
5563 reg = (modrm >> 3) & 7;
5564 mod = (modrm >> 6) & 3;
5567 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5568 gen_op_ld_T0_A0[ot + s->mem_index]();
5570 gen_op_mov_TN_reg[ot][0][rm]();
5572 if (s->cc_op != CC_OP_DYNAMIC)
5573 gen_op_set_cc_op(s->cc_op);
5575 s->cc_op = CC_OP_EFLAGS;
5577 gen_op_st_T0_A0[ot + s->mem_index]();
5579 gen_op_mov_reg_T0[ot][rm]();
5581 gen_op_arpl_update();
5584 case 0x102: /* lar */
5585 case 0x103: /* lsl */
5586 if (!s->pe || s->vm86)
5588 ot = dflag ? OT_LONG : OT_WORD;
5589 modrm = ldub_code(s->pc++);
5590 reg = ((modrm >> 3) & 7) | rex_r;
5591 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5592 gen_op_mov_TN_reg[ot][1][reg]();
5593 if (s->cc_op != CC_OP_DYNAMIC)
5594 gen_op_set_cc_op(s->cc_op);
5599 s->cc_op = CC_OP_EFLAGS;
5600 gen_op_mov_reg_T1[ot][reg]();
5603 modrm = ldub_code(s->pc++);
5604 mod = (modrm >> 6) & 3;
5605 op = (modrm >> 3) & 7;
5607 case 0: /* prefetchnta */
5608 case 1: /* prefetchnt0 */
5609 case 2: /* prefetchnt0 */
5610 case 3: /* prefetchnt0 */
5613 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5614 /* nothing more to do */
5620 case 0x120: /* mov reg, crN */
5621 case 0x122: /* mov crN, reg */
5623 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5625 modrm = ldub_code(s->pc++);
5626 if ((modrm & 0xc0) != 0xc0)
5628 rm = (modrm & 7) | REX_B(s);
5629 reg = ((modrm >> 3) & 7) | rex_r;
5640 gen_op_mov_TN_reg[ot][0][rm]();
5641 gen_op_movl_crN_T0(reg);
5642 gen_jmp_im(s->pc - s->cs_base);
5645 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5646 gen_op_mov_reg_T0[ot][rm]();
5649 /* XXX: add CR8 for x86_64 */
5655 case 0x121: /* mov reg, drN */
5656 case 0x123: /* mov drN, reg */
5658 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5660 modrm = ldub_code(s->pc++);
5661 if ((modrm & 0xc0) != 0xc0)
5663 rm = (modrm & 7) | REX_B(s);
5664 reg = ((modrm >> 3) & 7) | rex_r;
5669 /* XXX: do it dynamically with CR4.DE bit */
5670 if (reg == 4 || reg == 5 || reg >= 8)
5673 gen_op_mov_TN_reg[ot][0][rm]();
5674 gen_op_movl_drN_T0(reg);
5675 gen_jmp_im(s->pc - s->cs_base);
5678 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5679 gen_op_mov_reg_T0[ot][rm]();
5683 case 0x106: /* clts */
5685 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5688 /* abort block because static cpu state changed */
5689 gen_jmp_im(s->pc - s->cs_base);
5693 /* MMX/SSE/SSE2/PNI support */
5694 case 0x1c3: /* MOVNTI reg, mem */
5695 if (!(s->cpuid_features & CPUID_SSE2))
5697 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5698 modrm = ldub_code(s->pc++);
5699 mod = (modrm >> 6) & 3;
5702 reg = ((modrm >> 3) & 7) | rex_r;
5703 /* generate a generic store */
5704 gen_ldst_modrm(s, modrm, ot, reg, 1);
5707 modrm = ldub_code(s->pc++);
5708 mod = (modrm >> 6) & 3;
5709 op = (modrm >> 3) & 7;
5711 case 0: /* fxsave */
5712 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5714 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5715 gen_op_fxsave_A0((s->dflag == 2));
5717 case 1: /* fxrstor */
5718 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5720 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5721 gen_op_fxrstor_A0((s->dflag == 2));
5723 case 2: /* ldmxcsr */
5724 case 3: /* stmxcsr */
5725 if (s->flags & HF_TS_MASK) {
5726 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5729 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
5732 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5734 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5735 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
5737 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
5738 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
5741 case 5: /* lfence */
5742 case 6: /* mfence */
5743 case 7: /* sfence */
5744 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
5751 case 0x110 ... 0x117:
5752 case 0x128 ... 0x12f:
5753 case 0x150 ... 0x177:
5754 case 0x17c ... 0x17f:
5756 case 0x1c4 ... 0x1c6:
5757 case 0x1d0 ... 0x1fe:
5758 gen_sse(s, b, pc_start, rex_r);
5763 /* lock generation */
5764 if (s->prefix & PREFIX_LOCK)
5768 if (s->prefix & PREFIX_LOCK)
5770 /* XXX: ensure that no lock was generated */
5771 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
5775 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5776 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5778 /* flags read by an operation */
5779 static uint16_t opc_read_flags[NB_OPS] = {
5780 [INDEX_op_aas] = CC_A,
5781 [INDEX_op_aaa] = CC_A,
5782 [INDEX_op_das] = CC_A | CC_C,
5783 [INDEX_op_daa] = CC_A | CC_C,
5785 /* subtle: due to the incl/decl implementation, C is used */
5786 [INDEX_op_update_inc_cc] = CC_C,
5788 [INDEX_op_into] = CC_O,
5790 [INDEX_op_jb_subb] = CC_C,
5791 [INDEX_op_jb_subw] = CC_C,
5792 [INDEX_op_jb_subl] = CC_C,
5794 [INDEX_op_jz_subb] = CC_Z,
5795 [INDEX_op_jz_subw] = CC_Z,
5796 [INDEX_op_jz_subl] = CC_Z,
5798 [INDEX_op_jbe_subb] = CC_Z | CC_C,
5799 [INDEX_op_jbe_subw] = CC_Z | CC_C,
5800 [INDEX_op_jbe_subl] = CC_Z | CC_C,
5802 [INDEX_op_js_subb] = CC_S,
5803 [INDEX_op_js_subw] = CC_S,
5804 [INDEX_op_js_subl] = CC_S,
5806 [INDEX_op_jl_subb] = CC_O | CC_S,
5807 [INDEX_op_jl_subw] = CC_O | CC_S,
5808 [INDEX_op_jl_subl] = CC_O | CC_S,
5810 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
5811 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
5812 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
5814 [INDEX_op_loopnzw] = CC_Z,
5815 [INDEX_op_loopnzl] = CC_Z,
5816 [INDEX_op_loopzw] = CC_Z,
5817 [INDEX_op_loopzl] = CC_Z,
5819 [INDEX_op_seto_T0_cc] = CC_O,
5820 [INDEX_op_setb_T0_cc] = CC_C,
5821 [INDEX_op_setz_T0_cc] = CC_Z,
5822 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
5823 [INDEX_op_sets_T0_cc] = CC_S,
5824 [INDEX_op_setp_T0_cc] = CC_P,
5825 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
5826 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
5828 [INDEX_op_setb_T0_subb] = CC_C,
5829 [INDEX_op_setb_T0_subw] = CC_C,
5830 [INDEX_op_setb_T0_subl] = CC_C,
5832 [INDEX_op_setz_T0_subb] = CC_Z,
5833 [INDEX_op_setz_T0_subw] = CC_Z,
5834 [INDEX_op_setz_T0_subl] = CC_Z,
5836 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
5837 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
5838 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
5840 [INDEX_op_sets_T0_subb] = CC_S,
5841 [INDEX_op_sets_T0_subw] = CC_S,
5842 [INDEX_op_sets_T0_subl] = CC_S,
5844 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
5845 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
5846 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
5848 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
5849 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
5850 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
5852 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
5853 [INDEX_op_cmc] = CC_C,
5854 [INDEX_op_salc] = CC_C,
5856 /* needed for correct flag optimisation before string ops */
5857 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
5858 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
5859 [INDEX_op_jz_ecxw] = CC_OSZAPC,
5860 [INDEX_op_jz_ecxl] = CC_OSZAPC,
5862 #ifdef TARGET_X86_64
5863 [INDEX_op_jb_subq] = CC_C,
5864 [INDEX_op_jz_subq] = CC_Z,
5865 [INDEX_op_jbe_subq] = CC_Z | CC_C,
5866 [INDEX_op_js_subq] = CC_S,
5867 [INDEX_op_jl_subq] = CC_O | CC_S,
5868 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
5870 [INDEX_op_loopnzq] = CC_Z,
5871 [INDEX_op_loopzq] = CC_Z,
5873 [INDEX_op_setb_T0_subq] = CC_C,
5874 [INDEX_op_setz_T0_subq] = CC_Z,
5875 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
5876 [INDEX_op_sets_T0_subq] = CC_S,
5877 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
5878 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
5880 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
5881 [INDEX_op_jz_ecxq] = CC_OSZAPC,
5884 #define DEF_READF(SUFFIX)\
5885 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5886 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5887 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5888 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5889 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5890 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5891 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5892 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5894 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5895 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5896 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5897 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5898 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5899 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5900 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5901 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5905 #ifndef CONFIG_USER_ONLY
5911 /* flags written by an operation */
5912 static uint16_t opc_write_flags[NB_OPS] = {
5913 [INDEX_op_update2_cc] = CC_OSZAPC,
5914 [INDEX_op_update1_cc] = CC_OSZAPC,
5915 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
5916 [INDEX_op_update_neg_cc] = CC_OSZAPC,
5917 /* subtle: due to the incl/decl implementation, C is used */
5918 [INDEX_op_update_inc_cc] = CC_OSZAPC,
5919 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
5921 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
5922 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
5923 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
5924 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
5925 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
5926 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
5927 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
5928 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
5929 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
5930 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
5931 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
5934 [INDEX_op_ucomiss] = CC_OSZAPC,
5935 [INDEX_op_ucomisd] = CC_OSZAPC,
5936 [INDEX_op_comiss] = CC_OSZAPC,
5937 [INDEX_op_comisd] = CC_OSZAPC,
5940 [INDEX_op_aam] = CC_OSZAPC,
5941 [INDEX_op_aad] = CC_OSZAPC,
5942 [INDEX_op_aas] = CC_OSZAPC,
5943 [INDEX_op_aaa] = CC_OSZAPC,
5944 [INDEX_op_das] = CC_OSZAPC,
5945 [INDEX_op_daa] = CC_OSZAPC,
5947 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
5948 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
5949 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
5950 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
5951 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
5952 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
5953 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
5954 [INDEX_op_clc] = CC_C,
5955 [INDEX_op_stc] = CC_C,
5956 [INDEX_op_cmc] = CC_C,
5958 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
5959 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
5960 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
5961 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
5962 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
5963 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
5964 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
5965 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
5966 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
5967 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
5968 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
5969 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
5971 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
5972 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
5973 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
5974 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
5975 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
5976 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
5978 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
5979 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
5980 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
5981 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
5983 [INDEX_op_cmpxchg8b] = CC_Z,
5984 [INDEX_op_lar] = CC_Z,
5985 [INDEX_op_lsl] = CC_Z,
5986 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
5987 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
5989 #define DEF_WRITEF(SUFFIX)\
5990 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5991 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5992 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5993 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5994 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5995 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5996 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5997 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5999 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6000 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6001 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6002 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6003 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6004 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6005 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6006 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6008 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6009 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6010 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6011 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6012 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6013 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6014 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6015 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6017 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6018 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6019 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6020 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6022 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6023 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6024 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6025 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6027 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6028 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6029 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6030 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6032 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6033 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6034 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6035 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6036 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6037 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6039 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6040 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6041 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6042 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6043 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6044 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6046 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6047 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6048 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6049 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6054 #ifndef CONFIG_USER_ONLY
6060 /* simpler form of an operation if no flags need to be generated */
6061 static uint16_t opc_simpler[NB_OPS] = {
6062 [INDEX_op_update2_cc] = INDEX_op_nop,
6063 [INDEX_op_update1_cc] = INDEX_op_nop,
6064 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6066 /* broken: CC_OP logic must be rewritten */
6067 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6070 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6071 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6072 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6073 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6075 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6076 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6077 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6078 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6080 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6081 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6082 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6083 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6085 #define DEF_SIMPLER(SUFFIX)\
6086 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6087 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6088 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6089 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6091 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6092 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6093 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6094 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6098 #ifndef CONFIG_USER_ONLY
6099 DEF_SIMPLER(_kernel)
6104 void optimize_flags_init(void)
6107 /* put default values in arrays */
6108 for(i = 0; i < NB_OPS; i++) {
6109 if (opc_simpler[i] == 0)
6114 /* CPU flags computation optimization: we move backward thru the
6115 generated code to see which flags are needed. The operation is
6116 modified if suitable */
6117 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6120 int live_flags, write_flags, op;
6122 opc_ptr = opc_buf + opc_buf_len;
6123 /* live_flags contains the flags needed by the next instructions
6124 in the code. At the end of the bloc, we consider that all the
6126 live_flags = CC_OSZAPC;
6127 while (opc_ptr > opc_buf) {
6129 /* if none of the flags written by the instruction is used,
6130 then we can try to find a simpler instruction */
6131 write_flags = opc_write_flags[op];
6132 if ((live_flags & write_flags) == 0) {
6133 *opc_ptr = opc_simpler[op];
6135 /* compute the live flags before the instruction */
6136 live_flags &= ~write_flags;
6137 live_flags |= opc_read_flags[op];
6141 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6142 basic block 'tb'. If search_pc is TRUE, also generate PC
6143 information for each intermediate instruction. */
6144 static inline int gen_intermediate_code_internal(CPUState *env,
6145 TranslationBlock *tb,
6148 DisasContext dc1, *dc = &dc1;
6149 target_ulong pc_ptr;
6150 uint16_t *gen_opc_end;
6151 int flags, j, lj, cflags;
6152 target_ulong pc_start;
6153 target_ulong cs_base;
6155 /* generate intermediate code */
6157 cs_base = tb->cs_base;
6159 cflags = tb->cflags;
6161 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6162 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6163 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6164 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6166 dc->vm86 = (flags >> VM_SHIFT) & 1;
6167 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6168 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6169 dc->tf = (flags >> TF_SHIFT) & 1;
6170 dc->singlestep_enabled = env->singlestep_enabled;
6171 dc->cc_op = CC_OP_DYNAMIC;
6172 dc->cs_base = cs_base;
6174 dc->popl_esp_hack = 0;
6175 /* select memory access functions */
6177 if (flags & HF_SOFTMMU_MASK) {
6179 dc->mem_index = 2 * 4;
6181 dc->mem_index = 1 * 4;
6183 dc->cpuid_features = env->cpuid_features;
6184 #ifdef TARGET_X86_64
6185 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6186 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6189 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6190 (flags & HF_INHIBIT_IRQ_MASK)
6191 #ifndef CONFIG_SOFTMMU
6192 || (flags & HF_SOFTMMU_MASK)
6196 /* check addseg logic */
6197 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6198 printf("ERROR addseg\n");
6201 gen_opc_ptr = gen_opc_buf;
6202 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6203 gen_opparam_ptr = gen_opparam_buf;
6206 dc->is_jmp = DISAS_NEXT;
6211 if (env->nb_breakpoints > 0) {
6212 for(j = 0; j < env->nb_breakpoints; j++) {
6213 if (env->breakpoints[j] == pc_ptr) {
6214 gen_debug(dc, pc_ptr - dc->cs_base);
6220 j = gen_opc_ptr - gen_opc_buf;
6224 gen_opc_instr_start[lj++] = 0;
6226 gen_opc_pc[lj] = pc_ptr;
6227 gen_opc_cc_op[lj] = dc->cc_op;
6228 gen_opc_instr_start[lj] = 1;
6230 pc_ptr = disas_insn(dc, pc_ptr);
6231 /* stop translation if indicated */
6234 /* if single step mode, we generate only one instruction and
6235 generate an exception */
6236 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6237 the flag and abort the translation to give the irqs a
6238 change to be happen */
6239 if (dc->tf || dc->singlestep_enabled ||
6240 (flags & HF_INHIBIT_IRQ_MASK) ||
6241 (cflags & CF_SINGLE_INSN)) {
6242 gen_jmp_im(pc_ptr - dc->cs_base);
6246 /* if too long translation, stop generation too */
6247 if (gen_opc_ptr >= gen_opc_end ||
6248 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6249 gen_jmp_im(pc_ptr - dc->cs_base);
6254 *gen_opc_ptr = INDEX_op_end;
6255 /* we don't forget to fill the last values */
6257 j = gen_opc_ptr - gen_opc_buf;
6260 gen_opc_instr_start[lj++] = 0;
6264 if (loglevel & CPU_LOG_TB_CPU) {
6265 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6267 if (loglevel & CPU_LOG_TB_IN_ASM) {
6269 fprintf(logfile, "----------------\n");
6270 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6271 #ifdef TARGET_X86_64
6276 disas_flags = !dc->code32;
6277 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6278 fprintf(logfile, "\n");
6279 if (loglevel & CPU_LOG_TB_OP) {
6280 fprintf(logfile, "OP:\n");
6281 dump_ops(gen_opc_buf, gen_opparam_buf);
6282 fprintf(logfile, "\n");
6287 /* optimize flag computations */
6288 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6291 if (loglevel & CPU_LOG_TB_OP_OPT) {
6292 fprintf(logfile, "AFTER FLAGS OPT:\n");
6293 dump_ops(gen_opc_buf, gen_opparam_buf);
6294 fprintf(logfile, "\n");
6298 tb->size = pc_ptr - pc_start;
6302 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6304 return gen_intermediate_code_internal(env, tb, 0);
6307 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6309 return gen_intermediate_code_internal(env, tb, 1);