4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr;
34 static uint32_t *gen_opparam_ptr;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #define BUGGY_64(x) NULL
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
61 static int x86_64_hregs;
64 #ifdef USE_DIRECT_JUMP
67 #define TBPARAM(x) (long)(x)
70 typedef struct DisasContext {
71 /* current insn context */
72 int override; /* -1 if no override */
75 target_ulong pc; /* pc = eip + cs_base */
76 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base; /* base of CS segment */
80 int pe; /* protected mode */
81 int code32; /* 32 bit code segment */
83 int lma; /* long mode active */
84 int code64; /* 64 bit code segment */
87 int ss32; /* 32 bit stack segment */
88 int cc_op; /* current CC operation */
89 int addseg; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st; /* currently unused */
91 int vm86; /* vm86 mode */
94 int tf; /* TF cpu flag */
95 int singlestep_enabled; /* "hardware" single step enabled */
96 int jmp_opt; /* use direct block chaining for direct jumps */
97 int mem_index; /* select memory access functions */
98 int flags; /* all execution flags */
99 struct TranslationBlock *tb;
100 int popl_esp_hack; /* for correct popl with esp base handling */
101 int rip_offset; /* only used in x86_64, but left for simplicity */
103 int cpuid_ext_features;
106 static void gen_eob(DisasContext *s);
107 static void gen_jmp(DisasContext *s, target_ulong eip);
108 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
110 /* i386 arith/logic operations */
130 OP_SHL1, /* undocumented */
135 #define DEF(s, n, copy_size) INDEX_op_ ## s,
152 /* I386 int registers */
153 OR_EAX, /* MUST be even numbered */
162 OR_TMP0 = 16, /* temporary operand register */
164 OR_A0, /* temporary register used when doing address evaluation */
169 #define NB_OP_SIZES 4
171 #define DEF_REGS(prefix, suffix) \
172 prefix ## EAX ## suffix,\
173 prefix ## ECX ## suffix,\
174 prefix ## EDX ## suffix,\
175 prefix ## EBX ## suffix,\
176 prefix ## ESP ## suffix,\
177 prefix ## EBP ## suffix,\
178 prefix ## ESI ## suffix,\
179 prefix ## EDI ## suffix,\
180 prefix ## R8 ## suffix,\
181 prefix ## R9 ## suffix,\
182 prefix ## R10 ## suffix,\
183 prefix ## R11 ## suffix,\
184 prefix ## R12 ## suffix,\
185 prefix ## R13 ## suffix,\
186 prefix ## R14 ## suffix,\
187 prefix ## R15 ## suffix,
189 #define DEF_BREGS(prefixb, prefixh, suffix) \
191 static void prefixb ## ESP ## suffix ## _wrapper(void) \
194 prefixb ## ESP ## suffix (); \
196 prefixh ## EAX ## suffix (); \
199 static void prefixb ## EBP ## suffix ## _wrapper(void) \
202 prefixb ## EBP ## suffix (); \
204 prefixh ## ECX ## suffix (); \
207 static void prefixb ## ESI ## suffix ## _wrapper(void) \
210 prefixb ## ESI ## suffix (); \
212 prefixh ## EDX ## suffix (); \
215 static void prefixb ## EDI ## suffix ## _wrapper(void) \
218 prefixb ## EDI ## suffix (); \
220 prefixh ## EBX ## suffix (); \
223 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
224 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
225 DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
226 DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
228 #else /* !TARGET_X86_64 */
230 #define NB_OP_SIZES 3
232 #define DEF_REGS(prefix, suffix) \
233 prefix ## EAX ## suffix,\
234 prefix ## ECX ## suffix,\
235 prefix ## EDX ## suffix,\
236 prefix ## EBX ## suffix,\
237 prefix ## ESP ## suffix,\
238 prefix ## EBP ## suffix,\
239 prefix ## ESI ## suffix,\
240 prefix ## EDI ## suffix,
242 #endif /* !TARGET_X86_64 */
244 static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
251 gen_op_movb_ESP_T0_wrapper,
252 gen_op_movb_EBP_T0_wrapper,
253 gen_op_movb_ESI_T0_wrapper,
254 gen_op_movb_EDI_T0_wrapper,
271 DEF_REGS(gen_op_movw_, _T0)
274 DEF_REGS(gen_op_movl_, _T0)
278 DEF_REGS(gen_op_movq_, _T0)
283 static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
290 gen_op_movb_ESP_T1_wrapper,
291 gen_op_movb_EBP_T1_wrapper,
292 gen_op_movb_ESI_T1_wrapper,
293 gen_op_movb_EDI_T1_wrapper,
310 DEF_REGS(gen_op_movw_, _T1)
313 DEF_REGS(gen_op_movl_, _T1)
317 DEF_REGS(gen_op_movq_, _T1)
322 static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
324 DEF_REGS(gen_op_movw_, _A0)
327 DEF_REGS(gen_op_movl_, _A0)
331 DEF_REGS(gen_op_movq_, _A0)
336 static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
345 gen_op_movl_T0_ESP_wrapper,
346 gen_op_movl_T0_EBP_wrapper,
347 gen_op_movl_T0_ESI_wrapper,
348 gen_op_movl_T0_EDI_wrapper,
370 gen_op_movl_T1_ESP_wrapper,
371 gen_op_movl_T1_EBP_wrapper,
372 gen_op_movl_T1_ESI_wrapper,
373 gen_op_movl_T1_EDI_wrapper,
392 DEF_REGS(gen_op_movl_T0_, )
395 DEF_REGS(gen_op_movl_T1_, )
400 DEF_REGS(gen_op_movl_T0_, )
403 DEF_REGS(gen_op_movl_T1_, )
409 DEF_REGS(gen_op_movl_T0_, )
412 DEF_REGS(gen_op_movl_T1_, )
418 static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
419 DEF_REGS(gen_op_movl_A0_, )
422 static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
424 DEF_REGS(gen_op_addl_A0_, )
427 DEF_REGS(gen_op_addl_A0_, _s1)
430 DEF_REGS(gen_op_addl_A0_, _s2)
433 DEF_REGS(gen_op_addl_A0_, _s3)
438 static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
439 DEF_REGS(gen_op_movq_A0_, )
442 static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
444 DEF_REGS(gen_op_addq_A0_, )
447 DEF_REGS(gen_op_addq_A0_, _s1)
450 DEF_REGS(gen_op_addq_A0_, _s2)
453 DEF_REGS(gen_op_addq_A0_, _s3)
458 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
460 DEF_REGS(gen_op_cmovw_, _T1_T0)
463 DEF_REGS(gen_op_cmovl_, _T1_T0)
467 DEF_REGS(gen_op_cmovq_, _T1_T0)
472 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
483 #define DEF_ARITHC(SUFFIX)\
485 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
486 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
489 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
490 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
493 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
494 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
497 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
498 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
501 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
505 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
507 #ifndef CONFIG_USER_ONLY
513 static const int cc_op_arithb[8] = {
524 #define DEF_CMPXCHG(SUFFIX)\
525 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
527 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
528 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
530 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
534 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
536 #ifndef CONFIG_USER_ONLY
542 #define DEF_SHIFT(SUFFIX)\
544 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
551 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
554 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
561 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
564 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
567 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
570 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
571 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
574 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
581 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
584 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
588 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
590 #ifndef CONFIG_USER_ONLY
596 #define DEF_SHIFTD(SUFFIX, op)\
602 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
610 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
614 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
618 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
622 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
624 #ifndef CONFIG_USER_ONLY
625 DEF_SHIFTD(_kernel, im)
626 DEF_SHIFTD(_user, im)
630 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
631 DEF_SHIFTD(_raw, ECX)
632 #ifndef CONFIG_USER_ONLY
633 DEF_SHIFTD(_kernel, ECX)
634 DEF_SHIFTD(_user, ECX)
638 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
641 gen_op_btsw_T0_T1_cc,
642 gen_op_btrw_T0_T1_cc,
643 gen_op_btcw_T0_T1_cc,
647 gen_op_btsl_T0_T1_cc,
648 gen_op_btrl_T0_T1_cc,
649 gen_op_btcl_T0_T1_cc,
654 gen_op_btsq_T0_T1_cc,
655 gen_op_btrq_T0_T1_cc,
656 gen_op_btcq_T0_T1_cc,
661 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
662 gen_op_add_bitw_A0_T1,
663 gen_op_add_bitl_A0_T1,
664 X86_64_ONLY(gen_op_add_bitq_A0_T1),
667 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
684 static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
685 gen_op_ldsb_raw_T0_A0,
686 gen_op_ldsw_raw_T0_A0,
687 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
689 #ifndef CONFIG_USER_ONLY
690 gen_op_ldsb_kernel_T0_A0,
691 gen_op_ldsw_kernel_T0_A0,
692 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
695 gen_op_ldsb_user_T0_A0,
696 gen_op_ldsw_user_T0_A0,
697 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
702 static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
703 gen_op_ldub_raw_T0_A0,
704 gen_op_lduw_raw_T0_A0,
708 #ifndef CONFIG_USER_ONLY
709 gen_op_ldub_kernel_T0_A0,
710 gen_op_lduw_kernel_T0_A0,
714 gen_op_ldub_user_T0_A0,
715 gen_op_lduw_user_T0_A0,
721 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
722 static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
723 gen_op_ldub_raw_T0_A0,
724 gen_op_lduw_raw_T0_A0,
725 gen_op_ldl_raw_T0_A0,
726 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
728 #ifndef CONFIG_USER_ONLY
729 gen_op_ldub_kernel_T0_A0,
730 gen_op_lduw_kernel_T0_A0,
731 gen_op_ldl_kernel_T0_A0,
732 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
734 gen_op_ldub_user_T0_A0,
735 gen_op_lduw_user_T0_A0,
736 gen_op_ldl_user_T0_A0,
737 X86_64_ONLY(gen_op_ldq_user_T0_A0),
741 static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
742 gen_op_ldub_raw_T1_A0,
743 gen_op_lduw_raw_T1_A0,
744 gen_op_ldl_raw_T1_A0,
745 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
747 #ifndef CONFIG_USER_ONLY
748 gen_op_ldub_kernel_T1_A0,
749 gen_op_lduw_kernel_T1_A0,
750 gen_op_ldl_kernel_T1_A0,
751 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
753 gen_op_ldub_user_T1_A0,
754 gen_op_lduw_user_T1_A0,
755 gen_op_ldl_user_T1_A0,
756 X86_64_ONLY(gen_op_ldq_user_T1_A0),
760 static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
761 gen_op_stb_raw_T0_A0,
762 gen_op_stw_raw_T0_A0,
763 gen_op_stl_raw_T0_A0,
764 X86_64_ONLY(gen_op_stq_raw_T0_A0),
766 #ifndef CONFIG_USER_ONLY
767 gen_op_stb_kernel_T0_A0,
768 gen_op_stw_kernel_T0_A0,
769 gen_op_stl_kernel_T0_A0,
770 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
772 gen_op_stb_user_T0_A0,
773 gen_op_stw_user_T0_A0,
774 gen_op_stl_user_T0_A0,
775 X86_64_ONLY(gen_op_stq_user_T0_A0),
779 static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
781 gen_op_stw_raw_T1_A0,
782 gen_op_stl_raw_T1_A0,
783 X86_64_ONLY(gen_op_stq_raw_T1_A0),
785 #ifndef CONFIG_USER_ONLY
787 gen_op_stw_kernel_T1_A0,
788 gen_op_stl_kernel_T1_A0,
789 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
792 gen_op_stw_user_T1_A0,
793 gen_op_stl_user_T1_A0,
794 X86_64_ONLY(gen_op_stq_user_T1_A0),
798 static inline void gen_jmp_im(target_ulong pc)
801 if (pc == (uint32_t)pc) {
802 gen_op_movl_eip_im(pc);
803 } else if (pc == (int32_t)pc) {
804 gen_op_movq_eip_im(pc);
806 gen_op_movq_eip_im64(pc >> 32, pc);
809 gen_op_movl_eip_im(pc);
813 static inline void gen_string_movl_A0_ESI(DisasContext *s)
817 override = s->override;
821 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
822 gen_op_addq_A0_reg_sN[0][R_ESI]();
824 gen_op_movq_A0_reg[R_ESI]();
830 if (s->addseg && override < 0)
833 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
834 gen_op_addl_A0_reg_sN[0][R_ESI]();
836 gen_op_movl_A0_reg[R_ESI]();
839 /* 16 address, always override */
842 gen_op_movl_A0_reg[R_ESI]();
843 gen_op_andl_A0_ffff();
844 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
848 static inline void gen_string_movl_A0_EDI(DisasContext *s)
852 gen_op_movq_A0_reg[R_EDI]();
857 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
858 gen_op_addl_A0_reg_sN[0][R_EDI]();
860 gen_op_movl_A0_reg[R_EDI]();
863 gen_op_movl_A0_reg[R_EDI]();
864 gen_op_andl_A0_ffff();
865 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
869 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
870 gen_op_movl_T0_Dshiftb,
871 gen_op_movl_T0_Dshiftw,
872 gen_op_movl_T0_Dshiftl,
873 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
876 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
879 X86_64_ONLY(gen_op_jnz_ecxq),
882 static GenOpFunc1 *gen_op_jz_ecx[3] = {
885 X86_64_ONLY(gen_op_jz_ecxq),
888 static GenOpFunc *gen_op_dec_ECX[3] = {
891 X86_64_ONLY(gen_op_decq_ECX),
894 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
899 X86_64_ONLY(gen_op_jnz_subq),
905 X86_64_ONLY(gen_op_jz_subq),
909 static GenOpFunc *gen_op_in_DX_T0[3] = {
915 static GenOpFunc *gen_op_out_DX_T0[3] = {
921 static GenOpFunc *gen_op_in[3] = {
927 static GenOpFunc *gen_op_out[3] = {
933 static GenOpFunc *gen_check_io_T0[3] = {
939 static GenOpFunc *gen_check_io_DX[3] = {
945 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
947 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
948 if (s->cc_op != CC_OP_DYNAMIC)
949 gen_op_set_cc_op(s->cc_op);
952 gen_check_io_DX[ot]();
954 gen_check_io_T0[ot]();
958 static inline void gen_movs(DisasContext *s, int ot)
960 gen_string_movl_A0_ESI(s);
961 gen_op_ld_T0_A0[ot + s->mem_index]();
962 gen_string_movl_A0_EDI(s);
963 gen_op_st_T0_A0[ot + s->mem_index]();
964 gen_op_movl_T0_Dshift[ot]();
967 gen_op_addq_ESI_T0();
968 gen_op_addq_EDI_T0();
972 gen_op_addl_ESI_T0();
973 gen_op_addl_EDI_T0();
975 gen_op_addw_ESI_T0();
976 gen_op_addw_EDI_T0();
980 static inline void gen_update_cc_op(DisasContext *s)
982 if (s->cc_op != CC_OP_DYNAMIC) {
983 gen_op_set_cc_op(s->cc_op);
984 s->cc_op = CC_OP_DYNAMIC;
988 /* XXX: does not work with gdbstub "ice" single step - not a
990 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
994 l1 = gen_new_label();
995 l2 = gen_new_label();
996 gen_op_jnz_ecx[s->aflag](l1);
998 gen_jmp_tb(s, next_eip, 1);
1003 static inline void gen_stos(DisasContext *s, int ot)
1005 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1006 gen_string_movl_A0_EDI(s);
1007 gen_op_st_T0_A0[ot + s->mem_index]();
1008 gen_op_movl_T0_Dshift[ot]();
1009 #ifdef TARGET_X86_64
1010 if (s->aflag == 2) {
1011 gen_op_addq_EDI_T0();
1015 gen_op_addl_EDI_T0();
1017 gen_op_addw_EDI_T0();
1021 static inline void gen_lods(DisasContext *s, int ot)
1023 gen_string_movl_A0_ESI(s);
1024 gen_op_ld_T0_A0[ot + s->mem_index]();
1025 gen_op_mov_reg_T0[ot][R_EAX]();
1026 gen_op_movl_T0_Dshift[ot]();
1027 #ifdef TARGET_X86_64
1028 if (s->aflag == 2) {
1029 gen_op_addq_ESI_T0();
1033 gen_op_addl_ESI_T0();
1035 gen_op_addw_ESI_T0();
1039 static inline void gen_scas(DisasContext *s, int ot)
1041 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1042 gen_string_movl_A0_EDI(s);
1043 gen_op_ld_T1_A0[ot + s->mem_index]();
1044 gen_op_cmpl_T0_T1_cc();
1045 gen_op_movl_T0_Dshift[ot]();
1046 #ifdef TARGET_X86_64
1047 if (s->aflag == 2) {
1048 gen_op_addq_EDI_T0();
1052 gen_op_addl_EDI_T0();
1054 gen_op_addw_EDI_T0();
1058 static inline void gen_cmps(DisasContext *s, int ot)
1060 gen_string_movl_A0_ESI(s);
1061 gen_op_ld_T0_A0[ot + s->mem_index]();
1062 gen_string_movl_A0_EDI(s);
1063 gen_op_ld_T1_A0[ot + s->mem_index]();
1064 gen_op_cmpl_T0_T1_cc();
1065 gen_op_movl_T0_Dshift[ot]();
1066 #ifdef TARGET_X86_64
1067 if (s->aflag == 2) {
1068 gen_op_addq_ESI_T0();
1069 gen_op_addq_EDI_T0();
1073 gen_op_addl_ESI_T0();
1074 gen_op_addl_EDI_T0();
1076 gen_op_addw_ESI_T0();
1077 gen_op_addw_EDI_T0();
1081 static inline void gen_ins(DisasContext *s, int ot)
1083 gen_string_movl_A0_EDI(s);
1085 gen_op_st_T0_A0[ot + s->mem_index]();
1086 gen_op_in_DX_T0[ot]();
1087 gen_op_st_T0_A0[ot + s->mem_index]();
1088 gen_op_movl_T0_Dshift[ot]();
1089 #ifdef TARGET_X86_64
1090 if (s->aflag == 2) {
1091 gen_op_addq_EDI_T0();
1095 gen_op_addl_EDI_T0();
1097 gen_op_addw_EDI_T0();
1101 static inline void gen_outs(DisasContext *s, int ot)
1103 gen_string_movl_A0_ESI(s);
1104 gen_op_ld_T0_A0[ot + s->mem_index]();
1105 gen_op_out_DX_T0[ot]();
1106 gen_op_movl_T0_Dshift[ot]();
1107 #ifdef TARGET_X86_64
1108 if (s->aflag == 2) {
1109 gen_op_addq_ESI_T0();
1113 gen_op_addl_ESI_T0();
1115 gen_op_addw_ESI_T0();
1119 /* same method as Valgrind : we generate jumps to current or next
1121 #define GEN_REPZ(op) \
1122 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1123 target_ulong cur_eip, target_ulong next_eip) \
1126 gen_update_cc_op(s); \
1127 l2 = gen_jz_ecx_string(s, next_eip); \
1128 gen_ ## op(s, ot); \
1129 gen_op_dec_ECX[s->aflag](); \
1130 /* a loop would cause two single step exceptions if ECX = 1 \
1131 before rep string_insn */ \
1133 gen_op_jz_ecx[s->aflag](l2); \
1134 gen_jmp(s, cur_eip); \
1137 #define GEN_REPZ2(op) \
1138 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1139 target_ulong cur_eip, \
1140 target_ulong next_eip, \
1144 gen_update_cc_op(s); \
1145 l2 = gen_jz_ecx_string(s, next_eip); \
1146 gen_ ## op(s, ot); \
1147 gen_op_dec_ECX[s->aflag](); \
1148 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1149 gen_op_string_jnz_sub[nz][ot](l2);\
1151 gen_op_jz_ecx[s->aflag](l2); \
1152 gen_jmp(s, cur_eip); \
1174 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1205 #ifdef TARGET_X86_64
1208 BUGGY_64(gen_op_jb_subq),
1210 BUGGY_64(gen_op_jbe_subq),
1213 BUGGY_64(gen_op_jl_subq),
1214 BUGGY_64(gen_op_jle_subq),
1218 static GenOpFunc1 *gen_op_loop[3][4] = {
1229 #ifdef TARGET_X86_64
1238 static GenOpFunc *gen_setcc_slow[8] = {
1249 static GenOpFunc *gen_setcc_sub[4][8] = {
1252 gen_op_setb_T0_subb,
1253 gen_op_setz_T0_subb,
1254 gen_op_setbe_T0_subb,
1255 gen_op_sets_T0_subb,
1257 gen_op_setl_T0_subb,
1258 gen_op_setle_T0_subb,
1262 gen_op_setb_T0_subw,
1263 gen_op_setz_T0_subw,
1264 gen_op_setbe_T0_subw,
1265 gen_op_sets_T0_subw,
1267 gen_op_setl_T0_subw,
1268 gen_op_setle_T0_subw,
1272 gen_op_setb_T0_subl,
1273 gen_op_setz_T0_subl,
1274 gen_op_setbe_T0_subl,
1275 gen_op_sets_T0_subl,
1277 gen_op_setl_T0_subl,
1278 gen_op_setle_T0_subl,
1280 #ifdef TARGET_X86_64
1283 gen_op_setb_T0_subq,
1284 gen_op_setz_T0_subq,
1285 gen_op_setbe_T0_subq,
1286 gen_op_sets_T0_subq,
1288 gen_op_setl_T0_subq,
1289 gen_op_setle_T0_subq,
1294 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1295 gen_op_fadd_ST0_FT0,
1296 gen_op_fmul_ST0_FT0,
1297 gen_op_fcom_ST0_FT0,
1298 gen_op_fcom_ST0_FT0,
1299 gen_op_fsub_ST0_FT0,
1300 gen_op_fsubr_ST0_FT0,
1301 gen_op_fdiv_ST0_FT0,
1302 gen_op_fdivr_ST0_FT0,
1305 /* NOTE the exception in "r" op ordering */
1306 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1307 gen_op_fadd_STN_ST0,
1308 gen_op_fmul_STN_ST0,
1311 gen_op_fsubr_STN_ST0,
1312 gen_op_fsub_STN_ST0,
1313 gen_op_fdivr_STN_ST0,
1314 gen_op_fdiv_STN_ST0,
1317 /* if d == OR_TMP0, it means memory operand (address in A0) */
1318 static void gen_op(DisasContext *s1, int op, int ot, int d)
1320 GenOpFunc *gen_update_cc;
1323 gen_op_mov_TN_reg[ot][0][d]();
1325 gen_op_ld_T0_A0[ot + s1->mem_index]();
1330 if (s1->cc_op != CC_OP_DYNAMIC)
1331 gen_op_set_cc_op(s1->cc_op);
1333 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1334 gen_op_mov_reg_T0[ot][d]();
1336 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1338 s1->cc_op = CC_OP_DYNAMIC;
1341 gen_op_addl_T0_T1();
1342 s1->cc_op = CC_OP_ADDB + ot;
1343 gen_update_cc = gen_op_update2_cc;
1346 gen_op_subl_T0_T1();
1347 s1->cc_op = CC_OP_SUBB + ot;
1348 gen_update_cc = gen_op_update2_cc;
1354 gen_op_arith_T0_T1_cc[op]();
1355 s1->cc_op = CC_OP_LOGICB + ot;
1356 gen_update_cc = gen_op_update1_cc;
1359 gen_op_cmpl_T0_T1_cc();
1360 s1->cc_op = CC_OP_SUBB + ot;
1361 gen_update_cc = NULL;
1364 if (op != OP_CMPL) {
1366 gen_op_mov_reg_T0[ot][d]();
1368 gen_op_st_T0_A0[ot + s1->mem_index]();
1370 /* the flags update must happen after the memory write (precise
1371 exception support) */
1377 /* if d == OR_TMP0, it means memory operand (address in A0) */
1378 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1381 gen_op_mov_TN_reg[ot][0][d]();
1383 gen_op_ld_T0_A0[ot + s1->mem_index]();
1384 if (s1->cc_op != CC_OP_DYNAMIC)
1385 gen_op_set_cc_op(s1->cc_op);
1388 s1->cc_op = CC_OP_INCB + ot;
1391 s1->cc_op = CC_OP_DECB + ot;
1394 gen_op_mov_reg_T0[ot][d]();
1396 gen_op_st_T0_A0[ot + s1->mem_index]();
1397 gen_op_update_inc_cc();
1400 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1403 gen_op_mov_TN_reg[ot][0][d]();
1405 gen_op_ld_T0_A0[ot + s1->mem_index]();
1407 gen_op_mov_TN_reg[ot][1][s]();
1408 /* for zero counts, flags are not updated, so must do it dynamically */
1409 if (s1->cc_op != CC_OP_DYNAMIC)
1410 gen_op_set_cc_op(s1->cc_op);
1413 gen_op_shift_T0_T1_cc[ot][op]();
1415 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1417 gen_op_mov_reg_T0[ot][d]();
1418 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1421 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1423 /* currently not optimized */
1424 gen_op_movl_T1_im(c);
1425 gen_shift(s1, op, ot, d, OR_TMP1);
1428 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1436 int mod, rm, code, override, must_add_seg;
1438 override = s->override;
1439 must_add_seg = s->addseg;
1442 mod = (modrm >> 6) & 3;
1454 code = ldub_code(s->pc++);
1455 scale = (code >> 6) & 3;
1456 index = ((code >> 3) & 7) | REX_X(s);
1463 if ((base & 7) == 5) {
1465 disp = (int32_t)ldl_code(s->pc);
1467 if (CODE64(s) && !havesib) {
1468 disp += s->pc + s->rip_offset;
1475 disp = (int8_t)ldub_code(s->pc++);
1479 disp = ldl_code(s->pc);
1485 /* for correct popl handling with esp */
1486 if (base == 4 && s->popl_esp_hack)
1487 disp += s->popl_esp_hack;
1488 #ifdef TARGET_X86_64
1489 if (s->aflag == 2) {
1490 gen_op_movq_A0_reg[base]();
1492 if ((int32_t)disp == disp)
1493 gen_op_addq_A0_im(disp);
1495 gen_op_addq_A0_im64(disp >> 32, disp);
1500 gen_op_movl_A0_reg[base]();
1502 gen_op_addl_A0_im(disp);
1505 #ifdef TARGET_X86_64
1506 if (s->aflag == 2) {
1507 if ((int32_t)disp == disp)
1508 gen_op_movq_A0_im(disp);
1510 gen_op_movq_A0_im64(disp >> 32, disp);
1514 gen_op_movl_A0_im(disp);
1517 /* XXX: index == 4 is always invalid */
1518 if (havesib && (index != 4 || scale != 0)) {
1519 #ifdef TARGET_X86_64
1520 if (s->aflag == 2) {
1521 gen_op_addq_A0_reg_sN[scale][index]();
1525 gen_op_addl_A0_reg_sN[scale][index]();
1530 if (base == R_EBP || base == R_ESP)
1535 #ifdef TARGET_X86_64
1536 if (s->aflag == 2) {
1537 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1541 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1548 disp = lduw_code(s->pc);
1550 gen_op_movl_A0_im(disp);
1551 rm = 0; /* avoid SS override */
1558 disp = (int8_t)ldub_code(s->pc++);
1562 disp = lduw_code(s->pc);
1568 gen_op_movl_A0_reg[R_EBX]();
1569 gen_op_addl_A0_reg_sN[0][R_ESI]();
1572 gen_op_movl_A0_reg[R_EBX]();
1573 gen_op_addl_A0_reg_sN[0][R_EDI]();
1576 gen_op_movl_A0_reg[R_EBP]();
1577 gen_op_addl_A0_reg_sN[0][R_ESI]();
1580 gen_op_movl_A0_reg[R_EBP]();
1581 gen_op_addl_A0_reg_sN[0][R_EDI]();
1584 gen_op_movl_A0_reg[R_ESI]();
1587 gen_op_movl_A0_reg[R_EDI]();
1590 gen_op_movl_A0_reg[R_EBP]();
1594 gen_op_movl_A0_reg[R_EBX]();
1598 gen_op_addl_A0_im(disp);
1599 gen_op_andl_A0_ffff();
1603 if (rm == 2 || rm == 3 || rm == 6)
1608 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1618 static void gen_nop_modrm(DisasContext *s, int modrm)
1620 int mod, rm, base, code;
1622 mod = (modrm >> 6) & 3;
1632 code = ldub_code(s->pc++);
1668 /* used for LEA and MOV AX, mem */
1669 static void gen_add_A0_ds_seg(DisasContext *s)
1671 int override, must_add_seg;
1672 must_add_seg = s->addseg;
1674 if (s->override >= 0) {
1675 override = s->override;
1681 #ifdef TARGET_X86_64
1683 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1687 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1692 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1694 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1696 int mod, rm, opreg, disp;
1698 mod = (modrm >> 6) & 3;
1699 rm = (modrm & 7) | REX_B(s);
1703 gen_op_mov_TN_reg[ot][0][reg]();
1704 gen_op_mov_reg_T0[ot][rm]();
1706 gen_op_mov_TN_reg[ot][0][rm]();
1708 gen_op_mov_reg_T0[ot][reg]();
1711 gen_lea_modrm(s, modrm, &opreg, &disp);
1714 gen_op_mov_TN_reg[ot][0][reg]();
1715 gen_op_st_T0_A0[ot + s->mem_index]();
1717 gen_op_ld_T0_A0[ot + s->mem_index]();
1719 gen_op_mov_reg_T0[ot][reg]();
1724 static inline uint32_t insn_get(DisasContext *s, int ot)
1730 ret = ldub_code(s->pc);
1734 ret = lduw_code(s->pc);
1739 ret = ldl_code(s->pc);
1746 static inline int insn_const_size(unsigned int ot)
1754 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1756 TranslationBlock *tb;
1759 pc = s->cs_base + eip;
1761 /* NOTE: we handle the case where the TB spans two pages here */
1762 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1763 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1764 /* jump to same page: we can use a direct jump */
1766 gen_op_goto_tb0(TBPARAM(tb));
1768 gen_op_goto_tb1(TBPARAM(tb));
1770 gen_op_movl_T0_im((long)tb + tb_num);
1773 /* jump to another page: currently not optimized */
1779 static inline void gen_jcc(DisasContext *s, int b,
1780 target_ulong val, target_ulong next_eip)
1782 TranslationBlock *tb;
1789 jcc_op = (b >> 1) & 7;
1793 /* we optimize the cmp/jcc case */
1798 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1801 /* some jumps are easy to compute */
1843 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1846 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1858 if (s->cc_op != CC_OP_DYNAMIC) {
1859 gen_op_set_cc_op(s->cc_op);
1860 s->cc_op = CC_OP_DYNAMIC;
1864 gen_setcc_slow[jcc_op]();
1865 func = gen_op_jnz_T0_label;
1875 l1 = gen_new_label();
1878 gen_goto_tb(s, 0, next_eip);
1881 gen_goto_tb(s, 1, val);
1886 if (s->cc_op != CC_OP_DYNAMIC) {
1887 gen_op_set_cc_op(s->cc_op);
1888 s->cc_op = CC_OP_DYNAMIC;
1890 gen_setcc_slow[jcc_op]();
1896 l1 = gen_new_label();
1897 l2 = gen_new_label();
1898 gen_op_jnz_T0_label(l1);
1899 gen_jmp_im(next_eip);
1900 gen_op_jmp_label(l2);
1908 static void gen_setcc(DisasContext *s, int b)
1914 jcc_op = (b >> 1) & 7;
1916 /* we optimize the cmp/jcc case */
1921 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1926 /* some jumps are easy to compute */
1953 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1956 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1964 if (s->cc_op != CC_OP_DYNAMIC)
1965 gen_op_set_cc_op(s->cc_op);
1966 func = gen_setcc_slow[jcc_op];
1975 /* move T0 to seg_reg and compute if the CPU state may change. Never
1976 call this function with seg_reg == R_CS */
1977 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1979 if (s->pe && !s->vm86) {
1980 /* XXX: optimize by finding processor state dynamically */
1981 if (s->cc_op != CC_OP_DYNAMIC)
1982 gen_op_set_cc_op(s->cc_op);
1983 gen_jmp_im(cur_eip);
1984 gen_op_movl_seg_T0(seg_reg);
1985 /* abort translation because the addseg value may change or
1986 because ss32 may change. For R_SS, translation must always
1987 stop as a special handling must be done to disable hardware
1988 interrupts for the next instruction */
1989 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1992 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1993 if (seg_reg == R_SS)
1998 static inline void gen_stack_update(DisasContext *s, int addend)
2000 #ifdef TARGET_X86_64
2003 gen_op_addq_ESP_8();
2005 gen_op_addq_ESP_im(addend);
2010 gen_op_addl_ESP_2();
2011 else if (addend == 4)
2012 gen_op_addl_ESP_4();
2014 gen_op_addl_ESP_im(addend);
2017 gen_op_addw_ESP_2();
2018 else if (addend == 4)
2019 gen_op_addw_ESP_4();
2021 gen_op_addw_ESP_im(addend);
2025 /* generate a push. It depends on ss32, addseg and dflag */
2026 static void gen_push_T0(DisasContext *s)
2028 #ifdef TARGET_X86_64
2030 gen_op_movq_A0_reg[R_ESP]();
2033 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2036 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2038 gen_op_movq_ESP_A0();
2042 gen_op_movl_A0_reg[R_ESP]();
2049 gen_op_movl_T1_A0();
2050 gen_op_addl_A0_SS();
2053 gen_op_andl_A0_ffff();
2054 gen_op_movl_T1_A0();
2055 gen_op_addl_A0_SS();
2057 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2058 if (s->ss32 && !s->addseg)
2059 gen_op_movl_ESP_A0();
2061 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2065 /* generate a push. It depends on ss32, addseg and dflag */
2066 /* slower version for T1, only used for call Ev */
2067 static void gen_push_T1(DisasContext *s)
2069 #ifdef TARGET_X86_64
2071 gen_op_movq_A0_reg[R_ESP]();
2074 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2077 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2079 gen_op_movq_ESP_A0();
2083 gen_op_movl_A0_reg[R_ESP]();
2090 gen_op_addl_A0_SS();
2093 gen_op_andl_A0_ffff();
2094 gen_op_addl_A0_SS();
2096 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2098 if (s->ss32 && !s->addseg)
2099 gen_op_movl_ESP_A0();
2101 gen_stack_update(s, (-2) << s->dflag);
2105 /* two step pop is necessary for precise exceptions */
2106 static void gen_pop_T0(DisasContext *s)
2108 #ifdef TARGET_X86_64
2110 gen_op_movq_A0_reg[R_ESP]();
2111 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2115 gen_op_movl_A0_reg[R_ESP]();
2118 gen_op_addl_A0_SS();
2120 gen_op_andl_A0_ffff();
2121 gen_op_addl_A0_SS();
2123 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2127 static void gen_pop_update(DisasContext *s)
2129 #ifdef TARGET_X86_64
2130 if (CODE64(s) && s->dflag) {
2131 gen_stack_update(s, 8);
2135 gen_stack_update(s, 2 << s->dflag);
2139 static void gen_stack_A0(DisasContext *s)
2141 gen_op_movl_A0_ESP();
2143 gen_op_andl_A0_ffff();
2144 gen_op_movl_T1_A0();
2146 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2149 /* NOTE: wrap around in 16 bit not fully handled */
2150 static void gen_pusha(DisasContext *s)
2153 gen_op_movl_A0_ESP();
2154 gen_op_addl_A0_im(-16 << s->dflag);
2156 gen_op_andl_A0_ffff();
2157 gen_op_movl_T1_A0();
2159 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2160 for(i = 0;i < 8; i++) {
2161 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2162 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2163 gen_op_addl_A0_im(2 << s->dflag);
2165 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2168 /* NOTE: wrap around in 16 bit not fully handled */
2169 static void gen_popa(DisasContext *s)
2172 gen_op_movl_A0_ESP();
2174 gen_op_andl_A0_ffff();
2175 gen_op_movl_T1_A0();
2176 gen_op_addl_T1_im(16 << s->dflag);
2178 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2179 for(i = 0;i < 8; i++) {
2180 /* ESP is not reloaded */
2182 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2183 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2185 gen_op_addl_A0_im(2 << s->dflag);
2187 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2190 static void gen_enter(DisasContext *s, int esp_addend, int level)
2195 #ifdef TARGET_X86_64
2197 ot = s->dflag ? OT_QUAD : OT_WORD;
2200 gen_op_movl_A0_ESP();
2201 gen_op_addq_A0_im(-opsize);
2202 gen_op_movl_T1_A0();
2205 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2206 gen_op_st_T0_A0[ot + s->mem_index]();
2208 gen_op_enter64_level(level, (ot == OT_QUAD));
2210 gen_op_mov_reg_T1[ot][R_EBP]();
2211 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2212 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2216 ot = s->dflag + OT_WORD;
2217 opsize = 2 << s->dflag;
2219 gen_op_movl_A0_ESP();
2220 gen_op_addl_A0_im(-opsize);
2222 gen_op_andl_A0_ffff();
2223 gen_op_movl_T1_A0();
2225 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2227 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2228 gen_op_st_T0_A0[ot + s->mem_index]();
2230 gen_op_enter_level(level, s->dflag);
2232 gen_op_mov_reg_T1[ot][R_EBP]();
2233 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2234 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2238 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2240 if (s->cc_op != CC_OP_DYNAMIC)
2241 gen_op_set_cc_op(s->cc_op);
2242 gen_jmp_im(cur_eip);
2243 gen_op_raise_exception(trapno);
2247 /* an interrupt is different from an exception because of the
2248 priviledge checks */
2249 static void gen_interrupt(DisasContext *s, int intno,
2250 target_ulong cur_eip, target_ulong next_eip)
2252 if (s->cc_op != CC_OP_DYNAMIC)
2253 gen_op_set_cc_op(s->cc_op);
2254 gen_jmp_im(cur_eip);
2255 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2259 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2261 if (s->cc_op != CC_OP_DYNAMIC)
2262 gen_op_set_cc_op(s->cc_op);
2263 gen_jmp_im(cur_eip);
2268 /* generate a generic end of block. Trace exception is also generated
2270 static void gen_eob(DisasContext *s)
2272 if (s->cc_op != CC_OP_DYNAMIC)
2273 gen_op_set_cc_op(s->cc_op);
2274 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2275 gen_op_reset_inhibit_irq();
2277 if (s->singlestep_enabled) {
2280 gen_op_raise_exception(EXCP01_SSTP);
2288 /* generate a jump to eip. No segment change must happen before as a
2289 direct call to the next block may occur */
2290 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2293 if (s->cc_op != CC_OP_DYNAMIC) {
2294 gen_op_set_cc_op(s->cc_op);
2295 s->cc_op = CC_OP_DYNAMIC;
2297 gen_goto_tb(s, tb_num, eip);
2305 static void gen_jmp(DisasContext *s, target_ulong eip)
2307 gen_jmp_tb(s, eip, 0);
2310 static void gen_movtl_T0_im(target_ulong val)
2312 #ifdef TARGET_X86_64
2313 if ((int32_t)val == val) {
2314 gen_op_movl_T0_im(val);
2316 gen_op_movq_T0_im64(val >> 32, val);
2319 gen_op_movl_T0_im(val);
2323 static void gen_movtl_T1_im(target_ulong val)
2325 #ifdef TARGET_X86_64
2326 if ((int32_t)val == val) {
2327 gen_op_movl_T1_im(val);
2329 gen_op_movq_T1_im64(val >> 32, val);
2332 gen_op_movl_T1_im(val);
2336 static void gen_add_A0_im(DisasContext *s, int val)
2338 #ifdef TARGET_X86_64
2340 gen_op_addq_A0_im(val);
2343 gen_op_addl_A0_im(val);
2346 static GenOpFunc1 *gen_ldq_env_A0[3] = {
2347 gen_op_ldq_raw_env_A0,
2348 #ifndef CONFIG_USER_ONLY
2349 gen_op_ldq_kernel_env_A0,
2350 gen_op_ldq_user_env_A0,
2354 static GenOpFunc1 *gen_stq_env_A0[3] = {
2355 gen_op_stq_raw_env_A0,
2356 #ifndef CONFIG_USER_ONLY
2357 gen_op_stq_kernel_env_A0,
2358 gen_op_stq_user_env_A0,
2362 static GenOpFunc1 *gen_ldo_env_A0[3] = {
2363 gen_op_ldo_raw_env_A0,
2364 #ifndef CONFIG_USER_ONLY
2365 gen_op_ldo_kernel_env_A0,
2366 gen_op_ldo_user_env_A0,
2370 static GenOpFunc1 *gen_sto_env_A0[3] = {
2371 gen_op_sto_raw_env_A0,
2372 #ifndef CONFIG_USER_ONLY
2373 gen_op_sto_kernel_env_A0,
2374 gen_op_sto_user_env_A0,
2378 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2380 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2381 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2382 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2384 static GenOpFunc2 *sse_op_table1[256][4] = {
2385 /* pure SSE operations */
2386 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2387 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2388 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2389 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2390 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2391 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2392 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2393 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2395 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2396 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2397 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2398 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2399 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2400 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2401 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2402 [0x2f] = { gen_op_comiss, gen_op_comisd },
2403 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2404 [0x51] = SSE_FOP(sqrt),
2405 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2406 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2407 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2408 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2409 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2410 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2411 [0x58] = SSE_FOP(add),
2412 [0x59] = SSE_FOP(mul),
2413 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2414 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2415 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2416 [0x5c] = SSE_FOP(sub),
2417 [0x5d] = SSE_FOP(min),
2418 [0x5e] = SSE_FOP(div),
2419 [0x5f] = SSE_FOP(max),
2421 [0xc2] = SSE_FOP(cmpeq),
2422 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2424 /* MMX ops and their SSE extensions */
2425 [0x60] = MMX_OP2(punpcklbw),
2426 [0x61] = MMX_OP2(punpcklwd),
2427 [0x62] = MMX_OP2(punpckldq),
2428 [0x63] = MMX_OP2(packsswb),
2429 [0x64] = MMX_OP2(pcmpgtb),
2430 [0x65] = MMX_OP2(pcmpgtw),
2431 [0x66] = MMX_OP2(pcmpgtl),
2432 [0x67] = MMX_OP2(packuswb),
2433 [0x68] = MMX_OP2(punpckhbw),
2434 [0x69] = MMX_OP2(punpckhwd),
2435 [0x6a] = MMX_OP2(punpckhdq),
2436 [0x6b] = MMX_OP2(packssdw),
2437 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2438 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2439 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2440 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2441 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2442 (GenOpFunc2 *)gen_op_pshufd_xmm,
2443 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2444 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2445 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2446 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2447 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2448 [0x74] = MMX_OP2(pcmpeqb),
2449 [0x75] = MMX_OP2(pcmpeqw),
2450 [0x76] = MMX_OP2(pcmpeql),
2451 [0x77] = { SSE_SPECIAL }, /* emms */
2452 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2453 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2454 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2455 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2456 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2457 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2458 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2459 [0xd1] = MMX_OP2(psrlw),
2460 [0xd2] = MMX_OP2(psrld),
2461 [0xd3] = MMX_OP2(psrlq),
2462 [0xd4] = MMX_OP2(paddq),
2463 [0xd5] = MMX_OP2(pmullw),
2464 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2465 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2466 [0xd8] = MMX_OP2(psubusb),
2467 [0xd9] = MMX_OP2(psubusw),
2468 [0xda] = MMX_OP2(pminub),
2469 [0xdb] = MMX_OP2(pand),
2470 [0xdc] = MMX_OP2(paddusb),
2471 [0xdd] = MMX_OP2(paddusw),
2472 [0xde] = MMX_OP2(pmaxub),
2473 [0xdf] = MMX_OP2(pandn),
2474 [0xe0] = MMX_OP2(pavgb),
2475 [0xe1] = MMX_OP2(psraw),
2476 [0xe2] = MMX_OP2(psrad),
2477 [0xe3] = MMX_OP2(pavgw),
2478 [0xe4] = MMX_OP2(pmulhuw),
2479 [0xe5] = MMX_OP2(pmulhw),
2480 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2481 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2482 [0xe8] = MMX_OP2(psubsb),
2483 [0xe9] = MMX_OP2(psubsw),
2484 [0xea] = MMX_OP2(pminsw),
2485 [0xeb] = MMX_OP2(por),
2486 [0xec] = MMX_OP2(paddsb),
2487 [0xed] = MMX_OP2(paddsw),
2488 [0xee] = MMX_OP2(pmaxsw),
2489 [0xef] = MMX_OP2(pxor),
2490 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2491 [0xf1] = MMX_OP2(psllw),
2492 [0xf2] = MMX_OP2(pslld),
2493 [0xf3] = MMX_OP2(psllq),
2494 [0xf4] = MMX_OP2(pmuludq),
2495 [0xf5] = MMX_OP2(pmaddwd),
2496 [0xf6] = MMX_OP2(psadbw),
2497 [0xf7] = MMX_OP2(maskmov),
2498 [0xf8] = MMX_OP2(psubb),
2499 [0xf9] = MMX_OP2(psubw),
2500 [0xfa] = MMX_OP2(psubl),
2501 [0xfb] = MMX_OP2(psubq),
2502 [0xfc] = MMX_OP2(paddb),
2503 [0xfd] = MMX_OP2(paddw),
2504 [0xfe] = MMX_OP2(paddl),
2507 static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2508 [0 + 2] = MMX_OP2(psrlw),
2509 [0 + 4] = MMX_OP2(psraw),
2510 [0 + 6] = MMX_OP2(psllw),
2511 [8 + 2] = MMX_OP2(psrld),
2512 [8 + 4] = MMX_OP2(psrad),
2513 [8 + 6] = MMX_OP2(pslld),
2514 [16 + 2] = MMX_OP2(psrlq),
2515 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2516 [16 + 6] = MMX_OP2(psllq),
2517 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2520 static GenOpFunc1 *sse_op_table3[4 * 3] = {
2523 X86_64_ONLY(gen_op_cvtsq2ss),
2524 X86_64_ONLY(gen_op_cvtsq2sd),
2528 X86_64_ONLY(gen_op_cvttss2sq),
2529 X86_64_ONLY(gen_op_cvttsd2sq),
2533 X86_64_ONLY(gen_op_cvtss2sq),
2534 X86_64_ONLY(gen_op_cvtsd2sq),
2537 static GenOpFunc2 *sse_op_table4[8][4] = {
2548 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2550 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2551 int modrm, mod, rm, reg, reg_addr, offset_addr;
2552 GenOpFunc2 *sse_op2;
2553 GenOpFunc3 *sse_op3;
2556 if (s->prefix & PREFIX_DATA)
2558 else if (s->prefix & PREFIX_REPZ)
2560 else if (s->prefix & PREFIX_REPNZ)
2564 sse_op2 = sse_op_table1[b][b1];
2567 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2577 /* simple MMX/SSE operation */
2578 if (s->flags & HF_TS_MASK) {
2579 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2582 if (s->flags & HF_EM_MASK) {
2584 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2587 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2594 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2595 the static cpu state) */
2600 modrm = ldub_code(s->pc++);
2601 reg = ((modrm >> 3) & 7);
2604 mod = (modrm >> 6) & 3;
2605 if (sse_op2 == SSE_SPECIAL) {
2608 case 0x0e7: /* movntq */
2611 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2612 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2614 case 0x1e7: /* movntdq */
2615 case 0x02b: /* movntps */
2616 case 0x12b: /* movntps */
2617 case 0x3f0: /* lddqu */
2620 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2621 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2623 case 0x6e: /* movd mm, ea */
2624 #ifdef TARGET_X86_64
2625 if (s->dflag == 2) {
2626 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2627 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2631 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2632 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2635 case 0x16e: /* movd xmm, ea */
2636 #ifdef TARGET_X86_64
2637 if (s->dflag == 2) {
2638 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2639 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2643 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2644 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2647 case 0x6f: /* movq mm, ea */
2649 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2650 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2653 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2654 offsetof(CPUX86State,fpregs[rm].mmx));
2657 case 0x010: /* movups */
2658 case 0x110: /* movupd */
2659 case 0x028: /* movaps */
2660 case 0x128: /* movapd */
2661 case 0x16f: /* movdqa xmm, ea */
2662 case 0x26f: /* movdqu xmm, ea */
2664 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2665 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2667 rm = (modrm & 7) | REX_B(s);
2668 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2669 offsetof(CPUX86State,xmm_regs[rm]));
2672 case 0x210: /* movss xmm, ea */
2674 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2675 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2676 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2678 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2679 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2680 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2682 rm = (modrm & 7) | REX_B(s);
2683 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2684 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2687 case 0x310: /* movsd xmm, ea */
2689 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2690 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2692 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2693 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2695 rm = (modrm & 7) | REX_B(s);
2696 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2697 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2700 case 0x012: /* movlps */
2701 case 0x112: /* movlpd */
2703 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2704 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2707 rm = (modrm & 7) | REX_B(s);
2708 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2709 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2712 case 0x212: /* movsldup */
2714 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2715 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2717 rm = (modrm & 7) | REX_B(s);
2718 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2719 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2720 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2721 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2723 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2724 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2725 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2726 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2728 case 0x312: /* movddup */
2730 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2731 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2733 rm = (modrm & 7) | REX_B(s);
2734 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2735 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2737 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2738 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2740 case 0x016: /* movhps */
2741 case 0x116: /* movhpd */
2743 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2744 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2747 rm = (modrm & 7) | REX_B(s);
2748 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2749 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2752 case 0x216: /* movshdup */
2754 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2755 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2757 rm = (modrm & 7) | REX_B(s);
2758 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2759 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2760 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2761 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2763 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2764 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2765 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2766 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2768 case 0x7e: /* movd ea, mm */
2769 #ifdef TARGET_X86_64
2770 if (s->dflag == 2) {
2771 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2772 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2776 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2777 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2780 case 0x17e: /* movd ea, xmm */
2781 #ifdef TARGET_X86_64
2782 if (s->dflag == 2) {
2783 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2784 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2788 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2789 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2792 case 0x27e: /* movq xmm, ea */
2794 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2795 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2797 rm = (modrm & 7) | REX_B(s);
2798 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2799 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2801 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2803 case 0x7f: /* movq ea, mm */
2805 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2806 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2809 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2810 offsetof(CPUX86State,fpregs[reg].mmx));
2813 case 0x011: /* movups */
2814 case 0x111: /* movupd */
2815 case 0x029: /* movaps */
2816 case 0x129: /* movapd */
2817 case 0x17f: /* movdqa ea, xmm */
2818 case 0x27f: /* movdqu ea, xmm */
2820 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2821 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2823 rm = (modrm & 7) | REX_B(s);
2824 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2825 offsetof(CPUX86State,xmm_regs[reg]));
2828 case 0x211: /* movss ea, xmm */
2830 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2831 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2832 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2834 rm = (modrm & 7) | REX_B(s);
2835 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2836 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2839 case 0x311: /* movsd ea, xmm */
2841 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2842 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2844 rm = (modrm & 7) | REX_B(s);
2845 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2846 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2849 case 0x013: /* movlps */
2850 case 0x113: /* movlpd */
2852 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2853 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2858 case 0x017: /* movhps */
2859 case 0x117: /* movhpd */
2861 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2862 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2867 case 0x71: /* shift mm, im */
2870 case 0x171: /* shift xmm, im */
2873 val = ldub_code(s->pc++);
2875 gen_op_movl_T0_im(val);
2876 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2878 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2879 op1_offset = offsetof(CPUX86State,xmm_t0);
2881 gen_op_movl_T0_im(val);
2882 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2884 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2885 op1_offset = offsetof(CPUX86State,mmx_t0);
2887 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2891 rm = (modrm & 7) | REX_B(s);
2892 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2895 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2897 sse_op2(op2_offset, op1_offset);
2899 case 0x050: /* movmskps */
2900 rm = (modrm & 7) | REX_B(s);
2901 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2902 gen_op_mov_reg_T0[OT_LONG][reg]();
2904 case 0x150: /* movmskpd */
2905 rm = (modrm & 7) | REX_B(s);
2906 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2907 gen_op_mov_reg_T0[OT_LONG][reg]();
2909 case 0x02a: /* cvtpi2ps */
2910 case 0x12a: /* cvtpi2pd */
2913 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2914 op2_offset = offsetof(CPUX86State,mmx_t0);
2915 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2918 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2920 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2923 gen_op_cvtpi2ps(op1_offset, op2_offset);
2927 gen_op_cvtpi2pd(op1_offset, op2_offset);
2931 case 0x22a: /* cvtsi2ss */
2932 case 0x32a: /* cvtsi2sd */
2933 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2934 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2935 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2936 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2938 case 0x02c: /* cvttps2pi */
2939 case 0x12c: /* cvttpd2pi */
2940 case 0x02d: /* cvtps2pi */
2941 case 0x12d: /* cvtpd2pi */
2944 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2945 op2_offset = offsetof(CPUX86State,xmm_t0);
2946 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2948 rm = (modrm & 7) | REX_B(s);
2949 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2951 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2954 gen_op_cvttps2pi(op1_offset, op2_offset);
2957 gen_op_cvttpd2pi(op1_offset, op2_offset);
2960 gen_op_cvtps2pi(op1_offset, op2_offset);
2963 gen_op_cvtpd2pi(op1_offset, op2_offset);
2967 case 0x22c: /* cvttss2si */
2968 case 0x32c: /* cvttsd2si */
2969 case 0x22d: /* cvtss2si */
2970 case 0x32d: /* cvtsd2si */
2971 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2973 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2975 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2977 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2978 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2980 op2_offset = offsetof(CPUX86State,xmm_t0);
2982 rm = (modrm & 7) | REX_B(s);
2983 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2985 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
2986 (b & 1) * 4](op2_offset);
2987 gen_op_mov_reg_T0[ot][reg]();
2989 case 0xc4: /* pinsrw */
2992 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2993 val = ldub_code(s->pc++);
2996 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2999 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3002 case 0xc5: /* pextrw */
3006 val = ldub_code(s->pc++);
3009 rm = (modrm & 7) | REX_B(s);
3010 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3014 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3016 reg = ((modrm >> 3) & 7) | rex_r;
3017 gen_op_mov_reg_T0[OT_LONG][reg]();
3019 case 0x1d6: /* movq ea, xmm */
3021 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3022 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3024 rm = (modrm & 7) | REX_B(s);
3025 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3026 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3027 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3030 case 0x2d6: /* movq2dq */
3033 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3034 offsetof(CPUX86State,fpregs[rm].mmx));
3035 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3037 case 0x3d6: /* movdq2q */
3039 rm = (modrm & 7) | REX_B(s);
3040 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3041 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3043 case 0xd7: /* pmovmskb */
3048 rm = (modrm & 7) | REX_B(s);
3049 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3052 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3054 reg = ((modrm >> 3) & 7) | rex_r;
3055 gen_op_mov_reg_T0[OT_LONG][reg]();
3061 /* generic MMX or SSE operation */
3064 /* maskmov : we must prepare A0 */
3067 #ifdef TARGET_X86_64
3068 if (s->aflag == 2) {
3069 gen_op_movq_A0_reg[R_EDI]();
3073 gen_op_movl_A0_reg[R_EDI]();
3075 gen_op_andl_A0_ffff();
3077 gen_add_A0_ds_seg(s);
3079 case 0x70: /* pshufx insn */
3080 case 0xc6: /* pshufx insn */
3081 case 0xc2: /* compare insns */
3088 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3090 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3091 op2_offset = offsetof(CPUX86State,xmm_t0);
3092 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3094 /* specific case for SSE single instructions */
3097 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3098 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3101 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3104 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3107 rm = (modrm & 7) | REX_B(s);
3108 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3111 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3113 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3114 op2_offset = offsetof(CPUX86State,mmx_t0);
3115 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3118 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3122 case 0x70: /* pshufx insn */
3123 case 0xc6: /* pshufx insn */
3124 val = ldub_code(s->pc++);
3125 sse_op3 = (GenOpFunc3 *)sse_op2;
3126 sse_op3(op1_offset, op2_offset, val);
3130 val = ldub_code(s->pc++);
3133 sse_op2 = sse_op_table4[val][b1];
3134 sse_op2(op1_offset, op2_offset);
3137 sse_op2(op1_offset, op2_offset);
3140 if (b == 0x2e || b == 0x2f) {
3141 s->cc_op = CC_OP_EFLAGS;
3147 /* convert one instruction. s->is_jmp is set if the translation must
3148 be stopped. Return the next pc value */
3149 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3151 int b, prefixes, aflag, dflag;
3153 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3154 target_ulong next_eip, tval;
3164 #ifdef TARGET_X86_64
3169 s->rip_offset = 0; /* for relative ip address */
3171 b = ldub_code(s->pc);
3173 /* check prefixes */
3174 #ifdef TARGET_X86_64
3178 prefixes |= PREFIX_REPZ;
3181 prefixes |= PREFIX_REPNZ;
3184 prefixes |= PREFIX_LOCK;
3205 prefixes |= PREFIX_DATA;
3208 prefixes |= PREFIX_ADR;
3212 rex_w = (b >> 3) & 1;
3213 rex_r = (b & 0x4) << 1;
3214 s->rex_x = (b & 0x2) << 2;
3215 REX_B(s) = (b & 0x1) << 3;
3216 x86_64_hregs = 1; /* select uniform byte register addressing */
3220 /* 0x66 is ignored if rex.w is set */
3223 if (prefixes & PREFIX_DATA)
3226 if (!(prefixes & PREFIX_ADR))
3233 prefixes |= PREFIX_REPZ;
3236 prefixes |= PREFIX_REPNZ;
3239 prefixes |= PREFIX_LOCK;
3260 prefixes |= PREFIX_DATA;
3263 prefixes |= PREFIX_ADR;
3266 if (prefixes & PREFIX_DATA)
3268 if (prefixes & PREFIX_ADR)
3272 s->prefix = prefixes;
3276 /* lock generation */
3277 if (prefixes & PREFIX_LOCK)
3280 /* now check op code */
3284 /**************************/
3285 /* extended op code */
3286 b = ldub_code(s->pc++) | 0x100;
3289 /**************************/
3307 ot = dflag + OT_WORD;
3310 case 0: /* OP Ev, Gv */
3311 modrm = ldub_code(s->pc++);
3312 reg = ((modrm >> 3) & 7) | rex_r;
3313 mod = (modrm >> 6) & 3;
3314 rm = (modrm & 7) | REX_B(s);
3316 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3318 } else if (op == OP_XORL && rm == reg) {
3320 /* xor reg, reg optimisation */
3322 s->cc_op = CC_OP_LOGICB + ot;
3323 gen_op_mov_reg_T0[ot][reg]();
3324 gen_op_update1_cc();
3329 gen_op_mov_TN_reg[ot][1][reg]();
3330 gen_op(s, op, ot, opreg);
3332 case 1: /* OP Gv, Ev */
3333 modrm = ldub_code(s->pc++);
3334 mod = (modrm >> 6) & 3;
3335 reg = ((modrm >> 3) & 7) | rex_r;
3336 rm = (modrm & 7) | REX_B(s);
3338 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3339 gen_op_ld_T1_A0[ot + s->mem_index]();
3340 } else if (op == OP_XORL && rm == reg) {
3343 gen_op_mov_TN_reg[ot][1][rm]();
3345 gen_op(s, op, ot, reg);
3347 case 2: /* OP A, Iv */
3348 val = insn_get(s, ot);
3349 gen_op_movl_T1_im(val);
3350 gen_op(s, op, ot, OR_EAX);
3356 case 0x80: /* GRP1 */
3366 ot = dflag + OT_WORD;
3368 modrm = ldub_code(s->pc++);
3369 mod = (modrm >> 6) & 3;
3370 rm = (modrm & 7) | REX_B(s);
3371 op = (modrm >> 3) & 7;
3377 s->rip_offset = insn_const_size(ot);
3378 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3389 val = insn_get(s, ot);
3392 val = (int8_t)insn_get(s, OT_BYTE);
3395 gen_op_movl_T1_im(val);
3396 gen_op(s, op, ot, opreg);
3400 /**************************/
3401 /* inc, dec, and other misc arith */
3402 case 0x40 ... 0x47: /* inc Gv */
3403 ot = dflag ? OT_LONG : OT_WORD;
3404 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3406 case 0x48 ... 0x4f: /* dec Gv */
3407 ot = dflag ? OT_LONG : OT_WORD;
3408 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3410 case 0xf6: /* GRP3 */
3415 ot = dflag + OT_WORD;
3417 modrm = ldub_code(s->pc++);
3418 mod = (modrm >> 6) & 3;
3419 rm = (modrm & 7) | REX_B(s);
3420 op = (modrm >> 3) & 7;
3423 s->rip_offset = insn_const_size(ot);
3424 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3425 gen_op_ld_T0_A0[ot + s->mem_index]();
3427 gen_op_mov_TN_reg[ot][0][rm]();
3432 val = insn_get(s, ot);
3433 gen_op_movl_T1_im(val);
3434 gen_op_testl_T0_T1_cc();
3435 s->cc_op = CC_OP_LOGICB + ot;
3440 gen_op_st_T0_A0[ot + s->mem_index]();
3442 gen_op_mov_reg_T0[ot][rm]();
3448 gen_op_st_T0_A0[ot + s->mem_index]();
3450 gen_op_mov_reg_T0[ot][rm]();
3452 gen_op_update_neg_cc();
3453 s->cc_op = CC_OP_SUBB + ot;
3458 gen_op_mulb_AL_T0();
3459 s->cc_op = CC_OP_MULB;
3462 gen_op_mulw_AX_T0();
3463 s->cc_op = CC_OP_MULW;
3467 gen_op_mull_EAX_T0();
3468 s->cc_op = CC_OP_MULL;
3470 #ifdef TARGET_X86_64
3472 gen_op_mulq_EAX_T0();
3473 s->cc_op = CC_OP_MULQ;
3481 gen_op_imulb_AL_T0();
3482 s->cc_op = CC_OP_MULB;
3485 gen_op_imulw_AX_T0();
3486 s->cc_op = CC_OP_MULW;
3490 gen_op_imull_EAX_T0();
3491 s->cc_op = CC_OP_MULL;
3493 #ifdef TARGET_X86_64
3495 gen_op_imulq_EAX_T0();
3496 s->cc_op = CC_OP_MULQ;
3504 gen_jmp_im(pc_start - s->cs_base);
3505 gen_op_divb_AL_T0();
3508 gen_jmp_im(pc_start - s->cs_base);
3509 gen_op_divw_AX_T0();
3513 gen_jmp_im(pc_start - s->cs_base);
3514 gen_op_divl_EAX_T0();
3516 #ifdef TARGET_X86_64
3518 gen_jmp_im(pc_start - s->cs_base);
3519 gen_op_divq_EAX_T0();
3527 gen_jmp_im(pc_start - s->cs_base);
3528 gen_op_idivb_AL_T0();
3531 gen_jmp_im(pc_start - s->cs_base);
3532 gen_op_idivw_AX_T0();
3536 gen_jmp_im(pc_start - s->cs_base);
3537 gen_op_idivl_EAX_T0();
3539 #ifdef TARGET_X86_64
3541 gen_jmp_im(pc_start - s->cs_base);
3542 gen_op_idivq_EAX_T0();
3552 case 0xfe: /* GRP4 */
3553 case 0xff: /* GRP5 */
3557 ot = dflag + OT_WORD;
3559 modrm = ldub_code(s->pc++);
3560 mod = (modrm >> 6) & 3;
3561 rm = (modrm & 7) | REX_B(s);
3562 op = (modrm >> 3) & 7;
3563 if (op >= 2 && b == 0xfe) {
3567 if (op == 2 || op == 4) {
3568 /* operand size for jumps is 64 bit */
3570 } else if (op == 3 || op == 5) {
3571 /* for call calls, the operand is 16 or 32 bit, even
3573 ot = dflag ? OT_LONG : OT_WORD;
3574 } else if (op == 6) {
3575 /* default push size is 64 bit */
3576 ot = dflag ? OT_QUAD : OT_WORD;
3580 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3581 if (op >= 2 && op != 3 && op != 5)
3582 gen_op_ld_T0_A0[ot + s->mem_index]();
3584 gen_op_mov_TN_reg[ot][0][rm]();
3588 case 0: /* inc Ev */
3593 gen_inc(s, ot, opreg, 1);
3595 case 1: /* dec Ev */
3600 gen_inc(s, ot, opreg, -1);
3602 case 2: /* call Ev */
3603 /* XXX: optimize if memory (no 'and' is necessary) */
3605 gen_op_andl_T0_ffff();
3606 next_eip = s->pc - s->cs_base;
3607 gen_movtl_T1_im(next_eip);
3612 case 3: /* lcall Ev */
3613 gen_op_ld_T1_A0[ot + s->mem_index]();
3614 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3615 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3617 if (s->pe && !s->vm86) {
3618 if (s->cc_op != CC_OP_DYNAMIC)
3619 gen_op_set_cc_op(s->cc_op);
3620 gen_jmp_im(pc_start - s->cs_base);
3621 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3623 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3627 case 4: /* jmp Ev */
3629 gen_op_andl_T0_ffff();
3633 case 5: /* ljmp Ev */
3634 gen_op_ld_T1_A0[ot + s->mem_index]();
3635 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3636 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3638 if (s->pe && !s->vm86) {
3639 if (s->cc_op != CC_OP_DYNAMIC)
3640 gen_op_set_cc_op(s->cc_op);
3641 gen_jmp_im(pc_start - s->cs_base);
3642 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3644 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3645 gen_op_movl_T0_T1();
3650 case 6: /* push Ev */
3658 case 0x84: /* test Ev, Gv */
3663 ot = dflag + OT_WORD;
3665 modrm = ldub_code(s->pc++);
3666 mod = (modrm >> 6) & 3;
3667 rm = (modrm & 7) | REX_B(s);
3668 reg = ((modrm >> 3) & 7) | rex_r;
3670 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3671 gen_op_mov_TN_reg[ot][1][reg]();
3672 gen_op_testl_T0_T1_cc();
3673 s->cc_op = CC_OP_LOGICB + ot;
3676 case 0xa8: /* test eAX, Iv */
3681 ot = dflag + OT_WORD;
3682 val = insn_get(s, ot);
3684 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3685 gen_op_movl_T1_im(val);
3686 gen_op_testl_T0_T1_cc();
3687 s->cc_op = CC_OP_LOGICB + ot;
3690 case 0x98: /* CWDE/CBW */
3691 #ifdef TARGET_X86_64
3693 gen_op_movslq_RAX_EAX();
3697 gen_op_movswl_EAX_AX();
3699 gen_op_movsbw_AX_AL();
3701 case 0x99: /* CDQ/CWD */
3702 #ifdef TARGET_X86_64
3704 gen_op_movsqo_RDX_RAX();
3708 gen_op_movslq_EDX_EAX();
3710 gen_op_movswl_DX_AX();
3712 case 0x1af: /* imul Gv, Ev */
3713 case 0x69: /* imul Gv, Ev, I */
3715 ot = dflag + OT_WORD;
3716 modrm = ldub_code(s->pc++);
3717 reg = ((modrm >> 3) & 7) | rex_r;
3719 s->rip_offset = insn_const_size(ot);
3722 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3724 val = insn_get(s, ot);
3725 gen_op_movl_T1_im(val);
3726 } else if (b == 0x6b) {
3727 val = (int8_t)insn_get(s, OT_BYTE);
3728 gen_op_movl_T1_im(val);
3730 gen_op_mov_TN_reg[ot][1][reg]();
3733 #ifdef TARGET_X86_64
3734 if (ot == OT_QUAD) {
3735 gen_op_imulq_T0_T1();
3738 if (ot == OT_LONG) {
3739 gen_op_imull_T0_T1();
3741 gen_op_imulw_T0_T1();
3743 gen_op_mov_reg_T0[ot][reg]();
3744 s->cc_op = CC_OP_MULB + ot;
3747 case 0x1c1: /* xadd Ev, Gv */
3751 ot = dflag + OT_WORD;
3752 modrm = ldub_code(s->pc++);
3753 reg = ((modrm >> 3) & 7) | rex_r;
3754 mod = (modrm >> 6) & 3;
3756 rm = (modrm & 7) | REX_B(s);
3757 gen_op_mov_TN_reg[ot][0][reg]();
3758 gen_op_mov_TN_reg[ot][1][rm]();
3759 gen_op_addl_T0_T1();
3760 gen_op_mov_reg_T1[ot][reg]();
3761 gen_op_mov_reg_T0[ot][rm]();
3763 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3764 gen_op_mov_TN_reg[ot][0][reg]();
3765 gen_op_ld_T1_A0[ot + s->mem_index]();
3766 gen_op_addl_T0_T1();
3767 gen_op_st_T0_A0[ot + s->mem_index]();
3768 gen_op_mov_reg_T1[ot][reg]();
3770 gen_op_update2_cc();
3771 s->cc_op = CC_OP_ADDB + ot;
3774 case 0x1b1: /* cmpxchg Ev, Gv */
3778 ot = dflag + OT_WORD;
3779 modrm = ldub_code(s->pc++);
3780 reg = ((modrm >> 3) & 7) | rex_r;
3781 mod = (modrm >> 6) & 3;
3782 gen_op_mov_TN_reg[ot][1][reg]();
3784 rm = (modrm & 7) | REX_B(s);
3785 gen_op_mov_TN_reg[ot][0][rm]();
3786 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3787 gen_op_mov_reg_T0[ot][rm]();
3789 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3790 gen_op_ld_T0_A0[ot + s->mem_index]();
3791 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3793 s->cc_op = CC_OP_SUBB + ot;
3795 case 0x1c7: /* cmpxchg8b */
3796 modrm = ldub_code(s->pc++);
3797 mod = (modrm >> 6) & 3;
3800 if (s->cc_op != CC_OP_DYNAMIC)
3801 gen_op_set_cc_op(s->cc_op);
3802 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3804 s->cc_op = CC_OP_EFLAGS;
3807 /**************************/
3809 case 0x50 ... 0x57: /* push */
3810 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3813 case 0x58 ... 0x5f: /* pop */
3815 ot = dflag ? OT_QUAD : OT_WORD;
3817 ot = dflag + OT_WORD;
3820 /* NOTE: order is important for pop %sp */
3822 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3824 case 0x60: /* pusha */
3829 case 0x61: /* popa */
3834 case 0x68: /* push Iv */
3837 ot = dflag ? OT_QUAD : OT_WORD;
3839 ot = dflag + OT_WORD;
3842 val = insn_get(s, ot);
3844 val = (int8_t)insn_get(s, OT_BYTE);
3845 gen_op_movl_T0_im(val);
3848 case 0x8f: /* pop Ev */
3850 ot = dflag ? OT_QUAD : OT_WORD;
3852 ot = dflag + OT_WORD;
3854 modrm = ldub_code(s->pc++);
3855 mod = (modrm >> 6) & 3;
3858 /* NOTE: order is important for pop %sp */
3860 rm = (modrm & 7) | REX_B(s);
3861 gen_op_mov_reg_T0[ot][rm]();
3863 /* NOTE: order is important too for MMU exceptions */
3864 s->popl_esp_hack = 1 << ot;
3865 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3866 s->popl_esp_hack = 0;
3870 case 0xc8: /* enter */
3873 val = lduw_code(s->pc);
3875 level = ldub_code(s->pc++);
3876 gen_enter(s, val, level);
3879 case 0xc9: /* leave */
3880 /* XXX: exception not precise (ESP is updated before potential exception) */
3882 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3883 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3884 } else if (s->ss32) {
3885 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3886 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3888 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3889 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3893 ot = dflag ? OT_QUAD : OT_WORD;
3895 ot = dflag + OT_WORD;
3897 gen_op_mov_reg_T0[ot][R_EBP]();
3900 case 0x06: /* push es */
3901 case 0x0e: /* push cs */
3902 case 0x16: /* push ss */
3903 case 0x1e: /* push ds */
3906 gen_op_movl_T0_seg(b >> 3);
3909 case 0x1a0: /* push fs */
3910 case 0x1a8: /* push gs */
3911 gen_op_movl_T0_seg((b >> 3) & 7);
3914 case 0x07: /* pop es */
3915 case 0x17: /* pop ss */
3916 case 0x1f: /* pop ds */
3921 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3924 /* if reg == SS, inhibit interrupts/trace. */
3925 /* If several instructions disable interrupts, only the
3927 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3928 gen_op_set_inhibit_irq();
3932 gen_jmp_im(s->pc - s->cs_base);
3936 case 0x1a1: /* pop fs */
3937 case 0x1a9: /* pop gs */
3939 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3942 gen_jmp_im(s->pc - s->cs_base);
3947 /**************************/
3950 case 0x89: /* mov Gv, Ev */
3954 ot = dflag + OT_WORD;
3955 modrm = ldub_code(s->pc++);
3956 reg = ((modrm >> 3) & 7) | rex_r;
3958 /* generate a generic store */
3959 gen_ldst_modrm(s, modrm, ot, reg, 1);
3962 case 0xc7: /* mov Ev, Iv */
3966 ot = dflag + OT_WORD;
3967 modrm = ldub_code(s->pc++);
3968 mod = (modrm >> 6) & 3;
3970 s->rip_offset = insn_const_size(ot);
3971 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3973 val = insn_get(s, ot);
3974 gen_op_movl_T0_im(val);
3976 gen_op_st_T0_A0[ot + s->mem_index]();
3978 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3981 case 0x8b: /* mov Ev, Gv */
3985 ot = OT_WORD + dflag;
3986 modrm = ldub_code(s->pc++);
3987 reg = ((modrm >> 3) & 7) | rex_r;
3989 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3990 gen_op_mov_reg_T0[ot][reg]();
3992 case 0x8e: /* mov seg, Gv */
3993 modrm = ldub_code(s->pc++);
3994 reg = (modrm >> 3) & 7;
3995 if (reg >= 6 || reg == R_CS)
3997 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3998 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4000 /* if reg == SS, inhibit interrupts/trace */
4001 /* If several instructions disable interrupts, only the
4003 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4004 gen_op_set_inhibit_irq();
4008 gen_jmp_im(s->pc - s->cs_base);
4012 case 0x8c: /* mov Gv, seg */
4013 modrm = ldub_code(s->pc++);
4014 reg = (modrm >> 3) & 7;
4015 mod = (modrm >> 6) & 3;
4018 gen_op_movl_T0_seg(reg);
4020 ot = OT_WORD + dflag;
4023 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4026 case 0x1b6: /* movzbS Gv, Eb */
4027 case 0x1b7: /* movzwS Gv, Eb */
4028 case 0x1be: /* movsbS Gv, Eb */
4029 case 0x1bf: /* movswS Gv, Eb */
4032 /* d_ot is the size of destination */
4033 d_ot = dflag + OT_WORD;
4034 /* ot is the size of source */
4035 ot = (b & 1) + OT_BYTE;
4036 modrm = ldub_code(s->pc++);
4037 reg = ((modrm >> 3) & 7) | rex_r;
4038 mod = (modrm >> 6) & 3;
4039 rm = (modrm & 7) | REX_B(s);
4042 gen_op_mov_TN_reg[ot][0][rm]();
4043 switch(ot | (b & 8)) {
4045 gen_op_movzbl_T0_T0();
4048 gen_op_movsbl_T0_T0();
4051 gen_op_movzwl_T0_T0();
4055 gen_op_movswl_T0_T0();
4058 gen_op_mov_reg_T0[d_ot][reg]();
4060 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4062 gen_op_lds_T0_A0[ot + s->mem_index]();
4064 gen_op_ldu_T0_A0[ot + s->mem_index]();
4066 gen_op_mov_reg_T0[d_ot][reg]();
4071 case 0x8d: /* lea */
4072 ot = dflag + OT_WORD;
4073 modrm = ldub_code(s->pc++);
4074 mod = (modrm >> 6) & 3;
4077 reg = ((modrm >> 3) & 7) | rex_r;
4078 /* we must ensure that no segment is added */
4082 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4084 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4087 case 0xa0: /* mov EAX, Ov */
4089 case 0xa2: /* mov Ov, EAX */
4092 target_ulong offset_addr;
4097 ot = dflag + OT_WORD;
4098 #ifdef TARGET_X86_64
4099 if (s->aflag == 2) {
4100 offset_addr = ldq_code(s->pc);
4102 if (offset_addr == (int32_t)offset_addr)
4103 gen_op_movq_A0_im(offset_addr);
4105 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4110 offset_addr = insn_get(s, OT_LONG);
4112 offset_addr = insn_get(s, OT_WORD);
4114 gen_op_movl_A0_im(offset_addr);
4116 gen_add_A0_ds_seg(s);
4118 gen_op_ld_T0_A0[ot + s->mem_index]();
4119 gen_op_mov_reg_T0[ot][R_EAX]();
4121 gen_op_mov_TN_reg[ot][0][R_EAX]();
4122 gen_op_st_T0_A0[ot + s->mem_index]();
4126 case 0xd7: /* xlat */
4127 #ifdef TARGET_X86_64
4128 if (s->aflag == 2) {
4129 gen_op_movq_A0_reg[R_EBX]();
4130 gen_op_addq_A0_AL();
4134 gen_op_movl_A0_reg[R_EBX]();
4135 gen_op_addl_A0_AL();
4137 gen_op_andl_A0_ffff();
4139 gen_add_A0_ds_seg(s);
4140 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4141 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4143 case 0xb0 ... 0xb7: /* mov R, Ib */
4144 val = insn_get(s, OT_BYTE);
4145 gen_op_movl_T0_im(val);
4146 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4148 case 0xb8 ... 0xbf: /* mov R, Iv */
4149 #ifdef TARGET_X86_64
4153 tmp = ldq_code(s->pc);
4155 reg = (b & 7) | REX_B(s);
4156 gen_movtl_T0_im(tmp);
4157 gen_op_mov_reg_T0[OT_QUAD][reg]();
4161 ot = dflag ? OT_LONG : OT_WORD;
4162 val = insn_get(s, ot);
4163 reg = (b & 7) | REX_B(s);
4164 gen_op_movl_T0_im(val);
4165 gen_op_mov_reg_T0[ot][reg]();
4169 case 0x91 ... 0x97: /* xchg R, EAX */
4170 ot = dflag + OT_WORD;
4171 reg = (b & 7) | REX_B(s);
4175 case 0x87: /* xchg Ev, Gv */
4179 ot = dflag + OT_WORD;
4180 modrm = ldub_code(s->pc++);
4181 reg = ((modrm >> 3) & 7) | rex_r;
4182 mod = (modrm >> 6) & 3;
4184 rm = (modrm & 7) | REX_B(s);
4186 gen_op_mov_TN_reg[ot][0][reg]();
4187 gen_op_mov_TN_reg[ot][1][rm]();
4188 gen_op_mov_reg_T0[ot][rm]();
4189 gen_op_mov_reg_T1[ot][reg]();
4191 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4192 gen_op_mov_TN_reg[ot][0][reg]();
4193 /* for xchg, lock is implicit */
4194 if (!(prefixes & PREFIX_LOCK))
4196 gen_op_ld_T1_A0[ot + s->mem_index]();
4197 gen_op_st_T0_A0[ot + s->mem_index]();
4198 if (!(prefixes & PREFIX_LOCK))
4200 gen_op_mov_reg_T1[ot][reg]();
4203 case 0xc4: /* les Gv */
4208 case 0xc5: /* lds Gv */
4213 case 0x1b2: /* lss Gv */
4216 case 0x1b4: /* lfs Gv */
4219 case 0x1b5: /* lgs Gv */
4222 ot = dflag ? OT_LONG : OT_WORD;
4223 modrm = ldub_code(s->pc++);
4224 reg = ((modrm >> 3) & 7) | rex_r;
4225 mod = (modrm >> 6) & 3;
4228 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4229 gen_op_ld_T1_A0[ot + s->mem_index]();
4230 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4231 /* load the segment first to handle exceptions properly */
4232 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4233 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4234 /* then put the data */
4235 gen_op_mov_reg_T1[ot][reg]();
4237 gen_jmp_im(s->pc - s->cs_base);
4242 /************************/
4253 ot = dflag + OT_WORD;
4255 modrm = ldub_code(s->pc++);
4256 mod = (modrm >> 6) & 3;
4257 op = (modrm >> 3) & 7;
4263 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4266 opreg = (modrm & 7) | REX_B(s);
4271 gen_shift(s, op, ot, opreg, OR_ECX);
4274 shift = ldub_code(s->pc++);
4276 gen_shifti(s, op, ot, opreg, shift);
4291 case 0x1a4: /* shld imm */
4295 case 0x1a5: /* shld cl */
4299 case 0x1ac: /* shrd imm */
4303 case 0x1ad: /* shrd cl */
4307 ot = dflag + OT_WORD;
4308 modrm = ldub_code(s->pc++);
4309 mod = (modrm >> 6) & 3;
4310 rm = (modrm & 7) | REX_B(s);
4311 reg = ((modrm >> 3) & 7) | rex_r;
4314 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4315 gen_op_ld_T0_A0[ot + s->mem_index]();
4317 gen_op_mov_TN_reg[ot][0][rm]();
4319 gen_op_mov_TN_reg[ot][1][reg]();
4322 val = ldub_code(s->pc++);
4329 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4331 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4332 if (op == 0 && ot != OT_WORD)
4333 s->cc_op = CC_OP_SHLB + ot;
4335 s->cc_op = CC_OP_SARB + ot;
4338 if (s->cc_op != CC_OP_DYNAMIC)
4339 gen_op_set_cc_op(s->cc_op);
4341 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4343 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4344 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4347 gen_op_mov_reg_T0[ot][rm]();
4351 /************************/
4354 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4355 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4356 /* XXX: what to do if illegal op ? */
4357 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4360 modrm = ldub_code(s->pc++);
4361 mod = (modrm >> 6) & 3;
4363 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4366 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4368 case 0x00 ... 0x07: /* fxxxs */
4369 case 0x10 ... 0x17: /* fixxxl */
4370 case 0x20 ... 0x27: /* fxxxl */
4371 case 0x30 ... 0x37: /* fixxx */
4378 gen_op_flds_FT0_A0();
4381 gen_op_fildl_FT0_A0();
4384 gen_op_fldl_FT0_A0();
4388 gen_op_fild_FT0_A0();
4392 gen_op_fp_arith_ST0_FT0[op1]();
4394 /* fcomp needs pop */
4399 case 0x08: /* flds */
4400 case 0x0a: /* fsts */
4401 case 0x0b: /* fstps */
4402 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4403 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4404 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4409 gen_op_flds_ST0_A0();
4412 gen_op_fildl_ST0_A0();
4415 gen_op_fldl_ST0_A0();
4419 gen_op_fild_ST0_A0();
4426 gen_op_fisttl_ST0_A0();
4429 gen_op_fisttll_ST0_A0();
4433 gen_op_fistt_ST0_A0();
4440 gen_op_fsts_ST0_A0();
4443 gen_op_fistl_ST0_A0();
4446 gen_op_fstl_ST0_A0();
4450 gen_op_fist_ST0_A0();
4458 case 0x0c: /* fldenv mem */
4459 gen_op_fldenv_A0(s->dflag);
4461 case 0x0d: /* fldcw mem */
4464 case 0x0e: /* fnstenv mem */
4465 gen_op_fnstenv_A0(s->dflag);
4467 case 0x0f: /* fnstcw mem */
4470 case 0x1d: /* fldt mem */
4471 gen_op_fldt_ST0_A0();
4473 case 0x1f: /* fstpt mem */
4474 gen_op_fstt_ST0_A0();
4477 case 0x2c: /* frstor mem */
4478 gen_op_frstor_A0(s->dflag);
4480 case 0x2e: /* fnsave mem */
4481 gen_op_fnsave_A0(s->dflag);
4483 case 0x2f: /* fnstsw mem */
4486 case 0x3c: /* fbld */
4487 gen_op_fbld_ST0_A0();
4489 case 0x3e: /* fbstp */
4490 gen_op_fbst_ST0_A0();
4493 case 0x3d: /* fildll */
4494 gen_op_fildll_ST0_A0();
4496 case 0x3f: /* fistpll */
4497 gen_op_fistll_ST0_A0();
4504 /* register float ops */
4508 case 0x08: /* fld sti */
4510 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4512 case 0x09: /* fxchg sti */
4513 case 0x29: /* fxchg4 sti, undocumented op */
4514 case 0x39: /* fxchg7 sti, undocumented op */
4515 gen_op_fxchg_ST0_STN(opreg);
4517 case 0x0a: /* grp d9/2 */
4520 /* check exceptions (FreeBSD FPU probe) */
4521 if (s->cc_op != CC_OP_DYNAMIC)
4522 gen_op_set_cc_op(s->cc_op);
4523 gen_jmp_im(pc_start - s->cs_base);
4530 case 0x0c: /* grp d9/4 */
4540 gen_op_fcom_ST0_FT0();
4549 case 0x0d: /* grp d9/5 */
4558 gen_op_fldl2t_ST0();
4562 gen_op_fldl2e_ST0();
4570 gen_op_fldlg2_ST0();
4574 gen_op_fldln2_ST0();
4585 case 0x0e: /* grp d9/6 */
4596 case 3: /* fpatan */
4599 case 4: /* fxtract */
4602 case 5: /* fprem1 */
4605 case 6: /* fdecstp */
4609 case 7: /* fincstp */
4614 case 0x0f: /* grp d9/7 */
4619 case 1: /* fyl2xp1 */
4625 case 3: /* fsincos */
4628 case 5: /* fscale */
4631 case 4: /* frndint */
4643 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4644 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4645 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4651 gen_op_fp_arith_STN_ST0[op1](opreg);
4655 gen_op_fmov_FT0_STN(opreg);
4656 gen_op_fp_arith_ST0_FT0[op1]();
4660 case 0x02: /* fcom */
4661 case 0x22: /* fcom2, undocumented op */
4662 gen_op_fmov_FT0_STN(opreg);
4663 gen_op_fcom_ST0_FT0();
4665 case 0x03: /* fcomp */
4666 case 0x23: /* fcomp3, undocumented op */
4667 case 0x32: /* fcomp5, undocumented op */
4668 gen_op_fmov_FT0_STN(opreg);
4669 gen_op_fcom_ST0_FT0();
4672 case 0x15: /* da/5 */
4674 case 1: /* fucompp */
4675 gen_op_fmov_FT0_STN(1);
4676 gen_op_fucom_ST0_FT0();
4686 case 0: /* feni (287 only, just do nop here) */
4688 case 1: /* fdisi (287 only, just do nop here) */
4693 case 3: /* fninit */
4696 case 4: /* fsetpm (287 only, just do nop here) */
4702 case 0x1d: /* fucomi */
4703 if (s->cc_op != CC_OP_DYNAMIC)
4704 gen_op_set_cc_op(s->cc_op);
4705 gen_op_fmov_FT0_STN(opreg);
4706 gen_op_fucomi_ST0_FT0();
4707 s->cc_op = CC_OP_EFLAGS;
4709 case 0x1e: /* fcomi */
4710 if (s->cc_op != CC_OP_DYNAMIC)
4711 gen_op_set_cc_op(s->cc_op);
4712 gen_op_fmov_FT0_STN(opreg);
4713 gen_op_fcomi_ST0_FT0();
4714 s->cc_op = CC_OP_EFLAGS;
4716 case 0x28: /* ffree sti */
4717 gen_op_ffree_STN(opreg);
4719 case 0x2a: /* fst sti */
4720 gen_op_fmov_STN_ST0(opreg);
4722 case 0x2b: /* fstp sti */
4723 case 0x0b: /* fstp1 sti, undocumented op */
4724 case 0x3a: /* fstp8 sti, undocumented op */
4725 case 0x3b: /* fstp9 sti, undocumented op */
4726 gen_op_fmov_STN_ST0(opreg);
4729 case 0x2c: /* fucom st(i) */
4730 gen_op_fmov_FT0_STN(opreg);
4731 gen_op_fucom_ST0_FT0();
4733 case 0x2d: /* fucomp st(i) */
4734 gen_op_fmov_FT0_STN(opreg);
4735 gen_op_fucom_ST0_FT0();
4738 case 0x33: /* de/3 */
4740 case 1: /* fcompp */
4741 gen_op_fmov_FT0_STN(1);
4742 gen_op_fcom_ST0_FT0();
4750 case 0x38: /* ffreep sti, undocumented op */
4751 gen_op_ffree_STN(opreg);
4754 case 0x3c: /* df/4 */
4757 gen_op_fnstsw_EAX();
4763 case 0x3d: /* fucomip */
4764 if (s->cc_op != CC_OP_DYNAMIC)
4765 gen_op_set_cc_op(s->cc_op);
4766 gen_op_fmov_FT0_STN(opreg);
4767 gen_op_fucomi_ST0_FT0();
4769 s->cc_op = CC_OP_EFLAGS;
4771 case 0x3e: /* fcomip */
4772 if (s->cc_op != CC_OP_DYNAMIC)
4773 gen_op_set_cc_op(s->cc_op);
4774 gen_op_fmov_FT0_STN(opreg);
4775 gen_op_fcomi_ST0_FT0();
4777 s->cc_op = CC_OP_EFLAGS;
4779 case 0x10 ... 0x13: /* fcmovxx */
4783 const static uint8_t fcmov_cc[8] = {
4789 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4791 gen_op_fcmov_ST0_STN_T0(opreg);
4798 #ifdef USE_CODE_COPY
4799 s->tb->cflags |= CF_TB_FP_USED;
4802 /************************/
4805 case 0xa4: /* movsS */
4810 ot = dflag + OT_WORD;
4812 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4813 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4819 case 0xaa: /* stosS */
4824 ot = dflag + OT_WORD;
4826 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4827 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4832 case 0xac: /* lodsS */
4837 ot = dflag + OT_WORD;
4838 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4839 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4844 case 0xae: /* scasS */
4849 ot = dflag + OT_WORD;
4850 if (prefixes & PREFIX_REPNZ) {
4851 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4852 } else if (prefixes & PREFIX_REPZ) {
4853 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4856 s->cc_op = CC_OP_SUBB + ot;
4860 case 0xa6: /* cmpsS */
4865 ot = dflag + OT_WORD;
4866 if (prefixes & PREFIX_REPNZ) {
4867 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4868 } else if (prefixes & PREFIX_REPZ) {
4869 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4872 s->cc_op = CC_OP_SUBB + ot;
4875 case 0x6c: /* insS */
4880 ot = dflag ? OT_LONG : OT_WORD;
4881 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4882 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4883 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4888 case 0x6e: /* outsS */
4893 ot = dflag ? OT_LONG : OT_WORD;
4894 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4895 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4896 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4902 /************************/
4909 ot = dflag ? OT_LONG : OT_WORD;
4910 val = ldub_code(s->pc++);
4911 gen_op_movl_T0_im(val);
4912 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4914 gen_op_mov_reg_T1[ot][R_EAX]();
4921 ot = dflag ? OT_LONG : OT_WORD;
4922 val = ldub_code(s->pc++);
4923 gen_op_movl_T0_im(val);
4924 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4925 gen_op_mov_TN_reg[ot][1][R_EAX]();
4933 ot = dflag ? OT_LONG : OT_WORD;
4934 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4935 gen_op_andl_T0_ffff();
4936 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4938 gen_op_mov_reg_T1[ot][R_EAX]();
4945 ot = dflag ? OT_LONG : OT_WORD;
4946 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4947 gen_op_andl_T0_ffff();
4948 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4949 gen_op_mov_TN_reg[ot][1][R_EAX]();
4953 /************************/
4955 case 0xc2: /* ret im */
4956 val = ldsw_code(s->pc);
4959 if (CODE64(s) && s->dflag)
4961 gen_stack_update(s, val + (2 << s->dflag));
4963 gen_op_andl_T0_ffff();
4967 case 0xc3: /* ret */
4971 gen_op_andl_T0_ffff();
4975 case 0xca: /* lret im */
4976 val = ldsw_code(s->pc);
4979 if (s->pe && !s->vm86) {
4980 if (s->cc_op != CC_OP_DYNAMIC)
4981 gen_op_set_cc_op(s->cc_op);
4982 gen_jmp_im(pc_start - s->cs_base);
4983 gen_op_lret_protected(s->dflag, val);
4987 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4989 gen_op_andl_T0_ffff();
4990 /* NOTE: keeping EIP updated is not a problem in case of
4994 gen_op_addl_A0_im(2 << s->dflag);
4995 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4996 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4997 /* add stack offset */
4998 gen_stack_update(s, val + (4 << s->dflag));
5002 case 0xcb: /* lret */
5005 case 0xcf: /* iret */
5008 gen_op_iret_real(s->dflag);
5009 s->cc_op = CC_OP_EFLAGS;
5010 } else if (s->vm86) {
5012 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5014 gen_op_iret_real(s->dflag);
5015 s->cc_op = CC_OP_EFLAGS;
5018 if (s->cc_op != CC_OP_DYNAMIC)
5019 gen_op_set_cc_op(s->cc_op);
5020 gen_jmp_im(pc_start - s->cs_base);
5021 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5022 s->cc_op = CC_OP_EFLAGS;
5026 case 0xe8: /* call im */
5029 tval = (int32_t)insn_get(s, OT_LONG);
5031 tval = (int16_t)insn_get(s, OT_WORD);
5032 next_eip = s->pc - s->cs_base;
5036 gen_movtl_T0_im(next_eip);
5041 case 0x9a: /* lcall im */
5043 unsigned int selector, offset;
5047 ot = dflag ? OT_LONG : OT_WORD;
5048 offset = insn_get(s, ot);
5049 selector = insn_get(s, OT_WORD);
5051 gen_op_movl_T0_im(selector);
5052 gen_op_movl_T1_imu(offset);
5055 case 0xe9: /* jmp im */
5057 tval = (int32_t)insn_get(s, OT_LONG);
5059 tval = (int16_t)insn_get(s, OT_WORD);
5060 tval += s->pc - s->cs_base;
5065 case 0xea: /* ljmp im */
5067 unsigned int selector, offset;
5071 ot = dflag ? OT_LONG : OT_WORD;
5072 offset = insn_get(s, ot);
5073 selector = insn_get(s, OT_WORD);
5075 gen_op_movl_T0_im(selector);
5076 gen_op_movl_T1_imu(offset);
5079 case 0xeb: /* jmp Jb */
5080 tval = (int8_t)insn_get(s, OT_BYTE);
5081 tval += s->pc - s->cs_base;
5086 case 0x70 ... 0x7f: /* jcc Jb */
5087 tval = (int8_t)insn_get(s, OT_BYTE);
5089 case 0x180 ... 0x18f: /* jcc Jv */
5091 tval = (int32_t)insn_get(s, OT_LONG);
5093 tval = (int16_t)insn_get(s, OT_WORD);
5096 next_eip = s->pc - s->cs_base;
5100 gen_jcc(s, b, tval, next_eip);
5103 case 0x190 ... 0x19f: /* setcc Gv */
5104 modrm = ldub_code(s->pc++);
5106 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5108 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5109 ot = dflag + OT_WORD;
5110 modrm = ldub_code(s->pc++);
5111 reg = ((modrm >> 3) & 7) | rex_r;
5112 mod = (modrm >> 6) & 3;
5115 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5116 gen_op_ld_T1_A0[ot + s->mem_index]();
5118 rm = (modrm & 7) | REX_B(s);
5119 gen_op_mov_TN_reg[ot][1][rm]();
5121 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5124 /************************/
5126 case 0x9c: /* pushf */
5127 if (s->vm86 && s->iopl != 3) {
5128 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5130 if (s->cc_op != CC_OP_DYNAMIC)
5131 gen_op_set_cc_op(s->cc_op);
5132 gen_op_movl_T0_eflags();
5136 case 0x9d: /* popf */
5137 if (s->vm86 && s->iopl != 3) {
5138 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5143 gen_op_movl_eflags_T0_cpl0();
5145 gen_op_movw_eflags_T0_cpl0();
5148 if (s->cpl <= s->iopl) {
5150 gen_op_movl_eflags_T0_io();
5152 gen_op_movw_eflags_T0_io();
5156 gen_op_movl_eflags_T0();
5158 gen_op_movw_eflags_T0();
5163 s->cc_op = CC_OP_EFLAGS;
5164 /* abort translation because TF flag may change */
5165 gen_jmp_im(s->pc - s->cs_base);
5169 case 0x9e: /* sahf */
5172 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5173 if (s->cc_op != CC_OP_DYNAMIC)
5174 gen_op_set_cc_op(s->cc_op);
5175 gen_op_movb_eflags_T0();
5176 s->cc_op = CC_OP_EFLAGS;
5178 case 0x9f: /* lahf */
5181 if (s->cc_op != CC_OP_DYNAMIC)
5182 gen_op_set_cc_op(s->cc_op);
5183 gen_op_movl_T0_eflags();
5184 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5186 case 0xf5: /* cmc */
5187 if (s->cc_op != CC_OP_DYNAMIC)
5188 gen_op_set_cc_op(s->cc_op);
5190 s->cc_op = CC_OP_EFLAGS;
5192 case 0xf8: /* clc */
5193 if (s->cc_op != CC_OP_DYNAMIC)
5194 gen_op_set_cc_op(s->cc_op);
5196 s->cc_op = CC_OP_EFLAGS;
5198 case 0xf9: /* stc */
5199 if (s->cc_op != CC_OP_DYNAMIC)
5200 gen_op_set_cc_op(s->cc_op);
5202 s->cc_op = CC_OP_EFLAGS;
5204 case 0xfc: /* cld */
5207 case 0xfd: /* std */
5211 /************************/
5212 /* bit operations */
5213 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5214 ot = dflag + OT_WORD;
5215 modrm = ldub_code(s->pc++);
5216 op = (modrm >> 3) & 7;
5217 mod = (modrm >> 6) & 3;
5218 rm = (modrm & 7) | REX_B(s);
5221 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5222 gen_op_ld_T0_A0[ot + s->mem_index]();
5224 gen_op_mov_TN_reg[ot][0][rm]();
5227 val = ldub_code(s->pc++);
5228 gen_op_movl_T1_im(val);
5232 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5233 s->cc_op = CC_OP_SARB + ot;
5236 gen_op_st_T0_A0[ot + s->mem_index]();
5238 gen_op_mov_reg_T0[ot][rm]();
5239 gen_op_update_bt_cc();
5242 case 0x1a3: /* bt Gv, Ev */
5245 case 0x1ab: /* bts */
5248 case 0x1b3: /* btr */
5251 case 0x1bb: /* btc */
5254 ot = dflag + OT_WORD;
5255 modrm = ldub_code(s->pc++);
5256 reg = ((modrm >> 3) & 7) | rex_r;
5257 mod = (modrm >> 6) & 3;
5258 rm = (modrm & 7) | REX_B(s);
5259 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5261 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5262 /* specific case: we need to add a displacement */
5263 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5264 gen_op_ld_T0_A0[ot + s->mem_index]();
5266 gen_op_mov_TN_reg[ot][0][rm]();
5268 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5269 s->cc_op = CC_OP_SARB + ot;
5272 gen_op_st_T0_A0[ot + s->mem_index]();
5274 gen_op_mov_reg_T0[ot][rm]();
5275 gen_op_update_bt_cc();
5278 case 0x1bc: /* bsf */
5279 case 0x1bd: /* bsr */
5280 ot = dflag + OT_WORD;
5281 modrm = ldub_code(s->pc++);
5282 reg = ((modrm >> 3) & 7) | rex_r;
5283 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5284 /* NOTE: in order to handle the 0 case, we must load the
5285 result. It could be optimized with a generated jump */
5286 gen_op_mov_TN_reg[ot][1][reg]();
5287 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5288 gen_op_mov_reg_T1[ot][reg]();
5289 s->cc_op = CC_OP_LOGICB + ot;
5291 /************************/
5293 case 0x27: /* daa */
5296 if (s->cc_op != CC_OP_DYNAMIC)
5297 gen_op_set_cc_op(s->cc_op);
5299 s->cc_op = CC_OP_EFLAGS;
5301 case 0x2f: /* das */
5304 if (s->cc_op != CC_OP_DYNAMIC)
5305 gen_op_set_cc_op(s->cc_op);
5307 s->cc_op = CC_OP_EFLAGS;
5309 case 0x37: /* aaa */
5312 if (s->cc_op != CC_OP_DYNAMIC)
5313 gen_op_set_cc_op(s->cc_op);
5315 s->cc_op = CC_OP_EFLAGS;
5317 case 0x3f: /* aas */
5320 if (s->cc_op != CC_OP_DYNAMIC)
5321 gen_op_set_cc_op(s->cc_op);
5323 s->cc_op = CC_OP_EFLAGS;
5325 case 0xd4: /* aam */
5328 val = ldub_code(s->pc++);
5330 s->cc_op = CC_OP_LOGICB;
5332 case 0xd5: /* aad */
5335 val = ldub_code(s->pc++);
5337 s->cc_op = CC_OP_LOGICB;
5339 /************************/
5341 case 0x90: /* nop */
5342 /* XXX: xchg + rex handling */
5343 /* XXX: correct lock test for all insn */
5344 if (prefixes & PREFIX_LOCK)
5347 case 0x9b: /* fwait */
5348 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5349 (HF_MP_MASK | HF_TS_MASK)) {
5350 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5352 if (s->cc_op != CC_OP_DYNAMIC)
5353 gen_op_set_cc_op(s->cc_op);
5354 gen_jmp_im(pc_start - s->cs_base);
5358 case 0xcc: /* int3 */
5359 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5361 case 0xcd: /* int N */
5362 val = ldub_code(s->pc++);
5363 if (s->vm86 && s->iopl != 3) {
5364 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5366 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5369 case 0xce: /* into */
5372 if (s->cc_op != CC_OP_DYNAMIC)
5373 gen_op_set_cc_op(s->cc_op);
5374 gen_jmp_im(pc_start - s->cs_base);
5375 gen_op_into(s->pc - pc_start);
5377 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5379 gen_debug(s, pc_start - s->cs_base);
5382 tb_flush(cpu_single_env);
5383 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5386 case 0xfa: /* cli */
5388 if (s->cpl <= s->iopl) {
5391 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5397 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5401 case 0xfb: /* sti */
5403 if (s->cpl <= s->iopl) {
5406 /* interruptions are enabled only the first insn after sti */
5407 /* If several instructions disable interrupts, only the
5409 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5410 gen_op_set_inhibit_irq();
5411 /* give a chance to handle pending irqs */
5412 gen_jmp_im(s->pc - s->cs_base);
5415 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5421 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5425 case 0x62: /* bound */
5428 ot = dflag ? OT_LONG : OT_WORD;
5429 modrm = ldub_code(s->pc++);
5430 reg = (modrm >> 3) & 7;
5431 mod = (modrm >> 6) & 3;
5434 gen_op_mov_TN_reg[ot][0][reg]();
5435 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5436 gen_jmp_im(pc_start - s->cs_base);
5442 case 0x1c8 ... 0x1cf: /* bswap reg */
5443 reg = (b & 7) | REX_B(s);
5444 #ifdef TARGET_X86_64
5446 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5448 gen_op_mov_reg_T0[OT_QUAD][reg]();
5452 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5454 gen_op_mov_reg_T0[OT_LONG][reg]();
5457 case 0xd6: /* salc */
5460 if (s->cc_op != CC_OP_DYNAMIC)
5461 gen_op_set_cc_op(s->cc_op);
5464 case 0xe0: /* loopnz */
5465 case 0xe1: /* loopz */
5466 if (s->cc_op != CC_OP_DYNAMIC)
5467 gen_op_set_cc_op(s->cc_op);
5469 case 0xe2: /* loop */
5470 case 0xe3: /* jecxz */
5474 tval = (int8_t)insn_get(s, OT_BYTE);
5475 next_eip = s->pc - s->cs_base;
5480 l1 = gen_new_label();
5481 l2 = gen_new_label();
5484 gen_op_jz_ecx[s->aflag](l1);
5486 gen_op_dec_ECX[s->aflag]();
5489 gen_op_loop[s->aflag][b](l1);
5492 gen_jmp_im(next_eip);
5493 gen_op_jmp_label(l2);
5500 case 0x130: /* wrmsr */
5501 case 0x132: /* rdmsr */
5503 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5511 case 0x131: /* rdtsc */
5512 gen_jmp_im(pc_start - s->cs_base);
5515 case 0x134: /* sysenter */
5519 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5521 if (s->cc_op != CC_OP_DYNAMIC) {
5522 gen_op_set_cc_op(s->cc_op);
5523 s->cc_op = CC_OP_DYNAMIC;
5525 gen_jmp_im(pc_start - s->cs_base);
5530 case 0x135: /* sysexit */
5534 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5536 if (s->cc_op != CC_OP_DYNAMIC) {
5537 gen_op_set_cc_op(s->cc_op);
5538 s->cc_op = CC_OP_DYNAMIC;
5540 gen_jmp_im(pc_start - s->cs_base);
5545 #ifdef TARGET_X86_64
5546 case 0x105: /* syscall */
5547 /* XXX: is it usable in real mode ? */
5548 if (s->cc_op != CC_OP_DYNAMIC) {
5549 gen_op_set_cc_op(s->cc_op);
5550 s->cc_op = CC_OP_DYNAMIC;
5552 gen_jmp_im(pc_start - s->cs_base);
5553 gen_op_syscall(s->pc - pc_start);
5556 case 0x107: /* sysret */
5558 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5560 if (s->cc_op != CC_OP_DYNAMIC) {
5561 gen_op_set_cc_op(s->cc_op);
5562 s->cc_op = CC_OP_DYNAMIC;
5564 gen_jmp_im(pc_start - s->cs_base);
5565 gen_op_sysret(s->dflag);
5566 /* condition codes are modified only in long mode */
5568 s->cc_op = CC_OP_EFLAGS;
5573 case 0x1a2: /* cpuid */
5576 case 0xf4: /* hlt */
5578 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5580 if (s->cc_op != CC_OP_DYNAMIC)
5581 gen_op_set_cc_op(s->cc_op);
5582 gen_jmp_im(s->pc - s->cs_base);
5588 modrm = ldub_code(s->pc++);
5589 mod = (modrm >> 6) & 3;
5590 op = (modrm >> 3) & 7;
5593 if (!s->pe || s->vm86)
5595 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5599 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5602 if (!s->pe || s->vm86)
5605 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5607 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5608 gen_jmp_im(pc_start - s->cs_base);
5613 if (!s->pe || s->vm86)
5615 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5619 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5622 if (!s->pe || s->vm86)
5625 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5627 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5628 gen_jmp_im(pc_start - s->cs_base);
5634 if (!s->pe || s->vm86)
5636 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5637 if (s->cc_op != CC_OP_DYNAMIC)
5638 gen_op_set_cc_op(s->cc_op);
5643 s->cc_op = CC_OP_EFLAGS;
5650 modrm = ldub_code(s->pc++);
5651 mod = (modrm >> 6) & 3;
5652 op = (modrm >> 3) & 7;
5658 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5659 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5660 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5661 gen_add_A0_im(s, 2);
5662 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5664 gen_op_andl_T0_im(0xffffff);
5665 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5670 case 0: /* monitor */
5671 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5674 gen_jmp_im(pc_start - s->cs_base);
5675 #ifdef TARGET_X86_64
5676 if (s->aflag == 2) {
5677 gen_op_movq_A0_reg[R_EBX]();
5678 gen_op_addq_A0_AL();
5682 gen_op_movl_A0_reg[R_EBX]();
5683 gen_op_addl_A0_AL();
5685 gen_op_andl_A0_ffff();
5687 gen_add_A0_ds_seg(s);
5691 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5694 if (s->cc_op != CC_OP_DYNAMIC) {
5695 gen_op_set_cc_op(s->cc_op);
5696 s->cc_op = CC_OP_DYNAMIC;
5698 gen_jmp_im(s->pc - s->cs_base);
5706 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5707 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5708 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5709 gen_add_A0_im(s, 2);
5710 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5712 gen_op_andl_T0_im(0xffffff);
5713 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5721 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5723 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5724 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5725 gen_add_A0_im(s, 2);
5726 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5728 gen_op_andl_T0_im(0xffffff);
5730 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5731 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5733 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5734 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5739 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5740 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5744 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5746 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5748 gen_jmp_im(s->pc - s->cs_base);
5752 case 7: /* invlpg */
5754 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5757 #ifdef TARGET_X86_64
5758 if (CODE64(s) && rm == 0) {
5760 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5761 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5762 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5763 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5770 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5772 gen_jmp_im(s->pc - s->cs_base);
5781 case 0x108: /* invd */
5782 case 0x109: /* wbinvd */
5784 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5789 case 0x63: /* arpl or movslS (x86_64) */
5790 #ifdef TARGET_X86_64
5793 /* d_ot is the size of destination */
5794 d_ot = dflag + OT_WORD;
5796 modrm = ldub_code(s->pc++);
5797 reg = ((modrm >> 3) & 7) | rex_r;
5798 mod = (modrm >> 6) & 3;
5799 rm = (modrm & 7) | REX_B(s);
5802 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5804 if (d_ot == OT_QUAD)
5805 gen_op_movslq_T0_T0();
5806 gen_op_mov_reg_T0[d_ot][reg]();
5808 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5809 if (d_ot == OT_QUAD) {
5810 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5812 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5814 gen_op_mov_reg_T0[d_ot][reg]();
5819 if (!s->pe || s->vm86)
5821 ot = dflag ? OT_LONG : OT_WORD;
5822 modrm = ldub_code(s->pc++);
5823 reg = (modrm >> 3) & 7;
5824 mod = (modrm >> 6) & 3;
5827 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5828 gen_op_ld_T0_A0[ot + s->mem_index]();
5830 gen_op_mov_TN_reg[ot][0][rm]();
5832 if (s->cc_op != CC_OP_DYNAMIC)
5833 gen_op_set_cc_op(s->cc_op);
5835 s->cc_op = CC_OP_EFLAGS;
5837 gen_op_st_T0_A0[ot + s->mem_index]();
5839 gen_op_mov_reg_T0[ot][rm]();
5841 gen_op_arpl_update();
5844 case 0x102: /* lar */
5845 case 0x103: /* lsl */
5846 if (!s->pe || s->vm86)
5848 ot = dflag ? OT_LONG : OT_WORD;
5849 modrm = ldub_code(s->pc++);
5850 reg = ((modrm >> 3) & 7) | rex_r;
5851 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5852 gen_op_mov_TN_reg[ot][1][reg]();
5853 if (s->cc_op != CC_OP_DYNAMIC)
5854 gen_op_set_cc_op(s->cc_op);
5859 s->cc_op = CC_OP_EFLAGS;
5860 gen_op_mov_reg_T1[ot][reg]();
5863 modrm = ldub_code(s->pc++);
5864 mod = (modrm >> 6) & 3;
5865 op = (modrm >> 3) & 7;
5867 case 0: /* prefetchnta */
5868 case 1: /* prefetchnt0 */
5869 case 2: /* prefetchnt0 */
5870 case 3: /* prefetchnt0 */
5873 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5874 /* nothing more to do */
5876 default: /* nop (multi byte) */
5877 gen_nop_modrm(s, modrm);
5881 case 0x119 ... 0x11f: /* nop (multi byte) */
5882 modrm = ldub_code(s->pc++);
5883 gen_nop_modrm(s, modrm);
5885 case 0x120: /* mov reg, crN */
5886 case 0x122: /* mov crN, reg */
5888 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5890 modrm = ldub_code(s->pc++);
5891 if ((modrm & 0xc0) != 0xc0)
5893 rm = (modrm & 7) | REX_B(s);
5894 reg = ((modrm >> 3) & 7) | rex_r;
5906 gen_op_mov_TN_reg[ot][0][rm]();
5907 gen_op_movl_crN_T0(reg);
5908 gen_jmp_im(s->pc - s->cs_base);
5911 #if !defined(CONFIG_USER_ONLY)
5913 gen_op_movtl_T0_cr8();
5916 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5917 gen_op_mov_reg_T0[ot][rm]();
5925 case 0x121: /* mov reg, drN */
5926 case 0x123: /* mov drN, reg */
5928 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5930 modrm = ldub_code(s->pc++);
5931 if ((modrm & 0xc0) != 0xc0)
5933 rm = (modrm & 7) | REX_B(s);
5934 reg = ((modrm >> 3) & 7) | rex_r;
5939 /* XXX: do it dynamically with CR4.DE bit */
5940 if (reg == 4 || reg == 5 || reg >= 8)
5943 gen_op_mov_TN_reg[ot][0][rm]();
5944 gen_op_movl_drN_T0(reg);
5945 gen_jmp_im(s->pc - s->cs_base);
5948 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5949 gen_op_mov_reg_T0[ot][rm]();
5953 case 0x106: /* clts */
5955 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5958 /* abort block because static cpu state changed */
5959 gen_jmp_im(s->pc - s->cs_base);
5963 /* MMX/SSE/SSE2/PNI support */
5964 case 0x1c3: /* MOVNTI reg, mem */
5965 if (!(s->cpuid_features & CPUID_SSE2))
5967 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5968 modrm = ldub_code(s->pc++);
5969 mod = (modrm >> 6) & 3;
5972 reg = ((modrm >> 3) & 7) | rex_r;
5973 /* generate a generic store */
5974 gen_ldst_modrm(s, modrm, ot, reg, 1);
5977 modrm = ldub_code(s->pc++);
5978 mod = (modrm >> 6) & 3;
5979 op = (modrm >> 3) & 7;
5981 case 0: /* fxsave */
5982 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
5983 (s->flags & HF_EM_MASK))
5985 if (s->flags & HF_TS_MASK) {
5986 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5989 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5990 gen_op_fxsave_A0((s->dflag == 2));
5992 case 1: /* fxrstor */
5993 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
5994 (s->flags & HF_EM_MASK))
5996 if (s->flags & HF_TS_MASK) {
5997 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6000 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6001 gen_op_fxrstor_A0((s->dflag == 2));
6003 case 2: /* ldmxcsr */
6004 case 3: /* stmxcsr */
6005 if (s->flags & HF_TS_MASK) {
6006 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6009 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6012 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6014 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6015 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6017 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6018 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6021 case 5: /* lfence */
6022 case 6: /* mfence */
6023 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6026 case 7: /* sfence / clflush */
6027 if ((modrm & 0xc7) == 0xc0) {
6029 if (!(s->cpuid_features & CPUID_SSE))
6033 if (!(s->cpuid_features & CPUID_CLFLUSH))
6035 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6042 case 0x10d: /* prefetch */
6043 modrm = ldub_code(s->pc++);
6044 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6045 /* ignore for now */
6047 case 0x1aa: /* rsm */
6048 if (!(s->flags & HF_SMM_MASK))
6050 if (s->cc_op != CC_OP_DYNAMIC) {
6051 gen_op_set_cc_op(s->cc_op);
6052 s->cc_op = CC_OP_DYNAMIC;
6054 gen_jmp_im(s->pc - s->cs_base);
6058 case 0x110 ... 0x117:
6059 case 0x128 ... 0x12f:
6060 case 0x150 ... 0x177:
6061 case 0x17c ... 0x17f:
6063 case 0x1c4 ... 0x1c6:
6064 case 0x1d0 ... 0x1fe:
6065 gen_sse(s, b, pc_start, rex_r);
6070 /* lock generation */
6071 if (s->prefix & PREFIX_LOCK)
6075 if (s->prefix & PREFIX_LOCK)
6077 /* XXX: ensure that no lock was generated */
6078 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6082 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6083 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6085 /* flags read by an operation */
6086 static uint16_t opc_read_flags[NB_OPS] = {
6087 [INDEX_op_aas] = CC_A,
6088 [INDEX_op_aaa] = CC_A,
6089 [INDEX_op_das] = CC_A | CC_C,
6090 [INDEX_op_daa] = CC_A | CC_C,
6092 /* subtle: due to the incl/decl implementation, C is used */
6093 [INDEX_op_update_inc_cc] = CC_C,
6095 [INDEX_op_into] = CC_O,
6097 [INDEX_op_jb_subb] = CC_C,
6098 [INDEX_op_jb_subw] = CC_C,
6099 [INDEX_op_jb_subl] = CC_C,
6101 [INDEX_op_jz_subb] = CC_Z,
6102 [INDEX_op_jz_subw] = CC_Z,
6103 [INDEX_op_jz_subl] = CC_Z,
6105 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6106 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6107 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6109 [INDEX_op_js_subb] = CC_S,
6110 [INDEX_op_js_subw] = CC_S,
6111 [INDEX_op_js_subl] = CC_S,
6113 [INDEX_op_jl_subb] = CC_O | CC_S,
6114 [INDEX_op_jl_subw] = CC_O | CC_S,
6115 [INDEX_op_jl_subl] = CC_O | CC_S,
6117 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6118 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6119 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6121 [INDEX_op_loopnzw] = CC_Z,
6122 [INDEX_op_loopnzl] = CC_Z,
6123 [INDEX_op_loopzw] = CC_Z,
6124 [INDEX_op_loopzl] = CC_Z,
6126 [INDEX_op_seto_T0_cc] = CC_O,
6127 [INDEX_op_setb_T0_cc] = CC_C,
6128 [INDEX_op_setz_T0_cc] = CC_Z,
6129 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6130 [INDEX_op_sets_T0_cc] = CC_S,
6131 [INDEX_op_setp_T0_cc] = CC_P,
6132 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6133 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6135 [INDEX_op_setb_T0_subb] = CC_C,
6136 [INDEX_op_setb_T0_subw] = CC_C,
6137 [INDEX_op_setb_T0_subl] = CC_C,
6139 [INDEX_op_setz_T0_subb] = CC_Z,
6140 [INDEX_op_setz_T0_subw] = CC_Z,
6141 [INDEX_op_setz_T0_subl] = CC_Z,
6143 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6144 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6145 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6147 [INDEX_op_sets_T0_subb] = CC_S,
6148 [INDEX_op_sets_T0_subw] = CC_S,
6149 [INDEX_op_sets_T0_subl] = CC_S,
6151 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6152 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6153 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6155 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6156 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6157 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6159 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6160 [INDEX_op_cmc] = CC_C,
6161 [INDEX_op_salc] = CC_C,
6163 /* needed for correct flag optimisation before string ops */
6164 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6165 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6166 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6167 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6169 #ifdef TARGET_X86_64
6170 [INDEX_op_jb_subq] = CC_C,
6171 [INDEX_op_jz_subq] = CC_Z,
6172 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6173 [INDEX_op_js_subq] = CC_S,
6174 [INDEX_op_jl_subq] = CC_O | CC_S,
6175 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6177 [INDEX_op_loopnzq] = CC_Z,
6178 [INDEX_op_loopzq] = CC_Z,
6180 [INDEX_op_setb_T0_subq] = CC_C,
6181 [INDEX_op_setz_T0_subq] = CC_Z,
6182 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6183 [INDEX_op_sets_T0_subq] = CC_S,
6184 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6185 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6187 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6188 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6191 #define DEF_READF(SUFFIX)\
6192 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6193 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6194 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6195 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6196 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6197 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6198 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6199 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6201 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6202 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6203 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6204 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6205 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6206 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6207 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6208 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6212 #ifndef CONFIG_USER_ONLY
6218 /* flags written by an operation */
6219 static uint16_t opc_write_flags[NB_OPS] = {
6220 [INDEX_op_update2_cc] = CC_OSZAPC,
6221 [INDEX_op_update1_cc] = CC_OSZAPC,
6222 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6223 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6224 /* subtle: due to the incl/decl implementation, C is used */
6225 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6226 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6228 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6229 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6230 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6231 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6232 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6233 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6234 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6235 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6236 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6237 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6238 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6241 [INDEX_op_ucomiss] = CC_OSZAPC,
6242 [INDEX_op_ucomisd] = CC_OSZAPC,
6243 [INDEX_op_comiss] = CC_OSZAPC,
6244 [INDEX_op_comisd] = CC_OSZAPC,
6247 [INDEX_op_aam] = CC_OSZAPC,
6248 [INDEX_op_aad] = CC_OSZAPC,
6249 [INDEX_op_aas] = CC_OSZAPC,
6250 [INDEX_op_aaa] = CC_OSZAPC,
6251 [INDEX_op_das] = CC_OSZAPC,
6252 [INDEX_op_daa] = CC_OSZAPC,
6254 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6255 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6256 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6257 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6258 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6259 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6260 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6261 [INDEX_op_clc] = CC_C,
6262 [INDEX_op_stc] = CC_C,
6263 [INDEX_op_cmc] = CC_C,
6265 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6266 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6267 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6268 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6269 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6270 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6271 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6272 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6273 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6274 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6275 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6276 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6278 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6279 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6280 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6281 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6282 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6283 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6285 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6286 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6287 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6288 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6290 [INDEX_op_cmpxchg8b] = CC_Z,
6291 [INDEX_op_lar] = CC_Z,
6292 [INDEX_op_lsl] = CC_Z,
6293 [INDEX_op_verr] = CC_Z,
6294 [INDEX_op_verw] = CC_Z,
6295 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6296 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6298 #define DEF_WRITEF(SUFFIX)\
6299 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6300 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6301 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6302 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6303 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6304 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6305 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6306 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6308 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6309 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6310 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6311 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6312 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6313 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6314 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6315 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6317 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6318 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6319 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6320 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6321 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6322 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6323 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6324 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6326 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6327 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6328 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6329 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6331 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6332 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6333 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6334 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6336 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6337 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6338 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6339 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6341 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6342 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6343 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6344 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6345 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6346 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6348 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6349 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6350 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6351 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6352 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6353 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6355 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6356 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6357 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6358 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6363 #ifndef CONFIG_USER_ONLY
6369 /* simpler form of an operation if no flags need to be generated */
6370 static uint16_t opc_simpler[NB_OPS] = {
6371 [INDEX_op_update2_cc] = INDEX_op_nop,
6372 [INDEX_op_update1_cc] = INDEX_op_nop,
6373 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6375 /* broken: CC_OP logic must be rewritten */
6376 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6379 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6380 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6381 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6382 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6384 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6385 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6386 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6387 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6389 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6390 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6391 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6392 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6394 #define DEF_SIMPLER(SUFFIX)\
6395 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6396 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6397 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6398 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6400 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6401 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6402 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6403 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6407 #ifndef CONFIG_USER_ONLY
6408 DEF_SIMPLER(_kernel)
6413 void optimize_flags_init(void)
6416 /* put default values in arrays */
6417 for(i = 0; i < NB_OPS; i++) {
6418 if (opc_simpler[i] == 0)
6423 /* CPU flags computation optimization: we move backward thru the
6424 generated code to see which flags are needed. The operation is
6425 modified if suitable */
6426 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6429 int live_flags, write_flags, op;
6431 opc_ptr = opc_buf + opc_buf_len;
6432 /* live_flags contains the flags needed by the next instructions
6433 in the code. At the end of the bloc, we consider that all the
6435 live_flags = CC_OSZAPC;
6436 while (opc_ptr > opc_buf) {
6438 /* if none of the flags written by the instruction is used,
6439 then we can try to find a simpler instruction */
6440 write_flags = opc_write_flags[op];
6441 if ((live_flags & write_flags) == 0) {
6442 *opc_ptr = opc_simpler[op];
6444 /* compute the live flags before the instruction */
6445 live_flags &= ~write_flags;
6446 live_flags |= opc_read_flags[op];
6450 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6451 basic block 'tb'. If search_pc is TRUE, also generate PC
6452 information for each intermediate instruction. */
6453 static inline int gen_intermediate_code_internal(CPUState *env,
6454 TranslationBlock *tb,
6457 DisasContext dc1, *dc = &dc1;
6458 target_ulong pc_ptr;
6459 uint16_t *gen_opc_end;
6460 int flags, j, lj, cflags;
6461 target_ulong pc_start;
6462 target_ulong cs_base;
6464 /* generate intermediate code */
6466 cs_base = tb->cs_base;
6468 cflags = tb->cflags;
6470 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6471 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6472 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6473 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6475 dc->vm86 = (flags >> VM_SHIFT) & 1;
6476 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6477 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6478 dc->tf = (flags >> TF_SHIFT) & 1;
6479 dc->singlestep_enabled = env->singlestep_enabled;
6480 dc->cc_op = CC_OP_DYNAMIC;
6481 dc->cs_base = cs_base;
6483 dc->popl_esp_hack = 0;
6484 /* select memory access functions */
6486 if (flags & HF_SOFTMMU_MASK) {
6488 dc->mem_index = 2 * 4;
6490 dc->mem_index = 1 * 4;
6492 dc->cpuid_features = env->cpuid_features;
6493 dc->cpuid_ext_features = env->cpuid_ext_features;
6494 #ifdef TARGET_X86_64
6495 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6496 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6499 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6500 (flags & HF_INHIBIT_IRQ_MASK)
6501 #ifndef CONFIG_SOFTMMU
6502 || (flags & HF_SOFTMMU_MASK)
6506 /* check addseg logic */
6507 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6508 printf("ERROR addseg\n");
6511 gen_opc_ptr = gen_opc_buf;
6512 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6513 gen_opparam_ptr = gen_opparam_buf;
6516 dc->is_jmp = DISAS_NEXT;
6521 if (env->nb_breakpoints > 0) {
6522 for(j = 0; j < env->nb_breakpoints; j++) {
6523 if (env->breakpoints[j] == pc_ptr) {
6524 gen_debug(dc, pc_ptr - dc->cs_base);
6530 j = gen_opc_ptr - gen_opc_buf;
6534 gen_opc_instr_start[lj++] = 0;
6536 gen_opc_pc[lj] = pc_ptr;
6537 gen_opc_cc_op[lj] = dc->cc_op;
6538 gen_opc_instr_start[lj] = 1;
6540 pc_ptr = disas_insn(dc, pc_ptr);
6541 /* stop translation if indicated */
6544 /* if single step mode, we generate only one instruction and
6545 generate an exception */
6546 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6547 the flag and abort the translation to give the irqs a
6548 change to be happen */
6549 if (dc->tf || dc->singlestep_enabled ||
6550 (flags & HF_INHIBIT_IRQ_MASK) ||
6551 (cflags & CF_SINGLE_INSN)) {
6552 gen_jmp_im(pc_ptr - dc->cs_base);
6556 /* if too long translation, stop generation too */
6557 if (gen_opc_ptr >= gen_opc_end ||
6558 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6559 gen_jmp_im(pc_ptr - dc->cs_base);
6564 *gen_opc_ptr = INDEX_op_end;
6565 /* we don't forget to fill the last values */
6567 j = gen_opc_ptr - gen_opc_buf;
6570 gen_opc_instr_start[lj++] = 0;
6574 if (loglevel & CPU_LOG_TB_CPU) {
6575 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6577 if (loglevel & CPU_LOG_TB_IN_ASM) {
6579 fprintf(logfile, "----------------\n");
6580 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6581 #ifdef TARGET_X86_64
6586 disas_flags = !dc->code32;
6587 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6588 fprintf(logfile, "\n");
6589 if (loglevel & CPU_LOG_TB_OP) {
6590 fprintf(logfile, "OP:\n");
6591 dump_ops(gen_opc_buf, gen_opparam_buf);
6592 fprintf(logfile, "\n");
6597 /* optimize flag computations */
6598 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6601 if (loglevel & CPU_LOG_TB_OP_OPT) {
6602 fprintf(logfile, "AFTER FLAGS OPT:\n");
6603 dump_ops(gen_opc_buf, gen_opparam_buf);
6604 fprintf(logfile, "\n");
6608 tb->size = pc_ptr - pc_start;
6612 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6614 return gen_intermediate_code_internal(env, tb, 0);
6617 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6619 return gen_intermediate_code_internal(env, tb, 1);