4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr;
34 static uint32_t *gen_opparam_ptr;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #define BUGGY_64(x) NULL
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
61 static int x86_64_hregs;
64 typedef struct DisasContext {
65 /* current insn context */
66 int override; /* -1 if no override */
69 target_ulong pc; /* pc = eip + cs_base */
70 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
71 static state change (stop translation) */
72 /* current block context */
73 target_ulong cs_base; /* base of CS segment */
74 int pe; /* protected mode */
75 int code32; /* 32 bit code segment */
77 int lma; /* long mode active */
78 int code64; /* 64 bit code segment */
81 int ss32; /* 32 bit stack segment */
82 int cc_op; /* current CC operation */
83 int addseg; /* non zero if either DS/ES/SS have a non zero base */
84 int f_st; /* currently unused */
85 int vm86; /* vm86 mode */
88 int tf; /* TF cpu flag */
89 int singlestep_enabled; /* "hardware" single step enabled */
90 int jmp_opt; /* use direct block chaining for direct jumps */
91 int mem_index; /* select memory access functions */
92 int flags; /* all execution flags */
93 struct TranslationBlock *tb;
94 int popl_esp_hack; /* for correct popl with esp base handling */
95 int rip_offset; /* only used in x86_64, but left for simplicity */
99 static void gen_eob(DisasContext *s);
100 static void gen_jmp(DisasContext *s, target_ulong eip);
101 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
103 /* i386 arith/logic operations */
123 OP_SHL1, /* undocumented */
128 #define DEF(s, n, copy_size) INDEX_op_ ## s,
145 /* I386 int registers */
146 OR_EAX, /* MUST be even numbered */
155 OR_TMP0 = 16, /* temporary operand register */
157 OR_A0, /* temporary register used when doing address evaluation */
162 #define NB_OP_SIZES 4
164 #define DEF_REGS(prefix, suffix) \
165 prefix ## EAX ## suffix,\
166 prefix ## ECX ## suffix,\
167 prefix ## EDX ## suffix,\
168 prefix ## EBX ## suffix,\
169 prefix ## ESP ## suffix,\
170 prefix ## EBP ## suffix,\
171 prefix ## ESI ## suffix,\
172 prefix ## EDI ## suffix,\
173 prefix ## R8 ## suffix,\
174 prefix ## R9 ## suffix,\
175 prefix ## R10 ## suffix,\
176 prefix ## R11 ## suffix,\
177 prefix ## R12 ## suffix,\
178 prefix ## R13 ## suffix,\
179 prefix ## R14 ## suffix,\
180 prefix ## R15 ## suffix,
182 #define DEF_BREGS(prefixb, prefixh, suffix) \
184 static void prefixb ## ESP ## suffix ## _wrapper(void) \
187 prefixb ## ESP ## suffix (); \
189 prefixh ## EAX ## suffix (); \
192 static void prefixb ## EBP ## suffix ## _wrapper(void) \
195 prefixb ## EBP ## suffix (); \
197 prefixh ## ECX ## suffix (); \
200 static void prefixb ## ESI ## suffix ## _wrapper(void) \
203 prefixb ## ESI ## suffix (); \
205 prefixh ## EDX ## suffix (); \
208 static void prefixb ## EDI ## suffix ## _wrapper(void) \
211 prefixb ## EDI ## suffix (); \
213 prefixh ## EBX ## suffix (); \
216 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
217 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
218 DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
219 DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
221 #else /* !TARGET_X86_64 */
223 #define NB_OP_SIZES 3
225 #define DEF_REGS(prefix, suffix) \
226 prefix ## EAX ## suffix,\
227 prefix ## ECX ## suffix,\
228 prefix ## EDX ## suffix,\
229 prefix ## EBX ## suffix,\
230 prefix ## ESP ## suffix,\
231 prefix ## EBP ## suffix,\
232 prefix ## ESI ## suffix,\
233 prefix ## EDI ## suffix,
235 #endif /* !TARGET_X86_64 */
237 static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
244 gen_op_movb_ESP_T0_wrapper,
245 gen_op_movb_EBP_T0_wrapper,
246 gen_op_movb_ESI_T0_wrapper,
247 gen_op_movb_EDI_T0_wrapper,
264 DEF_REGS(gen_op_movw_, _T0)
267 DEF_REGS(gen_op_movl_, _T0)
271 DEF_REGS(gen_op_movq_, _T0)
276 static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
283 gen_op_movb_ESP_T1_wrapper,
284 gen_op_movb_EBP_T1_wrapper,
285 gen_op_movb_ESI_T1_wrapper,
286 gen_op_movb_EDI_T1_wrapper,
303 DEF_REGS(gen_op_movw_, _T1)
306 DEF_REGS(gen_op_movl_, _T1)
310 DEF_REGS(gen_op_movq_, _T1)
315 static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
317 DEF_REGS(gen_op_movw_, _A0)
320 DEF_REGS(gen_op_movl_, _A0)
324 DEF_REGS(gen_op_movq_, _A0)
329 static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
338 gen_op_movl_T0_ESP_wrapper,
339 gen_op_movl_T0_EBP_wrapper,
340 gen_op_movl_T0_ESI_wrapper,
341 gen_op_movl_T0_EDI_wrapper,
363 gen_op_movl_T1_ESP_wrapper,
364 gen_op_movl_T1_EBP_wrapper,
365 gen_op_movl_T1_ESI_wrapper,
366 gen_op_movl_T1_EDI_wrapper,
385 DEF_REGS(gen_op_movl_T0_, )
388 DEF_REGS(gen_op_movl_T1_, )
393 DEF_REGS(gen_op_movl_T0_, )
396 DEF_REGS(gen_op_movl_T1_, )
402 DEF_REGS(gen_op_movl_T0_, )
405 DEF_REGS(gen_op_movl_T1_, )
411 static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
412 DEF_REGS(gen_op_movl_A0_, )
415 static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
417 DEF_REGS(gen_op_addl_A0_, )
420 DEF_REGS(gen_op_addl_A0_, _s1)
423 DEF_REGS(gen_op_addl_A0_, _s2)
426 DEF_REGS(gen_op_addl_A0_, _s3)
431 static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
432 DEF_REGS(gen_op_movq_A0_, )
435 static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
437 DEF_REGS(gen_op_addq_A0_, )
440 DEF_REGS(gen_op_addq_A0_, _s1)
443 DEF_REGS(gen_op_addq_A0_, _s2)
446 DEF_REGS(gen_op_addq_A0_, _s3)
451 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
453 DEF_REGS(gen_op_cmovw_, _T1_T0)
456 DEF_REGS(gen_op_cmovl_, _T1_T0)
460 DEF_REGS(gen_op_cmovq_, _T1_T0)
465 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
476 #define DEF_ARITHC(SUFFIX)\
478 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
479 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
482 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
483 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
486 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
487 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
490 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
491 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
494 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
498 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
500 #ifndef CONFIG_USER_ONLY
506 static const int cc_op_arithb[8] = {
517 #define DEF_CMPXCHG(SUFFIX)\
518 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
519 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
520 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
521 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
523 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
527 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
529 #ifndef CONFIG_USER_ONLY
535 #define DEF_SHIFT(SUFFIX)\
537 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
538 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
539 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
540 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
541 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
542 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
543 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
544 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
548 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
549 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
550 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
551 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
552 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
553 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
554 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
558 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
559 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
560 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
561 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
562 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
563 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
564 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
567 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
568 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
569 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
570 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
571 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
572 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
573 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
574 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
577 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
581 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
583 #ifndef CONFIG_USER_ONLY
589 #define DEF_SHIFTD(SUFFIX, op)\
595 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
596 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
599 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
600 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
605 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
609 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
613 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
615 #ifndef CONFIG_USER_ONLY
616 DEF_SHIFTD(_kernel, im)
617 DEF_SHIFTD(_user, im)
621 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
622 DEF_SHIFTD(_raw, ECX)
623 #ifndef CONFIG_USER_ONLY
624 DEF_SHIFTD(_kernel, ECX)
625 DEF_SHIFTD(_user, ECX)
629 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
632 gen_op_btsw_T0_T1_cc,
633 gen_op_btrw_T0_T1_cc,
634 gen_op_btcw_T0_T1_cc,
638 gen_op_btsl_T0_T1_cc,
639 gen_op_btrl_T0_T1_cc,
640 gen_op_btcl_T0_T1_cc,
645 gen_op_btsq_T0_T1_cc,
646 gen_op_btrq_T0_T1_cc,
647 gen_op_btcq_T0_T1_cc,
652 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
653 gen_op_add_bitw_A0_T1,
654 gen_op_add_bitl_A0_T1,
655 X86_64_ONLY(gen_op_add_bitq_A0_T1),
658 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
675 static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
676 gen_op_ldsb_raw_T0_A0,
677 gen_op_ldsw_raw_T0_A0,
678 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
680 #ifndef CONFIG_USER_ONLY
681 gen_op_ldsb_kernel_T0_A0,
682 gen_op_ldsw_kernel_T0_A0,
683 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
686 gen_op_ldsb_user_T0_A0,
687 gen_op_ldsw_user_T0_A0,
688 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
693 static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
694 gen_op_ldub_raw_T0_A0,
695 gen_op_lduw_raw_T0_A0,
699 #ifndef CONFIG_USER_ONLY
700 gen_op_ldub_kernel_T0_A0,
701 gen_op_lduw_kernel_T0_A0,
705 gen_op_ldub_user_T0_A0,
706 gen_op_lduw_user_T0_A0,
712 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
713 static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
714 gen_op_ldub_raw_T0_A0,
715 gen_op_lduw_raw_T0_A0,
716 gen_op_ldl_raw_T0_A0,
717 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
719 #ifndef CONFIG_USER_ONLY
720 gen_op_ldub_kernel_T0_A0,
721 gen_op_lduw_kernel_T0_A0,
722 gen_op_ldl_kernel_T0_A0,
723 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
725 gen_op_ldub_user_T0_A0,
726 gen_op_lduw_user_T0_A0,
727 gen_op_ldl_user_T0_A0,
728 X86_64_ONLY(gen_op_ldq_user_T0_A0),
732 static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
733 gen_op_ldub_raw_T1_A0,
734 gen_op_lduw_raw_T1_A0,
735 gen_op_ldl_raw_T1_A0,
736 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
738 #ifndef CONFIG_USER_ONLY
739 gen_op_ldub_kernel_T1_A0,
740 gen_op_lduw_kernel_T1_A0,
741 gen_op_ldl_kernel_T1_A0,
742 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
744 gen_op_ldub_user_T1_A0,
745 gen_op_lduw_user_T1_A0,
746 gen_op_ldl_user_T1_A0,
747 X86_64_ONLY(gen_op_ldq_user_T1_A0),
751 static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
752 gen_op_stb_raw_T0_A0,
753 gen_op_stw_raw_T0_A0,
754 gen_op_stl_raw_T0_A0,
755 X86_64_ONLY(gen_op_stq_raw_T0_A0),
757 #ifndef CONFIG_USER_ONLY
758 gen_op_stb_kernel_T0_A0,
759 gen_op_stw_kernel_T0_A0,
760 gen_op_stl_kernel_T0_A0,
761 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
763 gen_op_stb_user_T0_A0,
764 gen_op_stw_user_T0_A0,
765 gen_op_stl_user_T0_A0,
766 X86_64_ONLY(gen_op_stq_user_T0_A0),
770 static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
772 gen_op_stw_raw_T1_A0,
773 gen_op_stl_raw_T1_A0,
774 X86_64_ONLY(gen_op_stq_raw_T1_A0),
776 #ifndef CONFIG_USER_ONLY
778 gen_op_stw_kernel_T1_A0,
779 gen_op_stl_kernel_T1_A0,
780 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
783 gen_op_stw_user_T1_A0,
784 gen_op_stl_user_T1_A0,
785 X86_64_ONLY(gen_op_stq_user_T1_A0),
789 static inline void gen_jmp_im(target_ulong pc)
792 if (pc == (uint32_t)pc) {
793 gen_op_movl_eip_im(pc);
794 } else if (pc == (int32_t)pc) {
795 gen_op_movq_eip_im(pc);
797 gen_op_movq_eip_im64(pc >> 32, pc);
800 gen_op_movl_eip_im(pc);
804 static inline void gen_string_movl_A0_ESI(DisasContext *s)
808 override = s->override;
812 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
813 gen_op_addq_A0_reg_sN[0][R_ESI]();
815 gen_op_movq_A0_reg[R_ESI]();
821 if (s->addseg && override < 0)
824 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
825 gen_op_addl_A0_reg_sN[0][R_ESI]();
827 gen_op_movl_A0_reg[R_ESI]();
830 /* 16 address, always override */
833 gen_op_movl_A0_reg[R_ESI]();
834 gen_op_andl_A0_ffff();
835 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
839 static inline void gen_string_movl_A0_EDI(DisasContext *s)
843 gen_op_movq_A0_reg[R_EDI]();
848 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
849 gen_op_addl_A0_reg_sN[0][R_EDI]();
851 gen_op_movl_A0_reg[R_EDI]();
854 gen_op_movl_A0_reg[R_EDI]();
855 gen_op_andl_A0_ffff();
856 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
860 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
861 gen_op_movl_T0_Dshiftb,
862 gen_op_movl_T0_Dshiftw,
863 gen_op_movl_T0_Dshiftl,
864 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
867 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
870 X86_64_ONLY(gen_op_jnz_ecxq),
873 static GenOpFunc1 *gen_op_jz_ecx[3] = {
876 X86_64_ONLY(gen_op_jz_ecxq),
879 static GenOpFunc *gen_op_dec_ECX[3] = {
882 X86_64_ONLY(gen_op_decq_ECX),
885 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
890 X86_64_ONLY(gen_op_jnz_subq),
896 X86_64_ONLY(gen_op_jz_subq),
900 static GenOpFunc *gen_op_in_DX_T0[3] = {
906 static GenOpFunc *gen_op_out_DX_T0[3] = {
912 static GenOpFunc *gen_op_in[3] = {
918 static GenOpFunc *gen_op_out[3] = {
924 static GenOpFunc *gen_check_io_T0[3] = {
930 static GenOpFunc *gen_check_io_DX[3] = {
936 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
938 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
939 if (s->cc_op != CC_OP_DYNAMIC)
940 gen_op_set_cc_op(s->cc_op);
943 gen_check_io_DX[ot]();
945 gen_check_io_T0[ot]();
949 static inline void gen_movs(DisasContext *s, int ot)
951 gen_string_movl_A0_ESI(s);
952 gen_op_ld_T0_A0[ot + s->mem_index]();
953 gen_string_movl_A0_EDI(s);
954 gen_op_st_T0_A0[ot + s->mem_index]();
955 gen_op_movl_T0_Dshift[ot]();
958 gen_op_addq_ESI_T0();
959 gen_op_addq_EDI_T0();
963 gen_op_addl_ESI_T0();
964 gen_op_addl_EDI_T0();
966 gen_op_addw_ESI_T0();
967 gen_op_addw_EDI_T0();
971 static inline void gen_update_cc_op(DisasContext *s)
973 if (s->cc_op != CC_OP_DYNAMIC) {
974 gen_op_set_cc_op(s->cc_op);
975 s->cc_op = CC_OP_DYNAMIC;
979 /* XXX: does not work with gdbstub "ice" single step - not a
981 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
985 l1 = gen_new_label();
986 l2 = gen_new_label();
987 gen_op_jnz_ecx[s->aflag](l1);
989 gen_jmp_tb(s, next_eip, 1);
994 static inline void gen_stos(DisasContext *s, int ot)
996 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
997 gen_string_movl_A0_EDI(s);
998 gen_op_st_T0_A0[ot + s->mem_index]();
999 gen_op_movl_T0_Dshift[ot]();
1000 #ifdef TARGET_X86_64
1001 if (s->aflag == 2) {
1002 gen_op_addq_EDI_T0();
1006 gen_op_addl_EDI_T0();
1008 gen_op_addw_EDI_T0();
1012 static inline void gen_lods(DisasContext *s, int ot)
1014 gen_string_movl_A0_ESI(s);
1015 gen_op_ld_T0_A0[ot + s->mem_index]();
1016 gen_op_mov_reg_T0[ot][R_EAX]();
1017 gen_op_movl_T0_Dshift[ot]();
1018 #ifdef TARGET_X86_64
1019 if (s->aflag == 2) {
1020 gen_op_addq_ESI_T0();
1024 gen_op_addl_ESI_T0();
1026 gen_op_addw_ESI_T0();
1030 static inline void gen_scas(DisasContext *s, int ot)
1032 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1033 gen_string_movl_A0_EDI(s);
1034 gen_op_ld_T1_A0[ot + s->mem_index]();
1035 gen_op_cmpl_T0_T1_cc();
1036 gen_op_movl_T0_Dshift[ot]();
1037 #ifdef TARGET_X86_64
1038 if (s->aflag == 2) {
1039 gen_op_addq_EDI_T0();
1043 gen_op_addl_EDI_T0();
1045 gen_op_addw_EDI_T0();
1049 static inline void gen_cmps(DisasContext *s, int ot)
1051 gen_string_movl_A0_ESI(s);
1052 gen_op_ld_T0_A0[ot + s->mem_index]();
1053 gen_string_movl_A0_EDI(s);
1054 gen_op_ld_T1_A0[ot + s->mem_index]();
1055 gen_op_cmpl_T0_T1_cc();
1056 gen_op_movl_T0_Dshift[ot]();
1057 #ifdef TARGET_X86_64
1058 if (s->aflag == 2) {
1059 gen_op_addq_ESI_T0();
1060 gen_op_addq_EDI_T0();
1064 gen_op_addl_ESI_T0();
1065 gen_op_addl_EDI_T0();
1067 gen_op_addw_ESI_T0();
1068 gen_op_addw_EDI_T0();
1072 static inline void gen_ins(DisasContext *s, int ot)
1074 gen_string_movl_A0_EDI(s);
1076 gen_op_st_T0_A0[ot + s->mem_index]();
1077 gen_op_in_DX_T0[ot]();
1078 gen_op_st_T0_A0[ot + s->mem_index]();
1079 gen_op_movl_T0_Dshift[ot]();
1080 #ifdef TARGET_X86_64
1081 if (s->aflag == 2) {
1082 gen_op_addq_EDI_T0();
1086 gen_op_addl_EDI_T0();
1088 gen_op_addw_EDI_T0();
1092 static inline void gen_outs(DisasContext *s, int ot)
1094 gen_string_movl_A0_ESI(s);
1095 gen_op_ld_T0_A0[ot + s->mem_index]();
1096 gen_op_out_DX_T0[ot]();
1097 gen_op_movl_T0_Dshift[ot]();
1098 #ifdef TARGET_X86_64
1099 if (s->aflag == 2) {
1100 gen_op_addq_ESI_T0();
1104 gen_op_addl_ESI_T0();
1106 gen_op_addw_ESI_T0();
1110 /* same method as Valgrind : we generate jumps to current or next
1112 #define GEN_REPZ(op) \
1113 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1114 target_ulong cur_eip, target_ulong next_eip) \
1117 gen_update_cc_op(s); \
1118 l2 = gen_jz_ecx_string(s, next_eip); \
1119 gen_ ## op(s, ot); \
1120 gen_op_dec_ECX[s->aflag](); \
1121 /* a loop would cause two single step exceptions if ECX = 1 \
1122 before rep string_insn */ \
1124 gen_op_jz_ecx[s->aflag](l2); \
1125 gen_jmp(s, cur_eip); \
1128 #define GEN_REPZ2(op) \
1129 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1130 target_ulong cur_eip, \
1131 target_ulong next_eip, \
1135 gen_update_cc_op(s); \
1136 l2 = gen_jz_ecx_string(s, next_eip); \
1137 gen_ ## op(s, ot); \
1138 gen_op_dec_ECX[s->aflag](); \
1139 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1140 gen_op_string_jnz_sub[nz][ot](l2);\
1142 gen_op_jz_ecx[s->aflag](l2); \
1143 gen_jmp(s, cur_eip); \
1165 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1196 #ifdef TARGET_X86_64
1199 BUGGY_64(gen_op_jb_subq),
1201 BUGGY_64(gen_op_jbe_subq),
1204 BUGGY_64(gen_op_jl_subq),
1205 BUGGY_64(gen_op_jle_subq),
1209 static GenOpFunc1 *gen_op_loop[3][4] = {
1220 #ifdef TARGET_X86_64
1229 static GenOpFunc *gen_setcc_slow[8] = {
1240 static GenOpFunc *gen_setcc_sub[4][8] = {
1243 gen_op_setb_T0_subb,
1244 gen_op_setz_T0_subb,
1245 gen_op_setbe_T0_subb,
1246 gen_op_sets_T0_subb,
1248 gen_op_setl_T0_subb,
1249 gen_op_setle_T0_subb,
1253 gen_op_setb_T0_subw,
1254 gen_op_setz_T0_subw,
1255 gen_op_setbe_T0_subw,
1256 gen_op_sets_T0_subw,
1258 gen_op_setl_T0_subw,
1259 gen_op_setle_T0_subw,
1263 gen_op_setb_T0_subl,
1264 gen_op_setz_T0_subl,
1265 gen_op_setbe_T0_subl,
1266 gen_op_sets_T0_subl,
1268 gen_op_setl_T0_subl,
1269 gen_op_setle_T0_subl,
1271 #ifdef TARGET_X86_64
1274 gen_op_setb_T0_subq,
1275 gen_op_setz_T0_subq,
1276 gen_op_setbe_T0_subq,
1277 gen_op_sets_T0_subq,
1279 gen_op_setl_T0_subq,
1280 gen_op_setle_T0_subq,
1285 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1286 gen_op_fadd_ST0_FT0,
1287 gen_op_fmul_ST0_FT0,
1288 gen_op_fcom_ST0_FT0,
1289 gen_op_fcom_ST0_FT0,
1290 gen_op_fsub_ST0_FT0,
1291 gen_op_fsubr_ST0_FT0,
1292 gen_op_fdiv_ST0_FT0,
1293 gen_op_fdivr_ST0_FT0,
1296 /* NOTE the exception in "r" op ordering */
1297 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1298 gen_op_fadd_STN_ST0,
1299 gen_op_fmul_STN_ST0,
1302 gen_op_fsubr_STN_ST0,
1303 gen_op_fsub_STN_ST0,
1304 gen_op_fdivr_STN_ST0,
1305 gen_op_fdiv_STN_ST0,
1308 /* if d == OR_TMP0, it means memory operand (address in A0) */
1309 static void gen_op(DisasContext *s1, int op, int ot, int d)
1311 GenOpFunc *gen_update_cc;
1314 gen_op_mov_TN_reg[ot][0][d]();
1316 gen_op_ld_T0_A0[ot + s1->mem_index]();
1321 if (s1->cc_op != CC_OP_DYNAMIC)
1322 gen_op_set_cc_op(s1->cc_op);
1324 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1325 gen_op_mov_reg_T0[ot][d]();
1327 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1329 s1->cc_op = CC_OP_DYNAMIC;
1332 gen_op_addl_T0_T1();
1333 s1->cc_op = CC_OP_ADDB + ot;
1334 gen_update_cc = gen_op_update2_cc;
1337 gen_op_subl_T0_T1();
1338 s1->cc_op = CC_OP_SUBB + ot;
1339 gen_update_cc = gen_op_update2_cc;
1345 gen_op_arith_T0_T1_cc[op]();
1346 s1->cc_op = CC_OP_LOGICB + ot;
1347 gen_update_cc = gen_op_update1_cc;
1350 gen_op_cmpl_T0_T1_cc();
1351 s1->cc_op = CC_OP_SUBB + ot;
1352 gen_update_cc = NULL;
1355 if (op != OP_CMPL) {
1357 gen_op_mov_reg_T0[ot][d]();
1359 gen_op_st_T0_A0[ot + s1->mem_index]();
1361 /* the flags update must happen after the memory write (precise
1362 exception support) */
1368 /* if d == OR_TMP0, it means memory operand (address in A0) */
1369 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1372 gen_op_mov_TN_reg[ot][0][d]();
1374 gen_op_ld_T0_A0[ot + s1->mem_index]();
1375 if (s1->cc_op != CC_OP_DYNAMIC)
1376 gen_op_set_cc_op(s1->cc_op);
1379 s1->cc_op = CC_OP_INCB + ot;
1382 s1->cc_op = CC_OP_DECB + ot;
1385 gen_op_mov_reg_T0[ot][d]();
1387 gen_op_st_T0_A0[ot + s1->mem_index]();
1388 gen_op_update_inc_cc();
1391 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1394 gen_op_mov_TN_reg[ot][0][d]();
1396 gen_op_ld_T0_A0[ot + s1->mem_index]();
1398 gen_op_mov_TN_reg[ot][1][s]();
1399 /* for zero counts, flags are not updated, so must do it dynamically */
1400 if (s1->cc_op != CC_OP_DYNAMIC)
1401 gen_op_set_cc_op(s1->cc_op);
1404 gen_op_shift_T0_T1_cc[ot][op]();
1406 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1408 gen_op_mov_reg_T0[ot][d]();
1409 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1412 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1414 /* currently not optimized */
1415 gen_op_movl_T1_im(c);
1416 gen_shift(s1, op, ot, d, OR_TMP1);
1419 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1427 int mod, rm, code, override, must_add_seg;
1429 override = s->override;
1430 must_add_seg = s->addseg;
1433 mod = (modrm >> 6) & 3;
1445 code = ldub_code(s->pc++);
1446 scale = (code >> 6) & 3;
1447 index = ((code >> 3) & 7) | REX_X(s);
1454 if ((base & 7) == 5) {
1456 disp = (int32_t)ldl_code(s->pc);
1458 if (CODE64(s) && !havesib) {
1459 disp += s->pc + s->rip_offset;
1466 disp = (int8_t)ldub_code(s->pc++);
1470 disp = ldl_code(s->pc);
1476 /* for correct popl handling with esp */
1477 if (base == 4 && s->popl_esp_hack)
1478 disp += s->popl_esp_hack;
1479 #ifdef TARGET_X86_64
1480 if (s->aflag == 2) {
1481 gen_op_movq_A0_reg[base]();
1483 if ((int32_t)disp == disp)
1484 gen_op_addq_A0_im(disp);
1486 gen_op_addq_A0_im64(disp >> 32, disp);
1491 gen_op_movl_A0_reg[base]();
1493 gen_op_addl_A0_im(disp);
1496 #ifdef TARGET_X86_64
1497 if (s->aflag == 2) {
1498 if ((int32_t)disp == disp)
1499 gen_op_movq_A0_im(disp);
1501 gen_op_movq_A0_im64(disp >> 32, disp);
1505 gen_op_movl_A0_im(disp);
1508 /* XXX: index == 4 is always invalid */
1509 if (havesib && (index != 4 || scale != 0)) {
1510 #ifdef TARGET_X86_64
1511 if (s->aflag == 2) {
1512 gen_op_addq_A0_reg_sN[scale][index]();
1516 gen_op_addl_A0_reg_sN[scale][index]();
1521 if (base == R_EBP || base == R_ESP)
1526 #ifdef TARGET_X86_64
1527 if (s->aflag == 2) {
1528 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1532 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1539 disp = lduw_code(s->pc);
1541 gen_op_movl_A0_im(disp);
1542 rm = 0; /* avoid SS override */
1549 disp = (int8_t)ldub_code(s->pc++);
1553 disp = lduw_code(s->pc);
1559 gen_op_movl_A0_reg[R_EBX]();
1560 gen_op_addl_A0_reg_sN[0][R_ESI]();
1563 gen_op_movl_A0_reg[R_EBX]();
1564 gen_op_addl_A0_reg_sN[0][R_EDI]();
1567 gen_op_movl_A0_reg[R_EBP]();
1568 gen_op_addl_A0_reg_sN[0][R_ESI]();
1571 gen_op_movl_A0_reg[R_EBP]();
1572 gen_op_addl_A0_reg_sN[0][R_EDI]();
1575 gen_op_movl_A0_reg[R_ESI]();
1578 gen_op_movl_A0_reg[R_EDI]();
1581 gen_op_movl_A0_reg[R_EBP]();
1585 gen_op_movl_A0_reg[R_EBX]();
1589 gen_op_addl_A0_im(disp);
1590 gen_op_andl_A0_ffff();
1594 if (rm == 2 || rm == 3 || rm == 6)
1599 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1609 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1611 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1613 int mod, rm, opreg, disp;
1615 mod = (modrm >> 6) & 3;
1616 rm = (modrm & 7) | REX_B(s);
1620 gen_op_mov_TN_reg[ot][0][reg]();
1621 gen_op_mov_reg_T0[ot][rm]();
1623 gen_op_mov_TN_reg[ot][0][rm]();
1625 gen_op_mov_reg_T0[ot][reg]();
1628 gen_lea_modrm(s, modrm, &opreg, &disp);
1631 gen_op_mov_TN_reg[ot][0][reg]();
1632 gen_op_st_T0_A0[ot + s->mem_index]();
1634 gen_op_ld_T0_A0[ot + s->mem_index]();
1636 gen_op_mov_reg_T0[ot][reg]();
1641 static inline uint32_t insn_get(DisasContext *s, int ot)
1647 ret = ldub_code(s->pc);
1651 ret = lduw_code(s->pc);
1656 ret = ldl_code(s->pc);
1663 static inline int insn_const_size(unsigned int ot)
1671 static inline void gen_jcc(DisasContext *s, int b,
1672 target_ulong val, target_ulong next_eip)
1674 TranslationBlock *tb;
1681 jcc_op = (b >> 1) & 7;
1685 /* we optimize the cmp/jcc case */
1690 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1693 /* some jumps are easy to compute */
1735 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1738 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1750 if (s->cc_op != CC_OP_DYNAMIC)
1751 gen_op_set_cc_op(s->cc_op);
1754 gen_setcc_slow[jcc_op]();
1755 func = gen_op_jnz_T0_label;
1765 l1 = gen_new_label();
1769 gen_jmp_im(next_eip);
1770 gen_op_movl_T0_im((long)tb + 0);
1776 gen_op_movl_T0_im((long)tb + 1);
1782 if (s->cc_op != CC_OP_DYNAMIC) {
1783 gen_op_set_cc_op(s->cc_op);
1784 s->cc_op = CC_OP_DYNAMIC;
1786 gen_setcc_slow[jcc_op]();
1792 l1 = gen_new_label();
1793 l2 = gen_new_label();
1794 gen_op_jnz_T0_label(l1);
1795 gen_jmp_im(next_eip);
1796 gen_op_jmp_label(l2);
1804 static void gen_setcc(DisasContext *s, int b)
1810 jcc_op = (b >> 1) & 7;
1812 /* we optimize the cmp/jcc case */
1817 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1822 /* some jumps are easy to compute */
1849 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1852 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1860 if (s->cc_op != CC_OP_DYNAMIC)
1861 gen_op_set_cc_op(s->cc_op);
1862 func = gen_setcc_slow[jcc_op];
1871 /* move T0 to seg_reg and compute if the CPU state may change. Never
1872 call this function with seg_reg == R_CS */
1873 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1875 if (s->pe && !s->vm86) {
1876 /* XXX: optimize by finding processor state dynamically */
1877 if (s->cc_op != CC_OP_DYNAMIC)
1878 gen_op_set_cc_op(s->cc_op);
1879 gen_jmp_im(cur_eip);
1880 gen_op_movl_seg_T0(seg_reg);
1881 /* abort translation because the addseg value may change or
1882 because ss32 may change. For R_SS, translation must always
1883 stop as a special handling must be done to disable hardware
1884 interrupts for the next instruction */
1885 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1888 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1889 if (seg_reg == R_SS)
1894 static inline void gen_stack_update(DisasContext *s, int addend)
1896 #ifdef TARGET_X86_64
1899 gen_op_addq_ESP_8();
1901 gen_op_addq_ESP_im(addend);
1906 gen_op_addl_ESP_2();
1907 else if (addend == 4)
1908 gen_op_addl_ESP_4();
1910 gen_op_addl_ESP_im(addend);
1913 gen_op_addw_ESP_2();
1914 else if (addend == 4)
1915 gen_op_addw_ESP_4();
1917 gen_op_addw_ESP_im(addend);
1921 /* generate a push. It depends on ss32, addseg and dflag */
1922 static void gen_push_T0(DisasContext *s)
1924 #ifdef TARGET_X86_64
1926 /* XXX: check 16 bit behaviour */
1927 gen_op_movq_A0_reg[R_ESP]();
1929 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
1930 gen_op_movq_ESP_A0();
1934 gen_op_movl_A0_reg[R_ESP]();
1941 gen_op_movl_T1_A0();
1942 gen_op_addl_A0_SS();
1945 gen_op_andl_A0_ffff();
1946 gen_op_movl_T1_A0();
1947 gen_op_addl_A0_SS();
1949 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
1950 if (s->ss32 && !s->addseg)
1951 gen_op_movl_ESP_A0();
1953 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
1957 /* generate a push. It depends on ss32, addseg and dflag */
1958 /* slower version for T1, only used for call Ev */
1959 static void gen_push_T1(DisasContext *s)
1961 #ifdef TARGET_X86_64
1963 /* XXX: check 16 bit behaviour */
1964 gen_op_movq_A0_reg[R_ESP]();
1966 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
1967 gen_op_movq_ESP_A0();
1971 gen_op_movl_A0_reg[R_ESP]();
1978 gen_op_addl_A0_SS();
1981 gen_op_andl_A0_ffff();
1982 gen_op_addl_A0_SS();
1984 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
1986 if (s->ss32 && !s->addseg)
1987 gen_op_movl_ESP_A0();
1989 gen_stack_update(s, (-2) << s->dflag);
1993 /* two step pop is necessary for precise exceptions */
1994 static void gen_pop_T0(DisasContext *s)
1996 #ifdef TARGET_X86_64
1998 /* XXX: check 16 bit behaviour */
1999 gen_op_movq_A0_reg[R_ESP]();
2000 gen_op_ld_T0_A0[OT_QUAD + s->mem_index]();
2004 gen_op_movl_A0_reg[R_ESP]();
2007 gen_op_addl_A0_SS();
2009 gen_op_andl_A0_ffff();
2010 gen_op_addl_A0_SS();
2012 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2016 static void gen_pop_update(DisasContext *s)
2018 #ifdef TARGET_X86_64
2020 gen_stack_update(s, 8);
2024 gen_stack_update(s, 2 << s->dflag);
2028 static void gen_stack_A0(DisasContext *s)
2030 gen_op_movl_A0_ESP();
2032 gen_op_andl_A0_ffff();
2033 gen_op_movl_T1_A0();
2035 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2038 /* NOTE: wrap around in 16 bit not fully handled */
2039 static void gen_pusha(DisasContext *s)
2042 gen_op_movl_A0_ESP();
2043 gen_op_addl_A0_im(-16 << s->dflag);
2045 gen_op_andl_A0_ffff();
2046 gen_op_movl_T1_A0();
2048 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2049 for(i = 0;i < 8; i++) {
2050 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2051 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2052 gen_op_addl_A0_im(2 << s->dflag);
2054 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
2057 /* NOTE: wrap around in 16 bit not fully handled */
2058 static void gen_popa(DisasContext *s)
2061 gen_op_movl_A0_ESP();
2063 gen_op_andl_A0_ffff();
2064 gen_op_movl_T1_A0();
2065 gen_op_addl_T1_im(16 << s->dflag);
2067 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2068 for(i = 0;i < 8; i++) {
2069 /* ESP is not reloaded */
2071 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2072 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2074 gen_op_addl_A0_im(2 << s->dflag);
2076 gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
2079 static void gen_enter(DisasContext *s, int esp_addend, int level)
2083 ot = s->dflag + OT_WORD;
2085 opsize = 2 << s->dflag;
2087 gen_op_movl_A0_ESP();
2088 gen_op_addl_A0_im(-opsize);
2090 gen_op_andl_A0_ffff();
2091 gen_op_movl_T1_A0();
2093 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2095 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2096 gen_op_st_T0_A0[ot + s->mem_index]();
2098 gen_op_enter_level(level, s->dflag);
2100 gen_op_mov_reg_T1[ot][R_EBP]();
2101 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2102 gen_op_mov_reg_T1[ot][R_ESP]();
2105 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2107 if (s->cc_op != CC_OP_DYNAMIC)
2108 gen_op_set_cc_op(s->cc_op);
2109 gen_jmp_im(cur_eip);
2110 gen_op_raise_exception(trapno);
2114 /* an interrupt is different from an exception because of the
2115 priviledge checks */
2116 static void gen_interrupt(DisasContext *s, int intno,
2117 target_ulong cur_eip, target_ulong next_eip)
2119 if (s->cc_op != CC_OP_DYNAMIC)
2120 gen_op_set_cc_op(s->cc_op);
2121 gen_jmp_im(cur_eip);
2122 gen_op_raise_interrupt(intno, next_eip);
2126 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2128 if (s->cc_op != CC_OP_DYNAMIC)
2129 gen_op_set_cc_op(s->cc_op);
2130 gen_jmp_im(cur_eip);
2135 /* generate a generic end of block. Trace exception is also generated
2137 static void gen_eob(DisasContext *s)
2139 if (s->cc_op != CC_OP_DYNAMIC)
2140 gen_op_set_cc_op(s->cc_op);
2141 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2142 gen_op_reset_inhibit_irq();
2144 if (s->singlestep_enabled) {
2147 gen_op_raise_exception(EXCP01_SSTP);
2155 /* generate a jump to eip. No segment change must happen before as a
2156 direct call to the next block may occur */
2157 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2159 TranslationBlock *tb = s->tb;
2162 if (s->cc_op != CC_OP_DYNAMIC)
2163 gen_op_set_cc_op(s->cc_op);
2169 gen_op_movl_T0_im((long)tb + tb_num);
2178 static void gen_jmp(DisasContext *s, target_ulong eip)
2180 gen_jmp_tb(s, eip, 0);
2183 static void gen_movtl_T0_im(target_ulong val)
2185 #ifdef TARGET_X86_64
2186 if ((int32_t)val == val) {
2187 gen_op_movl_T0_im(val);
2189 gen_op_movq_T0_im64(val >> 32, val);
2192 gen_op_movl_T0_im(val);
2196 static GenOpFunc1 *gen_ldo_env_A0[3] = {
2197 gen_op_ldo_raw_env_A0,
2198 #ifndef CONFIG_USER_ONLY
2199 gen_op_ldo_kernel_env_A0,
2200 gen_op_ldo_user_env_A0,
2204 static GenOpFunc1 *gen_sto_env_A0[3] = {
2205 gen_op_sto_raw_env_A0,
2206 #ifndef CONFIG_USER_ONLY
2207 gen_op_sto_kernel_env_A0,
2208 gen_op_sto_user_env_A0,
2212 /* convert one instruction. s->is_jmp is set if the translation must
2213 be stopped. Return the next pc value */
2214 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
2216 int b, prefixes, aflag, dflag;
2218 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
2219 target_ulong next_eip, tval;
2229 #ifdef TARGET_X86_64
2234 s->rip_offset = 0; /* for relative ip address */
2236 b = ldub_code(s->pc);
2238 /* check prefixes */
2239 #ifdef TARGET_X86_64
2243 prefixes |= PREFIX_REPZ;
2246 prefixes |= PREFIX_REPNZ;
2249 prefixes |= PREFIX_LOCK;
2270 prefixes |= PREFIX_DATA;
2273 prefixes |= PREFIX_ADR;
2277 rex_w = (b >> 3) & 1;
2278 rex_r = (b & 0x4) << 1;
2279 s->rex_x = (b & 0x2) << 2;
2280 REX_B(s) = (b & 0x1) << 3;
2281 x86_64_hregs = 1; /* select uniform byte register addressing */
2285 /* 0x66 is ignored if rex.w is set */
2288 if (prefixes & PREFIX_DATA)
2291 if (!(prefixes & PREFIX_ADR))
2298 prefixes |= PREFIX_REPZ;
2301 prefixes |= PREFIX_REPNZ;
2304 prefixes |= PREFIX_LOCK;
2325 prefixes |= PREFIX_DATA;
2328 prefixes |= PREFIX_ADR;
2331 if (prefixes & PREFIX_DATA)
2333 if (prefixes & PREFIX_ADR)
2337 s->prefix = prefixes;
2341 /* lock generation */
2342 if (prefixes & PREFIX_LOCK)
2345 /* now check op code */
2349 /**************************/
2350 /* extended op code */
2351 b = ldub_code(s->pc++) | 0x100;
2354 /**************************/
2372 ot = dflag + OT_WORD;
2375 case 0: /* OP Ev, Gv */
2376 modrm = ldub_code(s->pc++);
2377 reg = ((modrm >> 3) & 7) | rex_r;
2378 mod = (modrm >> 6) & 3;
2379 rm = (modrm & 7) | REX_B(s);
2381 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2383 } else if (op == OP_XORL && rm == reg) {
2385 /* xor reg, reg optimisation */
2387 s->cc_op = CC_OP_LOGICB + ot;
2388 gen_op_mov_reg_T0[ot][reg]();
2389 gen_op_update1_cc();
2394 gen_op_mov_TN_reg[ot][1][reg]();
2395 gen_op(s, op, ot, opreg);
2397 case 1: /* OP Gv, Ev */
2398 modrm = ldub_code(s->pc++);
2399 mod = (modrm >> 6) & 3;
2400 reg = ((modrm >> 3) & 7) | rex_r;
2401 rm = (modrm & 7) | REX_B(s);
2403 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2404 gen_op_ld_T1_A0[ot + s->mem_index]();
2405 } else if (op == OP_XORL && rm == reg) {
2408 gen_op_mov_TN_reg[ot][1][rm]();
2410 gen_op(s, op, ot, reg);
2412 case 2: /* OP A, Iv */
2413 val = insn_get(s, ot);
2414 gen_op_movl_T1_im(val);
2415 gen_op(s, op, ot, OR_EAX);
2421 case 0x80: /* GRP1 */
2431 ot = dflag + OT_WORD;
2433 modrm = ldub_code(s->pc++);
2434 mod = (modrm >> 6) & 3;
2435 rm = (modrm & 7) | REX_B(s);
2436 op = (modrm >> 3) & 7;
2442 s->rip_offset = insn_const_size(ot);
2443 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2454 val = insn_get(s, ot);
2457 val = (int8_t)insn_get(s, OT_BYTE);
2460 gen_op_movl_T1_im(val);
2461 gen_op(s, op, ot, opreg);
2465 /**************************/
2466 /* inc, dec, and other misc arith */
2467 case 0x40 ... 0x47: /* inc Gv */
2468 ot = dflag ? OT_LONG : OT_WORD;
2469 gen_inc(s, ot, OR_EAX + (b & 7), 1);
2471 case 0x48 ... 0x4f: /* dec Gv */
2472 ot = dflag ? OT_LONG : OT_WORD;
2473 gen_inc(s, ot, OR_EAX + (b & 7), -1);
2475 case 0xf6: /* GRP3 */
2480 ot = dflag + OT_WORD;
2482 modrm = ldub_code(s->pc++);
2483 mod = (modrm >> 6) & 3;
2484 rm = (modrm & 7) | REX_B(s);
2485 op = (modrm >> 3) & 7;
2488 s->rip_offset = insn_const_size(ot);
2489 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2490 gen_op_ld_T0_A0[ot + s->mem_index]();
2492 gen_op_mov_TN_reg[ot][0][rm]();
2497 val = insn_get(s, ot);
2498 gen_op_movl_T1_im(val);
2499 gen_op_testl_T0_T1_cc();
2500 s->cc_op = CC_OP_LOGICB + ot;
2505 gen_op_st_T0_A0[ot + s->mem_index]();
2507 gen_op_mov_reg_T0[ot][rm]();
2513 gen_op_st_T0_A0[ot + s->mem_index]();
2515 gen_op_mov_reg_T0[ot][rm]();
2517 gen_op_update_neg_cc();
2518 s->cc_op = CC_OP_SUBB + ot;
2523 gen_op_mulb_AL_T0();
2524 s->cc_op = CC_OP_MULB;
2527 gen_op_mulw_AX_T0();
2528 s->cc_op = CC_OP_MULW;
2532 gen_op_mull_EAX_T0();
2533 s->cc_op = CC_OP_MULL;
2535 #ifdef TARGET_X86_64
2537 gen_op_mulq_EAX_T0();
2538 s->cc_op = CC_OP_MULQ;
2546 gen_op_imulb_AL_T0();
2547 s->cc_op = CC_OP_MULB;
2550 gen_op_imulw_AX_T0();
2551 s->cc_op = CC_OP_MULW;
2555 gen_op_imull_EAX_T0();
2556 s->cc_op = CC_OP_MULL;
2558 #ifdef TARGET_X86_64
2560 gen_op_imulq_EAX_T0();
2561 s->cc_op = CC_OP_MULQ;
2569 gen_jmp_im(pc_start - s->cs_base);
2570 gen_op_divb_AL_T0();
2573 gen_jmp_im(pc_start - s->cs_base);
2574 gen_op_divw_AX_T0();
2578 gen_jmp_im(pc_start - s->cs_base);
2579 gen_op_divl_EAX_T0();
2581 #ifdef TARGET_X86_64
2583 gen_jmp_im(pc_start - s->cs_base);
2584 gen_op_divq_EAX_T0();
2592 gen_jmp_im(pc_start - s->cs_base);
2593 gen_op_idivb_AL_T0();
2596 gen_jmp_im(pc_start - s->cs_base);
2597 gen_op_idivw_AX_T0();
2601 gen_jmp_im(pc_start - s->cs_base);
2602 gen_op_idivl_EAX_T0();
2604 #ifdef TARGET_X86_64
2606 gen_jmp_im(pc_start - s->cs_base);
2607 gen_op_idivq_EAX_T0();
2617 case 0xfe: /* GRP4 */
2618 case 0xff: /* GRP5 */
2622 ot = dflag + OT_WORD;
2624 modrm = ldub_code(s->pc++);
2625 mod = (modrm >> 6) & 3;
2626 rm = (modrm & 7) | REX_B(s);
2627 op = (modrm >> 3) & 7;
2628 if (op >= 2 && b == 0xfe) {
2632 if (op >= 2 && op <= 5) {
2633 /* operand size for jumps is 64 bit */
2635 } else if (op == 6) {
2636 /* default push size is 64 bit */
2637 ot = dflag ? OT_QUAD : OT_WORD;
2641 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2642 if (op >= 2 && op != 3 && op != 5)
2643 gen_op_ld_T0_A0[ot + s->mem_index]();
2645 gen_op_mov_TN_reg[ot][0][rm]();
2649 case 0: /* inc Ev */
2654 gen_inc(s, ot, opreg, 1);
2656 case 1: /* dec Ev */
2661 gen_inc(s, ot, opreg, -1);
2663 case 2: /* call Ev */
2664 /* XXX: optimize if memory (no 'and' is necessary) */
2666 gen_op_andl_T0_ffff();
2667 next_eip = s->pc - s->cs_base;
2668 gen_op_movl_T1_im(next_eip);
2673 case 3: /* lcall Ev */
2674 gen_op_ld_T1_A0[ot + s->mem_index]();
2675 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
2676 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
2678 if (s->pe && !s->vm86) {
2679 if (s->cc_op != CC_OP_DYNAMIC)
2680 gen_op_set_cc_op(s->cc_op);
2681 gen_jmp_im(pc_start - s->cs_base);
2682 gen_op_lcall_protected_T0_T1(dflag, s->pc - s->cs_base);
2684 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
2688 case 4: /* jmp Ev */
2690 gen_op_andl_T0_ffff();
2694 case 5: /* ljmp Ev */
2695 gen_op_ld_T1_A0[ot + s->mem_index]();
2696 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
2697 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
2699 if (s->pe && !s->vm86) {
2700 if (s->cc_op != CC_OP_DYNAMIC)
2701 gen_op_set_cc_op(s->cc_op);
2702 gen_jmp_im(pc_start - s->cs_base);
2703 gen_op_ljmp_protected_T0_T1(s->pc - s->cs_base);
2705 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
2706 gen_op_movl_T0_T1();
2711 case 6: /* push Ev */
2719 case 0x84: /* test Ev, Gv */
2724 ot = dflag + OT_WORD;
2726 modrm = ldub_code(s->pc++);
2727 mod = (modrm >> 6) & 3;
2728 rm = (modrm & 7) | REX_B(s);
2729 reg = ((modrm >> 3) & 7) | rex_r;
2731 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2732 gen_op_mov_TN_reg[ot][1][reg]();
2733 gen_op_testl_T0_T1_cc();
2734 s->cc_op = CC_OP_LOGICB + ot;
2737 case 0xa8: /* test eAX, Iv */
2742 ot = dflag + OT_WORD;
2743 val = insn_get(s, ot);
2745 gen_op_mov_TN_reg[ot][0][OR_EAX]();
2746 gen_op_movl_T1_im(val);
2747 gen_op_testl_T0_T1_cc();
2748 s->cc_op = CC_OP_LOGICB + ot;
2751 case 0x98: /* CWDE/CBW */
2752 #ifdef TARGET_X86_64
2754 gen_op_movslq_RAX_EAX();
2758 gen_op_movswl_EAX_AX();
2760 gen_op_movsbw_AX_AL();
2762 case 0x99: /* CDQ/CWD */
2763 #ifdef TARGET_X86_64
2765 gen_op_movsqo_RDX_RAX();
2769 gen_op_movslq_EDX_EAX();
2771 gen_op_movswl_DX_AX();
2773 case 0x1af: /* imul Gv, Ev */
2774 case 0x69: /* imul Gv, Ev, I */
2776 ot = dflag + OT_WORD;
2777 modrm = ldub_code(s->pc++);
2778 reg = ((modrm >> 3) & 7) | rex_r;
2780 s->rip_offset = insn_const_size(ot);
2783 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2785 val = insn_get(s, ot);
2786 gen_op_movl_T1_im(val);
2787 } else if (b == 0x6b) {
2788 val = (int8_t)insn_get(s, OT_BYTE);
2789 gen_op_movl_T1_im(val);
2791 gen_op_mov_TN_reg[ot][1][reg]();
2794 #ifdef TARGET_X86_64
2795 if (ot == OT_QUAD) {
2796 gen_op_imulq_T0_T1();
2799 if (ot == OT_LONG) {
2800 gen_op_imull_T0_T1();
2802 gen_op_imulw_T0_T1();
2804 gen_op_mov_reg_T0[ot][reg]();
2805 s->cc_op = CC_OP_MULB + ot;
2808 case 0x1c1: /* xadd Ev, Gv */
2812 ot = dflag + OT_WORD;
2813 modrm = ldub_code(s->pc++);
2814 reg = ((modrm >> 3) & 7) | rex_r;
2815 mod = (modrm >> 6) & 3;
2817 rm = (modrm & 7) | REX_B(s);
2818 gen_op_mov_TN_reg[ot][0][reg]();
2819 gen_op_mov_TN_reg[ot][1][rm]();
2820 gen_op_addl_T0_T1();
2821 gen_op_mov_reg_T1[ot][reg]();
2822 gen_op_mov_reg_T0[ot][rm]();
2824 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2825 gen_op_mov_TN_reg[ot][0][reg]();
2826 gen_op_ld_T1_A0[ot + s->mem_index]();
2827 gen_op_addl_T0_T1();
2828 gen_op_st_T0_A0[ot + s->mem_index]();
2829 gen_op_mov_reg_T1[ot][reg]();
2831 gen_op_update2_cc();
2832 s->cc_op = CC_OP_ADDB + ot;
2835 case 0x1b1: /* cmpxchg Ev, Gv */
2839 ot = dflag + OT_WORD;
2840 modrm = ldub_code(s->pc++);
2841 reg = ((modrm >> 3) & 7) | rex_r;
2842 mod = (modrm >> 6) & 3;
2843 gen_op_mov_TN_reg[ot][1][reg]();
2845 rm = (modrm & 7) | REX_B(s);
2846 gen_op_mov_TN_reg[ot][0][rm]();
2847 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
2848 gen_op_mov_reg_T0[ot][rm]();
2850 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2851 gen_op_ld_T0_A0[ot + s->mem_index]();
2852 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
2854 s->cc_op = CC_OP_SUBB + ot;
2856 case 0x1c7: /* cmpxchg8b */
2857 modrm = ldub_code(s->pc++);
2858 mod = (modrm >> 6) & 3;
2861 if (s->cc_op != CC_OP_DYNAMIC)
2862 gen_op_set_cc_op(s->cc_op);
2863 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2865 s->cc_op = CC_OP_EFLAGS;
2868 /**************************/
2870 case 0x50 ... 0x57: /* push */
2871 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
2874 case 0x58 ... 0x5f: /* pop */
2876 ot = dflag ? OT_QUAD : OT_WORD;
2878 ot = dflag + OT_WORD;
2881 /* NOTE: order is important for pop %sp */
2883 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
2885 case 0x60: /* pusha */
2890 case 0x61: /* popa */
2895 case 0x68: /* push Iv */
2898 ot = dflag ? OT_QUAD : OT_WORD;
2900 ot = dflag + OT_WORD;
2903 val = insn_get(s, ot);
2905 val = (int8_t)insn_get(s, OT_BYTE);
2906 gen_op_movl_T0_im(val);
2909 case 0x8f: /* pop Ev */
2911 ot = dflag ? OT_QUAD : OT_WORD;
2913 ot = dflag + OT_WORD;
2915 modrm = ldub_code(s->pc++);
2916 mod = (modrm >> 6) & 3;
2919 /* NOTE: order is important for pop %sp */
2921 rm = (modrm & 7) | REX_B(s);
2922 gen_op_mov_reg_T0[ot][rm]();
2924 /* NOTE: order is important too for MMU exceptions */
2925 s->popl_esp_hack = 1 << ot;
2926 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2927 s->popl_esp_hack = 0;
2931 case 0xc8: /* enter */
2933 /* XXX: long mode support */
2935 val = lduw_code(s->pc);
2937 level = ldub_code(s->pc++);
2938 gen_enter(s, val, level);
2941 case 0xc9: /* leave */
2942 /* XXX: exception not precise (ESP is updated before potential exception) */
2943 /* XXX: may be invalid for 16 bit in long mode */
2945 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
2946 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
2947 } else if (s->ss32) {
2948 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2949 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
2951 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
2952 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
2956 ot = dflag ? OT_QUAD : OT_WORD;
2958 ot = dflag + OT_WORD;
2960 gen_op_mov_reg_T0[ot][R_EBP]();
2963 case 0x06: /* push es */
2964 case 0x0e: /* push cs */
2965 case 0x16: /* push ss */
2966 case 0x1e: /* push ds */
2969 gen_op_movl_T0_seg(b >> 3);
2972 case 0x1a0: /* push fs */
2973 case 0x1a8: /* push gs */
2974 gen_op_movl_T0_seg((b >> 3) & 7);
2977 case 0x07: /* pop es */
2978 case 0x17: /* pop ss */
2979 case 0x1f: /* pop ds */
2984 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
2987 /* if reg == SS, inhibit interrupts/trace. */
2988 /* If several instructions disable interrupts, only the
2990 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
2991 gen_op_set_inhibit_irq();
2995 gen_jmp_im(s->pc - s->cs_base);
2999 case 0x1a1: /* pop fs */
3000 case 0x1a9: /* pop gs */
3002 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3005 gen_jmp_im(s->pc - s->cs_base);
3010 /**************************/
3013 case 0x89: /* mov Gv, Ev */
3017 ot = dflag + OT_WORD;
3018 modrm = ldub_code(s->pc++);
3019 reg = ((modrm >> 3) & 7) | rex_r;
3021 /* generate a generic store */
3022 gen_ldst_modrm(s, modrm, ot, reg, 1);
3025 case 0xc7: /* mov Ev, Iv */
3029 ot = dflag + OT_WORD;
3030 modrm = ldub_code(s->pc++);
3031 mod = (modrm >> 6) & 3;
3033 s->rip_offset = insn_const_size(ot);
3034 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3036 val = insn_get(s, ot);
3037 gen_op_movl_T0_im(val);
3039 gen_op_st_T0_A0[ot + s->mem_index]();
3041 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3044 case 0x8b: /* mov Ev, Gv */
3048 ot = OT_WORD + dflag;
3049 modrm = ldub_code(s->pc++);
3050 reg = ((modrm >> 3) & 7) | rex_r;
3052 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3053 gen_op_mov_reg_T0[ot][reg]();
3055 case 0x8e: /* mov seg, Gv */
3056 modrm = ldub_code(s->pc++);
3057 reg = (modrm >> 3) & 7;
3058 if (reg >= 6 || reg == R_CS)
3060 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3061 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3063 /* if reg == SS, inhibit interrupts/trace */
3064 /* If several instructions disable interrupts, only the
3066 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3067 gen_op_set_inhibit_irq();
3071 gen_jmp_im(s->pc - s->cs_base);
3075 case 0x8c: /* mov Gv, seg */
3076 modrm = ldub_code(s->pc++);
3077 reg = (modrm >> 3) & 7;
3078 mod = (modrm >> 6) & 3;
3081 gen_op_movl_T0_seg(reg);
3083 ot = OT_WORD + dflag;
3086 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3089 case 0x1b6: /* movzbS Gv, Eb */
3090 case 0x1b7: /* movzwS Gv, Eb */
3091 case 0x1be: /* movsbS Gv, Eb */
3092 case 0x1bf: /* movswS Gv, Eb */
3095 /* d_ot is the size of destination */
3096 d_ot = dflag + OT_WORD;
3097 /* ot is the size of source */
3098 ot = (b & 1) + OT_BYTE;
3099 modrm = ldub_code(s->pc++);
3100 reg = ((modrm >> 3) & 7) | rex_r;
3101 mod = (modrm >> 6) & 3;
3102 rm = (modrm & 7) | REX_B(s);
3105 gen_op_mov_TN_reg[ot][0][rm]();
3106 switch(ot | (b & 8)) {
3108 gen_op_movzbl_T0_T0();
3111 gen_op_movsbl_T0_T0();
3114 gen_op_movzwl_T0_T0();
3118 gen_op_movswl_T0_T0();
3121 gen_op_mov_reg_T0[d_ot][reg]();
3123 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3125 gen_op_lds_T0_A0[ot + s->mem_index]();
3127 gen_op_ldu_T0_A0[ot + s->mem_index]();
3129 gen_op_mov_reg_T0[d_ot][reg]();
3134 case 0x8d: /* lea */
3135 ot = dflag + OT_WORD;
3136 modrm = ldub_code(s->pc++);
3137 mod = (modrm >> 6) & 3;
3140 reg = ((modrm >> 3) & 7) | rex_r;
3141 /* we must ensure that no segment is added */
3145 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3147 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
3150 case 0xa0: /* mov EAX, Ov */
3152 case 0xa2: /* mov Ov, EAX */
3155 target_ulong offset_addr;
3160 ot = dflag + OT_WORD;
3161 #ifdef TARGET_X86_64
3163 offset_addr = ldq_code(s->pc);
3165 if (offset_addr == (int32_t)offset_addr)
3166 gen_op_movq_A0_im(offset_addr);
3168 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
3173 offset_addr = insn_get(s, OT_LONG);
3175 offset_addr = insn_get(s, OT_WORD);
3177 gen_op_movl_A0_im(offset_addr);
3179 /* handle override */
3181 int override, must_add_seg;
3182 must_add_seg = s->addseg;
3183 if (s->override >= 0) {
3184 override = s->override;
3190 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
3194 gen_op_ld_T0_A0[ot + s->mem_index]();
3195 gen_op_mov_reg_T0[ot][R_EAX]();
3197 gen_op_mov_TN_reg[ot][0][R_EAX]();
3198 gen_op_st_T0_A0[ot + s->mem_index]();
3202 case 0xd7: /* xlat */
3203 #ifdef TARGET_X86_64
3205 gen_op_movq_A0_reg[R_EBX]();
3206 gen_op_addq_A0_AL();
3210 gen_op_movl_A0_reg[R_EBX]();
3211 gen_op_addl_A0_AL();
3213 gen_op_andl_A0_ffff();
3215 /* handle override */
3217 int override, must_add_seg;
3218 must_add_seg = s->addseg;
3220 if (s->override >= 0) {
3221 override = s->override;
3227 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
3230 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
3231 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
3233 case 0xb0 ... 0xb7: /* mov R, Ib */
3234 val = insn_get(s, OT_BYTE);
3235 gen_op_movl_T0_im(val);
3236 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
3238 case 0xb8 ... 0xbf: /* mov R, Iv */
3239 #ifdef TARGET_X86_64
3243 tmp = ldq_code(s->pc);
3245 reg = (b & 7) | REX_B(s);
3246 gen_movtl_T0_im(tmp);
3247 gen_op_mov_reg_T0[OT_QUAD][reg]();
3251 ot = dflag ? OT_LONG : OT_WORD;
3252 val = insn_get(s, ot);
3253 reg = (b & 7) | REX_B(s);
3254 gen_op_movl_T0_im(val);
3255 gen_op_mov_reg_T0[ot][reg]();
3259 case 0x91 ... 0x97: /* xchg R, EAX */
3260 ot = dflag + OT_WORD;
3261 reg = (b & 7) | REX_B(s);
3265 case 0x87: /* xchg Ev, Gv */
3269 ot = dflag + OT_WORD;
3270 modrm = ldub_code(s->pc++);
3271 reg = ((modrm >> 3) & 7) | rex_r;
3272 mod = (modrm >> 6) & 3;
3274 rm = (modrm & 7) | REX_B(s);
3276 gen_op_mov_TN_reg[ot][0][reg]();
3277 gen_op_mov_TN_reg[ot][1][rm]();
3278 gen_op_mov_reg_T0[ot][rm]();
3279 gen_op_mov_reg_T1[ot][reg]();
3281 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3282 gen_op_mov_TN_reg[ot][0][reg]();
3283 /* for xchg, lock is implicit */
3284 if (!(prefixes & PREFIX_LOCK))
3286 gen_op_ld_T1_A0[ot + s->mem_index]();
3287 gen_op_st_T0_A0[ot + s->mem_index]();
3288 if (!(prefixes & PREFIX_LOCK))
3290 gen_op_mov_reg_T1[ot][reg]();
3293 case 0xc4: /* les Gv */
3298 case 0xc5: /* lds Gv */
3303 case 0x1b2: /* lss Gv */
3306 case 0x1b4: /* lfs Gv */
3309 case 0x1b5: /* lgs Gv */
3312 ot = dflag ? OT_LONG : OT_WORD;
3313 modrm = ldub_code(s->pc++);
3314 reg = ((modrm >> 3) & 7) | rex_r;
3315 mod = (modrm >> 6) & 3;
3318 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3319 gen_op_ld_T1_A0[ot + s->mem_index]();
3320 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3321 /* load the segment first to handle exceptions properly */
3322 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3323 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
3324 /* then put the data */
3325 gen_op_mov_reg_T1[ot][reg]();
3327 gen_jmp_im(s->pc - s->cs_base);
3332 /************************/
3343 ot = dflag + OT_WORD;
3345 modrm = ldub_code(s->pc++);
3346 mod = (modrm >> 6) & 3;
3347 op = (modrm >> 3) & 7;
3353 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3356 opreg = (modrm & 7) | REX_B(s);
3361 gen_shift(s, op, ot, opreg, OR_ECX);
3364 shift = ldub_code(s->pc++);
3366 gen_shifti(s, op, ot, opreg, shift);
3381 case 0x1a4: /* shld imm */
3385 case 0x1a5: /* shld cl */
3389 case 0x1ac: /* shrd imm */
3393 case 0x1ad: /* shrd cl */
3397 ot = dflag + OT_WORD;
3398 modrm = ldub_code(s->pc++);
3399 mod = (modrm >> 6) & 3;
3400 rm = (modrm & 7) | REX_B(s);
3401 reg = ((modrm >> 3) & 7) | rex_r;
3404 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3405 gen_op_ld_T0_A0[ot + s->mem_index]();
3407 gen_op_mov_TN_reg[ot][0][rm]();
3409 gen_op_mov_TN_reg[ot][1][reg]();
3412 val = ldub_code(s->pc++);
3419 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
3421 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
3422 if (op == 0 && ot != OT_WORD)
3423 s->cc_op = CC_OP_SHLB + ot;
3425 s->cc_op = CC_OP_SARB + ot;
3428 if (s->cc_op != CC_OP_DYNAMIC)
3429 gen_op_set_cc_op(s->cc_op);
3431 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
3433 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
3434 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
3437 gen_op_mov_reg_T0[ot][rm]();
3441 /************************/
3444 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
3445 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
3446 /* XXX: what to do if illegal op ? */
3447 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3450 modrm = ldub_code(s->pc++);
3451 mod = (modrm >> 6) & 3;
3453 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
3456 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3458 case 0x00 ... 0x07: /* fxxxs */
3459 case 0x10 ... 0x17: /* fixxxl */
3460 case 0x20 ... 0x27: /* fxxxl */
3461 case 0x30 ... 0x37: /* fixxx */
3468 gen_op_flds_FT0_A0();
3471 gen_op_fildl_FT0_A0();
3474 gen_op_fldl_FT0_A0();
3478 gen_op_fild_FT0_A0();
3482 gen_op_fp_arith_ST0_FT0[op1]();
3484 /* fcomp needs pop */
3489 case 0x08: /* flds */
3490 case 0x0a: /* fsts */
3491 case 0x0b: /* fstps */
3492 case 0x18: /* fildl */
3493 case 0x1a: /* fistl */
3494 case 0x1b: /* fistpl */
3495 case 0x28: /* fldl */
3496 case 0x2a: /* fstl */
3497 case 0x2b: /* fstpl */
3498 case 0x38: /* filds */
3499 case 0x3a: /* fists */
3500 case 0x3b: /* fistps */
3506 gen_op_flds_ST0_A0();
3509 gen_op_fildl_ST0_A0();
3512 gen_op_fldl_ST0_A0();
3516 gen_op_fild_ST0_A0();
3523 gen_op_fsts_ST0_A0();
3526 gen_op_fistl_ST0_A0();
3529 gen_op_fstl_ST0_A0();
3533 gen_op_fist_ST0_A0();
3541 case 0x0c: /* fldenv mem */
3542 gen_op_fldenv_A0(s->dflag);
3544 case 0x0d: /* fldcw mem */
3547 case 0x0e: /* fnstenv mem */
3548 gen_op_fnstenv_A0(s->dflag);
3550 case 0x0f: /* fnstcw mem */
3553 case 0x1d: /* fldt mem */
3554 gen_op_fldt_ST0_A0();
3556 case 0x1f: /* fstpt mem */
3557 gen_op_fstt_ST0_A0();
3560 case 0x2c: /* frstor mem */
3561 gen_op_frstor_A0(s->dflag);
3563 case 0x2e: /* fnsave mem */
3564 gen_op_fnsave_A0(s->dflag);
3566 case 0x2f: /* fnstsw mem */
3569 case 0x3c: /* fbld */
3570 gen_op_fbld_ST0_A0();
3572 case 0x3e: /* fbstp */
3573 gen_op_fbst_ST0_A0();
3576 case 0x3d: /* fildll */
3577 gen_op_fildll_ST0_A0();
3579 case 0x3f: /* fistpll */
3580 gen_op_fistll_ST0_A0();
3587 /* register float ops */
3591 case 0x08: /* fld sti */
3593 gen_op_fmov_ST0_STN((opreg + 1) & 7);
3595 case 0x09: /* fxchg sti */
3596 case 0x29: /* fxchg4 sti, undocumented op */
3597 case 0x39: /* fxchg7 sti, undocumented op */
3598 gen_op_fxchg_ST0_STN(opreg);
3600 case 0x0a: /* grp d9/2 */
3603 /* check exceptions (FreeBSD FPU probe) */
3604 if (s->cc_op != CC_OP_DYNAMIC)
3605 gen_op_set_cc_op(s->cc_op);
3606 gen_jmp_im(pc_start - s->cs_base);
3613 case 0x0c: /* grp d9/4 */
3623 gen_op_fcom_ST0_FT0();
3632 case 0x0d: /* grp d9/5 */
3641 gen_op_fldl2t_ST0();
3645 gen_op_fldl2e_ST0();
3653 gen_op_fldlg2_ST0();
3657 gen_op_fldln2_ST0();
3668 case 0x0e: /* grp d9/6 */
3679 case 3: /* fpatan */
3682 case 4: /* fxtract */
3685 case 5: /* fprem1 */
3688 case 6: /* fdecstp */
3692 case 7: /* fincstp */
3697 case 0x0f: /* grp d9/7 */
3702 case 1: /* fyl2xp1 */
3708 case 3: /* fsincos */
3711 case 5: /* fscale */
3714 case 4: /* frndint */
3726 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
3727 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
3728 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
3734 gen_op_fp_arith_STN_ST0[op1](opreg);
3738 gen_op_fmov_FT0_STN(opreg);
3739 gen_op_fp_arith_ST0_FT0[op1]();
3743 case 0x02: /* fcom */
3744 case 0x22: /* fcom2, undocumented op */
3745 gen_op_fmov_FT0_STN(opreg);
3746 gen_op_fcom_ST0_FT0();
3748 case 0x03: /* fcomp */
3749 case 0x23: /* fcomp3, undocumented op */
3750 case 0x32: /* fcomp5, undocumented op */
3751 gen_op_fmov_FT0_STN(opreg);
3752 gen_op_fcom_ST0_FT0();
3755 case 0x15: /* da/5 */
3757 case 1: /* fucompp */
3758 gen_op_fmov_FT0_STN(1);
3759 gen_op_fucom_ST0_FT0();
3769 case 0: /* feni (287 only, just do nop here) */
3771 case 1: /* fdisi (287 only, just do nop here) */
3776 case 3: /* fninit */
3779 case 4: /* fsetpm (287 only, just do nop here) */
3785 case 0x1d: /* fucomi */
3786 if (s->cc_op != CC_OP_DYNAMIC)
3787 gen_op_set_cc_op(s->cc_op);
3788 gen_op_fmov_FT0_STN(opreg);
3789 gen_op_fucomi_ST0_FT0();
3790 s->cc_op = CC_OP_EFLAGS;
3792 case 0x1e: /* fcomi */
3793 if (s->cc_op != CC_OP_DYNAMIC)
3794 gen_op_set_cc_op(s->cc_op);
3795 gen_op_fmov_FT0_STN(opreg);
3796 gen_op_fcomi_ST0_FT0();
3797 s->cc_op = CC_OP_EFLAGS;
3799 case 0x28: /* ffree sti */
3800 gen_op_ffree_STN(opreg);
3802 case 0x2a: /* fst sti */
3803 gen_op_fmov_STN_ST0(opreg);
3805 case 0x2b: /* fstp sti */
3806 case 0x0b: /* fstp1 sti, undocumented op */
3807 case 0x3a: /* fstp8 sti, undocumented op */
3808 case 0x3b: /* fstp9 sti, undocumented op */
3809 gen_op_fmov_STN_ST0(opreg);
3812 case 0x2c: /* fucom st(i) */
3813 gen_op_fmov_FT0_STN(opreg);
3814 gen_op_fucom_ST0_FT0();
3816 case 0x2d: /* fucomp st(i) */
3817 gen_op_fmov_FT0_STN(opreg);
3818 gen_op_fucom_ST0_FT0();
3821 case 0x33: /* de/3 */
3823 case 1: /* fcompp */
3824 gen_op_fmov_FT0_STN(1);
3825 gen_op_fcom_ST0_FT0();
3833 case 0x38: /* ffreep sti, undocumented op */
3834 gen_op_ffree_STN(opreg);
3837 case 0x3c: /* df/4 */
3840 gen_op_fnstsw_EAX();
3846 case 0x3d: /* fucomip */
3847 if (s->cc_op != CC_OP_DYNAMIC)
3848 gen_op_set_cc_op(s->cc_op);
3849 gen_op_fmov_FT0_STN(opreg);
3850 gen_op_fucomi_ST0_FT0();
3852 s->cc_op = CC_OP_EFLAGS;
3854 case 0x3e: /* fcomip */
3855 if (s->cc_op != CC_OP_DYNAMIC)
3856 gen_op_set_cc_op(s->cc_op);
3857 gen_op_fmov_FT0_STN(opreg);
3858 gen_op_fcomi_ST0_FT0();
3860 s->cc_op = CC_OP_EFLAGS;
3862 case 0x10 ... 0x13: /* fcmovxx */
3866 const static uint8_t fcmov_cc[8] = {
3872 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
3874 gen_op_fcmov_ST0_STN_T0(opreg);
3881 #ifdef USE_CODE_COPY
3882 s->tb->cflags |= CF_TB_FP_USED;
3885 /************************/
3888 case 0xa4: /* movsS */
3893 ot = dflag + OT_WORD;
3895 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3896 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3902 case 0xaa: /* stosS */
3907 ot = dflag + OT_WORD;
3909 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3910 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3915 case 0xac: /* lodsS */
3920 ot = dflag + OT_WORD;
3921 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3922 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3927 case 0xae: /* scasS */
3932 ot = dflag + OT_WORD;
3933 if (prefixes & PREFIX_REPNZ) {
3934 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
3935 } else if (prefixes & PREFIX_REPZ) {
3936 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
3939 s->cc_op = CC_OP_SUBB + ot;
3943 case 0xa6: /* cmpsS */
3948 ot = dflag + OT_WORD;
3949 if (prefixes & PREFIX_REPNZ) {
3950 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
3951 } else if (prefixes & PREFIX_REPZ) {
3952 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
3955 s->cc_op = CC_OP_SUBB + ot;
3958 case 0x6c: /* insS */
3963 ot = dflag ? OT_LONG : OT_WORD;
3964 gen_check_io(s, ot, 1, pc_start - s->cs_base);
3965 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3966 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3971 case 0x6e: /* outsS */
3976 ot = dflag ? OT_LONG : OT_WORD;
3977 gen_check_io(s, ot, 1, pc_start - s->cs_base);
3978 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3979 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3985 /************************/
3992 ot = dflag ? OT_LONG : OT_WORD;
3993 val = ldub_code(s->pc++);
3994 gen_op_movl_T0_im(val);
3995 gen_check_io(s, ot, 0, pc_start - s->cs_base);
3997 gen_op_mov_reg_T1[ot][R_EAX]();
4004 ot = dflag ? OT_LONG : OT_WORD;
4005 val = ldub_code(s->pc++);
4006 gen_op_movl_T0_im(val);
4007 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4008 gen_op_mov_TN_reg[ot][1][R_EAX]();
4016 ot = dflag ? OT_LONG : OT_WORD;
4017 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4018 gen_op_andl_T0_ffff();
4019 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4021 gen_op_mov_reg_T1[ot][R_EAX]();
4028 ot = dflag ? OT_LONG : OT_WORD;
4029 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4030 gen_op_andl_T0_ffff();
4031 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4032 gen_op_mov_TN_reg[ot][1][R_EAX]();
4036 /************************/
4038 case 0xc2: /* ret im */
4039 val = ldsw_code(s->pc);
4042 gen_stack_update(s, val + (2 << s->dflag));
4044 gen_op_andl_T0_ffff();
4048 case 0xc3: /* ret */
4052 gen_op_andl_T0_ffff();
4056 case 0xca: /* lret im */
4057 val = ldsw_code(s->pc);
4060 if (s->pe && !s->vm86) {
4061 if (s->cc_op != CC_OP_DYNAMIC)
4062 gen_op_set_cc_op(s->cc_op);
4063 gen_jmp_im(pc_start - s->cs_base);
4064 gen_op_lret_protected(s->dflag, val);
4068 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4070 gen_op_andl_T0_ffff();
4071 /* NOTE: keeping EIP updated is not a problem in case of
4075 gen_op_addl_A0_im(2 << s->dflag);
4076 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4077 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4078 /* add stack offset */
4079 gen_stack_update(s, val + (4 << s->dflag));
4083 case 0xcb: /* lret */
4086 case 0xcf: /* iret */
4089 gen_op_iret_real(s->dflag);
4090 s->cc_op = CC_OP_EFLAGS;
4091 } else if (s->vm86) {
4093 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4095 gen_op_iret_real(s->dflag);
4096 s->cc_op = CC_OP_EFLAGS;
4099 if (s->cc_op != CC_OP_DYNAMIC)
4100 gen_op_set_cc_op(s->cc_op);
4101 gen_jmp_im(pc_start - s->cs_base);
4102 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4103 s->cc_op = CC_OP_EFLAGS;
4107 case 0xe8: /* call im */
4110 tval = (int32_t)insn_get(s, OT_LONG);
4112 tval = (int16_t)insn_get(s, OT_WORD);
4113 next_eip = s->pc - s->cs_base;
4117 gen_movtl_T0_im(next_eip);
4122 case 0x9a: /* lcall im */
4124 unsigned int selector, offset;
4128 ot = dflag ? OT_LONG : OT_WORD;
4129 offset = insn_get(s, ot);
4130 selector = insn_get(s, OT_WORD);
4132 gen_op_movl_T0_im(selector);
4133 gen_op_movl_T1_imu(offset);
4136 case 0xe9: /* jmp */
4138 tval = (int32_t)insn_get(s, OT_LONG);
4140 tval = (int16_t)insn_get(s, OT_WORD);
4141 tval += s->pc - s->cs_base;
4146 case 0xea: /* ljmp im */
4148 unsigned int selector, offset;
4152 ot = dflag ? OT_LONG : OT_WORD;
4153 offset = insn_get(s, ot);
4154 selector = insn_get(s, OT_WORD);
4156 gen_op_movl_T0_im(selector);
4157 gen_op_movl_T1_imu(offset);
4160 case 0xeb: /* jmp Jb */
4161 tval = (int8_t)insn_get(s, OT_BYTE);
4162 tval += s->pc - s->cs_base;
4167 case 0x70 ... 0x7f: /* jcc Jb */
4168 tval = (int8_t)insn_get(s, OT_BYTE);
4170 case 0x180 ... 0x18f: /* jcc Jv */
4172 tval = (int32_t)insn_get(s, OT_LONG);
4174 tval = (int16_t)insn_get(s, OT_WORD);
4177 next_eip = s->pc - s->cs_base;
4181 gen_jcc(s, b, tval, next_eip);
4184 case 0x190 ... 0x19f: /* setcc Gv */
4185 modrm = ldub_code(s->pc++);
4187 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
4189 case 0x140 ... 0x14f: /* cmov Gv, Ev */
4190 ot = dflag + OT_WORD;
4191 modrm = ldub_code(s->pc++);
4192 reg = ((modrm >> 3) & 7) | rex_r;
4193 mod = (modrm >> 6) & 3;
4196 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4197 gen_op_ld_T1_A0[ot + s->mem_index]();
4199 rm = (modrm & 7) | REX_B(s);
4200 gen_op_mov_TN_reg[ot][1][rm]();
4202 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
4205 /************************/
4207 case 0x9c: /* pushf */
4208 if (s->vm86 && s->iopl != 3) {
4209 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4211 if (s->cc_op != CC_OP_DYNAMIC)
4212 gen_op_set_cc_op(s->cc_op);
4213 gen_op_movl_T0_eflags();
4217 case 0x9d: /* popf */
4218 if (s->vm86 && s->iopl != 3) {
4219 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4224 gen_op_movl_eflags_T0_cpl0();
4226 gen_op_movw_eflags_T0_cpl0();
4229 if (s->cpl <= s->iopl) {
4231 gen_op_movl_eflags_T0_io();
4233 gen_op_movw_eflags_T0_io();
4237 gen_op_movl_eflags_T0();
4239 gen_op_movw_eflags_T0();
4244 s->cc_op = CC_OP_EFLAGS;
4245 /* abort translation because TF flag may change */
4246 gen_jmp_im(s->pc - s->cs_base);
4250 case 0x9e: /* sahf */
4253 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
4254 if (s->cc_op != CC_OP_DYNAMIC)
4255 gen_op_set_cc_op(s->cc_op);
4256 gen_op_movb_eflags_T0();
4257 s->cc_op = CC_OP_EFLAGS;
4259 case 0x9f: /* lahf */
4262 if (s->cc_op != CC_OP_DYNAMIC)
4263 gen_op_set_cc_op(s->cc_op);
4264 gen_op_movl_T0_eflags();
4265 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
4267 case 0xf5: /* cmc */
4268 if (s->cc_op != CC_OP_DYNAMIC)
4269 gen_op_set_cc_op(s->cc_op);
4271 s->cc_op = CC_OP_EFLAGS;
4273 case 0xf8: /* clc */
4274 if (s->cc_op != CC_OP_DYNAMIC)
4275 gen_op_set_cc_op(s->cc_op);
4277 s->cc_op = CC_OP_EFLAGS;
4279 case 0xf9: /* stc */
4280 if (s->cc_op != CC_OP_DYNAMIC)
4281 gen_op_set_cc_op(s->cc_op);
4283 s->cc_op = CC_OP_EFLAGS;
4285 case 0xfc: /* cld */
4288 case 0xfd: /* std */
4292 /************************/
4293 /* bit operations */
4294 case 0x1ba: /* bt/bts/btr/btc Gv, im */
4295 ot = dflag + OT_WORD;
4296 modrm = ldub_code(s->pc++);
4297 op = ((modrm >> 3) & 7) | rex_r;
4298 mod = (modrm >> 6) & 3;
4299 rm = (modrm & 7) | REX_B(s);
4302 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4303 gen_op_ld_T0_A0[ot + s->mem_index]();
4305 gen_op_mov_TN_reg[ot][0][rm]();
4308 val = ldub_code(s->pc++);
4309 gen_op_movl_T1_im(val);
4313 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
4314 s->cc_op = CC_OP_SARB + ot;
4317 gen_op_st_T0_A0[ot + s->mem_index]();
4319 gen_op_mov_reg_T0[ot][rm]();
4320 gen_op_update_bt_cc();
4323 case 0x1a3: /* bt Gv, Ev */
4326 case 0x1ab: /* bts */
4329 case 0x1b3: /* btr */
4332 case 0x1bb: /* btc */
4335 ot = dflag + OT_WORD;
4336 modrm = ldub_code(s->pc++);
4337 reg = ((modrm >> 3) & 7) | rex_r;
4338 mod = (modrm >> 6) & 3;
4339 rm = (modrm & 7) | REX_B(s);
4340 gen_op_mov_TN_reg[OT_LONG][1][reg]();
4342 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4343 /* specific case: we need to add a displacement */
4344 gen_op_add_bit_A0_T1[ot - OT_WORD]();
4345 gen_op_ld_T0_A0[ot + s->mem_index]();
4347 gen_op_mov_TN_reg[ot][0][rm]();
4349 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
4350 s->cc_op = CC_OP_SARB + ot;
4353 gen_op_st_T0_A0[ot + s->mem_index]();
4355 gen_op_mov_reg_T0[ot][rm]();
4356 gen_op_update_bt_cc();
4359 case 0x1bc: /* bsf */
4360 case 0x1bd: /* bsr */
4361 ot = dflag + OT_WORD;
4362 modrm = ldub_code(s->pc++);
4363 reg = ((modrm >> 3) & 7) | rex_r;
4364 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4365 /* NOTE: in order to handle the 0 case, we must load the
4366 result. It could be optimized with a generated jump */
4367 gen_op_mov_TN_reg[ot][1][reg]();
4368 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
4369 gen_op_mov_reg_T1[ot][reg]();
4370 s->cc_op = CC_OP_LOGICB + ot;
4372 /************************/
4374 case 0x27: /* daa */
4377 if (s->cc_op != CC_OP_DYNAMIC)
4378 gen_op_set_cc_op(s->cc_op);
4380 s->cc_op = CC_OP_EFLAGS;
4382 case 0x2f: /* das */
4385 if (s->cc_op != CC_OP_DYNAMIC)
4386 gen_op_set_cc_op(s->cc_op);
4388 s->cc_op = CC_OP_EFLAGS;
4390 case 0x37: /* aaa */
4393 if (s->cc_op != CC_OP_DYNAMIC)
4394 gen_op_set_cc_op(s->cc_op);
4396 s->cc_op = CC_OP_EFLAGS;
4398 case 0x3f: /* aas */
4401 if (s->cc_op != CC_OP_DYNAMIC)
4402 gen_op_set_cc_op(s->cc_op);
4404 s->cc_op = CC_OP_EFLAGS;
4406 case 0xd4: /* aam */
4409 val = ldub_code(s->pc++);
4411 s->cc_op = CC_OP_LOGICB;
4413 case 0xd5: /* aad */
4416 val = ldub_code(s->pc++);
4418 s->cc_op = CC_OP_LOGICB;
4420 /************************/
4422 case 0x90: /* nop */
4423 /* XXX: xchg + rex handling */
4424 /* XXX: correct lock test for all insn */
4425 if (prefixes & PREFIX_LOCK)
4428 case 0x9b: /* fwait */
4429 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
4430 (HF_MP_MASK | HF_TS_MASK)) {
4431 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4433 if (s->cc_op != CC_OP_DYNAMIC)
4434 gen_op_set_cc_op(s->cc_op);
4435 gen_jmp_im(pc_start - s->cs_base);
4439 case 0xcc: /* int3 */
4440 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
4442 case 0xcd: /* int N */
4443 val = ldub_code(s->pc++);
4444 if (s->vm86 && s->iopl != 3) {
4445 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4447 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
4450 case 0xce: /* into */
4453 if (s->cc_op != CC_OP_DYNAMIC)
4454 gen_op_set_cc_op(s->cc_op);
4455 gen_op_into(s->pc - s->cs_base);
4457 case 0xf1: /* icebp (undocumented, exits to external debugger) */
4459 gen_debug(s, pc_start - s->cs_base);
4462 cpu_set_log(CPU_LOG_TB_IN_ASM | CPU_LOG_PCALL);
4465 case 0xfa: /* cli */
4467 if (s->cpl <= s->iopl) {
4470 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4476 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4480 case 0xfb: /* sti */
4482 if (s->cpl <= s->iopl) {
4485 /* interruptions are enabled only the first insn after sti */
4486 /* If several instructions disable interrupts, only the
4488 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4489 gen_op_set_inhibit_irq();
4490 /* give a chance to handle pending irqs */
4491 gen_jmp_im(s->pc - s->cs_base);
4494 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4500 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4504 case 0x62: /* bound */
4507 ot = dflag ? OT_LONG : OT_WORD;
4508 modrm = ldub_code(s->pc++);
4509 reg = (modrm >> 3) & 7;
4510 mod = (modrm >> 6) & 3;
4513 gen_op_mov_TN_reg[ot][0][reg]();
4514 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4515 gen_jmp_im(pc_start - s->cs_base);
4521 case 0x1c8 ... 0x1cf: /* bswap reg */
4522 reg = (b & 7) | REX_B(s);
4523 #ifdef TARGET_X86_64
4525 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
4527 gen_op_mov_reg_T0[OT_QUAD][reg]();
4531 gen_op_mov_TN_reg[OT_LONG][0][reg]();
4533 gen_op_mov_reg_T0[OT_LONG][reg]();
4536 case 0xd6: /* salc */
4539 if (s->cc_op != CC_OP_DYNAMIC)
4540 gen_op_set_cc_op(s->cc_op);
4543 case 0xe0: /* loopnz */
4544 case 0xe1: /* loopz */
4545 if (s->cc_op != CC_OP_DYNAMIC)
4546 gen_op_set_cc_op(s->cc_op);
4548 case 0xe2: /* loop */
4549 case 0xe3: /* jecxz */
4553 tval = (int8_t)insn_get(s, OT_BYTE);
4554 next_eip = s->pc - s->cs_base;
4559 l1 = gen_new_label();
4560 l2 = gen_new_label();
4563 gen_op_jz_ecx[s->aflag](l1);
4565 gen_op_dec_ECX[s->aflag]();
4566 gen_op_loop[s->aflag][b](l1);
4569 gen_jmp_im(next_eip);
4570 gen_op_jmp_label(l2);
4577 case 0x130: /* wrmsr */
4578 case 0x132: /* rdmsr */
4580 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4588 case 0x131: /* rdtsc */
4591 case 0x134: /* sysenter */
4595 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4597 if (s->cc_op != CC_OP_DYNAMIC) {
4598 gen_op_set_cc_op(s->cc_op);
4599 s->cc_op = CC_OP_DYNAMIC;
4601 gen_jmp_im(pc_start - s->cs_base);
4606 case 0x135: /* sysexit */
4610 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4612 if (s->cc_op != CC_OP_DYNAMIC) {
4613 gen_op_set_cc_op(s->cc_op);
4614 s->cc_op = CC_OP_DYNAMIC;
4616 gen_jmp_im(pc_start - s->cs_base);
4621 #ifdef TARGET_X86_64
4622 case 0x105: /* syscall */
4623 /* XXX: is it usable in real mode ? */
4624 if (s->cc_op != CC_OP_DYNAMIC) {
4625 gen_op_set_cc_op(s->cc_op);
4626 s->cc_op = CC_OP_DYNAMIC;
4628 gen_jmp_im(pc_start - s->cs_base);
4629 gen_op_syscall(s->pc - pc_start);
4632 case 0x107: /* sysret */
4634 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4636 if (s->cc_op != CC_OP_DYNAMIC) {
4637 gen_op_set_cc_op(s->cc_op);
4638 s->cc_op = CC_OP_DYNAMIC;
4640 gen_jmp_im(pc_start - s->cs_base);
4641 gen_op_sysret(s->dflag);
4646 case 0x1a2: /* cpuid */
4649 case 0xf4: /* hlt */
4651 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4653 if (s->cc_op != CC_OP_DYNAMIC)
4654 gen_op_set_cc_op(s->cc_op);
4655 gen_jmp_im(s->pc - s->cs_base);
4661 modrm = ldub_code(s->pc++);
4662 mod = (modrm >> 6) & 3;
4663 op = (modrm >> 3) & 7;
4666 if (!s->pe || s->vm86)
4668 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
4672 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4675 if (!s->pe || s->vm86)
4678 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4680 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4681 gen_jmp_im(pc_start - s->cs_base);
4686 if (!s->pe || s->vm86)
4688 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
4692 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4695 if (!s->pe || s->vm86)
4698 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4700 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4701 gen_jmp_im(pc_start - s->cs_base);
4707 if (!s->pe || s->vm86)
4709 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4710 if (s->cc_op != CC_OP_DYNAMIC)
4711 gen_op_set_cc_op(s->cc_op);
4716 s->cc_op = CC_OP_EFLAGS;
4723 modrm = ldub_code(s->pc++);
4724 mod = (modrm >> 6) & 3;
4725 op = (modrm >> 3) & 7;
4731 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4733 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
4735 gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
4736 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
4737 #ifdef TARGET_X86_64
4739 gen_op_addq_A0_im(2);
4742 gen_op_addl_A0_im(2);
4744 gen_op_movtl_T0_env(offsetof(CPUX86State,gdt.base));
4746 gen_op_movtl_T0_env(offsetof(CPUX86State,idt.base));
4748 gen_op_andl_T0_im(0xffffff);
4749 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
4756 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4758 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4759 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
4760 #ifdef TARGET_X86_64
4762 gen_op_addq_A0_im(2);
4765 gen_op_addl_A0_im(2);
4766 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
4768 gen_op_andl_T0_im(0xffffff);
4770 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
4771 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
4773 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
4774 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
4779 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
4780 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
4784 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4786 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4788 gen_jmp_im(s->pc - s->cs_base);
4792 case 7: /* invlpg */
4794 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4797 #ifdef TARGET_X86_64
4798 if (CODE64(s) && (modrm & 7) == 0) {
4800 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
4801 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
4802 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
4803 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
4810 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4812 gen_jmp_im(s->pc - s->cs_base);
4821 case 0x108: /* invd */
4822 case 0x109: /* wbinvd */
4824 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4829 case 0x1ae: /* sfence */
4830 modrm = ldub_code(s->pc++);
4831 mod = (modrm >> 6) & 3;
4832 op = (modrm >> 3) & 7;
4834 case 0: /* fxsave */
4835 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
4837 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4838 gen_op_fxsave_A0((s->dflag == 2));
4840 case 1: /* fxrstor */
4841 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
4843 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4844 gen_op_fxrstor_A0((s->dflag == 2));
4846 case 5: /* lfence */
4847 case 6: /* mfence */
4848 case 7: /* sfence */
4849 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
4856 case 0x63: /* arpl or movslS (x86_64) */
4857 #ifdef TARGET_X86_64
4860 /* d_ot is the size of destination */
4861 d_ot = dflag + OT_WORD;
4863 modrm = ldub_code(s->pc++);
4864 reg = ((modrm >> 3) & 7) | rex_r;
4865 mod = (modrm >> 6) & 3;
4866 rm = (modrm & 7) | REX_B(s);
4869 gen_op_mov_TN_reg[OT_LONG][0][rm]();
4871 if (d_ot == OT_QUAD)
4872 gen_op_movslq_T0_T0();
4873 gen_op_mov_reg_T0[d_ot][reg]();
4875 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4876 if (d_ot == OT_QUAD) {
4877 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
4879 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
4881 gen_op_mov_reg_T0[d_ot][reg]();
4886 if (!s->pe || s->vm86)
4888 ot = dflag ? OT_LONG : OT_WORD;
4889 modrm = ldub_code(s->pc++);
4890 reg = (modrm >> 3) & 7;
4891 mod = (modrm >> 6) & 3;
4894 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4895 gen_op_ld_T0_A0[ot + s->mem_index]();
4897 gen_op_mov_TN_reg[ot][0][rm]();
4899 if (s->cc_op != CC_OP_DYNAMIC)
4900 gen_op_set_cc_op(s->cc_op);
4902 s->cc_op = CC_OP_EFLAGS;
4904 gen_op_st_T0_A0[ot + s->mem_index]();
4906 gen_op_mov_reg_T0[ot][rm]();
4908 gen_op_arpl_update();
4911 case 0x102: /* lar */
4912 case 0x103: /* lsl */
4913 if (!s->pe || s->vm86)
4915 ot = dflag ? OT_LONG : OT_WORD;
4916 modrm = ldub_code(s->pc++);
4917 reg = ((modrm >> 3) & 7) | rex_r;
4918 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4919 gen_op_mov_TN_reg[ot][1][reg]();
4920 if (s->cc_op != CC_OP_DYNAMIC)
4921 gen_op_set_cc_op(s->cc_op);
4926 s->cc_op = CC_OP_EFLAGS;
4927 gen_op_mov_reg_T1[ot][reg]();
4930 modrm = ldub_code(s->pc++);
4931 mod = (modrm >> 6) & 3;
4932 op = (modrm >> 3) & 7;
4934 case 0: /* prefetchnta */
4935 case 1: /* prefetchnt0 */
4936 case 2: /* prefetchnt0 */
4937 case 3: /* prefetchnt0 */
4940 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4941 /* nothing more to do */
4947 case 0x120: /* mov reg, crN */
4948 case 0x122: /* mov crN, reg */
4950 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4952 modrm = ldub_code(s->pc++);
4953 if ((modrm & 0xc0) != 0xc0)
4955 rm = (modrm & 7) | REX_B(s);
4956 reg = ((modrm >> 3) & 7) | rex_r;
4967 gen_op_mov_TN_reg[ot][0][rm]();
4968 gen_op_movl_crN_T0(reg);
4969 gen_jmp_im(s->pc - s->cs_base);
4972 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
4973 gen_op_mov_reg_T0[ot][rm]();
4976 /* XXX: add CR8 for x86_64 */
4982 case 0x121: /* mov reg, drN */
4983 case 0x123: /* mov drN, reg */
4985 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4987 modrm = ldub_code(s->pc++);
4988 if ((modrm & 0xc0) != 0xc0)
4990 rm = (modrm & 7) | REX_B(s);
4991 reg = ((modrm >> 3) & 7) | rex_r;
4996 /* XXX: do it dynamically with CR4.DE bit */
4997 if (reg == 4 || reg == 5 || reg >= 8)
5000 gen_op_mov_TN_reg[ot][0][rm]();
5001 gen_op_movl_drN_T0(reg);
5002 gen_jmp_im(s->pc - s->cs_base);
5005 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5006 gen_op_mov_reg_T0[ot][rm]();
5010 case 0x106: /* clts */
5012 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5015 /* abort block because static cpu state changed */
5016 gen_jmp_im(s->pc - s->cs_base);
5022 if (prefixes & PREFIX_DATA) {
5023 /* movdqa xmm1, xmm2/mem128 */
5024 if (!(s->cpuid_features & CPUID_SSE))
5026 modrm = ldub_code(s->pc++);
5027 reg = ((modrm >> 3) & 7) | rex_r;
5028 mod = (modrm >> 6) & 3;
5030 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5031 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
5033 rm = (modrm & 7) | REX_B(s);
5034 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
5035 offsetof(CPUX86State,xmm_regs[rm]));
5042 if (prefixes & PREFIX_DATA) {
5043 /* movntdq mem128, xmm1 */
5044 if (!(s->cpuid_features & CPUID_SSE))
5046 modrm = ldub_code(s->pc++);
5047 reg = ((modrm >> 3) & 7) | rex_r;
5048 mod = (modrm >> 6) & 3;
5050 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5051 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
5060 if (prefixes & PREFIX_DATA) {
5061 /* movdqa xmm2/mem128, xmm1 */
5062 if (!(s->cpuid_features & CPUID_SSE))
5064 modrm = ldub_code(s->pc++);
5065 reg = ((modrm >> 3) & 7) | rex_r;
5066 mod = (modrm >> 6) & 3;
5068 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5069 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
5071 rm = (modrm & 7) | REX_B(s);
5072 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
5073 offsetof(CPUX86State,xmm_regs[reg]));
5082 /* lock generation */
5083 if (s->prefix & PREFIX_LOCK)
5087 if (s->prefix & PREFIX_LOCK)
5089 /* XXX: ensure that no lock was generated */
5090 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
5094 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5095 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5097 /* flags read by an operation */
5098 static uint16_t opc_read_flags[NB_OPS] = {
5099 [INDEX_op_aas] = CC_A,
5100 [INDEX_op_aaa] = CC_A,
5101 [INDEX_op_das] = CC_A | CC_C,
5102 [INDEX_op_daa] = CC_A | CC_C,
5104 /* subtle: due to the incl/decl implementation, C is used */
5105 [INDEX_op_update_inc_cc] = CC_C,
5107 [INDEX_op_into] = CC_O,
5109 [INDEX_op_jb_subb] = CC_C,
5110 [INDEX_op_jb_subw] = CC_C,
5111 [INDEX_op_jb_subl] = CC_C,
5113 [INDEX_op_jz_subb] = CC_Z,
5114 [INDEX_op_jz_subw] = CC_Z,
5115 [INDEX_op_jz_subl] = CC_Z,
5117 [INDEX_op_jbe_subb] = CC_Z | CC_C,
5118 [INDEX_op_jbe_subw] = CC_Z | CC_C,
5119 [INDEX_op_jbe_subl] = CC_Z | CC_C,
5121 [INDEX_op_js_subb] = CC_S,
5122 [INDEX_op_js_subw] = CC_S,
5123 [INDEX_op_js_subl] = CC_S,
5125 [INDEX_op_jl_subb] = CC_O | CC_S,
5126 [INDEX_op_jl_subw] = CC_O | CC_S,
5127 [INDEX_op_jl_subl] = CC_O | CC_S,
5129 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
5130 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
5131 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
5133 [INDEX_op_loopnzw] = CC_Z,
5134 [INDEX_op_loopnzl] = CC_Z,
5135 [INDEX_op_loopzw] = CC_Z,
5136 [INDEX_op_loopzl] = CC_Z,
5138 [INDEX_op_seto_T0_cc] = CC_O,
5139 [INDEX_op_setb_T0_cc] = CC_C,
5140 [INDEX_op_setz_T0_cc] = CC_Z,
5141 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
5142 [INDEX_op_sets_T0_cc] = CC_S,
5143 [INDEX_op_setp_T0_cc] = CC_P,
5144 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
5145 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
5147 [INDEX_op_setb_T0_subb] = CC_C,
5148 [INDEX_op_setb_T0_subw] = CC_C,
5149 [INDEX_op_setb_T0_subl] = CC_C,
5151 [INDEX_op_setz_T0_subb] = CC_Z,
5152 [INDEX_op_setz_T0_subw] = CC_Z,
5153 [INDEX_op_setz_T0_subl] = CC_Z,
5155 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
5156 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
5157 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
5159 [INDEX_op_sets_T0_subb] = CC_S,
5160 [INDEX_op_sets_T0_subw] = CC_S,
5161 [INDEX_op_sets_T0_subl] = CC_S,
5163 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
5164 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
5165 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
5167 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
5168 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
5169 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
5171 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
5172 [INDEX_op_cmc] = CC_C,
5173 [INDEX_op_salc] = CC_C,
5175 /* needed for correct flag optimisation before string ops */
5176 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
5177 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
5178 [INDEX_op_jz_ecxw] = CC_OSZAPC,
5179 [INDEX_op_jz_ecxl] = CC_OSZAPC,
5181 #ifdef TARGET_X86_64
5182 [INDEX_op_jb_subq] = CC_C,
5183 [INDEX_op_jz_subq] = CC_Z,
5184 [INDEX_op_jbe_subq] = CC_Z | CC_C,
5185 [INDEX_op_js_subq] = CC_S,
5186 [INDEX_op_jl_subq] = CC_O | CC_S,
5187 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
5189 [INDEX_op_loopnzq] = CC_Z,
5190 [INDEX_op_loopzq] = CC_Z,
5192 [INDEX_op_setb_T0_subq] = CC_C,
5193 [INDEX_op_setz_T0_subq] = CC_Z,
5194 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
5195 [INDEX_op_sets_T0_subq] = CC_S,
5196 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
5197 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
5199 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
5200 [INDEX_op_jz_ecxq] = CC_OSZAPC,
5203 #define DEF_READF(SUFFIX)\
5204 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5205 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5206 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5207 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5208 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5209 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5210 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5211 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5213 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5214 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5215 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5216 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5217 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5218 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5219 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5220 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5224 #ifndef CONFIG_USER_ONLY
5230 /* flags written by an operation */
5231 static uint16_t opc_write_flags[NB_OPS] = {
5232 [INDEX_op_update2_cc] = CC_OSZAPC,
5233 [INDEX_op_update1_cc] = CC_OSZAPC,
5234 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
5235 [INDEX_op_update_neg_cc] = CC_OSZAPC,
5236 /* subtle: due to the incl/decl implementation, C is used */
5237 [INDEX_op_update_inc_cc] = CC_OSZAPC,
5238 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
5240 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
5241 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
5242 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
5243 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
5244 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
5245 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
5246 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
5247 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
5248 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
5249 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
5250 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
5253 [INDEX_op_aam] = CC_OSZAPC,
5254 [INDEX_op_aad] = CC_OSZAPC,
5255 [INDEX_op_aas] = CC_OSZAPC,
5256 [INDEX_op_aaa] = CC_OSZAPC,
5257 [INDEX_op_das] = CC_OSZAPC,
5258 [INDEX_op_daa] = CC_OSZAPC,
5260 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
5261 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
5262 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
5263 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
5264 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
5265 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
5266 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
5267 [INDEX_op_clc] = CC_C,
5268 [INDEX_op_stc] = CC_C,
5269 [INDEX_op_cmc] = CC_C,
5271 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
5272 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
5273 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
5274 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
5275 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
5276 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
5277 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
5278 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
5279 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
5280 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
5281 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
5282 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
5284 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
5285 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
5286 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
5287 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
5288 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
5289 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
5291 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
5292 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
5293 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
5294 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
5296 [INDEX_op_cmpxchg8b] = CC_Z,
5297 [INDEX_op_lar] = CC_Z,
5298 [INDEX_op_lsl] = CC_Z,
5299 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
5300 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
5302 #define DEF_WRITEF(SUFFIX)\
5303 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5304 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5305 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5306 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5307 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5308 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5309 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5310 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5312 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5313 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5314 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5315 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5316 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5317 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5318 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5319 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5321 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5322 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5323 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5324 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5325 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5326 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5327 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5328 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5330 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5331 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5332 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5333 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5335 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5336 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5337 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5338 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5340 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5341 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5342 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5343 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5345 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
5346 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
5347 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
5348 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
5349 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
5350 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
5352 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
5353 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
5354 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
5355 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
5356 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
5357 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
5359 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
5360 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
5361 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
5362 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
5367 #ifndef CONFIG_USER_ONLY
5373 /* simpler form of an operation if no flags need to be generated */
5374 static uint16_t opc_simpler[NB_OPS] = {
5375 [INDEX_op_update2_cc] = INDEX_op_nop,
5376 [INDEX_op_update1_cc] = INDEX_op_nop,
5377 [INDEX_op_update_neg_cc] = INDEX_op_nop,
5379 /* broken: CC_OP logic must be rewritten */
5380 [INDEX_op_update_inc_cc] = INDEX_op_nop,
5383 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
5384 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
5385 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
5386 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
5388 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
5389 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
5390 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
5391 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
5393 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
5394 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
5395 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
5396 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
5398 #define DEF_SIMPLER(SUFFIX)\
5399 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
5400 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
5401 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
5402 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
5404 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
5405 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
5406 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
5407 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
5411 #ifndef CONFIG_USER_ONLY
5412 DEF_SIMPLER(_kernel)
5417 void optimize_flags_init(void)
5420 /* put default values in arrays */
5421 for(i = 0; i < NB_OPS; i++) {
5422 if (opc_simpler[i] == 0)
5427 /* CPU flags computation optimization: we move backward thru the
5428 generated code to see which flags are needed. The operation is
5429 modified if suitable */
5430 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
5433 int live_flags, write_flags, op;
5435 opc_ptr = opc_buf + opc_buf_len;
5436 /* live_flags contains the flags needed by the next instructions
5437 in the code. At the end of the bloc, we consider that all the
5439 live_flags = CC_OSZAPC;
5440 while (opc_ptr > opc_buf) {
5442 /* if none of the flags written by the instruction is used,
5443 then we can try to find a simpler instruction */
5444 write_flags = opc_write_flags[op];
5445 if ((live_flags & write_flags) == 0) {
5446 *opc_ptr = opc_simpler[op];
5448 /* compute the live flags before the instruction */
5449 live_flags &= ~write_flags;
5450 live_flags |= opc_read_flags[op];
5454 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
5455 basic block 'tb'. If search_pc is TRUE, also generate PC
5456 information for each intermediate instruction. */
5457 static inline int gen_intermediate_code_internal(CPUState *env,
5458 TranslationBlock *tb,
5461 DisasContext dc1, *dc = &dc1;
5462 target_ulong pc_ptr;
5463 uint16_t *gen_opc_end;
5464 int flags, j, lj, cflags;
5465 target_ulong pc_start;
5466 target_ulong cs_base;
5468 /* generate intermediate code */
5470 cs_base = tb->cs_base;
5472 cflags = tb->cflags;
5474 dc->pe = (flags >> HF_PE_SHIFT) & 1;
5475 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
5476 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
5477 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
5479 dc->vm86 = (flags >> VM_SHIFT) & 1;
5480 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
5481 dc->iopl = (flags >> IOPL_SHIFT) & 3;
5482 dc->tf = (flags >> TF_SHIFT) & 1;
5483 dc->singlestep_enabled = env->singlestep_enabled;
5484 dc->cc_op = CC_OP_DYNAMIC;
5485 dc->cs_base = cs_base;
5487 dc->popl_esp_hack = 0;
5488 /* select memory access functions */
5490 if (flags & HF_SOFTMMU_MASK) {
5492 dc->mem_index = 2 * 4;
5494 dc->mem_index = 1 * 4;
5496 dc->cpuid_features = env->cpuid_features;
5497 #ifdef TARGET_X86_64
5498 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
5499 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
5502 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
5503 (flags & HF_INHIBIT_IRQ_MASK)
5504 #ifndef CONFIG_SOFTMMU
5505 || (flags & HF_SOFTMMU_MASK)
5509 /* check addseg logic */
5510 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
5511 printf("ERROR addseg\n");
5514 gen_opc_ptr = gen_opc_buf;
5515 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5516 gen_opparam_ptr = gen_opparam_buf;
5519 dc->is_jmp = DISAS_NEXT;
5524 if (env->nb_breakpoints > 0) {
5525 for(j = 0; j < env->nb_breakpoints; j++) {
5526 if (env->breakpoints[j] == pc_ptr) {
5527 gen_debug(dc, pc_ptr - dc->cs_base);
5533 j = gen_opc_ptr - gen_opc_buf;
5537 gen_opc_instr_start[lj++] = 0;
5539 gen_opc_pc[lj] = pc_ptr;
5540 gen_opc_cc_op[lj] = dc->cc_op;
5541 gen_opc_instr_start[lj] = 1;
5543 pc_ptr = disas_insn(dc, pc_ptr);
5544 /* stop translation if indicated */
5547 /* if single step mode, we generate only one instruction and
5548 generate an exception */
5549 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
5550 the flag and abort the translation to give the irqs a
5551 change to be happen */
5552 if (dc->tf || dc->singlestep_enabled ||
5553 (flags & HF_INHIBIT_IRQ_MASK) ||
5554 (cflags & CF_SINGLE_INSN)) {
5555 gen_jmp_im(pc_ptr - dc->cs_base);
5559 /* if too long translation, stop generation too */
5560 if (gen_opc_ptr >= gen_opc_end ||
5561 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
5562 gen_jmp_im(pc_ptr - dc->cs_base);
5567 *gen_opc_ptr = INDEX_op_end;
5568 /* we don't forget to fill the last values */
5570 j = gen_opc_ptr - gen_opc_buf;
5573 gen_opc_instr_start[lj++] = 0;
5577 if (loglevel & CPU_LOG_TB_CPU) {
5578 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
5580 if (loglevel & CPU_LOG_TB_IN_ASM) {
5582 fprintf(logfile, "----------------\n");
5583 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
5584 #ifdef TARGET_X86_64
5589 disas_flags = !dc->code32;
5590 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
5591 fprintf(logfile, "\n");
5592 if (loglevel & CPU_LOG_TB_OP) {
5593 fprintf(logfile, "OP:\n");
5594 dump_ops(gen_opc_buf, gen_opparam_buf);
5595 fprintf(logfile, "\n");
5600 /* optimize flag computations */
5601 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
5604 if (loglevel & CPU_LOG_TB_OP_OPT) {
5605 fprintf(logfile, "AFTER FLAGS OPT:\n");
5606 dump_ops(gen_opc_buf, gen_opparam_buf);
5607 fprintf(logfile, "\n");
5611 tb->size = pc_ptr - pc_start;
5615 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
5617 return gen_intermediate_code_internal(env, tb, 0);
5620 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
5622 return gen_intermediate_code_internal(env, tb, 1);