4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
63 /* local register indexes (only used inside old micro ops) */
64 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65 static TCGv cpu_tmp5, cpu_tmp6;
68 static int x86_64_hregs;
71 typedef struct DisasContext {
72 /* current insn context */
73 int override; /* -1 if no override */
76 target_ulong pc; /* pc = eip + cs_base */
77 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78 static state change (stop translation) */
79 /* current block context */
80 target_ulong cs_base; /* base of CS segment */
81 int pe; /* protected mode */
82 int code32; /* 32 bit code segment */
84 int lma; /* long mode active */
85 int code64; /* 64 bit code segment */
88 int ss32; /* 32 bit stack segment */
89 int cc_op; /* current CC operation */
90 int addseg; /* non zero if either DS/ES/SS have a non zero base */
91 int f_st; /* currently unused */
92 int vm86; /* vm86 mode */
95 int tf; /* TF cpu flag */
96 int singlestep_enabled; /* "hardware" single step enabled */
97 int jmp_opt; /* use direct block chaining for direct jumps */
98 int mem_index; /* select memory access functions */
99 uint64_t flags; /* all execution flags */
100 struct TranslationBlock *tb;
101 int popl_esp_hack; /* for correct popl with esp base handling */
102 int rip_offset; /* only used in x86_64, but left for simplicity */
104 int cpuid_ext_features;
105 int cpuid_ext2_features;
108 static void gen_eob(DisasContext *s);
109 static void gen_jmp(DisasContext *s, target_ulong eip);
110 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
112 /* i386 arith/logic operations */
132 OP_SHL1, /* undocumented */
145 /* I386 int registers */
146 OR_EAX, /* MUST be even numbered */
155 OR_TMP0 = 16, /* temporary operand register */
157 OR_A0, /* temporary register used when doing address evaluation */
160 static inline void gen_op_movl_T0_0(void)
162 tcg_gen_movi_tl(cpu_T[0], 0);
165 static inline void gen_op_movl_T0_im(int32_t val)
167 tcg_gen_movi_tl(cpu_T[0], val);
170 static inline void gen_op_movl_T0_imu(uint32_t val)
172 tcg_gen_movi_tl(cpu_T[0], val);
175 static inline void gen_op_movl_T1_im(int32_t val)
177 tcg_gen_movi_tl(cpu_T[1], val);
180 static inline void gen_op_movl_T1_imu(uint32_t val)
182 tcg_gen_movi_tl(cpu_T[1], val);
185 static inline void gen_op_movl_A0_im(uint32_t val)
187 tcg_gen_movi_tl(cpu_A0, val);
191 static inline void gen_op_movq_A0_im(int64_t val)
193 tcg_gen_movi_tl(cpu_A0, val);
197 static inline void gen_movtl_T0_im(target_ulong val)
199 tcg_gen_movi_tl(cpu_T[0], val);
202 static inline void gen_movtl_T1_im(target_ulong val)
204 tcg_gen_movi_tl(cpu_T[1], val);
207 static inline void gen_op_andl_T0_ffff(void)
209 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
212 static inline void gen_op_andl_T0_im(uint32_t val)
214 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
217 static inline void gen_op_movl_T0_T1(void)
219 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
222 static inline void gen_op_andl_A0_ffff(void)
224 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
229 #define NB_OP_SIZES 4
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,\
240 prefix ## R8 ## suffix,\
241 prefix ## R9 ## suffix,\
242 prefix ## R10 ## suffix,\
243 prefix ## R11 ## suffix,\
244 prefix ## R12 ## suffix,\
245 prefix ## R13 ## suffix,\
246 prefix ## R14 ## suffix,\
247 prefix ## R15 ## suffix,
249 #else /* !TARGET_X86_64 */
251 #define NB_OP_SIZES 3
253 #define DEF_REGS(prefix, suffix) \
254 prefix ## EAX ## suffix,\
255 prefix ## ECX ## suffix,\
256 prefix ## EDX ## suffix,\
257 prefix ## EBX ## suffix,\
258 prefix ## ESP ## suffix,\
259 prefix ## EBP ## suffix,\
260 prefix ## ESI ## suffix,\
261 prefix ## EDI ## suffix,
263 #endif /* !TARGET_X86_64 */
265 #if defined(WORDS_BIGENDIAN)
266 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
267 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
268 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
269 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
270 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
272 #define REG_B_OFFSET 0
273 #define REG_H_OFFSET 1
274 #define REG_W_OFFSET 0
275 #define REG_L_OFFSET 0
276 #define REG_LH_OFFSET 4
279 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
283 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
286 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
290 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
294 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295 /* high part of register set to zero */
296 tcg_gen_movi_tl(cpu_tmp0, 0);
297 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
301 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
306 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
312 static inline void gen_op_mov_reg_T0(int ot, int reg)
314 gen_op_mov_reg_TN(ot, 0, reg);
317 static inline void gen_op_mov_reg_T1(int ot, int reg)
319 gen_op_mov_reg_TN(ot, 1, reg);
322 static inline void gen_op_mov_reg_A0(int size, int reg)
326 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
330 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331 /* high part of register set to zero */
332 tcg_gen_movi_tl(cpu_tmp0, 0);
333 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
337 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
342 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
348 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
352 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
355 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
360 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
365 static inline void gen_op_movl_A0_reg(int reg)
367 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
370 static inline void gen_op_addl_A0_im(int32_t val)
372 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
374 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
379 static inline void gen_op_addq_A0_im(int64_t val)
381 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
385 static void gen_add_A0_im(DisasContext *s, int val)
389 gen_op_addq_A0_im(val);
392 gen_op_addl_A0_im(val);
395 static inline void gen_op_addl_T0_T1(void)
397 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
400 static inline void gen_op_jmp_T0(void)
402 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
405 static inline void gen_op_addw_ESP_im(int32_t val)
407 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
408 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
412 static inline void gen_op_addl_ESP_im(int32_t val)
414 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
415 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
417 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
419 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
423 static inline void gen_op_addq_ESP_im(int32_t val)
425 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
427 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
431 static inline void gen_op_set_cc_op(int32_t val)
433 tcg_gen_movi_i32(cpu_cc_op, val);
436 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
438 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
440 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
441 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
443 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
447 static inline void gen_op_movl_A0_seg(int reg)
449 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
452 static inline void gen_op_addl_A0_seg(int reg)
454 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
455 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
457 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
462 static inline void gen_op_movq_A0_seg(int reg)
464 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
467 static inline void gen_op_addq_A0_seg(int reg)
469 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
470 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
473 static inline void gen_op_movq_A0_reg(int reg)
475 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
478 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
480 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
482 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
483 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
487 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
489 DEF_REGS(gen_op_cmovw_, _T1_T0)
492 DEF_REGS(gen_op_cmovl_, _T1_T0)
496 DEF_REGS(gen_op_cmovq_, _T1_T0)
501 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
518 static inline void gen_op_lds_T0_A0(int idx)
520 int mem_index = (idx >> 2) - 1;
523 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
526 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
530 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
535 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
536 static inline void gen_op_ld_T0_A0(int idx)
538 int mem_index = (idx >> 2) - 1;
541 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
544 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
547 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
551 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
556 static inline void gen_op_ldu_T0_A0(int idx)
558 gen_op_ld_T0_A0(idx);
561 static inline void gen_op_ld_T1_A0(int idx)
563 int mem_index = (idx >> 2) - 1;
566 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
569 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
572 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
576 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
581 static inline void gen_op_st_T0_A0(int idx)
583 int mem_index = (idx >> 2) - 1;
586 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
589 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
592 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
596 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
601 static inline void gen_op_st_T1_A0(int idx)
603 int mem_index = (idx >> 2) - 1;
606 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
609 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
612 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
616 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
621 static inline void gen_jmp_im(target_ulong pc)
623 tcg_gen_movi_tl(cpu_tmp0, pc);
624 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
627 static inline void gen_string_movl_A0_ESI(DisasContext *s)
631 override = s->override;
635 gen_op_movq_A0_seg(override);
636 gen_op_addq_A0_reg_sN(0, R_ESI);
638 gen_op_movq_A0_reg(R_ESI);
644 if (s->addseg && override < 0)
647 gen_op_movl_A0_seg(override);
648 gen_op_addl_A0_reg_sN(0, R_ESI);
650 gen_op_movl_A0_reg(R_ESI);
653 /* 16 address, always override */
656 gen_op_movl_A0_reg(R_ESI);
657 gen_op_andl_A0_ffff();
658 gen_op_addl_A0_seg(override);
662 static inline void gen_string_movl_A0_EDI(DisasContext *s)
666 gen_op_movq_A0_reg(R_EDI);
671 gen_op_movl_A0_seg(R_ES);
672 gen_op_addl_A0_reg_sN(0, R_EDI);
674 gen_op_movl_A0_reg(R_EDI);
677 gen_op_movl_A0_reg(R_EDI);
678 gen_op_andl_A0_ffff();
679 gen_op_addl_A0_seg(R_ES);
683 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
684 gen_op_movl_T0_Dshiftb,
685 gen_op_movl_T0_Dshiftw,
686 gen_op_movl_T0_Dshiftl,
687 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
690 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
693 X86_64_ONLY(gen_op_jnz_ecxq),
696 static GenOpFunc1 *gen_op_jz_ecx[3] = {
699 X86_64_ONLY(gen_op_jz_ecxq),
702 static GenOpFunc *gen_op_dec_ECX[3] = {
705 X86_64_ONLY(gen_op_decq_ECX),
708 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
713 X86_64_ONLY(gen_op_jnz_subq),
719 X86_64_ONLY(gen_op_jz_subq),
723 static void *helper_in_func[3] = {
729 static void *helper_out_func[3] = {
735 static void *gen_check_io_func[3] = {
741 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
745 target_ulong next_eip;
748 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
749 if (s->cc_op != CC_OP_DYNAMIC)
750 gen_op_set_cc_op(s->cc_op);
753 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
754 tcg_gen_helper_0_1(gen_check_io_func[ot],
757 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
759 if (s->cc_op != CC_OP_DYNAMIC)
760 gen_op_set_cc_op(s->cc_op);
764 svm_flags |= (1 << (4 + ot));
765 next_eip = s->pc - s->cs_base;
766 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
767 tcg_gen_helper_0_3(helper_svm_check_io,
769 tcg_const_i32(svm_flags),
770 tcg_const_i32(next_eip - cur_eip));
774 static inline void gen_movs(DisasContext *s, int ot)
776 gen_string_movl_A0_ESI(s);
777 gen_op_ld_T0_A0(ot + s->mem_index);
778 gen_string_movl_A0_EDI(s);
779 gen_op_st_T0_A0(ot + s->mem_index);
780 gen_op_movl_T0_Dshift[ot]();
783 gen_op_addq_ESI_T0();
784 gen_op_addq_EDI_T0();
788 gen_op_addl_ESI_T0();
789 gen_op_addl_EDI_T0();
791 gen_op_addw_ESI_T0();
792 gen_op_addw_EDI_T0();
796 static inline void gen_update_cc_op(DisasContext *s)
798 if (s->cc_op != CC_OP_DYNAMIC) {
799 gen_op_set_cc_op(s->cc_op);
800 s->cc_op = CC_OP_DYNAMIC;
804 static void gen_op_update1_cc(void)
806 tcg_gen_discard_tl(cpu_cc_src);
807 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
810 static void gen_op_update2_cc(void)
812 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
813 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
816 static inline void gen_op_cmpl_T0_T1_cc(void)
818 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
819 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
822 static inline void gen_op_testl_T0_T1_cc(void)
824 tcg_gen_discard_tl(cpu_cc_src);
825 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
828 static void gen_op_update_neg_cc(void)
830 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
831 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
834 /* XXX: does not work with gdbstub "ice" single step - not a
836 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
840 l1 = gen_new_label();
841 l2 = gen_new_label();
842 gen_op_jnz_ecx[s->aflag](l1);
844 gen_jmp_tb(s, next_eip, 1);
849 static inline void gen_stos(DisasContext *s, int ot)
851 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
852 gen_string_movl_A0_EDI(s);
853 gen_op_st_T0_A0(ot + s->mem_index);
854 gen_op_movl_T0_Dshift[ot]();
857 gen_op_addq_EDI_T0();
861 gen_op_addl_EDI_T0();
863 gen_op_addw_EDI_T0();
867 static inline void gen_lods(DisasContext *s, int ot)
869 gen_string_movl_A0_ESI(s);
870 gen_op_ld_T0_A0(ot + s->mem_index);
871 gen_op_mov_reg_T0(ot, R_EAX);
872 gen_op_movl_T0_Dshift[ot]();
875 gen_op_addq_ESI_T0();
879 gen_op_addl_ESI_T0();
881 gen_op_addw_ESI_T0();
885 static inline void gen_scas(DisasContext *s, int ot)
887 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
888 gen_string_movl_A0_EDI(s);
889 gen_op_ld_T1_A0(ot + s->mem_index);
890 gen_op_cmpl_T0_T1_cc();
891 gen_op_movl_T0_Dshift[ot]();
894 gen_op_addq_EDI_T0();
898 gen_op_addl_EDI_T0();
900 gen_op_addw_EDI_T0();
904 static inline void gen_cmps(DisasContext *s, int ot)
906 gen_string_movl_A0_ESI(s);
907 gen_op_ld_T0_A0(ot + s->mem_index);
908 gen_string_movl_A0_EDI(s);
909 gen_op_ld_T1_A0(ot + s->mem_index);
910 gen_op_cmpl_T0_T1_cc();
911 gen_op_movl_T0_Dshift[ot]();
914 gen_op_addq_ESI_T0();
915 gen_op_addq_EDI_T0();
919 gen_op_addl_ESI_T0();
920 gen_op_addl_EDI_T0();
922 gen_op_addw_ESI_T0();
923 gen_op_addw_EDI_T0();
927 static inline void gen_ins(DisasContext *s, int ot)
929 gen_string_movl_A0_EDI(s);
931 gen_op_st_T0_A0(ot + s->mem_index);
932 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
933 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
934 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
935 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
936 gen_op_st_T0_A0(ot + s->mem_index);
937 gen_op_movl_T0_Dshift[ot]();
940 gen_op_addq_EDI_T0();
944 gen_op_addl_EDI_T0();
946 gen_op_addw_EDI_T0();
950 static inline void gen_outs(DisasContext *s, int ot)
952 gen_string_movl_A0_ESI(s);
953 gen_op_ld_T0_A0(ot + s->mem_index);
955 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
956 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
957 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
958 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
959 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
961 gen_op_movl_T0_Dshift[ot]();
964 gen_op_addq_ESI_T0();
968 gen_op_addl_ESI_T0();
970 gen_op_addw_ESI_T0();
974 /* same method as Valgrind : we generate jumps to current or next
976 #define GEN_REPZ(op) \
977 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
978 target_ulong cur_eip, target_ulong next_eip) \
981 gen_update_cc_op(s); \
982 l2 = gen_jz_ecx_string(s, next_eip); \
984 gen_op_dec_ECX[s->aflag](); \
985 /* a loop would cause two single step exceptions if ECX = 1 \
986 before rep string_insn */ \
988 gen_op_jz_ecx[s->aflag](l2); \
989 gen_jmp(s, cur_eip); \
992 #define GEN_REPZ2(op) \
993 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
994 target_ulong cur_eip, \
995 target_ulong next_eip, \
999 gen_update_cc_op(s); \
1000 l2 = gen_jz_ecx_string(s, next_eip); \
1001 gen_ ## op(s, ot); \
1002 gen_op_dec_ECX[s->aflag](); \
1003 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1004 gen_op_string_jnz_sub[nz][ot](l2);\
1006 gen_op_jz_ecx[s->aflag](l2); \
1007 gen_jmp(s, cur_eip); \
1029 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1060 #ifdef TARGET_X86_64
1063 BUGGY_64(gen_op_jb_subq),
1065 BUGGY_64(gen_op_jbe_subq),
1068 BUGGY_64(gen_op_jl_subq),
1069 BUGGY_64(gen_op_jle_subq),
1073 static GenOpFunc1 *gen_op_loop[3][4] = {
1084 #ifdef TARGET_X86_64
1093 static GenOpFunc *gen_setcc_slow[8] = {
1104 static GenOpFunc *gen_setcc_sub[4][8] = {
1107 gen_op_setb_T0_subb,
1108 gen_op_setz_T0_subb,
1109 gen_op_setbe_T0_subb,
1110 gen_op_sets_T0_subb,
1112 gen_op_setl_T0_subb,
1113 gen_op_setle_T0_subb,
1117 gen_op_setb_T0_subw,
1118 gen_op_setz_T0_subw,
1119 gen_op_setbe_T0_subw,
1120 gen_op_sets_T0_subw,
1122 gen_op_setl_T0_subw,
1123 gen_op_setle_T0_subw,
1127 gen_op_setb_T0_subl,
1128 gen_op_setz_T0_subl,
1129 gen_op_setbe_T0_subl,
1130 gen_op_sets_T0_subl,
1132 gen_op_setl_T0_subl,
1133 gen_op_setle_T0_subl,
1135 #ifdef TARGET_X86_64
1138 gen_op_setb_T0_subq,
1139 gen_op_setz_T0_subq,
1140 gen_op_setbe_T0_subq,
1141 gen_op_sets_T0_subq,
1143 gen_op_setl_T0_subq,
1144 gen_op_setle_T0_subq,
1149 static void *helper_fp_arith_ST0_FT0[8] = {
1150 helper_fadd_ST0_FT0,
1151 helper_fmul_ST0_FT0,
1152 helper_fcom_ST0_FT0,
1153 helper_fcom_ST0_FT0,
1154 helper_fsub_ST0_FT0,
1155 helper_fsubr_ST0_FT0,
1156 helper_fdiv_ST0_FT0,
1157 helper_fdivr_ST0_FT0,
1160 /* NOTE the exception in "r" op ordering */
1161 static void *helper_fp_arith_STN_ST0[8] = {
1162 helper_fadd_STN_ST0,
1163 helper_fmul_STN_ST0,
1166 helper_fsubr_STN_ST0,
1167 helper_fsub_STN_ST0,
1168 helper_fdivr_STN_ST0,
1169 helper_fdiv_STN_ST0,
1172 /* compute eflags.C to reg */
1173 static void gen_compute_eflags_c(TCGv reg)
1175 #if TCG_TARGET_REG_BITS == 32
1176 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1177 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1178 (long)cc_table + offsetof(CCTable, compute_c));
1179 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1180 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1181 1, &cpu_tmp2_i32, 0, NULL);
1183 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1184 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1185 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1186 (long)cc_table + offsetof(CCTable, compute_c));
1187 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1188 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1189 1, &cpu_tmp2_i32, 0, NULL);
1191 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1194 /* compute all eflags to cc_src */
1195 static void gen_compute_eflags(TCGv reg)
1197 #if TCG_TARGET_REG_BITS == 32
1198 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1199 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1200 (long)cc_table + offsetof(CCTable, compute_all));
1201 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1202 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1203 1, &cpu_tmp2_i32, 0, NULL);
1205 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1206 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1207 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1208 (long)cc_table + offsetof(CCTable, compute_all));
1209 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1210 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1211 1, &cpu_tmp2_i32, 0, NULL);
1213 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1216 /* if d == OR_TMP0, it means memory operand (address in A0) */
1217 static void gen_op(DisasContext *s1, int op, int ot, int d)
1220 gen_op_mov_TN_reg(ot, 0, d);
1222 gen_op_ld_T0_A0(ot + s1->mem_index);
1226 if (s1->cc_op != CC_OP_DYNAMIC)
1227 gen_op_set_cc_op(s1->cc_op);
1228 gen_compute_eflags_c(cpu_tmp4);
1229 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1230 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1232 gen_op_mov_reg_T0(ot, d);
1234 gen_op_st_T0_A0(ot + s1->mem_index);
1235 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1236 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1237 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1238 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1239 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1240 s1->cc_op = CC_OP_DYNAMIC;
1243 if (s1->cc_op != CC_OP_DYNAMIC)
1244 gen_op_set_cc_op(s1->cc_op);
1245 gen_compute_eflags_c(cpu_tmp4);
1246 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1247 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1249 gen_op_mov_reg_T0(ot, d);
1251 gen_op_st_T0_A0(ot + s1->mem_index);
1252 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1253 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1254 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1255 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1256 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1257 s1->cc_op = CC_OP_DYNAMIC;
1260 gen_op_addl_T0_T1();
1262 gen_op_mov_reg_T0(ot, d);
1264 gen_op_st_T0_A0(ot + s1->mem_index);
1265 gen_op_update2_cc();
1266 s1->cc_op = CC_OP_ADDB + ot;
1269 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1271 gen_op_mov_reg_T0(ot, d);
1273 gen_op_st_T0_A0(ot + s1->mem_index);
1274 gen_op_update2_cc();
1275 s1->cc_op = CC_OP_SUBB + ot;
1279 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1281 gen_op_mov_reg_T0(ot, d);
1283 gen_op_st_T0_A0(ot + s1->mem_index);
1284 gen_op_update1_cc();
1285 s1->cc_op = CC_OP_LOGICB + ot;
1288 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1290 gen_op_mov_reg_T0(ot, d);
1292 gen_op_st_T0_A0(ot + s1->mem_index);
1293 gen_op_update1_cc();
1294 s1->cc_op = CC_OP_LOGICB + ot;
1297 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1299 gen_op_mov_reg_T0(ot, d);
1301 gen_op_st_T0_A0(ot + s1->mem_index);
1302 gen_op_update1_cc();
1303 s1->cc_op = CC_OP_LOGICB + ot;
1306 gen_op_cmpl_T0_T1_cc();
1307 s1->cc_op = CC_OP_SUBB + ot;
1312 /* if d == OR_TMP0, it means memory operand (address in A0) */
1313 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1316 gen_op_mov_TN_reg(ot, 0, d);
1318 gen_op_ld_T0_A0(ot + s1->mem_index);
1319 if (s1->cc_op != CC_OP_DYNAMIC)
1320 gen_op_set_cc_op(s1->cc_op);
1322 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1323 s1->cc_op = CC_OP_INCB + ot;
1325 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1326 s1->cc_op = CC_OP_DECB + ot;
1329 gen_op_mov_reg_T0(ot, d);
1331 gen_op_st_T0_A0(ot + s1->mem_index);
1332 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1333 gen_compute_eflags_c(cpu_cc_src);
1336 static void gen_extu(int ot, TCGv reg)
1340 tcg_gen_ext8u_tl(reg, reg);
1343 tcg_gen_ext16u_tl(reg, reg);
1346 tcg_gen_ext32u_tl(reg, reg);
1353 static void gen_exts(int ot, TCGv reg)
1357 tcg_gen_ext8s_tl(reg, reg);
1360 tcg_gen_ext16s_tl(reg, reg);
1363 tcg_gen_ext32s_tl(reg, reg);
1370 /* XXX: add faster immediate case */
1371 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1372 int is_right, int is_arith)
1384 gen_op_ld_T0_A0(ot + s->mem_index);
1386 gen_op_mov_TN_reg(ot, 0, op1);
1388 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1390 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1394 gen_exts(ot, cpu_T[0]);
1395 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1396 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1398 gen_extu(ot, cpu_T[0]);
1399 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1400 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1403 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1404 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1409 gen_op_st_T0_A0(ot + s->mem_index);
1411 gen_op_mov_reg_T0(ot, op1);
1413 /* update eflags if non zero shift */
1414 if (s->cc_op != CC_OP_DYNAMIC)
1415 gen_op_set_cc_op(s->cc_op);
1417 shift_label = gen_new_label();
1418 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1420 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1421 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1423 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1425 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1427 gen_set_label(shift_label);
1428 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1431 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1434 tcg_gen_shli_tl(ret, arg1, arg2);
1436 tcg_gen_shri_tl(ret, arg1, -arg2);
1439 /* XXX: add faster immediate case */
1440 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1444 int label1, label2, data_bits;
1453 gen_op_ld_T0_A0(ot + s->mem_index);
1455 gen_op_mov_TN_reg(ot, 0, op1);
1457 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1459 /* Must test zero case to avoid using undefined behaviour in TCG
1461 label1 = gen_new_label();
1462 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1465 tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1467 tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1469 gen_extu(ot, cpu_T[0]);
1470 tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1472 data_bits = 8 << ot;
1473 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1474 fix TCG definition) */
1476 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1477 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1478 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1480 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1481 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1482 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1484 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1486 gen_set_label(label1);
1489 gen_op_st_T0_A0(ot + s->mem_index);
1491 gen_op_mov_reg_T0(ot, op1);
1494 if (s->cc_op != CC_OP_DYNAMIC)
1495 gen_op_set_cc_op(s->cc_op);
1497 label2 = gen_new_label();
1498 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1500 gen_compute_eflags(cpu_cc_src);
1501 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1502 tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1503 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1504 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1505 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1507 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1509 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1510 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1512 tcg_gen_discard_tl(cpu_cc_dst);
1513 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1515 gen_set_label(label2);
1516 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1519 static void *helper_rotc[8] = {
1523 X86_64_ONLY(helper_rclq),
1527 X86_64_ONLY(helper_rcrq),
1530 /* XXX: add faster immediate = 1 case */
1531 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1536 if (s->cc_op != CC_OP_DYNAMIC)
1537 gen_op_set_cc_op(s->cc_op);
1541 gen_op_ld_T0_A0(ot + s->mem_index);
1543 gen_op_mov_TN_reg(ot, 0, op1);
1545 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1546 cpu_T[0], cpu_T[0], cpu_T[1]);
1549 gen_op_st_T0_A0(ot + s->mem_index);
1551 gen_op_mov_reg_T0(ot, op1);
1554 label1 = gen_new_label();
1555 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1557 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1558 tcg_gen_discard_tl(cpu_cc_dst);
1559 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1561 gen_set_label(label1);
1562 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1565 /* XXX: add faster immediate case */
1566 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1569 int label1, label2, data_bits;
1579 gen_op_ld_T0_A0(ot + s->mem_index);
1581 gen_op_mov_TN_reg(ot, 0, op1);
1583 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1584 /* Must test zero case to avoid using undefined behaviour in TCG
1586 label1 = gen_new_label();
1587 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1589 tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1590 if (ot == OT_WORD) {
1591 /* Note: we implement the Intel behaviour for shift count > 16 */
1593 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1594 tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1595 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1596 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1598 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1600 /* only needed if count > 16, but a test would complicate */
1601 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1602 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1604 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1606 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1608 /* XXX: not optimal */
1609 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1610 tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1611 tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1612 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1614 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1615 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1616 tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1617 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1619 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1620 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1621 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1622 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1625 data_bits = 8 << ot;
1628 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1630 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1632 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1633 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1634 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1635 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1639 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1641 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1643 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1644 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1645 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1646 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1649 tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1651 gen_set_label(label1);
1654 gen_op_st_T0_A0(ot + s->mem_index);
1656 gen_op_mov_reg_T0(ot, op1);
1659 if (s->cc_op != CC_OP_DYNAMIC)
1660 gen_op_set_cc_op(s->cc_op);
1662 label2 = gen_new_label();
1663 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1665 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1666 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1668 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1670 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1672 gen_set_label(label2);
1673 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1676 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1679 gen_op_mov_TN_reg(ot, 1, s);
1682 gen_rot_rm_T1(s1, ot, d, 0);
1685 gen_rot_rm_T1(s1, ot, d, 1);
1689 gen_shift_rm_T1(s1, ot, d, 0, 0);
1692 gen_shift_rm_T1(s1, ot, d, 1, 0);
1695 gen_shift_rm_T1(s1, ot, d, 1, 1);
1698 gen_rotc_rm_T1(s1, ot, d, 0);
1701 gen_rotc_rm_T1(s1, ot, d, 1);
1706 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1708 /* currently not optimized */
1709 gen_op_movl_T1_im(c);
1710 gen_shift(s1, op, ot, d, OR_TMP1);
1713 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1721 int mod, rm, code, override, must_add_seg;
1723 override = s->override;
1724 must_add_seg = s->addseg;
1727 mod = (modrm >> 6) & 3;
1739 code = ldub_code(s->pc++);
1740 scale = (code >> 6) & 3;
1741 index = ((code >> 3) & 7) | REX_X(s);
1748 if ((base & 7) == 5) {
1750 disp = (int32_t)ldl_code(s->pc);
1752 if (CODE64(s) && !havesib) {
1753 disp += s->pc + s->rip_offset;
1760 disp = (int8_t)ldub_code(s->pc++);
1764 disp = ldl_code(s->pc);
1770 /* for correct popl handling with esp */
1771 if (base == 4 && s->popl_esp_hack)
1772 disp += s->popl_esp_hack;
1773 #ifdef TARGET_X86_64
1774 if (s->aflag == 2) {
1775 gen_op_movq_A0_reg(base);
1777 gen_op_addq_A0_im(disp);
1782 gen_op_movl_A0_reg(base);
1784 gen_op_addl_A0_im(disp);
1787 #ifdef TARGET_X86_64
1788 if (s->aflag == 2) {
1789 gen_op_movq_A0_im(disp);
1793 gen_op_movl_A0_im(disp);
1796 /* XXX: index == 4 is always invalid */
1797 if (havesib && (index != 4 || scale != 0)) {
1798 #ifdef TARGET_X86_64
1799 if (s->aflag == 2) {
1800 gen_op_addq_A0_reg_sN(scale, index);
1804 gen_op_addl_A0_reg_sN(scale, index);
1809 if (base == R_EBP || base == R_ESP)
1814 #ifdef TARGET_X86_64
1815 if (s->aflag == 2) {
1816 gen_op_addq_A0_seg(override);
1820 gen_op_addl_A0_seg(override);
1827 disp = lduw_code(s->pc);
1829 gen_op_movl_A0_im(disp);
1830 rm = 0; /* avoid SS override */
1837 disp = (int8_t)ldub_code(s->pc++);
1841 disp = lduw_code(s->pc);
1847 gen_op_movl_A0_reg(R_EBX);
1848 gen_op_addl_A0_reg_sN(0, R_ESI);
1851 gen_op_movl_A0_reg(R_EBX);
1852 gen_op_addl_A0_reg_sN(0, R_EDI);
1855 gen_op_movl_A0_reg(R_EBP);
1856 gen_op_addl_A0_reg_sN(0, R_ESI);
1859 gen_op_movl_A0_reg(R_EBP);
1860 gen_op_addl_A0_reg_sN(0, R_EDI);
1863 gen_op_movl_A0_reg(R_ESI);
1866 gen_op_movl_A0_reg(R_EDI);
1869 gen_op_movl_A0_reg(R_EBP);
1873 gen_op_movl_A0_reg(R_EBX);
1877 gen_op_addl_A0_im(disp);
1878 gen_op_andl_A0_ffff();
1882 if (rm == 2 || rm == 3 || rm == 6)
1887 gen_op_addl_A0_seg(override);
1897 static void gen_nop_modrm(DisasContext *s, int modrm)
1899 int mod, rm, base, code;
1901 mod = (modrm >> 6) & 3;
1911 code = ldub_code(s->pc++);
1947 /* used for LEA and MOV AX, mem */
1948 static void gen_add_A0_ds_seg(DisasContext *s)
1950 int override, must_add_seg;
1951 must_add_seg = s->addseg;
1953 if (s->override >= 0) {
1954 override = s->override;
1960 #ifdef TARGET_X86_64
1962 gen_op_addq_A0_seg(override);
1966 gen_op_addl_A0_seg(override);
1971 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1973 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1975 int mod, rm, opreg, disp;
1977 mod = (modrm >> 6) & 3;
1978 rm = (modrm & 7) | REX_B(s);
1982 gen_op_mov_TN_reg(ot, 0, reg);
1983 gen_op_mov_reg_T0(ot, rm);
1985 gen_op_mov_TN_reg(ot, 0, rm);
1987 gen_op_mov_reg_T0(ot, reg);
1990 gen_lea_modrm(s, modrm, &opreg, &disp);
1993 gen_op_mov_TN_reg(ot, 0, reg);
1994 gen_op_st_T0_A0(ot + s->mem_index);
1996 gen_op_ld_T0_A0(ot + s->mem_index);
1998 gen_op_mov_reg_T0(ot, reg);
2003 static inline uint32_t insn_get(DisasContext *s, int ot)
2009 ret = ldub_code(s->pc);
2013 ret = lduw_code(s->pc);
2018 ret = ldl_code(s->pc);
2025 static inline int insn_const_size(unsigned int ot)
2033 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2035 TranslationBlock *tb;
2038 pc = s->cs_base + eip;
2040 /* NOTE: we handle the case where the TB spans two pages here */
2041 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2042 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2043 /* jump to same page: we can use a direct jump */
2044 tcg_gen_goto_tb(tb_num);
2046 tcg_gen_exit_tb((long)tb + tb_num);
2048 /* jump to another page: currently not optimized */
2054 static inline void gen_jcc(DisasContext *s, int b,
2055 target_ulong val, target_ulong next_eip)
2057 TranslationBlock *tb;
2064 jcc_op = (b >> 1) & 7;
2068 /* we optimize the cmp/jcc case */
2073 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2076 /* some jumps are easy to compute */
2118 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2121 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2133 if (s->cc_op != CC_OP_DYNAMIC) {
2134 gen_op_set_cc_op(s->cc_op);
2135 s->cc_op = CC_OP_DYNAMIC;
2139 gen_setcc_slow[jcc_op]();
2140 func = gen_op_jnz_T0_label;
2150 l1 = gen_new_label();
2153 gen_goto_tb(s, 0, next_eip);
2156 gen_goto_tb(s, 1, val);
2161 if (s->cc_op != CC_OP_DYNAMIC) {
2162 gen_op_set_cc_op(s->cc_op);
2163 s->cc_op = CC_OP_DYNAMIC;
2165 gen_setcc_slow[jcc_op]();
2171 l1 = gen_new_label();
2172 l2 = gen_new_label();
2173 gen_op_jnz_T0_label(l1);
2174 gen_jmp_im(next_eip);
2175 gen_op_jmp_label(l2);
2183 static void gen_setcc(DisasContext *s, int b)
2189 jcc_op = (b >> 1) & 7;
2191 /* we optimize the cmp/jcc case */
2196 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2201 /* some jumps are easy to compute */
2228 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2231 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2239 if (s->cc_op != CC_OP_DYNAMIC)
2240 gen_op_set_cc_op(s->cc_op);
2241 func = gen_setcc_slow[jcc_op];
2250 /* move T0 to seg_reg and compute if the CPU state may change. Never
2251 call this function with seg_reg == R_CS */
2252 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2254 if (s->pe && !s->vm86) {
2255 /* XXX: optimize by finding processor state dynamically */
2256 if (s->cc_op != CC_OP_DYNAMIC)
2257 gen_op_set_cc_op(s->cc_op);
2258 gen_jmp_im(cur_eip);
2259 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2260 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2261 /* abort translation because the addseg value may change or
2262 because ss32 may change. For R_SS, translation must always
2263 stop as a special handling must be done to disable hardware
2264 interrupts for the next instruction */
2265 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2268 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2269 if (seg_reg == R_SS)
2274 static inline int svm_is_rep(int prefixes)
2276 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2280 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2281 uint32_t type, uint64_t param)
2283 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2284 /* no SVM activated */
2287 /* CRx and DRx reads/writes */
2288 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2289 if (s->cc_op != CC_OP_DYNAMIC) {
2290 gen_op_set_cc_op(s->cc_op);
2292 gen_jmp_im(pc_start - s->cs_base);
2293 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2294 tcg_const_i32(type), tcg_const_i64(param));
2295 /* this is a special case as we do not know if the interception occurs
2296 so we assume there was none */
2299 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2300 if (s->cc_op != CC_OP_DYNAMIC) {
2301 gen_op_set_cc_op(s->cc_op);
2303 gen_jmp_im(pc_start - s->cs_base);
2304 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2305 tcg_const_i32(type), tcg_const_i64(param));
2306 /* this is a special case as we do not know if the interception occurs
2307 so we assume there was none */
2312 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2313 if (s->cc_op != CC_OP_DYNAMIC) {
2314 gen_op_set_cc_op(s->cc_op);
2316 gen_jmp_im(pc_start - s->cs_base);
2317 tcg_gen_helper_0_2(helper_vmexit,
2318 tcg_const_i32(type), tcg_const_i64(param));
2319 /* we can optimize this one so TBs don't get longer
2320 than up to vmexit */
2329 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2331 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2334 static inline void gen_stack_update(DisasContext *s, int addend)
2336 #ifdef TARGET_X86_64
2338 gen_op_addq_ESP_im(addend);
2342 gen_op_addl_ESP_im(addend);
2344 gen_op_addw_ESP_im(addend);
2348 /* generate a push. It depends on ss32, addseg and dflag */
2349 static void gen_push_T0(DisasContext *s)
2351 #ifdef TARGET_X86_64
2353 gen_op_movq_A0_reg(R_ESP);
2355 gen_op_addq_A0_im(-8);
2356 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2358 gen_op_addq_A0_im(-2);
2359 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2361 gen_op_mov_reg_A0(2, R_ESP);
2365 gen_op_movl_A0_reg(R_ESP);
2367 gen_op_addl_A0_im(-2);
2369 gen_op_addl_A0_im(-4);
2372 gen_op_movl_T1_A0();
2373 gen_op_addl_A0_seg(R_SS);
2376 gen_op_andl_A0_ffff();
2377 gen_op_movl_T1_A0();
2378 gen_op_addl_A0_seg(R_SS);
2380 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2381 if (s->ss32 && !s->addseg)
2382 gen_op_mov_reg_A0(1, R_ESP);
2384 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2388 /* generate a push. It depends on ss32, addseg and dflag */
2389 /* slower version for T1, only used for call Ev */
2390 static void gen_push_T1(DisasContext *s)
2392 #ifdef TARGET_X86_64
2394 gen_op_movq_A0_reg(R_ESP);
2396 gen_op_addq_A0_im(-8);
2397 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2399 gen_op_addq_A0_im(-2);
2400 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2402 gen_op_mov_reg_A0(2, R_ESP);
2406 gen_op_movl_A0_reg(R_ESP);
2408 gen_op_addl_A0_im(-2);
2410 gen_op_addl_A0_im(-4);
2413 gen_op_addl_A0_seg(R_SS);
2416 gen_op_andl_A0_ffff();
2417 gen_op_addl_A0_seg(R_SS);
2419 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2421 if (s->ss32 && !s->addseg)
2422 gen_op_mov_reg_A0(1, R_ESP);
2424 gen_stack_update(s, (-2) << s->dflag);
2428 /* two step pop is necessary for precise exceptions */
2429 static void gen_pop_T0(DisasContext *s)
2431 #ifdef TARGET_X86_64
2433 gen_op_movq_A0_reg(R_ESP);
2434 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2438 gen_op_movl_A0_reg(R_ESP);
2441 gen_op_addl_A0_seg(R_SS);
2443 gen_op_andl_A0_ffff();
2444 gen_op_addl_A0_seg(R_SS);
2446 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2450 static void gen_pop_update(DisasContext *s)
2452 #ifdef TARGET_X86_64
2453 if (CODE64(s) && s->dflag) {
2454 gen_stack_update(s, 8);
2458 gen_stack_update(s, 2 << s->dflag);
2462 static void gen_stack_A0(DisasContext *s)
2464 gen_op_movl_A0_reg(R_ESP);
2466 gen_op_andl_A0_ffff();
2467 gen_op_movl_T1_A0();
2469 gen_op_addl_A0_seg(R_SS);
2472 /* NOTE: wrap around in 16 bit not fully handled */
2473 static void gen_pusha(DisasContext *s)
2476 gen_op_movl_A0_reg(R_ESP);
2477 gen_op_addl_A0_im(-16 << s->dflag);
2479 gen_op_andl_A0_ffff();
2480 gen_op_movl_T1_A0();
2482 gen_op_addl_A0_seg(R_SS);
2483 for(i = 0;i < 8; i++) {
2484 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2485 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2486 gen_op_addl_A0_im(2 << s->dflag);
2488 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2491 /* NOTE: wrap around in 16 bit not fully handled */
2492 static void gen_popa(DisasContext *s)
2495 gen_op_movl_A0_reg(R_ESP);
2497 gen_op_andl_A0_ffff();
2498 gen_op_movl_T1_A0();
2499 gen_op_addl_T1_im(16 << s->dflag);
2501 gen_op_addl_A0_seg(R_SS);
2502 for(i = 0;i < 8; i++) {
2503 /* ESP is not reloaded */
2505 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2506 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2508 gen_op_addl_A0_im(2 << s->dflag);
2510 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2513 static void gen_enter(DisasContext *s, int esp_addend, int level)
2518 #ifdef TARGET_X86_64
2520 ot = s->dflag ? OT_QUAD : OT_WORD;
2523 gen_op_movl_A0_reg(R_ESP);
2524 gen_op_addq_A0_im(-opsize);
2525 gen_op_movl_T1_A0();
2528 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2529 gen_op_st_T0_A0(ot + s->mem_index);
2531 /* XXX: must save state */
2532 tcg_gen_helper_0_3(helper_enter64_level,
2533 tcg_const_i32(level),
2534 tcg_const_i32((ot == OT_QUAD)),
2537 gen_op_mov_reg_T1(ot, R_EBP);
2538 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2539 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2543 ot = s->dflag + OT_WORD;
2544 opsize = 2 << s->dflag;
2546 gen_op_movl_A0_reg(R_ESP);
2547 gen_op_addl_A0_im(-opsize);
2549 gen_op_andl_A0_ffff();
2550 gen_op_movl_T1_A0();
2552 gen_op_addl_A0_seg(R_SS);
2554 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2555 gen_op_st_T0_A0(ot + s->mem_index);
2557 /* XXX: must save state */
2558 tcg_gen_helper_0_3(helper_enter_level,
2559 tcg_const_i32(level),
2560 tcg_const_i32(s->dflag),
2563 gen_op_mov_reg_T1(ot, R_EBP);
2564 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2565 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2569 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2571 if (s->cc_op != CC_OP_DYNAMIC)
2572 gen_op_set_cc_op(s->cc_op);
2573 gen_jmp_im(cur_eip);
2574 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2578 /* an interrupt is different from an exception because of the
2580 static void gen_interrupt(DisasContext *s, int intno,
2581 target_ulong cur_eip, target_ulong next_eip)
2583 if (s->cc_op != CC_OP_DYNAMIC)
2584 gen_op_set_cc_op(s->cc_op);
2585 gen_jmp_im(cur_eip);
2586 tcg_gen_helper_0_2(helper_raise_interrupt,
2587 tcg_const_i32(intno),
2588 tcg_const_i32(next_eip - cur_eip));
2592 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2594 if (s->cc_op != CC_OP_DYNAMIC)
2595 gen_op_set_cc_op(s->cc_op);
2596 gen_jmp_im(cur_eip);
2597 tcg_gen_helper_0_0(helper_debug);
2601 /* generate a generic end of block. Trace exception is also generated
2603 static void gen_eob(DisasContext *s)
2605 if (s->cc_op != CC_OP_DYNAMIC)
2606 gen_op_set_cc_op(s->cc_op);
2607 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2608 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2610 if (s->singlestep_enabled) {
2611 tcg_gen_helper_0_0(helper_debug);
2613 tcg_gen_helper_0_0(helper_single_step);
2620 /* generate a jump to eip. No segment change must happen before as a
2621 direct call to the next block may occur */
2622 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2625 if (s->cc_op != CC_OP_DYNAMIC) {
2626 gen_op_set_cc_op(s->cc_op);
2627 s->cc_op = CC_OP_DYNAMIC;
2629 gen_goto_tb(s, tb_num, eip);
2637 static void gen_jmp(DisasContext *s, target_ulong eip)
2639 gen_jmp_tb(s, eip, 0);
2642 static inline void gen_ldq_env_A0(int idx, int offset)
2644 int mem_index = (idx >> 2) - 1;
2645 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2646 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2649 static inline void gen_stq_env_A0(int idx, int offset)
2651 int mem_index = (idx >> 2) - 1;
2652 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2653 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2656 static inline void gen_ldo_env_A0(int idx, int offset)
2658 int mem_index = (idx >> 2) - 1;
2659 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2660 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2661 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2662 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2663 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2666 static inline void gen_sto_env_A0(int idx, int offset)
2668 int mem_index = (idx >> 2) - 1;
2669 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2670 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2671 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2672 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2673 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2676 static inline void gen_op_movo(int d_offset, int s_offset)
2678 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2679 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2680 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2681 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2684 static inline void gen_op_movq(int d_offset, int s_offset)
2686 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2687 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2690 static inline void gen_op_movl(int d_offset, int s_offset)
2692 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2693 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2696 static inline void gen_op_movq_env_0(int d_offset)
2698 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2699 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2702 #define SSE_SPECIAL ((void *)1)
2703 #define SSE_DUMMY ((void *)2)
2705 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2706 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2707 helper_ ## x ## ss, helper_ ## x ## sd, }
2709 static void *sse_op_table1[256][4] = {
2710 /* 3DNow! extensions */
2711 [0x0e] = { SSE_DUMMY }, /* femms */
2712 [0x0f] = { SSE_DUMMY }, /* pf... */
2713 /* pure SSE operations */
2714 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2715 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2716 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2717 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2718 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2719 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2720 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2721 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2723 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2724 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2725 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2726 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2727 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2728 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2729 [0x2e] = { helper_ucomiss, helper_ucomisd },
2730 [0x2f] = { helper_comiss, helper_comisd },
2731 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2732 [0x51] = SSE_FOP(sqrt),
2733 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2734 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2735 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2736 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2737 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2738 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2739 [0x58] = SSE_FOP(add),
2740 [0x59] = SSE_FOP(mul),
2741 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2742 helper_cvtss2sd, helper_cvtsd2ss },
2743 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2744 [0x5c] = SSE_FOP(sub),
2745 [0x5d] = SSE_FOP(min),
2746 [0x5e] = SSE_FOP(div),
2747 [0x5f] = SSE_FOP(max),
2749 [0xc2] = SSE_FOP(cmpeq),
2750 [0xc6] = { helper_shufps, helper_shufpd },
2752 /* MMX ops and their SSE extensions */
2753 [0x60] = MMX_OP2(punpcklbw),
2754 [0x61] = MMX_OP2(punpcklwd),
2755 [0x62] = MMX_OP2(punpckldq),
2756 [0x63] = MMX_OP2(packsswb),
2757 [0x64] = MMX_OP2(pcmpgtb),
2758 [0x65] = MMX_OP2(pcmpgtw),
2759 [0x66] = MMX_OP2(pcmpgtl),
2760 [0x67] = MMX_OP2(packuswb),
2761 [0x68] = MMX_OP2(punpckhbw),
2762 [0x69] = MMX_OP2(punpckhwd),
2763 [0x6a] = MMX_OP2(punpckhdq),
2764 [0x6b] = MMX_OP2(packssdw),
2765 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2766 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2767 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2768 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2769 [0x70] = { helper_pshufw_mmx,
2772 helper_pshuflw_xmm },
2773 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2774 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2775 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2776 [0x74] = MMX_OP2(pcmpeqb),
2777 [0x75] = MMX_OP2(pcmpeqw),
2778 [0x76] = MMX_OP2(pcmpeql),
2779 [0x77] = { SSE_DUMMY }, /* emms */
2780 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2781 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2782 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2783 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2784 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2785 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2786 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2787 [0xd1] = MMX_OP2(psrlw),
2788 [0xd2] = MMX_OP2(psrld),
2789 [0xd3] = MMX_OP2(psrlq),
2790 [0xd4] = MMX_OP2(paddq),
2791 [0xd5] = MMX_OP2(pmullw),
2792 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2793 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2794 [0xd8] = MMX_OP2(psubusb),
2795 [0xd9] = MMX_OP2(psubusw),
2796 [0xda] = MMX_OP2(pminub),
2797 [0xdb] = MMX_OP2(pand),
2798 [0xdc] = MMX_OP2(paddusb),
2799 [0xdd] = MMX_OP2(paddusw),
2800 [0xde] = MMX_OP2(pmaxub),
2801 [0xdf] = MMX_OP2(pandn),
2802 [0xe0] = MMX_OP2(pavgb),
2803 [0xe1] = MMX_OP2(psraw),
2804 [0xe2] = MMX_OP2(psrad),
2805 [0xe3] = MMX_OP2(pavgw),
2806 [0xe4] = MMX_OP2(pmulhuw),
2807 [0xe5] = MMX_OP2(pmulhw),
2808 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2809 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2810 [0xe8] = MMX_OP2(psubsb),
2811 [0xe9] = MMX_OP2(psubsw),
2812 [0xea] = MMX_OP2(pminsw),
2813 [0xeb] = MMX_OP2(por),
2814 [0xec] = MMX_OP2(paddsb),
2815 [0xed] = MMX_OP2(paddsw),
2816 [0xee] = MMX_OP2(pmaxsw),
2817 [0xef] = MMX_OP2(pxor),
2818 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2819 [0xf1] = MMX_OP2(psllw),
2820 [0xf2] = MMX_OP2(pslld),
2821 [0xf3] = MMX_OP2(psllq),
2822 [0xf4] = MMX_OP2(pmuludq),
2823 [0xf5] = MMX_OP2(pmaddwd),
2824 [0xf6] = MMX_OP2(psadbw),
2825 [0xf7] = MMX_OP2(maskmov),
2826 [0xf8] = MMX_OP2(psubb),
2827 [0xf9] = MMX_OP2(psubw),
2828 [0xfa] = MMX_OP2(psubl),
2829 [0xfb] = MMX_OP2(psubq),
2830 [0xfc] = MMX_OP2(paddb),
2831 [0xfd] = MMX_OP2(paddw),
2832 [0xfe] = MMX_OP2(paddl),
2835 static void *sse_op_table2[3 * 8][2] = {
2836 [0 + 2] = MMX_OP2(psrlw),
2837 [0 + 4] = MMX_OP2(psraw),
2838 [0 + 6] = MMX_OP2(psllw),
2839 [8 + 2] = MMX_OP2(psrld),
2840 [8 + 4] = MMX_OP2(psrad),
2841 [8 + 6] = MMX_OP2(pslld),
2842 [16 + 2] = MMX_OP2(psrlq),
2843 [16 + 3] = { NULL, helper_psrldq_xmm },
2844 [16 + 6] = MMX_OP2(psllq),
2845 [16 + 7] = { NULL, helper_pslldq_xmm },
2848 static void *sse_op_table3[4 * 3] = {
2851 X86_64_ONLY(helper_cvtsq2ss),
2852 X86_64_ONLY(helper_cvtsq2sd),
2856 X86_64_ONLY(helper_cvttss2sq),
2857 X86_64_ONLY(helper_cvttsd2sq),
2861 X86_64_ONLY(helper_cvtss2sq),
2862 X86_64_ONLY(helper_cvtsd2sq),
2865 static void *sse_op_table4[8][4] = {
2876 static void *sse_op_table5[256] = {
2877 [0x0c] = helper_pi2fw,
2878 [0x0d] = helper_pi2fd,
2879 [0x1c] = helper_pf2iw,
2880 [0x1d] = helper_pf2id,
2881 [0x8a] = helper_pfnacc,
2882 [0x8e] = helper_pfpnacc,
2883 [0x90] = helper_pfcmpge,
2884 [0x94] = helper_pfmin,
2885 [0x96] = helper_pfrcp,
2886 [0x97] = helper_pfrsqrt,
2887 [0x9a] = helper_pfsub,
2888 [0x9e] = helper_pfadd,
2889 [0xa0] = helper_pfcmpgt,
2890 [0xa4] = helper_pfmax,
2891 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2892 [0xa7] = helper_movq, /* pfrsqit1 */
2893 [0xaa] = helper_pfsubr,
2894 [0xae] = helper_pfacc,
2895 [0xb0] = helper_pfcmpeq,
2896 [0xb4] = helper_pfmul,
2897 [0xb6] = helper_movq, /* pfrcpit2 */
2898 [0xb7] = helper_pmulhrw_mmx,
2899 [0xbb] = helper_pswapd,
2900 [0xbf] = helper_pavgb_mmx /* pavgusb */
2903 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2905 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2906 int modrm, mod, rm, reg, reg_addr, offset_addr;
2910 if (s->prefix & PREFIX_DATA)
2912 else if (s->prefix & PREFIX_REPZ)
2914 else if (s->prefix & PREFIX_REPNZ)
2918 sse_op2 = sse_op_table1[b][b1];
2921 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2931 /* simple MMX/SSE operation */
2932 if (s->flags & HF_TS_MASK) {
2933 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2936 if (s->flags & HF_EM_MASK) {
2938 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2941 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2944 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2947 tcg_gen_helper_0_0(helper_emms);
2952 tcg_gen_helper_0_0(helper_emms);
2955 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2956 the static cpu state) */
2958 tcg_gen_helper_0_0(helper_enter_mmx);
2961 modrm = ldub_code(s->pc++);
2962 reg = ((modrm >> 3) & 7);
2965 mod = (modrm >> 6) & 3;
2966 if (sse_op2 == SSE_SPECIAL) {
2969 case 0x0e7: /* movntq */
2972 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2973 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2975 case 0x1e7: /* movntdq */
2976 case 0x02b: /* movntps */
2977 case 0x12b: /* movntps */
2978 case 0x3f0: /* lddqu */
2981 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2982 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2984 case 0x6e: /* movd mm, ea */
2985 #ifdef TARGET_X86_64
2986 if (s->dflag == 2) {
2987 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2988 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2992 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2993 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2994 offsetof(CPUX86State,fpregs[reg].mmx));
2995 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2998 case 0x16e: /* movd xmm, ea */
2999 #ifdef TARGET_X86_64
3000 if (s->dflag == 2) {
3001 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3002 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3003 offsetof(CPUX86State,xmm_regs[reg]));
3004 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3008 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3009 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3010 offsetof(CPUX86State,xmm_regs[reg]));
3011 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3012 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3015 case 0x6f: /* movq mm, ea */
3017 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3018 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3021 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3022 offsetof(CPUX86State,fpregs[rm].mmx));
3023 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3024 offsetof(CPUX86State,fpregs[reg].mmx));
3027 case 0x010: /* movups */
3028 case 0x110: /* movupd */
3029 case 0x028: /* movaps */
3030 case 0x128: /* movapd */
3031 case 0x16f: /* movdqa xmm, ea */
3032 case 0x26f: /* movdqu xmm, ea */
3034 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3035 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3037 rm = (modrm & 7) | REX_B(s);
3038 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3039 offsetof(CPUX86State,xmm_regs[rm]));
3042 case 0x210: /* movss xmm, ea */
3044 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3045 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3046 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3048 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3049 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3050 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3052 rm = (modrm & 7) | REX_B(s);
3053 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3054 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3057 case 0x310: /* movsd xmm, ea */
3059 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3060 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3062 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3063 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3065 rm = (modrm & 7) | REX_B(s);
3066 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3067 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3070 case 0x012: /* movlps */
3071 case 0x112: /* movlpd */
3073 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3074 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3077 rm = (modrm & 7) | REX_B(s);
3078 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3079 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3082 case 0x212: /* movsldup */
3084 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3085 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3087 rm = (modrm & 7) | REX_B(s);
3088 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3089 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3090 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3091 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3093 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3094 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3095 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3096 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3098 case 0x312: /* movddup */
3100 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3101 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3103 rm = (modrm & 7) | REX_B(s);
3104 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3105 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3107 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3108 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3110 case 0x016: /* movhps */
3111 case 0x116: /* movhpd */
3113 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3114 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3117 rm = (modrm & 7) | REX_B(s);
3118 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3119 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3122 case 0x216: /* movshdup */
3124 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3125 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3127 rm = (modrm & 7) | REX_B(s);
3128 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3129 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3130 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3131 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3133 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3134 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3135 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3136 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3138 case 0x7e: /* movd ea, mm */
3139 #ifdef TARGET_X86_64
3140 if (s->dflag == 2) {
3141 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3142 offsetof(CPUX86State,fpregs[reg].mmx));
3143 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3147 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3148 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3149 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3152 case 0x17e: /* movd ea, xmm */
3153 #ifdef TARGET_X86_64
3154 if (s->dflag == 2) {
3155 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3156 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3157 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3161 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3162 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3163 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3166 case 0x27e: /* movq xmm, ea */
3168 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3169 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3171 rm = (modrm & 7) | REX_B(s);
3172 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3173 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3175 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3177 case 0x7f: /* movq ea, mm */
3179 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3180 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3183 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3184 offsetof(CPUX86State,fpregs[reg].mmx));
3187 case 0x011: /* movups */
3188 case 0x111: /* movupd */
3189 case 0x029: /* movaps */
3190 case 0x129: /* movapd */
3191 case 0x17f: /* movdqa ea, xmm */
3192 case 0x27f: /* movdqu ea, xmm */
3194 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3195 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3197 rm = (modrm & 7) | REX_B(s);
3198 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3199 offsetof(CPUX86State,xmm_regs[reg]));
3202 case 0x211: /* movss ea, xmm */
3204 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3205 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3206 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3208 rm = (modrm & 7) | REX_B(s);
3209 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3210 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3213 case 0x311: /* movsd ea, xmm */
3215 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3216 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3218 rm = (modrm & 7) | REX_B(s);
3219 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3220 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3223 case 0x013: /* movlps */
3224 case 0x113: /* movlpd */
3226 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3227 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3232 case 0x017: /* movhps */
3233 case 0x117: /* movhpd */
3235 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3236 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3241 case 0x71: /* shift mm, im */
3244 case 0x171: /* shift xmm, im */
3247 val = ldub_code(s->pc++);
3249 gen_op_movl_T0_im(val);
3250 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3252 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3253 op1_offset = offsetof(CPUX86State,xmm_t0);
3255 gen_op_movl_T0_im(val);
3256 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3258 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3259 op1_offset = offsetof(CPUX86State,mmx_t0);
3261 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3265 rm = (modrm & 7) | REX_B(s);
3266 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3269 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3271 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3272 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3273 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3275 case 0x050: /* movmskps */
3276 rm = (modrm & 7) | REX_B(s);
3277 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3278 offsetof(CPUX86State,xmm_regs[rm]));
3279 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3280 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3281 gen_op_mov_reg_T0(OT_LONG, reg);
3283 case 0x150: /* movmskpd */
3284 rm = (modrm & 7) | REX_B(s);
3285 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3286 offsetof(CPUX86State,xmm_regs[rm]));
3287 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3288 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3289 gen_op_mov_reg_T0(OT_LONG, reg);
3291 case 0x02a: /* cvtpi2ps */
3292 case 0x12a: /* cvtpi2pd */
3293 tcg_gen_helper_0_0(helper_enter_mmx);
3295 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3296 op2_offset = offsetof(CPUX86State,mmx_t0);
3297 gen_ldq_env_A0(s->mem_index, op2_offset);
3300 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3302 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3303 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3304 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3307 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3311 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3315 case 0x22a: /* cvtsi2ss */
3316 case 0x32a: /* cvtsi2sd */
3317 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3318 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3319 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3320 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3321 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3322 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3323 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3325 case 0x02c: /* cvttps2pi */
3326 case 0x12c: /* cvttpd2pi */
3327 case 0x02d: /* cvtps2pi */
3328 case 0x12d: /* cvtpd2pi */
3329 tcg_gen_helper_0_0(helper_enter_mmx);
3331 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3332 op2_offset = offsetof(CPUX86State,xmm_t0);
3333 gen_ldo_env_A0(s->mem_index, op2_offset);
3335 rm = (modrm & 7) | REX_B(s);
3336 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3338 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3339 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3340 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3343 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3346 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3349 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3352 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3356 case 0x22c: /* cvttss2si */
3357 case 0x32c: /* cvttsd2si */
3358 case 0x22d: /* cvtss2si */
3359 case 0x32d: /* cvtsd2si */
3360 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3362 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3364 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3366 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3367 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3369 op2_offset = offsetof(CPUX86State,xmm_t0);
3371 rm = (modrm & 7) | REX_B(s);
3372 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3374 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3376 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3377 if (ot == OT_LONG) {
3378 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3379 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3381 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3383 gen_op_mov_reg_T0(ot, reg);
3385 case 0xc4: /* pinsrw */
3388 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3389 val = ldub_code(s->pc++);
3392 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3393 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3396 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3397 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3400 case 0xc5: /* pextrw */
3404 val = ldub_code(s->pc++);
3407 rm = (modrm & 7) | REX_B(s);
3408 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3409 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3413 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3414 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3416 reg = ((modrm >> 3) & 7) | rex_r;
3417 gen_op_mov_reg_T0(OT_LONG, reg);
3419 case 0x1d6: /* movq ea, xmm */
3421 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3422 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3424 rm = (modrm & 7) | REX_B(s);
3425 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3426 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3427 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3430 case 0x2d6: /* movq2dq */
3431 tcg_gen_helper_0_0(helper_enter_mmx);
3433 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3434 offsetof(CPUX86State,fpregs[rm].mmx));
3435 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3437 case 0x3d6: /* movdq2q */
3438 tcg_gen_helper_0_0(helper_enter_mmx);
3439 rm = (modrm & 7) | REX_B(s);
3440 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3441 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3443 case 0xd7: /* pmovmskb */
3448 rm = (modrm & 7) | REX_B(s);
3449 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3450 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3453 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3454 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3456 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3457 reg = ((modrm >> 3) & 7) | rex_r;
3458 gen_op_mov_reg_T0(OT_LONG, reg);
3464 /* generic MMX or SSE operation */
3466 case 0x70: /* pshufx insn */
3467 case 0xc6: /* pshufx insn */
3468 case 0xc2: /* compare insns */
3475 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3477 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3478 op2_offset = offsetof(CPUX86State,xmm_t0);
3479 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3481 /* specific case for SSE single instructions */
3484 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3485 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3488 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3491 gen_ldo_env_A0(s->mem_index, op2_offset);
3494 rm = (modrm & 7) | REX_B(s);
3495 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3498 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3500 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3501 op2_offset = offsetof(CPUX86State,mmx_t0);
3502 gen_ldq_env_A0(s->mem_index, op2_offset);
3505 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3509 case 0x0f: /* 3DNow! data insns */
3510 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3512 val = ldub_code(s->pc++);
3513 sse_op2 = sse_op_table5[val];
3516 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3517 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3518 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3520 case 0x70: /* pshufx insn */
3521 case 0xc6: /* pshufx insn */
3522 val = ldub_code(s->pc++);
3523 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3524 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3525 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3529 val = ldub_code(s->pc++);
3532 sse_op2 = sse_op_table4[val][b1];
3533 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3534 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3535 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3538 /* maskmov : we must prepare A0 */
3541 #ifdef TARGET_X86_64
3542 if (s->aflag == 2) {
3543 gen_op_movq_A0_reg(R_EDI);
3547 gen_op_movl_A0_reg(R_EDI);
3549 gen_op_andl_A0_ffff();
3551 gen_add_A0_ds_seg(s);
3553 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3554 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3555 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3558 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3559 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3560 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3563 if (b == 0x2e || b == 0x2f) {
3564 /* just to keep the EFLAGS optimization correct */
3566 s->cc_op = CC_OP_EFLAGS;
3571 /* convert one instruction. s->is_jmp is set if the translation must
3572 be stopped. Return the next pc value */
3573 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3575 int b, prefixes, aflag, dflag;
3577 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3578 target_ulong next_eip, tval;
3588 #ifdef TARGET_X86_64
3593 s->rip_offset = 0; /* for relative ip address */
3595 b = ldub_code(s->pc);
3597 /* check prefixes */
3598 #ifdef TARGET_X86_64
3602 prefixes |= PREFIX_REPZ;
3605 prefixes |= PREFIX_REPNZ;
3608 prefixes |= PREFIX_LOCK;
3629 prefixes |= PREFIX_DATA;
3632 prefixes |= PREFIX_ADR;
3636 rex_w = (b >> 3) & 1;
3637 rex_r = (b & 0x4) << 1;
3638 s->rex_x = (b & 0x2) << 2;
3639 REX_B(s) = (b & 0x1) << 3;
3640 x86_64_hregs = 1; /* select uniform byte register addressing */
3644 /* 0x66 is ignored if rex.w is set */
3647 if (prefixes & PREFIX_DATA)
3650 if (!(prefixes & PREFIX_ADR))
3657 prefixes |= PREFIX_REPZ;
3660 prefixes |= PREFIX_REPNZ;
3663 prefixes |= PREFIX_LOCK;
3684 prefixes |= PREFIX_DATA;
3687 prefixes |= PREFIX_ADR;
3690 if (prefixes & PREFIX_DATA)
3692 if (prefixes & PREFIX_ADR)
3696 s->prefix = prefixes;
3700 /* lock generation */
3701 if (prefixes & PREFIX_LOCK)
3702 tcg_gen_helper_0_0(helper_lock);
3704 /* now check op code */
3708 /**************************/
3709 /* extended op code */
3710 b = ldub_code(s->pc++) | 0x100;
3713 /**************************/
3731 ot = dflag + OT_WORD;
3734 case 0: /* OP Ev, Gv */
3735 modrm = ldub_code(s->pc++);
3736 reg = ((modrm >> 3) & 7) | rex_r;
3737 mod = (modrm >> 6) & 3;
3738 rm = (modrm & 7) | REX_B(s);
3740 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3742 } else if (op == OP_XORL && rm == reg) {
3744 /* xor reg, reg optimisation */
3746 s->cc_op = CC_OP_LOGICB + ot;
3747 gen_op_mov_reg_T0(ot, reg);
3748 gen_op_update1_cc();
3753 gen_op_mov_TN_reg(ot, 1, reg);
3754 gen_op(s, op, ot, opreg);
3756 case 1: /* OP Gv, Ev */
3757 modrm = ldub_code(s->pc++);
3758 mod = (modrm >> 6) & 3;
3759 reg = ((modrm >> 3) & 7) | rex_r;
3760 rm = (modrm & 7) | REX_B(s);
3762 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3763 gen_op_ld_T1_A0(ot + s->mem_index);
3764 } else if (op == OP_XORL && rm == reg) {
3767 gen_op_mov_TN_reg(ot, 1, rm);
3769 gen_op(s, op, ot, reg);
3771 case 2: /* OP A, Iv */
3772 val = insn_get(s, ot);
3773 gen_op_movl_T1_im(val);
3774 gen_op(s, op, ot, OR_EAX);
3780 case 0x80: /* GRP1 */
3790 ot = dflag + OT_WORD;
3792 modrm = ldub_code(s->pc++);
3793 mod = (modrm >> 6) & 3;
3794 rm = (modrm & 7) | REX_B(s);
3795 op = (modrm >> 3) & 7;
3801 s->rip_offset = insn_const_size(ot);
3802 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3813 val = insn_get(s, ot);
3816 val = (int8_t)insn_get(s, OT_BYTE);
3819 gen_op_movl_T1_im(val);
3820 gen_op(s, op, ot, opreg);
3824 /**************************/
3825 /* inc, dec, and other misc arith */
3826 case 0x40 ... 0x47: /* inc Gv */
3827 ot = dflag ? OT_LONG : OT_WORD;
3828 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3830 case 0x48 ... 0x4f: /* dec Gv */
3831 ot = dflag ? OT_LONG : OT_WORD;
3832 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3834 case 0xf6: /* GRP3 */
3839 ot = dflag + OT_WORD;
3841 modrm = ldub_code(s->pc++);
3842 mod = (modrm >> 6) & 3;
3843 rm = (modrm & 7) | REX_B(s);
3844 op = (modrm >> 3) & 7;
3847 s->rip_offset = insn_const_size(ot);
3848 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3849 gen_op_ld_T0_A0(ot + s->mem_index);
3851 gen_op_mov_TN_reg(ot, 0, rm);
3856 val = insn_get(s, ot);
3857 gen_op_movl_T1_im(val);
3858 gen_op_testl_T0_T1_cc();
3859 s->cc_op = CC_OP_LOGICB + ot;
3862 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3864 gen_op_st_T0_A0(ot + s->mem_index);
3866 gen_op_mov_reg_T0(ot, rm);
3870 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3872 gen_op_st_T0_A0(ot + s->mem_index);
3874 gen_op_mov_reg_T0(ot, rm);
3876 gen_op_update_neg_cc();
3877 s->cc_op = CC_OP_SUBB + ot;
3882 gen_op_mulb_AL_T0();
3883 s->cc_op = CC_OP_MULB;
3886 gen_op_mulw_AX_T0();
3887 s->cc_op = CC_OP_MULW;
3891 gen_op_mull_EAX_T0();
3892 s->cc_op = CC_OP_MULL;
3894 #ifdef TARGET_X86_64
3896 gen_op_mulq_EAX_T0();
3897 s->cc_op = CC_OP_MULQ;
3905 gen_op_imulb_AL_T0();
3906 s->cc_op = CC_OP_MULB;
3909 gen_op_imulw_AX_T0();
3910 s->cc_op = CC_OP_MULW;
3914 gen_op_imull_EAX_T0();
3915 s->cc_op = CC_OP_MULL;
3917 #ifdef TARGET_X86_64
3919 gen_op_imulq_EAX_T0();
3920 s->cc_op = CC_OP_MULQ;
3928 gen_jmp_im(pc_start - s->cs_base);
3929 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3932 gen_jmp_im(pc_start - s->cs_base);
3933 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3937 gen_jmp_im(pc_start - s->cs_base);
3938 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3940 #ifdef TARGET_X86_64
3942 gen_jmp_im(pc_start - s->cs_base);
3943 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3951 gen_jmp_im(pc_start - s->cs_base);
3952 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3955 gen_jmp_im(pc_start - s->cs_base);
3956 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3960 gen_jmp_im(pc_start - s->cs_base);
3961 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3963 #ifdef TARGET_X86_64
3965 gen_jmp_im(pc_start - s->cs_base);
3966 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3976 case 0xfe: /* GRP4 */
3977 case 0xff: /* GRP5 */
3981 ot = dflag + OT_WORD;
3983 modrm = ldub_code(s->pc++);
3984 mod = (modrm >> 6) & 3;
3985 rm = (modrm & 7) | REX_B(s);
3986 op = (modrm >> 3) & 7;
3987 if (op >= 2 && b == 0xfe) {
3991 if (op == 2 || op == 4) {
3992 /* operand size for jumps is 64 bit */
3994 } else if (op == 3 || op == 5) {
3995 /* for call calls, the operand is 16 or 32 bit, even
3997 ot = dflag ? OT_LONG : OT_WORD;
3998 } else if (op == 6) {
3999 /* default push size is 64 bit */
4000 ot = dflag ? OT_QUAD : OT_WORD;
4004 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4005 if (op >= 2 && op != 3 && op != 5)
4006 gen_op_ld_T0_A0(ot + s->mem_index);
4008 gen_op_mov_TN_reg(ot, 0, rm);
4012 case 0: /* inc Ev */
4017 gen_inc(s, ot, opreg, 1);
4019 case 1: /* dec Ev */
4024 gen_inc(s, ot, opreg, -1);
4026 case 2: /* call Ev */
4027 /* XXX: optimize if memory (no 'and' is necessary) */
4029 gen_op_andl_T0_ffff();
4030 next_eip = s->pc - s->cs_base;
4031 gen_movtl_T1_im(next_eip);
4036 case 3: /* lcall Ev */
4037 gen_op_ld_T1_A0(ot + s->mem_index);
4038 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4039 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4041 if (s->pe && !s->vm86) {
4042 if (s->cc_op != CC_OP_DYNAMIC)
4043 gen_op_set_cc_op(s->cc_op);
4044 gen_jmp_im(pc_start - s->cs_base);
4045 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4046 tcg_gen_helper_0_4(helper_lcall_protected,
4047 cpu_tmp2_i32, cpu_T[1],
4048 tcg_const_i32(dflag),
4049 tcg_const_i32(s->pc - pc_start));
4051 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4052 tcg_gen_helper_0_4(helper_lcall_real,
4053 cpu_tmp2_i32, cpu_T[1],
4054 tcg_const_i32(dflag),
4055 tcg_const_i32(s->pc - s->cs_base));
4059 case 4: /* jmp Ev */
4061 gen_op_andl_T0_ffff();
4065 case 5: /* ljmp Ev */
4066 gen_op_ld_T1_A0(ot + s->mem_index);
4067 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4068 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4070 if (s->pe && !s->vm86) {
4071 if (s->cc_op != CC_OP_DYNAMIC)
4072 gen_op_set_cc_op(s->cc_op);
4073 gen_jmp_im(pc_start - s->cs_base);
4074 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4075 tcg_gen_helper_0_3(helper_ljmp_protected,
4078 tcg_const_i32(s->pc - pc_start));
4080 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4081 gen_op_movl_T0_T1();
4086 case 6: /* push Ev */
4094 case 0x84: /* test Ev, Gv */
4099 ot = dflag + OT_WORD;
4101 modrm = ldub_code(s->pc++);
4102 mod = (modrm >> 6) & 3;
4103 rm = (modrm & 7) | REX_B(s);
4104 reg = ((modrm >> 3) & 7) | rex_r;
4106 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4107 gen_op_mov_TN_reg(ot, 1, reg);
4108 gen_op_testl_T0_T1_cc();
4109 s->cc_op = CC_OP_LOGICB + ot;
4112 case 0xa8: /* test eAX, Iv */
4117 ot = dflag + OT_WORD;
4118 val = insn_get(s, ot);
4120 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4121 gen_op_movl_T1_im(val);
4122 gen_op_testl_T0_T1_cc();
4123 s->cc_op = CC_OP_LOGICB + ot;
4126 case 0x98: /* CWDE/CBW */
4127 #ifdef TARGET_X86_64
4129 gen_op_movslq_RAX_EAX();
4133 gen_op_movswl_EAX_AX();
4135 gen_op_movsbw_AX_AL();
4137 case 0x99: /* CDQ/CWD */
4138 #ifdef TARGET_X86_64
4140 gen_op_movsqo_RDX_RAX();
4144 gen_op_movslq_EDX_EAX();
4146 gen_op_movswl_DX_AX();
4148 case 0x1af: /* imul Gv, Ev */
4149 case 0x69: /* imul Gv, Ev, I */
4151 ot = dflag + OT_WORD;
4152 modrm = ldub_code(s->pc++);
4153 reg = ((modrm >> 3) & 7) | rex_r;
4155 s->rip_offset = insn_const_size(ot);
4158 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4160 val = insn_get(s, ot);
4161 gen_op_movl_T1_im(val);
4162 } else if (b == 0x6b) {
4163 val = (int8_t)insn_get(s, OT_BYTE);
4164 gen_op_movl_T1_im(val);
4166 gen_op_mov_TN_reg(ot, 1, reg);
4169 #ifdef TARGET_X86_64
4170 if (ot == OT_QUAD) {
4171 gen_op_imulq_T0_T1();
4174 if (ot == OT_LONG) {
4175 gen_op_imull_T0_T1();
4177 gen_op_imulw_T0_T1();
4179 gen_op_mov_reg_T0(ot, reg);
4180 s->cc_op = CC_OP_MULB + ot;
4183 case 0x1c1: /* xadd Ev, Gv */
4187 ot = dflag + OT_WORD;
4188 modrm = ldub_code(s->pc++);
4189 reg = ((modrm >> 3) & 7) | rex_r;
4190 mod = (modrm >> 6) & 3;
4192 rm = (modrm & 7) | REX_B(s);
4193 gen_op_mov_TN_reg(ot, 0, reg);
4194 gen_op_mov_TN_reg(ot, 1, rm);
4195 gen_op_addl_T0_T1();
4196 gen_op_mov_reg_T1(ot, reg);
4197 gen_op_mov_reg_T0(ot, rm);
4199 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4200 gen_op_mov_TN_reg(ot, 0, reg);
4201 gen_op_ld_T1_A0(ot + s->mem_index);
4202 gen_op_addl_T0_T1();
4203 gen_op_st_T0_A0(ot + s->mem_index);
4204 gen_op_mov_reg_T1(ot, reg);
4206 gen_op_update2_cc();
4207 s->cc_op = CC_OP_ADDB + ot;
4210 case 0x1b1: /* cmpxchg Ev, Gv */
4217 ot = dflag + OT_WORD;
4218 modrm = ldub_code(s->pc++);
4219 reg = ((modrm >> 3) & 7) | rex_r;
4220 mod = (modrm >> 6) & 3;
4221 gen_op_mov_TN_reg(ot, 1, reg);
4223 rm = (modrm & 7) | REX_B(s);
4224 gen_op_mov_TN_reg(ot, 0, rm);
4226 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4227 gen_op_ld_T0_A0(ot + s->mem_index);
4228 rm = 0; /* avoid warning */
4230 label1 = gen_new_label();
4231 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4232 tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4233 gen_extu(ot, cpu_T3);
4234 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4235 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4236 gen_op_mov_reg_T0(ot, R_EAX);
4237 gen_set_label(label1);
4239 gen_op_mov_reg_T1(ot, rm);
4241 gen_op_st_T1_A0(ot + s->mem_index);
4243 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4244 tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4245 s->cc_op = CC_OP_SUBB + ot;
4248 case 0x1c7: /* cmpxchg8b */
4249 modrm = ldub_code(s->pc++);
4250 mod = (modrm >> 6) & 3;
4251 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4253 gen_jmp_im(pc_start - s->cs_base);
4254 if (s->cc_op != CC_OP_DYNAMIC)
4255 gen_op_set_cc_op(s->cc_op);
4256 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4258 s->cc_op = CC_OP_EFLAGS;
4261 /**************************/
4263 case 0x50 ... 0x57: /* push */
4264 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4267 case 0x58 ... 0x5f: /* pop */
4269 ot = dflag ? OT_QUAD : OT_WORD;
4271 ot = dflag + OT_WORD;
4274 /* NOTE: order is important for pop %sp */
4276 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4278 case 0x60: /* pusha */
4283 case 0x61: /* popa */
4288 case 0x68: /* push Iv */
4291 ot = dflag ? OT_QUAD : OT_WORD;
4293 ot = dflag + OT_WORD;
4296 val = insn_get(s, ot);
4298 val = (int8_t)insn_get(s, OT_BYTE);
4299 gen_op_movl_T0_im(val);
4302 case 0x8f: /* pop Ev */
4304 ot = dflag ? OT_QUAD : OT_WORD;
4306 ot = dflag + OT_WORD;
4308 modrm = ldub_code(s->pc++);
4309 mod = (modrm >> 6) & 3;
4312 /* NOTE: order is important for pop %sp */
4314 rm = (modrm & 7) | REX_B(s);
4315 gen_op_mov_reg_T0(ot, rm);
4317 /* NOTE: order is important too for MMU exceptions */
4318 s->popl_esp_hack = 1 << ot;
4319 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4320 s->popl_esp_hack = 0;
4324 case 0xc8: /* enter */
4327 val = lduw_code(s->pc);
4329 level = ldub_code(s->pc++);
4330 gen_enter(s, val, level);
4333 case 0xc9: /* leave */
4334 /* XXX: exception not precise (ESP is updated before potential exception) */
4336 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4337 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4338 } else if (s->ss32) {
4339 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4340 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4342 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4343 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4347 ot = dflag ? OT_QUAD : OT_WORD;
4349 ot = dflag + OT_WORD;
4351 gen_op_mov_reg_T0(ot, R_EBP);
4354 case 0x06: /* push es */
4355 case 0x0e: /* push cs */
4356 case 0x16: /* push ss */
4357 case 0x1e: /* push ds */
4360 gen_op_movl_T0_seg(b >> 3);
4363 case 0x1a0: /* push fs */
4364 case 0x1a8: /* push gs */
4365 gen_op_movl_T0_seg((b >> 3) & 7);
4368 case 0x07: /* pop es */
4369 case 0x17: /* pop ss */
4370 case 0x1f: /* pop ds */
4375 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4378 /* if reg == SS, inhibit interrupts/trace. */
4379 /* If several instructions disable interrupts, only the
4381 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4382 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4386 gen_jmp_im(s->pc - s->cs_base);
4390 case 0x1a1: /* pop fs */
4391 case 0x1a9: /* pop gs */
4393 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4396 gen_jmp_im(s->pc - s->cs_base);
4401 /**************************/
4404 case 0x89: /* mov Gv, Ev */
4408 ot = dflag + OT_WORD;
4409 modrm = ldub_code(s->pc++);
4410 reg = ((modrm >> 3) & 7) | rex_r;
4412 /* generate a generic store */
4413 gen_ldst_modrm(s, modrm, ot, reg, 1);
4416 case 0xc7: /* mov Ev, Iv */
4420 ot = dflag + OT_WORD;
4421 modrm = ldub_code(s->pc++);
4422 mod = (modrm >> 6) & 3;
4424 s->rip_offset = insn_const_size(ot);
4425 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4427 val = insn_get(s, ot);
4428 gen_op_movl_T0_im(val);
4430 gen_op_st_T0_A0(ot + s->mem_index);
4432 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4435 case 0x8b: /* mov Ev, Gv */
4439 ot = OT_WORD + dflag;
4440 modrm = ldub_code(s->pc++);
4441 reg = ((modrm >> 3) & 7) | rex_r;
4443 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4444 gen_op_mov_reg_T0(ot, reg);
4446 case 0x8e: /* mov seg, Gv */
4447 modrm = ldub_code(s->pc++);
4448 reg = (modrm >> 3) & 7;
4449 if (reg >= 6 || reg == R_CS)
4451 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4452 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4454 /* if reg == SS, inhibit interrupts/trace */
4455 /* If several instructions disable interrupts, only the
4457 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4458 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4462 gen_jmp_im(s->pc - s->cs_base);
4466 case 0x8c: /* mov Gv, seg */
4467 modrm = ldub_code(s->pc++);
4468 reg = (modrm >> 3) & 7;
4469 mod = (modrm >> 6) & 3;
4472 gen_op_movl_T0_seg(reg);
4474 ot = OT_WORD + dflag;
4477 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4480 case 0x1b6: /* movzbS Gv, Eb */
4481 case 0x1b7: /* movzwS Gv, Eb */
4482 case 0x1be: /* movsbS Gv, Eb */
4483 case 0x1bf: /* movswS Gv, Eb */
4486 /* d_ot is the size of destination */
4487 d_ot = dflag + OT_WORD;
4488 /* ot is the size of source */
4489 ot = (b & 1) + OT_BYTE;
4490 modrm = ldub_code(s->pc++);
4491 reg = ((modrm >> 3) & 7) | rex_r;
4492 mod = (modrm >> 6) & 3;
4493 rm = (modrm & 7) | REX_B(s);
4496 gen_op_mov_TN_reg(ot, 0, rm);
4497 switch(ot | (b & 8)) {
4499 gen_op_movzbl_T0_T0();
4502 gen_op_movsbl_T0_T0();
4505 gen_op_movzwl_T0_T0();
4509 gen_op_movswl_T0_T0();
4512 gen_op_mov_reg_T0(d_ot, reg);
4514 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4516 gen_op_lds_T0_A0(ot + s->mem_index);
4518 gen_op_ldu_T0_A0(ot + s->mem_index);
4520 gen_op_mov_reg_T0(d_ot, reg);
4525 case 0x8d: /* lea */
4526 ot = dflag + OT_WORD;
4527 modrm = ldub_code(s->pc++);
4528 mod = (modrm >> 6) & 3;
4531 reg = ((modrm >> 3) & 7) | rex_r;
4532 /* we must ensure that no segment is added */
4536 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4538 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4541 case 0xa0: /* mov EAX, Ov */
4543 case 0xa2: /* mov Ov, EAX */
4546 target_ulong offset_addr;
4551 ot = dflag + OT_WORD;
4552 #ifdef TARGET_X86_64
4553 if (s->aflag == 2) {
4554 offset_addr = ldq_code(s->pc);
4556 gen_op_movq_A0_im(offset_addr);
4561 offset_addr = insn_get(s, OT_LONG);
4563 offset_addr = insn_get(s, OT_WORD);
4565 gen_op_movl_A0_im(offset_addr);
4567 gen_add_A0_ds_seg(s);
4569 gen_op_ld_T0_A0(ot + s->mem_index);
4570 gen_op_mov_reg_T0(ot, R_EAX);
4572 gen_op_mov_TN_reg(ot, 0, R_EAX);
4573 gen_op_st_T0_A0(ot + s->mem_index);
4577 case 0xd7: /* xlat */
4578 #ifdef TARGET_X86_64
4579 if (s->aflag == 2) {
4580 gen_op_movq_A0_reg(R_EBX);
4581 gen_op_addq_A0_AL();
4585 gen_op_movl_A0_reg(R_EBX);
4586 gen_op_addl_A0_AL();
4588 gen_op_andl_A0_ffff();
4590 gen_add_A0_ds_seg(s);
4591 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4592 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4594 case 0xb0 ... 0xb7: /* mov R, Ib */
4595 val = insn_get(s, OT_BYTE);
4596 gen_op_movl_T0_im(val);
4597 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4599 case 0xb8 ... 0xbf: /* mov R, Iv */
4600 #ifdef TARGET_X86_64
4604 tmp = ldq_code(s->pc);
4606 reg = (b & 7) | REX_B(s);
4607 gen_movtl_T0_im(tmp);
4608 gen_op_mov_reg_T0(OT_QUAD, reg);
4612 ot = dflag ? OT_LONG : OT_WORD;
4613 val = insn_get(s, ot);
4614 reg = (b & 7) | REX_B(s);
4615 gen_op_movl_T0_im(val);
4616 gen_op_mov_reg_T0(ot, reg);
4620 case 0x91 ... 0x97: /* xchg R, EAX */
4621 ot = dflag + OT_WORD;
4622 reg = (b & 7) | REX_B(s);
4626 case 0x87: /* xchg Ev, Gv */
4630 ot = dflag + OT_WORD;
4631 modrm = ldub_code(s->pc++);
4632 reg = ((modrm >> 3) & 7) | rex_r;
4633 mod = (modrm >> 6) & 3;
4635 rm = (modrm & 7) | REX_B(s);
4637 gen_op_mov_TN_reg(ot, 0, reg);
4638 gen_op_mov_TN_reg(ot, 1, rm);
4639 gen_op_mov_reg_T0(ot, rm);
4640 gen_op_mov_reg_T1(ot, reg);
4642 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4643 gen_op_mov_TN_reg(ot, 0, reg);
4644 /* for xchg, lock is implicit */
4645 if (!(prefixes & PREFIX_LOCK))
4646 tcg_gen_helper_0_0(helper_lock);
4647 gen_op_ld_T1_A0(ot + s->mem_index);
4648 gen_op_st_T0_A0(ot + s->mem_index);
4649 if (!(prefixes & PREFIX_LOCK))
4650 tcg_gen_helper_0_0(helper_unlock);
4651 gen_op_mov_reg_T1(ot, reg);
4654 case 0xc4: /* les Gv */
4659 case 0xc5: /* lds Gv */
4664 case 0x1b2: /* lss Gv */
4667 case 0x1b4: /* lfs Gv */
4670 case 0x1b5: /* lgs Gv */
4673 ot = dflag ? OT_LONG : OT_WORD;
4674 modrm = ldub_code(s->pc++);
4675 reg = ((modrm >> 3) & 7) | rex_r;
4676 mod = (modrm >> 6) & 3;
4679 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4680 gen_op_ld_T1_A0(ot + s->mem_index);
4681 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4682 /* load the segment first to handle exceptions properly */
4683 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4684 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4685 /* then put the data */
4686 gen_op_mov_reg_T1(ot, reg);
4688 gen_jmp_im(s->pc - s->cs_base);
4693 /************************/
4704 ot = dflag + OT_WORD;
4706 modrm = ldub_code(s->pc++);
4707 mod = (modrm >> 6) & 3;
4708 op = (modrm >> 3) & 7;
4714 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4717 opreg = (modrm & 7) | REX_B(s);
4722 gen_shift(s, op, ot, opreg, OR_ECX);
4725 shift = ldub_code(s->pc++);
4727 gen_shifti(s, op, ot, opreg, shift);
4742 case 0x1a4: /* shld imm */
4746 case 0x1a5: /* shld cl */
4750 case 0x1ac: /* shrd imm */
4754 case 0x1ad: /* shrd cl */
4758 ot = dflag + OT_WORD;
4759 modrm = ldub_code(s->pc++);
4760 mod = (modrm >> 6) & 3;
4761 rm = (modrm & 7) | REX_B(s);
4762 reg = ((modrm >> 3) & 7) | rex_r;
4764 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4769 gen_op_mov_TN_reg(ot, 1, reg);
4772 val = ldub_code(s->pc++);
4773 tcg_gen_movi_tl(cpu_T3, val);
4775 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4777 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4780 /************************/
4783 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4784 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4785 /* XXX: what to do if illegal op ? */
4786 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4789 modrm = ldub_code(s->pc++);
4790 mod = (modrm >> 6) & 3;
4792 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4795 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4797 case 0x00 ... 0x07: /* fxxxs */
4798 case 0x10 ... 0x17: /* fixxxl */
4799 case 0x20 ... 0x27: /* fxxxl */
4800 case 0x30 ... 0x37: /* fixxx */
4807 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4808 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4809 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4812 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4813 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4814 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4817 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4818 (s->mem_index >> 2) - 1);
4819 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4823 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4824 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4825 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4829 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4831 /* fcomp needs pop */
4832 tcg_gen_helper_0_0(helper_fpop);
4836 case 0x08: /* flds */
4837 case 0x0a: /* fsts */
4838 case 0x0b: /* fstps */
4839 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4840 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4841 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4846 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4847 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4848 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4851 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4852 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4853 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4856 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4857 (s->mem_index >> 2) - 1);
4858 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4862 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4863 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4864 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4869 /* XXX: the corresponding CPUID bit must be tested ! */
4872 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4873 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4874 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4877 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4878 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4879 (s->mem_index >> 2) - 1);
4883 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4884 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4885 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4888 tcg_gen_helper_0_0(helper_fpop);
4893 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4894 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4895 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4898 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4899 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4900 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4903 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4904 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4905 (s->mem_index >> 2) - 1);
4909 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4910 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4911 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4915 tcg_gen_helper_0_0(helper_fpop);
4919 case 0x0c: /* fldenv mem */
4920 if (s->cc_op != CC_OP_DYNAMIC)
4921 gen_op_set_cc_op(s->cc_op);
4922 gen_jmp_im(pc_start - s->cs_base);
4923 tcg_gen_helper_0_2(helper_fldenv,
4924 cpu_A0, tcg_const_i32(s->dflag));
4926 case 0x0d: /* fldcw mem */
4927 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4928 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4929 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
4931 case 0x0e: /* fnstenv mem */
4932 if (s->cc_op != CC_OP_DYNAMIC)
4933 gen_op_set_cc_op(s->cc_op);
4934 gen_jmp_im(pc_start - s->cs_base);
4935 tcg_gen_helper_0_2(helper_fstenv,
4936 cpu_A0, tcg_const_i32(s->dflag));
4938 case 0x0f: /* fnstcw mem */
4939 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
4940 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4941 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4943 case 0x1d: /* fldt mem */
4944 if (s->cc_op != CC_OP_DYNAMIC)
4945 gen_op_set_cc_op(s->cc_op);
4946 gen_jmp_im(pc_start - s->cs_base);
4947 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4949 case 0x1f: /* fstpt mem */
4950 if (s->cc_op != CC_OP_DYNAMIC)
4951 gen_op_set_cc_op(s->cc_op);
4952 gen_jmp_im(pc_start - s->cs_base);
4953 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4954 tcg_gen_helper_0_0(helper_fpop);
4956 case 0x2c: /* frstor mem */
4957 if (s->cc_op != CC_OP_DYNAMIC)
4958 gen_op_set_cc_op(s->cc_op);
4959 gen_jmp_im(pc_start - s->cs_base);
4960 tcg_gen_helper_0_2(helper_frstor,
4961 cpu_A0, tcg_const_i32(s->dflag));
4963 case 0x2e: /* fnsave mem */
4964 if (s->cc_op != CC_OP_DYNAMIC)
4965 gen_op_set_cc_op(s->cc_op);
4966 gen_jmp_im(pc_start - s->cs_base);
4967 tcg_gen_helper_0_2(helper_fsave,
4968 cpu_A0, tcg_const_i32(s->dflag));
4970 case 0x2f: /* fnstsw mem */
4971 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
4972 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4973 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4975 case 0x3c: /* fbld */
4976 if (s->cc_op != CC_OP_DYNAMIC)
4977 gen_op_set_cc_op(s->cc_op);
4978 gen_jmp_im(pc_start - s->cs_base);
4979 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4981 case 0x3e: /* fbstp */
4982 if (s->cc_op != CC_OP_DYNAMIC)
4983 gen_op_set_cc_op(s->cc_op);
4984 gen_jmp_im(pc_start - s->cs_base);
4985 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4986 tcg_gen_helper_0_0(helper_fpop);
4988 case 0x3d: /* fildll */
4989 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4990 (s->mem_index >> 2) - 1);
4991 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
4993 case 0x3f: /* fistpll */
4994 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
4995 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4996 (s->mem_index >> 2) - 1);
4997 tcg_gen_helper_0_0(helper_fpop);
5003 /* register float ops */
5007 case 0x08: /* fld sti */
5008 tcg_gen_helper_0_0(helper_fpush);
5009 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5011 case 0x09: /* fxchg sti */
5012 case 0x29: /* fxchg4 sti, undocumented op */
5013 case 0x39: /* fxchg7 sti, undocumented op */
5014 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5016 case 0x0a: /* grp d9/2 */
5019 /* check exceptions (FreeBSD FPU probe) */
5020 if (s->cc_op != CC_OP_DYNAMIC)
5021 gen_op_set_cc_op(s->cc_op);
5022 gen_jmp_im(pc_start - s->cs_base);
5023 tcg_gen_helper_0_0(helper_fwait);
5029 case 0x0c: /* grp d9/4 */
5032 tcg_gen_helper_0_0(helper_fchs_ST0);
5035 tcg_gen_helper_0_0(helper_fabs_ST0);
5038 tcg_gen_helper_0_0(helper_fldz_FT0);
5039 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5042 tcg_gen_helper_0_0(helper_fxam_ST0);
5048 case 0x0d: /* grp d9/5 */
5052 tcg_gen_helper_0_0(helper_fpush);
5053 tcg_gen_helper_0_0(helper_fld1_ST0);
5056 tcg_gen_helper_0_0(helper_fpush);
5057 tcg_gen_helper_0_0(helper_fldl2t_ST0);
5060 tcg_gen_helper_0_0(helper_fpush);
5061 tcg_gen_helper_0_0(helper_fldl2e_ST0);
5064 tcg_gen_helper_0_0(helper_fpush);
5065 tcg_gen_helper_0_0(helper_fldpi_ST0);
5068 tcg_gen_helper_0_0(helper_fpush);
5069 tcg_gen_helper_0_0(helper_fldlg2_ST0);
5072 tcg_gen_helper_0_0(helper_fpush);
5073 tcg_gen_helper_0_0(helper_fldln2_ST0);
5076 tcg_gen_helper_0_0(helper_fpush);
5077 tcg_gen_helper_0_0(helper_fldz_ST0);
5084 case 0x0e: /* grp d9/6 */
5087 tcg_gen_helper_0_0(helper_f2xm1);
5090 tcg_gen_helper_0_0(helper_fyl2x);
5093 tcg_gen_helper_0_0(helper_fptan);
5095 case 3: /* fpatan */
5096 tcg_gen_helper_0_0(helper_fpatan);
5098 case 4: /* fxtract */
5099 tcg_gen_helper_0_0(helper_fxtract);
5101 case 5: /* fprem1 */
5102 tcg_gen_helper_0_0(helper_fprem1);
5104 case 6: /* fdecstp */
5105 tcg_gen_helper_0_0(helper_fdecstp);
5108 case 7: /* fincstp */
5109 tcg_gen_helper_0_0(helper_fincstp);
5113 case 0x0f: /* grp d9/7 */
5116 tcg_gen_helper_0_0(helper_fprem);
5118 case 1: /* fyl2xp1 */
5119 tcg_gen_helper_0_0(helper_fyl2xp1);
5122 tcg_gen_helper_0_0(helper_fsqrt);
5124 case 3: /* fsincos */
5125 tcg_gen_helper_0_0(helper_fsincos);
5127 case 5: /* fscale */
5128 tcg_gen_helper_0_0(helper_fscale);
5130 case 4: /* frndint */
5131 tcg_gen_helper_0_0(helper_frndint);
5134 tcg_gen_helper_0_0(helper_fsin);
5138 tcg_gen_helper_0_0(helper_fcos);
5142 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5143 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5144 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5150 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5152 tcg_gen_helper_0_0(helper_fpop);
5154 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5155 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5159 case 0x02: /* fcom */
5160 case 0x22: /* fcom2, undocumented op */
5161 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5162 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5164 case 0x03: /* fcomp */
5165 case 0x23: /* fcomp3, undocumented op */
5166 case 0x32: /* fcomp5, undocumented op */
5167 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5168 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5169 tcg_gen_helper_0_0(helper_fpop);
5171 case 0x15: /* da/5 */
5173 case 1: /* fucompp */
5174 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5175 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5176 tcg_gen_helper_0_0(helper_fpop);
5177 tcg_gen_helper_0_0(helper_fpop);
5185 case 0: /* feni (287 only, just do nop here) */
5187 case 1: /* fdisi (287 only, just do nop here) */
5190 tcg_gen_helper_0_0(helper_fclex);
5192 case 3: /* fninit */
5193 tcg_gen_helper_0_0(helper_fninit);
5195 case 4: /* fsetpm (287 only, just do nop here) */
5201 case 0x1d: /* fucomi */
5202 if (s->cc_op != CC_OP_DYNAMIC)
5203 gen_op_set_cc_op(s->cc_op);
5204 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5205 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5206 gen_op_fcomi_dummy();
5207 s->cc_op = CC_OP_EFLAGS;
5209 case 0x1e: /* fcomi */
5210 if (s->cc_op != CC_OP_DYNAMIC)
5211 gen_op_set_cc_op(s->cc_op);
5212 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5213 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5214 gen_op_fcomi_dummy();
5215 s->cc_op = CC_OP_EFLAGS;
5217 case 0x28: /* ffree sti */
5218 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5220 case 0x2a: /* fst sti */
5221 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5223 case 0x2b: /* fstp sti */
5224 case 0x0b: /* fstp1 sti, undocumented op */
5225 case 0x3a: /* fstp8 sti, undocumented op */
5226 case 0x3b: /* fstp9 sti, undocumented op */
5227 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5228 tcg_gen_helper_0_0(helper_fpop);
5230 case 0x2c: /* fucom st(i) */
5231 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5232 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5234 case 0x2d: /* fucomp st(i) */
5235 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5236 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5237 tcg_gen_helper_0_0(helper_fpop);
5239 case 0x33: /* de/3 */
5241 case 1: /* fcompp */
5242 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5243 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5244 tcg_gen_helper_0_0(helper_fpop);
5245 tcg_gen_helper_0_0(helper_fpop);
5251 case 0x38: /* ffreep sti, undocumented op */
5252 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5253 tcg_gen_helper_0_0(helper_fpop);
5255 case 0x3c: /* df/4 */
5258 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5259 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5260 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5266 case 0x3d: /* fucomip */
5267 if (s->cc_op != CC_OP_DYNAMIC)
5268 gen_op_set_cc_op(s->cc_op);
5269 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5270 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5271 tcg_gen_helper_0_0(helper_fpop);
5272 gen_op_fcomi_dummy();
5273 s->cc_op = CC_OP_EFLAGS;
5275 case 0x3e: /* fcomip */
5276 if (s->cc_op != CC_OP_DYNAMIC)
5277 gen_op_set_cc_op(s->cc_op);
5278 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5279 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5280 tcg_gen_helper_0_0(helper_fpop);
5281 gen_op_fcomi_dummy();
5282 s->cc_op = CC_OP_EFLAGS;
5284 case 0x10 ... 0x13: /* fcmovxx */
5288 const static uint8_t fcmov_cc[8] = {
5294 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5296 l1 = gen_new_label();
5297 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5298 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5307 /************************/
5310 case 0xa4: /* movsS */
5315 ot = dflag + OT_WORD;
5317 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5318 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5324 case 0xaa: /* stosS */
5329 ot = dflag + OT_WORD;
5331 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5332 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5337 case 0xac: /* lodsS */
5342 ot = dflag + OT_WORD;
5343 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5344 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5349 case 0xae: /* scasS */
5354 ot = dflag + OT_WORD;
5355 if (prefixes & PREFIX_REPNZ) {
5356 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5357 } else if (prefixes & PREFIX_REPZ) {
5358 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5361 s->cc_op = CC_OP_SUBB + ot;
5365 case 0xa6: /* cmpsS */
5370 ot = dflag + OT_WORD;
5371 if (prefixes & PREFIX_REPNZ) {
5372 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5373 } else if (prefixes & PREFIX_REPZ) {
5374 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5377 s->cc_op = CC_OP_SUBB + ot;
5380 case 0x6c: /* insS */
5385 ot = dflag ? OT_LONG : OT_WORD;
5386 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5387 gen_op_andl_T0_ffff();
5388 gen_check_io(s, ot, pc_start - s->cs_base,
5389 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5390 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5391 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5396 case 0x6e: /* outsS */
5401 ot = dflag ? OT_LONG : OT_WORD;
5402 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5403 gen_op_andl_T0_ffff();
5404 gen_check_io(s, ot, pc_start - s->cs_base,
5405 svm_is_rep(prefixes) | 4);
5406 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5407 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5413 /************************/
5421 ot = dflag ? OT_LONG : OT_WORD;
5422 val = ldub_code(s->pc++);
5423 gen_op_movl_T0_im(val);
5424 gen_check_io(s, ot, pc_start - s->cs_base,
5425 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5426 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5427 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5428 gen_op_mov_reg_T1(ot, R_EAX);
5435 ot = dflag ? OT_LONG : OT_WORD;
5436 val = ldub_code(s->pc++);
5437 gen_op_movl_T0_im(val);
5438 gen_check_io(s, ot, pc_start - s->cs_base,
5439 svm_is_rep(prefixes));
5440 gen_op_mov_TN_reg(ot, 1, R_EAX);
5442 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5443 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5444 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5445 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5452 ot = dflag ? OT_LONG : OT_WORD;
5453 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5454 gen_op_andl_T0_ffff();
5455 gen_check_io(s, ot, pc_start - s->cs_base,
5456 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5457 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5458 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5459 gen_op_mov_reg_T1(ot, R_EAX);
5466 ot = dflag ? OT_LONG : OT_WORD;
5467 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5468 gen_op_andl_T0_ffff();
5469 gen_check_io(s, ot, pc_start - s->cs_base,
5470 svm_is_rep(prefixes));
5471 gen_op_mov_TN_reg(ot, 1, R_EAX);
5473 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5474 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5475 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5476 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5479 /************************/
5481 case 0xc2: /* ret im */
5482 val = ldsw_code(s->pc);
5485 if (CODE64(s) && s->dflag)
5487 gen_stack_update(s, val + (2 << s->dflag));
5489 gen_op_andl_T0_ffff();
5493 case 0xc3: /* ret */
5497 gen_op_andl_T0_ffff();
5501 case 0xca: /* lret im */
5502 val = ldsw_code(s->pc);
5505 if (s->pe && !s->vm86) {
5506 if (s->cc_op != CC_OP_DYNAMIC)
5507 gen_op_set_cc_op(s->cc_op);
5508 gen_jmp_im(pc_start - s->cs_base);
5509 tcg_gen_helper_0_2(helper_lret_protected,
5510 tcg_const_i32(s->dflag),
5511 tcg_const_i32(val));
5515 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5517 gen_op_andl_T0_ffff();
5518 /* NOTE: keeping EIP updated is not a problem in case of
5522 gen_op_addl_A0_im(2 << s->dflag);
5523 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5524 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5525 /* add stack offset */
5526 gen_stack_update(s, val + (4 << s->dflag));
5530 case 0xcb: /* lret */
5533 case 0xcf: /* iret */
5534 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5538 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5539 s->cc_op = CC_OP_EFLAGS;
5540 } else if (s->vm86) {
5542 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5544 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5545 s->cc_op = CC_OP_EFLAGS;
5548 if (s->cc_op != CC_OP_DYNAMIC)
5549 gen_op_set_cc_op(s->cc_op);
5550 gen_jmp_im(pc_start - s->cs_base);
5551 tcg_gen_helper_0_2(helper_iret_protected,
5552 tcg_const_i32(s->dflag),
5553 tcg_const_i32(s->pc - s->cs_base));
5554 s->cc_op = CC_OP_EFLAGS;
5558 case 0xe8: /* call im */
5561 tval = (int32_t)insn_get(s, OT_LONG);
5563 tval = (int16_t)insn_get(s, OT_WORD);
5564 next_eip = s->pc - s->cs_base;
5568 gen_movtl_T0_im(next_eip);
5573 case 0x9a: /* lcall im */
5575 unsigned int selector, offset;
5579 ot = dflag ? OT_LONG : OT_WORD;
5580 offset = insn_get(s, ot);
5581 selector = insn_get(s, OT_WORD);
5583 gen_op_movl_T0_im(selector);
5584 gen_op_movl_T1_imu(offset);
5587 case 0xe9: /* jmp im */
5589 tval = (int32_t)insn_get(s, OT_LONG);
5591 tval = (int16_t)insn_get(s, OT_WORD);
5592 tval += s->pc - s->cs_base;
5597 case 0xea: /* ljmp im */
5599 unsigned int selector, offset;
5603 ot = dflag ? OT_LONG : OT_WORD;
5604 offset = insn_get(s, ot);
5605 selector = insn_get(s, OT_WORD);
5607 gen_op_movl_T0_im(selector);
5608 gen_op_movl_T1_imu(offset);
5611 case 0xeb: /* jmp Jb */
5612 tval = (int8_t)insn_get(s, OT_BYTE);
5613 tval += s->pc - s->cs_base;
5618 case 0x70 ... 0x7f: /* jcc Jb */
5619 tval = (int8_t)insn_get(s, OT_BYTE);
5621 case 0x180 ... 0x18f: /* jcc Jv */
5623 tval = (int32_t)insn_get(s, OT_LONG);
5625 tval = (int16_t)insn_get(s, OT_WORD);
5628 next_eip = s->pc - s->cs_base;
5632 gen_jcc(s, b, tval, next_eip);
5635 case 0x190 ... 0x19f: /* setcc Gv */
5636 modrm = ldub_code(s->pc++);
5638 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5640 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5641 ot = dflag + OT_WORD;
5642 modrm = ldub_code(s->pc++);
5643 reg = ((modrm >> 3) & 7) | rex_r;
5644 mod = (modrm >> 6) & 3;
5647 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5648 gen_op_ld_T1_A0(ot + s->mem_index);
5650 rm = (modrm & 7) | REX_B(s);
5651 gen_op_mov_TN_reg(ot, 1, rm);
5653 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5656 /************************/
5658 case 0x9c: /* pushf */
5659 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5661 if (s->vm86 && s->iopl != 3) {
5662 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5664 if (s->cc_op != CC_OP_DYNAMIC)
5665 gen_op_set_cc_op(s->cc_op);
5666 gen_op_movl_T0_eflags();
5670 case 0x9d: /* popf */
5671 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5673 if (s->vm86 && s->iopl != 3) {
5674 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5679 gen_op_movl_eflags_T0_cpl0();
5681 gen_op_movw_eflags_T0_cpl0();
5684 if (s->cpl <= s->iopl) {
5686 gen_op_movl_eflags_T0_io();
5688 gen_op_movw_eflags_T0_io();
5692 gen_op_movl_eflags_T0();
5694 gen_op_movw_eflags_T0();
5699 s->cc_op = CC_OP_EFLAGS;
5700 /* abort translation because TF flag may change */
5701 gen_jmp_im(s->pc - s->cs_base);
5705 case 0x9e: /* sahf */
5708 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5709 if (s->cc_op != CC_OP_DYNAMIC)
5710 gen_op_set_cc_op(s->cc_op);
5711 gen_op_movb_eflags_T0();
5712 s->cc_op = CC_OP_EFLAGS;
5714 case 0x9f: /* lahf */
5717 if (s->cc_op != CC_OP_DYNAMIC)
5718 gen_op_set_cc_op(s->cc_op);
5719 gen_op_movl_T0_eflags();
5720 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5722 case 0xf5: /* cmc */
5723 if (s->cc_op != CC_OP_DYNAMIC)
5724 gen_op_set_cc_op(s->cc_op);
5726 s->cc_op = CC_OP_EFLAGS;
5728 case 0xf8: /* clc */
5729 if (s->cc_op != CC_OP_DYNAMIC)
5730 gen_op_set_cc_op(s->cc_op);
5732 s->cc_op = CC_OP_EFLAGS;
5734 case 0xf9: /* stc */
5735 if (s->cc_op != CC_OP_DYNAMIC)
5736 gen_op_set_cc_op(s->cc_op);
5738 s->cc_op = CC_OP_EFLAGS;
5740 case 0xfc: /* cld */
5741 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5742 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5744 case 0xfd: /* std */
5745 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5746 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5749 /************************/
5750 /* bit operations */
5751 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5752 ot = dflag + OT_WORD;
5753 modrm = ldub_code(s->pc++);
5754 op = (modrm >> 3) & 7;
5755 mod = (modrm >> 6) & 3;
5756 rm = (modrm & 7) | REX_B(s);
5759 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5760 gen_op_ld_T0_A0(ot + s->mem_index);
5762 gen_op_mov_TN_reg(ot, 0, rm);
5765 val = ldub_code(s->pc++);
5766 gen_op_movl_T1_im(val);
5771 case 0x1a3: /* bt Gv, Ev */
5774 case 0x1ab: /* bts */
5777 case 0x1b3: /* btr */
5780 case 0x1bb: /* btc */
5783 ot = dflag + OT_WORD;
5784 modrm = ldub_code(s->pc++);
5785 reg = ((modrm >> 3) & 7) | rex_r;
5786 mod = (modrm >> 6) & 3;
5787 rm = (modrm & 7) | REX_B(s);
5788 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5790 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5791 /* specific case: we need to add a displacement */
5792 gen_exts(ot, cpu_T[1]);
5793 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5794 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5795 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5796 gen_op_ld_T0_A0(ot + s->mem_index);
5798 gen_op_mov_TN_reg(ot, 0, rm);
5801 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5804 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5805 tcg_gen_movi_tl(cpu_cc_dst, 0);
5808 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5809 tcg_gen_movi_tl(cpu_tmp0, 1);
5810 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5811 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5814 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5815 tcg_gen_movi_tl(cpu_tmp0, 1);
5816 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5817 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5818 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5822 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5823 tcg_gen_movi_tl(cpu_tmp0, 1);
5824 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5825 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5828 s->cc_op = CC_OP_SARB + ot;
5831 gen_op_st_T0_A0(ot + s->mem_index);
5833 gen_op_mov_reg_T0(ot, rm);
5834 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5835 tcg_gen_movi_tl(cpu_cc_dst, 0);
5838 case 0x1bc: /* bsf */
5839 case 0x1bd: /* bsr */
5840 ot = dflag + OT_WORD;
5841 modrm = ldub_code(s->pc++);
5842 reg = ((modrm >> 3) & 7) | rex_r;
5843 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5844 /* NOTE: in order to handle the 0 case, we must load the
5845 result. It could be optimized with a generated jump */
5846 gen_op_mov_TN_reg(ot, 1, reg);
5847 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5848 gen_op_mov_reg_T1(ot, reg);
5849 s->cc_op = CC_OP_LOGICB + ot;
5851 /************************/
5853 case 0x27: /* daa */
5856 if (s->cc_op != CC_OP_DYNAMIC)
5857 gen_op_set_cc_op(s->cc_op);
5859 s->cc_op = CC_OP_EFLAGS;
5861 case 0x2f: /* das */
5864 if (s->cc_op != CC_OP_DYNAMIC)
5865 gen_op_set_cc_op(s->cc_op);
5867 s->cc_op = CC_OP_EFLAGS;
5869 case 0x37: /* aaa */
5872 if (s->cc_op != CC_OP_DYNAMIC)
5873 gen_op_set_cc_op(s->cc_op);
5875 s->cc_op = CC_OP_EFLAGS;
5877 case 0x3f: /* aas */
5880 if (s->cc_op != CC_OP_DYNAMIC)
5881 gen_op_set_cc_op(s->cc_op);
5883 s->cc_op = CC_OP_EFLAGS;
5885 case 0xd4: /* aam */
5888 val = ldub_code(s->pc++);
5890 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5893 s->cc_op = CC_OP_LOGICB;
5896 case 0xd5: /* aad */
5899 val = ldub_code(s->pc++);
5901 s->cc_op = CC_OP_LOGICB;
5903 /************************/
5905 case 0x90: /* nop */
5906 /* XXX: xchg + rex handling */
5907 /* XXX: correct lock test for all insn */
5908 if (prefixes & PREFIX_LOCK)
5910 if (prefixes & PREFIX_REPZ) {
5911 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5914 case 0x9b: /* fwait */
5915 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5916 (HF_MP_MASK | HF_TS_MASK)) {
5917 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5919 if (s->cc_op != CC_OP_DYNAMIC)
5920 gen_op_set_cc_op(s->cc_op);
5921 gen_jmp_im(pc_start - s->cs_base);
5922 tcg_gen_helper_0_0(helper_fwait);
5925 case 0xcc: /* int3 */
5926 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5928 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5930 case 0xcd: /* int N */
5931 val = ldub_code(s->pc++);
5932 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5934 if (s->vm86 && s->iopl != 3) {
5935 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5937 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5940 case 0xce: /* into */
5943 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5945 if (s->cc_op != CC_OP_DYNAMIC)
5946 gen_op_set_cc_op(s->cc_op);
5947 gen_jmp_im(pc_start - s->cs_base);
5948 gen_op_into(s->pc - pc_start);
5950 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5951 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5954 gen_debug(s, pc_start - s->cs_base);
5957 tb_flush(cpu_single_env);
5958 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5961 case 0xfa: /* cli */
5963 if (s->cpl <= s->iopl) {
5964 tcg_gen_helper_0_0(helper_cli);
5966 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5970 tcg_gen_helper_0_0(helper_cli);
5972 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5976 case 0xfb: /* sti */
5978 if (s->cpl <= s->iopl) {
5980 tcg_gen_helper_0_0(helper_sti);
5981 /* interruptions are enabled only the first insn after sti */
5982 /* If several instructions disable interrupts, only the
5984 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5985 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5986 /* give a chance to handle pending irqs */
5987 gen_jmp_im(s->pc - s->cs_base);
5990 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5996 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6000 case 0x62: /* bound */
6003 ot = dflag ? OT_LONG : OT_WORD;
6004 modrm = ldub_code(s->pc++);
6005 reg = (modrm >> 3) & 7;
6006 mod = (modrm >> 6) & 3;
6009 gen_op_mov_TN_reg(ot, 0, reg);
6010 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6011 gen_jmp_im(pc_start - s->cs_base);
6012 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6014 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6016 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6018 case 0x1c8 ... 0x1cf: /* bswap reg */
6019 reg = (b & 7) | REX_B(s);
6020 #ifdef TARGET_X86_64
6022 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6023 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6024 gen_op_mov_reg_T0(OT_QUAD, reg);
6028 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6030 tmp0 = tcg_temp_new(TCG_TYPE_I32);
6031 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6032 tcg_gen_bswap_i32(tmp0, tmp0);
6033 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6034 gen_op_mov_reg_T0(OT_LONG, reg);
6038 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6039 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6040 gen_op_mov_reg_T0(OT_LONG, reg);
6044 case 0xd6: /* salc */
6047 if (s->cc_op != CC_OP_DYNAMIC)
6048 gen_op_set_cc_op(s->cc_op);
6051 case 0xe0: /* loopnz */
6052 case 0xe1: /* loopz */
6053 if (s->cc_op != CC_OP_DYNAMIC)
6054 gen_op_set_cc_op(s->cc_op);
6056 case 0xe2: /* loop */
6057 case 0xe3: /* jecxz */
6061 tval = (int8_t)insn_get(s, OT_BYTE);
6062 next_eip = s->pc - s->cs_base;
6067 l1 = gen_new_label();
6068 l2 = gen_new_label();
6071 gen_op_jz_ecx[s->aflag](l1);
6073 gen_op_dec_ECX[s->aflag]();
6076 gen_op_loop[s->aflag][b](l1);
6079 gen_jmp_im(next_eip);
6080 gen_op_jmp_label(l2);
6087 case 0x130: /* wrmsr */
6088 case 0x132: /* rdmsr */
6090 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6094 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6095 tcg_gen_helper_0_0(helper_rdmsr);
6097 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6098 tcg_gen_helper_0_0(helper_wrmsr);
6104 case 0x131: /* rdtsc */
6105 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6107 gen_jmp_im(pc_start - s->cs_base);
6108 tcg_gen_helper_0_0(helper_rdtsc);
6110 case 0x133: /* rdpmc */
6111 gen_jmp_im(pc_start - s->cs_base);
6112 tcg_gen_helper_0_0(helper_rdpmc);
6114 case 0x134: /* sysenter */
6118 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6120 if (s->cc_op != CC_OP_DYNAMIC) {
6121 gen_op_set_cc_op(s->cc_op);
6122 s->cc_op = CC_OP_DYNAMIC;
6124 gen_jmp_im(pc_start - s->cs_base);
6125 tcg_gen_helper_0_0(helper_sysenter);
6129 case 0x135: /* sysexit */
6133 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6135 if (s->cc_op != CC_OP_DYNAMIC) {
6136 gen_op_set_cc_op(s->cc_op);
6137 s->cc_op = CC_OP_DYNAMIC;
6139 gen_jmp_im(pc_start - s->cs_base);
6140 tcg_gen_helper_0_0(helper_sysexit);
6144 #ifdef TARGET_X86_64
6145 case 0x105: /* syscall */
6146 /* XXX: is it usable in real mode ? */
6147 if (s->cc_op != CC_OP_DYNAMIC) {
6148 gen_op_set_cc_op(s->cc_op);
6149 s->cc_op = CC_OP_DYNAMIC;
6151 gen_jmp_im(pc_start - s->cs_base);
6152 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6155 case 0x107: /* sysret */
6157 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6159 if (s->cc_op != CC_OP_DYNAMIC) {
6160 gen_op_set_cc_op(s->cc_op);
6161 s->cc_op = CC_OP_DYNAMIC;
6163 gen_jmp_im(pc_start - s->cs_base);
6164 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6165 /* condition codes are modified only in long mode */
6167 s->cc_op = CC_OP_EFLAGS;
6172 case 0x1a2: /* cpuid */
6173 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6175 tcg_gen_helper_0_0(helper_cpuid);
6177 case 0xf4: /* hlt */
6179 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6181 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6183 if (s->cc_op != CC_OP_DYNAMIC)
6184 gen_op_set_cc_op(s->cc_op);
6185 gen_jmp_im(s->pc - s->cs_base);
6186 tcg_gen_helper_0_0(helper_hlt);
6191 modrm = ldub_code(s->pc++);
6192 mod = (modrm >> 6) & 3;
6193 op = (modrm >> 3) & 7;
6196 if (!s->pe || s->vm86)
6198 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6200 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
6204 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6207 if (!s->pe || s->vm86)
6210 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6212 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6214 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6215 gen_jmp_im(pc_start - s->cs_base);
6216 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6217 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6221 if (!s->pe || s->vm86)
6223 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6225 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
6229 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6232 if (!s->pe || s->vm86)
6235 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6237 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6239 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6240 gen_jmp_im(pc_start - s->cs_base);
6241 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6242 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6247 if (!s->pe || s->vm86)
6249 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6250 if (s->cc_op != CC_OP_DYNAMIC)
6251 gen_op_set_cc_op(s->cc_op);
6256 s->cc_op = CC_OP_EFLAGS;
6263 modrm = ldub_code(s->pc++);
6264 mod = (modrm >> 6) & 3;
6265 op = (modrm >> 3) & 7;
6271 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6273 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6274 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
6275 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6276 gen_add_A0_im(s, 2);
6277 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
6279 gen_op_andl_T0_im(0xffffff);
6280 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6285 case 0: /* monitor */
6286 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6289 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6291 gen_jmp_im(pc_start - s->cs_base);
6292 #ifdef TARGET_X86_64
6293 if (s->aflag == 2) {
6294 gen_op_movq_A0_reg(R_EBX);
6295 gen_op_addq_A0_AL();
6299 gen_op_movl_A0_reg(R_EBX);
6300 gen_op_addl_A0_AL();
6302 gen_op_andl_A0_ffff();
6304 gen_add_A0_ds_seg(s);
6305 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6308 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6311 if (s->cc_op != CC_OP_DYNAMIC) {
6312 gen_op_set_cc_op(s->cc_op);
6313 s->cc_op = CC_OP_DYNAMIC;
6315 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6317 gen_jmp_im(s->pc - s->cs_base);
6318 tcg_gen_helper_0_0(helper_mwait);
6325 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6327 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6328 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6329 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6330 gen_add_A0_im(s, 2);
6331 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6333 gen_op_andl_T0_im(0xffffff);
6334 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6342 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6344 if (s->cc_op != CC_OP_DYNAMIC)
6345 gen_op_set_cc_op(s->cc_op);
6346 gen_jmp_im(s->pc - s->cs_base);
6347 tcg_gen_helper_0_0(helper_vmrun);
6348 s->cc_op = CC_OP_EFLAGS;
6351 case 1: /* VMMCALL */
6352 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6354 /* FIXME: cause #UD if hflags & SVM */
6355 tcg_gen_helper_0_0(helper_vmmcall);
6357 case 2: /* VMLOAD */
6358 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6360 tcg_gen_helper_0_0(helper_vmload);
6362 case 3: /* VMSAVE */
6363 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6365 tcg_gen_helper_0_0(helper_vmsave);
6368 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6370 tcg_gen_helper_0_0(helper_stgi);
6373 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6375 tcg_gen_helper_0_0(helper_clgi);
6377 case 6: /* SKINIT */
6378 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6380 tcg_gen_helper_0_0(helper_skinit);
6382 case 7: /* INVLPGA */
6383 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6385 tcg_gen_helper_0_0(helper_invlpga);
6390 } else if (s->cpl != 0) {
6391 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6393 if (gen_svm_check_intercept(s, pc_start,
6394 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6396 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6397 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6398 gen_add_A0_im(s, 2);
6399 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6401 gen_op_andl_T0_im(0xffffff);
6403 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6404 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6406 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6407 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6412 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6414 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6415 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6419 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6421 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6423 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6424 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6425 gen_jmp_im(s->pc - s->cs_base);
6429 case 7: /* invlpg */
6431 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6434 #ifdef TARGET_X86_64
6435 if (CODE64(s) && rm == 0) {
6437 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6438 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6439 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6440 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6447 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6449 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6450 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6451 gen_jmp_im(s->pc - s->cs_base);
6460 case 0x108: /* invd */
6461 case 0x109: /* wbinvd */
6463 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6465 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6470 case 0x63: /* arpl or movslS (x86_64) */
6471 #ifdef TARGET_X86_64
6474 /* d_ot is the size of destination */
6475 d_ot = dflag + OT_WORD;
6477 modrm = ldub_code(s->pc++);
6478 reg = ((modrm >> 3) & 7) | rex_r;
6479 mod = (modrm >> 6) & 3;
6480 rm = (modrm & 7) | REX_B(s);
6483 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6485 if (d_ot == OT_QUAD)
6486 gen_op_movslq_T0_T0();
6487 gen_op_mov_reg_T0(d_ot, reg);
6489 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6490 if (d_ot == OT_QUAD) {
6491 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6493 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6495 gen_op_mov_reg_T0(d_ot, reg);
6500 if (!s->pe || s->vm86)
6502 ot = dflag ? OT_LONG : OT_WORD;
6503 modrm = ldub_code(s->pc++);
6504 reg = (modrm >> 3) & 7;
6505 mod = (modrm >> 6) & 3;
6508 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6509 gen_op_ld_T0_A0(ot + s->mem_index);
6511 gen_op_mov_TN_reg(ot, 0, rm);
6513 gen_op_mov_TN_reg(ot, 1, reg);
6514 if (s->cc_op != CC_OP_DYNAMIC)
6515 gen_op_set_cc_op(s->cc_op);
6517 s->cc_op = CC_OP_EFLAGS;
6519 gen_op_st_T0_A0(ot + s->mem_index);
6521 gen_op_mov_reg_T0(ot, rm);
6523 gen_op_arpl_update();
6526 case 0x102: /* lar */
6527 case 0x103: /* lsl */
6528 if (!s->pe || s->vm86)
6530 ot = dflag ? OT_LONG : OT_WORD;
6531 modrm = ldub_code(s->pc++);
6532 reg = ((modrm >> 3) & 7) | rex_r;
6533 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6534 gen_op_mov_TN_reg(ot, 1, reg);
6535 if (s->cc_op != CC_OP_DYNAMIC)
6536 gen_op_set_cc_op(s->cc_op);
6541 s->cc_op = CC_OP_EFLAGS;
6542 gen_op_mov_reg_T1(ot, reg);
6545 modrm = ldub_code(s->pc++);
6546 mod = (modrm >> 6) & 3;
6547 op = (modrm >> 3) & 7;
6549 case 0: /* prefetchnta */
6550 case 1: /* prefetchnt0 */
6551 case 2: /* prefetchnt0 */
6552 case 3: /* prefetchnt0 */
6555 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6556 /* nothing more to do */
6558 default: /* nop (multi byte) */
6559 gen_nop_modrm(s, modrm);
6563 case 0x119 ... 0x11f: /* nop (multi byte) */
6564 modrm = ldub_code(s->pc++);
6565 gen_nop_modrm(s, modrm);
6567 case 0x120: /* mov reg, crN */
6568 case 0x122: /* mov crN, reg */
6570 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6572 modrm = ldub_code(s->pc++);
6573 if ((modrm & 0xc0) != 0xc0)
6575 rm = (modrm & 7) | REX_B(s);
6576 reg = ((modrm >> 3) & 7) | rex_r;
6588 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6589 gen_op_mov_TN_reg(ot, 0, rm);
6590 tcg_gen_helper_0_2(helper_movl_crN_T0,
6591 tcg_const_i32(reg), cpu_T[0]);
6592 gen_jmp_im(s->pc - s->cs_base);
6595 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6596 #if !defined(CONFIG_USER_ONLY)
6598 tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6601 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6602 gen_op_mov_reg_T0(ot, rm);
6610 case 0x121: /* mov reg, drN */
6611 case 0x123: /* mov drN, reg */
6613 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6615 modrm = ldub_code(s->pc++);
6616 if ((modrm & 0xc0) != 0xc0)
6618 rm = (modrm & 7) | REX_B(s);
6619 reg = ((modrm >> 3) & 7) | rex_r;
6624 /* XXX: do it dynamically with CR4.DE bit */
6625 if (reg == 4 || reg == 5 || reg >= 8)
6628 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6629 gen_op_mov_TN_reg(ot, 0, rm);
6630 tcg_gen_helper_0_2(helper_movl_drN_T0,
6631 tcg_const_i32(reg), cpu_T[0]);
6632 gen_jmp_im(s->pc - s->cs_base);
6635 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6636 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6637 gen_op_mov_reg_T0(ot, rm);
6641 case 0x106: /* clts */
6643 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6645 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6646 tcg_gen_helper_0_0(helper_clts);
6647 /* abort block because static cpu state changed */
6648 gen_jmp_im(s->pc - s->cs_base);
6652 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6653 case 0x1c3: /* MOVNTI reg, mem */
6654 if (!(s->cpuid_features & CPUID_SSE2))
6656 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6657 modrm = ldub_code(s->pc++);
6658 mod = (modrm >> 6) & 3;
6661 reg = ((modrm >> 3) & 7) | rex_r;
6662 /* generate a generic store */
6663 gen_ldst_modrm(s, modrm, ot, reg, 1);
6666 modrm = ldub_code(s->pc++);
6667 mod = (modrm >> 6) & 3;
6668 op = (modrm >> 3) & 7;
6670 case 0: /* fxsave */
6671 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6672 (s->flags & HF_EM_MASK))
6674 if (s->flags & HF_TS_MASK) {
6675 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6678 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6679 if (s->cc_op != CC_OP_DYNAMIC)
6680 gen_op_set_cc_op(s->cc_op);
6681 gen_jmp_im(pc_start - s->cs_base);
6682 tcg_gen_helper_0_2(helper_fxsave,
6683 cpu_A0, tcg_const_i32((s->dflag == 2)));
6685 case 1: /* fxrstor */
6686 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6687 (s->flags & HF_EM_MASK))
6689 if (s->flags & HF_TS_MASK) {
6690 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6693 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6694 if (s->cc_op != CC_OP_DYNAMIC)
6695 gen_op_set_cc_op(s->cc_op);
6696 gen_jmp_im(pc_start - s->cs_base);
6697 tcg_gen_helper_0_2(helper_fxrstor,
6698 cpu_A0, tcg_const_i32((s->dflag == 2)));
6700 case 2: /* ldmxcsr */
6701 case 3: /* stmxcsr */
6702 if (s->flags & HF_TS_MASK) {
6703 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6706 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6709 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6711 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6712 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6714 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6715 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6718 case 5: /* lfence */
6719 case 6: /* mfence */
6720 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6723 case 7: /* sfence / clflush */
6724 if ((modrm & 0xc7) == 0xc0) {
6726 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6727 if (!(s->cpuid_features & CPUID_SSE))
6731 if (!(s->cpuid_features & CPUID_CLFLUSH))
6733 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6740 case 0x10d: /* 3DNow! prefetch(w) */
6741 modrm = ldub_code(s->pc++);
6742 mod = (modrm >> 6) & 3;
6745 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6746 /* ignore for now */
6748 case 0x1aa: /* rsm */
6749 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6751 if (!(s->flags & HF_SMM_MASK))
6753 if (s->cc_op != CC_OP_DYNAMIC) {
6754 gen_op_set_cc_op(s->cc_op);
6755 s->cc_op = CC_OP_DYNAMIC;
6757 gen_jmp_im(s->pc - s->cs_base);
6758 tcg_gen_helper_0_0(helper_rsm);
6761 case 0x10e ... 0x10f:
6762 /* 3DNow! instructions, ignore prefixes */
6763 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6764 case 0x110 ... 0x117:
6765 case 0x128 ... 0x12f:
6766 case 0x150 ... 0x177:
6767 case 0x17c ... 0x17f:
6769 case 0x1c4 ... 0x1c6:
6770 case 0x1d0 ... 0x1fe:
6771 gen_sse(s, b, pc_start, rex_r);
6776 /* lock generation */
6777 if (s->prefix & PREFIX_LOCK)
6778 tcg_gen_helper_0_0(helper_unlock);
6781 if (s->prefix & PREFIX_LOCK)
6782 tcg_gen_helper_0_0(helper_unlock);
6783 /* XXX: ensure that no lock was generated */
6784 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6788 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6793 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6799 void optimize_flags_init(void)
6801 #if TCG_TARGET_REG_BITS == 32
6802 assert(sizeof(CCTable) == (1 << 3));
6804 assert(sizeof(CCTable) == (1 << 4));
6806 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6808 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6809 #if TARGET_LONG_BITS > HOST_LONG_BITS
6810 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6811 TCG_AREG0, offsetof(CPUState, t0), "T0");
6812 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6813 TCG_AREG0, offsetof(CPUState, t1), "T1");
6814 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6815 TCG_AREG0, offsetof(CPUState, t2), "A0");
6817 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6818 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6819 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6821 cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6822 TCG_AREG0, offsetof(CPUState, t3), "T3");
6823 #if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6824 /* XXX: must be suppressed once there are less fixed registers */
6825 cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6827 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6828 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6829 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6830 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6831 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6832 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6835 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6836 basic block 'tb'. If search_pc is TRUE, also generate PC
6837 information for each intermediate instruction. */
6838 static inline int gen_intermediate_code_internal(CPUState *env,
6839 TranslationBlock *tb,
6842 DisasContext dc1, *dc = &dc1;
6843 target_ulong pc_ptr;
6844 uint16_t *gen_opc_end;
6847 target_ulong pc_start;
6848 target_ulong cs_base;
6850 /* generate intermediate code */
6852 cs_base = tb->cs_base;
6854 cflags = tb->cflags;
6856 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6857 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6858 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6859 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6861 dc->vm86 = (flags >> VM_SHIFT) & 1;
6862 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6863 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6864 dc->tf = (flags >> TF_SHIFT) & 1;
6865 dc->singlestep_enabled = env->singlestep_enabled;
6866 dc->cc_op = CC_OP_DYNAMIC;
6867 dc->cs_base = cs_base;
6869 dc->popl_esp_hack = 0;
6870 /* select memory access functions */
6872 if (flags & HF_SOFTMMU_MASK) {
6874 dc->mem_index = 2 * 4;
6876 dc->mem_index = 1 * 4;
6878 dc->cpuid_features = env->cpuid_features;
6879 dc->cpuid_ext_features = env->cpuid_ext_features;
6880 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6881 #ifdef TARGET_X86_64
6882 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6883 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6886 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6887 (flags & HF_INHIBIT_IRQ_MASK)
6888 #ifndef CONFIG_SOFTMMU
6889 || (flags & HF_SOFTMMU_MASK)
6893 /* check addseg logic */
6894 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6895 printf("ERROR addseg\n");
6898 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6899 #if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6900 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
6902 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
6903 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
6904 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
6905 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
6906 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
6907 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6908 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6910 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6912 dc->is_jmp = DISAS_NEXT;
6917 if (env->nb_breakpoints > 0) {
6918 for(j = 0; j < env->nb_breakpoints; j++) {
6919 if (env->breakpoints[j] == pc_ptr) {
6920 gen_debug(dc, pc_ptr - dc->cs_base);
6926 j = gen_opc_ptr - gen_opc_buf;
6930 gen_opc_instr_start[lj++] = 0;
6932 gen_opc_pc[lj] = pc_ptr;
6933 gen_opc_cc_op[lj] = dc->cc_op;
6934 gen_opc_instr_start[lj] = 1;
6936 pc_ptr = disas_insn(dc, pc_ptr);
6937 /* stop translation if indicated */
6940 /* if single step mode, we generate only one instruction and
6941 generate an exception */
6942 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6943 the flag and abort the translation to give the irqs a
6944 change to be happen */
6945 if (dc->tf || dc->singlestep_enabled ||
6946 (flags & HF_INHIBIT_IRQ_MASK) ||
6947 (cflags & CF_SINGLE_INSN)) {
6948 gen_jmp_im(pc_ptr - dc->cs_base);
6952 /* if too long translation, stop generation too */
6953 if (gen_opc_ptr >= gen_opc_end ||
6954 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6955 gen_jmp_im(pc_ptr - dc->cs_base);
6960 *gen_opc_ptr = INDEX_op_end;
6961 /* we don't forget to fill the last values */
6963 j = gen_opc_ptr - gen_opc_buf;
6966 gen_opc_instr_start[lj++] = 0;
6970 if (loglevel & CPU_LOG_TB_CPU) {
6971 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6973 if (loglevel & CPU_LOG_TB_IN_ASM) {
6975 fprintf(logfile, "----------------\n");
6976 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6977 #ifdef TARGET_X86_64
6982 disas_flags = !dc->code32;
6983 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6984 fprintf(logfile, "\n");
6985 if (loglevel & CPU_LOG_TB_OP_OPT) {
6986 fprintf(logfile, "OP before opt:\n");
6987 tcg_dump_ops(&tcg_ctx, logfile);
6988 fprintf(logfile, "\n");
6994 tb->size = pc_ptr - pc_start;
6998 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7000 return gen_intermediate_code_internal(env, tb, 0);
7003 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7005 return gen_intermediate_code_internal(env, tb, 1);
7008 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7009 unsigned long searched_pc, int pc_pos, void *puc)
7013 if (loglevel & CPU_LOG_TB_OP) {
7015 fprintf(logfile, "RESTORE:\n");
7016 for(i = 0;i <= pc_pos; i++) {
7017 if (gen_opc_instr_start[i]) {
7018 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7021 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7022 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7023 (uint32_t)tb->cs_base);
7026 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7027 cc_op = gen_opc_cc_op[pc_pos];
7028 if (cc_op != CC_OP_DYNAMIC)