4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
63 /* local register indexes (only used inside old micro ops) */
64 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65 static TCGv cpu_tmp5, cpu_tmp6;
68 static int x86_64_hregs;
71 typedef struct DisasContext {
72 /* current insn context */
73 int override; /* -1 if no override */
76 target_ulong pc; /* pc = eip + cs_base */
77 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78 static state change (stop translation) */
79 /* current block context */
80 target_ulong cs_base; /* base of CS segment */
81 int pe; /* protected mode */
82 int code32; /* 32 bit code segment */
84 int lma; /* long mode active */
85 int code64; /* 64 bit code segment */
88 int ss32; /* 32 bit stack segment */
89 int cc_op; /* current CC operation */
90 int addseg; /* non zero if either DS/ES/SS have a non zero base */
91 int f_st; /* currently unused */
92 int vm86; /* vm86 mode */
95 int tf; /* TF cpu flag */
96 int singlestep_enabled; /* "hardware" single step enabled */
97 int jmp_opt; /* use direct block chaining for direct jumps */
98 int mem_index; /* select memory access functions */
99 uint64_t flags; /* all execution flags */
100 struct TranslationBlock *tb;
101 int popl_esp_hack; /* for correct popl with esp base handling */
102 int rip_offset; /* only used in x86_64, but left for simplicity */
104 int cpuid_ext_features;
105 int cpuid_ext2_features;
106 int cpuid_ext3_features;
109 static void gen_eob(DisasContext *s);
110 static void gen_jmp(DisasContext *s, target_ulong eip);
111 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
113 /* i386 arith/logic operations */
133 OP_SHL1, /* undocumented */
157 /* I386 int registers */
158 OR_EAX, /* MUST be even numbered */
167 OR_TMP0 = 16, /* temporary operand register */
169 OR_A0, /* temporary register used when doing address evaluation */
172 static inline void gen_op_movl_T0_0(void)
174 tcg_gen_movi_tl(cpu_T[0], 0);
177 static inline void gen_op_movl_T0_im(int32_t val)
179 tcg_gen_movi_tl(cpu_T[0], val);
182 static inline void gen_op_movl_T0_imu(uint32_t val)
184 tcg_gen_movi_tl(cpu_T[0], val);
187 static inline void gen_op_movl_T1_im(int32_t val)
189 tcg_gen_movi_tl(cpu_T[1], val);
192 static inline void gen_op_movl_T1_imu(uint32_t val)
194 tcg_gen_movi_tl(cpu_T[1], val);
197 static inline void gen_op_movl_A0_im(uint32_t val)
199 tcg_gen_movi_tl(cpu_A0, val);
203 static inline void gen_op_movq_A0_im(int64_t val)
205 tcg_gen_movi_tl(cpu_A0, val);
209 static inline void gen_movtl_T0_im(target_ulong val)
211 tcg_gen_movi_tl(cpu_T[0], val);
214 static inline void gen_movtl_T1_im(target_ulong val)
216 tcg_gen_movi_tl(cpu_T[1], val);
219 static inline void gen_op_andl_T0_ffff(void)
221 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
224 static inline void gen_op_andl_T0_im(uint32_t val)
226 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
229 static inline void gen_op_movl_T0_T1(void)
231 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
234 static inline void gen_op_andl_A0_ffff(void)
236 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
241 #define NB_OP_SIZES 4
243 #else /* !TARGET_X86_64 */
245 #define NB_OP_SIZES 3
247 #endif /* !TARGET_X86_64 */
249 #if defined(WORDS_BIGENDIAN)
250 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
251 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
252 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
253 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
254 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
256 #define REG_B_OFFSET 0
257 #define REG_H_OFFSET 1
258 #define REG_W_OFFSET 0
259 #define REG_L_OFFSET 0
260 #define REG_LH_OFFSET 4
263 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
267 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
268 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
270 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
274 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
278 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
279 /* high part of register set to zero */
280 tcg_gen_movi_tl(cpu_tmp0, 0);
281 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
285 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
290 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
296 static inline void gen_op_mov_reg_T0(int ot, int reg)
298 gen_op_mov_reg_TN(ot, 0, reg);
301 static inline void gen_op_mov_reg_T1(int ot, int reg)
303 gen_op_mov_reg_TN(ot, 1, reg);
306 static inline void gen_op_mov_reg_A0(int size, int reg)
310 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
314 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
315 /* high part of register set to zero */
316 tcg_gen_movi_tl(cpu_tmp0, 0);
317 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
321 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
326 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
332 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
336 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
339 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
344 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
349 static inline void gen_op_movl_A0_reg(int reg)
351 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
354 static inline void gen_op_addl_A0_im(int32_t val)
356 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
358 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
363 static inline void gen_op_addq_A0_im(int64_t val)
365 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
369 static void gen_add_A0_im(DisasContext *s, int val)
373 gen_op_addq_A0_im(val);
376 gen_op_addl_A0_im(val);
379 static inline void gen_op_addl_T0_T1(void)
381 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
384 static inline void gen_op_jmp_T0(void)
386 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
389 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
393 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
394 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
395 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
398 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
399 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
401 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
403 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
407 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
408 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415 static inline void gen_op_add_reg_T0(int size, int reg)
419 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
420 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
421 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
424 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
425 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
427 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
429 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
433 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
434 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
435 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441 static inline void gen_op_set_cc_op(int32_t val)
443 tcg_gen_movi_i32(cpu_cc_op, val);
446 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
448 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
450 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
451 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
453 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
457 static inline void gen_op_movl_A0_seg(int reg)
459 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
462 static inline void gen_op_addl_A0_seg(int reg)
464 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
465 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
467 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
472 static inline void gen_op_movq_A0_seg(int reg)
474 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
477 static inline void gen_op_addq_A0_seg(int reg)
479 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
480 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
483 static inline void gen_op_movq_A0_reg(int reg)
485 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
488 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
490 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
492 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
493 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
497 static inline void gen_op_lds_T0_A0(int idx)
499 int mem_index = (idx >> 2) - 1;
502 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
505 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
509 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
514 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
515 static inline void gen_op_ld_T0_A0(int idx)
517 int mem_index = (idx >> 2) - 1;
520 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
523 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
526 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
530 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
535 static inline void gen_op_ldu_T0_A0(int idx)
537 gen_op_ld_T0_A0(idx);
540 static inline void gen_op_ld_T1_A0(int idx)
542 int mem_index = (idx >> 2) - 1;
545 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
548 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
551 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
555 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
560 static inline void gen_op_st_T0_A0(int idx)
562 int mem_index = (idx >> 2) - 1;
565 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
568 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
571 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
575 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
580 static inline void gen_op_st_T1_A0(int idx)
582 int mem_index = (idx >> 2) - 1;
585 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
588 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
591 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
595 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
600 static inline void gen_jmp_im(target_ulong pc)
602 tcg_gen_movi_tl(cpu_tmp0, pc);
603 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
606 static inline void gen_string_movl_A0_ESI(DisasContext *s)
610 override = s->override;
614 gen_op_movq_A0_seg(override);
615 gen_op_addq_A0_reg_sN(0, R_ESI);
617 gen_op_movq_A0_reg(R_ESI);
623 if (s->addseg && override < 0)
626 gen_op_movl_A0_seg(override);
627 gen_op_addl_A0_reg_sN(0, R_ESI);
629 gen_op_movl_A0_reg(R_ESI);
632 /* 16 address, always override */
635 gen_op_movl_A0_reg(R_ESI);
636 gen_op_andl_A0_ffff();
637 gen_op_addl_A0_seg(override);
641 static inline void gen_string_movl_A0_EDI(DisasContext *s)
645 gen_op_movq_A0_reg(R_EDI);
650 gen_op_movl_A0_seg(R_ES);
651 gen_op_addl_A0_reg_sN(0, R_EDI);
653 gen_op_movl_A0_reg(R_EDI);
656 gen_op_movl_A0_reg(R_EDI);
657 gen_op_andl_A0_ffff();
658 gen_op_addl_A0_seg(R_ES);
662 static inline void gen_op_movl_T0_Dshift(int ot)
664 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
665 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
668 static void gen_extu(int ot, TCGv reg)
672 tcg_gen_ext8u_tl(reg, reg);
675 tcg_gen_ext16u_tl(reg, reg);
678 tcg_gen_ext32u_tl(reg, reg);
685 static void gen_exts(int ot, TCGv reg)
689 tcg_gen_ext8s_tl(reg, reg);
692 tcg_gen_ext16s_tl(reg, reg);
695 tcg_gen_ext32s_tl(reg, reg);
702 static inline void gen_op_jnz_ecx(int size, int label1)
704 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
705 gen_extu(size + 1, cpu_tmp0);
706 tcg_gen_brcond_tl(TCG_COND_NE, cpu_tmp0, tcg_const_tl(0), label1);
709 static inline void gen_op_jz_ecx(int size, int label1)
711 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
712 gen_extu(size + 1, cpu_tmp0);
713 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
716 static void *helper_in_func[3] = {
722 static void *helper_out_func[3] = {
728 static void *gen_check_io_func[3] = {
734 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
738 target_ulong next_eip;
741 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
742 if (s->cc_op != CC_OP_DYNAMIC)
743 gen_op_set_cc_op(s->cc_op);
746 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
747 tcg_gen_helper_0_1(gen_check_io_func[ot],
750 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
752 if (s->cc_op != CC_OP_DYNAMIC)
753 gen_op_set_cc_op(s->cc_op);
757 svm_flags |= (1 << (4 + ot));
758 next_eip = s->pc - s->cs_base;
759 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
760 tcg_gen_helper_0_3(helper_svm_check_io,
762 tcg_const_i32(svm_flags),
763 tcg_const_i32(next_eip - cur_eip));
767 static inline void gen_movs(DisasContext *s, int ot)
769 gen_string_movl_A0_ESI(s);
770 gen_op_ld_T0_A0(ot + s->mem_index);
771 gen_string_movl_A0_EDI(s);
772 gen_op_st_T0_A0(ot + s->mem_index);
773 gen_op_movl_T0_Dshift(ot);
774 gen_op_add_reg_T0(s->aflag, R_ESI);
775 gen_op_add_reg_T0(s->aflag, R_EDI);
778 static inline void gen_update_cc_op(DisasContext *s)
780 if (s->cc_op != CC_OP_DYNAMIC) {
781 gen_op_set_cc_op(s->cc_op);
782 s->cc_op = CC_OP_DYNAMIC;
786 static void gen_op_update1_cc(void)
788 tcg_gen_discard_tl(cpu_cc_src);
789 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
792 static void gen_op_update2_cc(void)
794 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
795 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
798 static inline void gen_op_cmpl_T0_T1_cc(void)
800 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
801 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
804 static inline void gen_op_testl_T0_T1_cc(void)
806 tcg_gen_discard_tl(cpu_cc_src);
807 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
810 static void gen_op_update_neg_cc(void)
812 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
813 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
816 /* compute eflags.C to reg */
817 static void gen_compute_eflags_c(TCGv reg)
819 #if TCG_TARGET_REG_BITS == 32
820 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
821 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
822 (long)cc_table + offsetof(CCTable, compute_c));
823 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
824 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
825 1, &cpu_tmp2_i32, 0, NULL);
827 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
828 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
829 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
830 (long)cc_table + offsetof(CCTable, compute_c));
831 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
832 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
833 1, &cpu_tmp2_i32, 0, NULL);
835 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
838 /* compute all eflags to cc_src */
839 static void gen_compute_eflags(TCGv reg)
841 #if TCG_TARGET_REG_BITS == 32
842 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
843 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
844 (long)cc_table + offsetof(CCTable, compute_all));
845 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
846 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
847 1, &cpu_tmp2_i32, 0, NULL);
849 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
850 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
851 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
852 (long)cc_table + offsetof(CCTable, compute_all));
853 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
854 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
855 1, &cpu_tmp2_i32, 0, NULL);
857 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
860 static inline void gen_setcc_slow_T0(int op)
864 gen_compute_eflags(cpu_T[0]);
865 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
866 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
869 gen_compute_eflags_c(cpu_T[0]);
872 gen_compute_eflags(cpu_T[0]);
873 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
874 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
877 gen_compute_eflags(cpu_tmp0);
878 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
879 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
880 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
883 gen_compute_eflags(cpu_T[0]);
884 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
885 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
888 gen_compute_eflags(cpu_T[0]);
889 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
890 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
893 gen_compute_eflags(cpu_tmp0);
894 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
895 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
896 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
897 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
901 gen_compute_eflags(cpu_tmp0);
902 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
903 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
904 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
905 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
906 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
907 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
912 /* return true if setcc_slow is not needed (WARNING: must be kept in
913 sync with gen_jcc1) */
914 static int is_fast_jcc_case(DisasContext *s, int b)
917 jcc_op = (b >> 1) & 7;
919 /* we optimize the cmp/jcc case */
924 if (jcc_op == JCC_O || jcc_op == JCC_P)
928 /* some jumps are easy to compute */
953 if (jcc_op != JCC_Z && jcc_op != JCC_S)
963 /* generate a conditional jump to label 'l1' according to jump opcode
964 value 'b'. In the fast case, T0 is guaranted not to be used. */
965 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
967 int inv, jcc_op, size, cond;
971 jcc_op = (b >> 1) & 7;
974 /* we optimize the cmp/jcc case */
980 size = cc_op - CC_OP_SUBB;
986 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
990 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
995 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1003 tcg_gen_brcond_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0,
1004 tcg_const_tl(0), l1);
1010 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1011 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1012 tcg_const_tl(0), l1);
1015 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1016 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1017 tcg_const_tl(0), l1);
1019 #ifdef TARGET_X86_64
1021 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1022 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1023 tcg_const_tl(0), l1);
1027 tcg_gen_brcond_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1028 tcg_const_tl(0), l1);
1034 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1037 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1039 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1043 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1044 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1048 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1049 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1051 #ifdef TARGET_X86_64
1054 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1055 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1062 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1066 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1069 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1071 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1075 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1076 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1080 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1081 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1083 #ifdef TARGET_X86_64
1086 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1087 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1094 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1102 /* some jumps are easy to compute */
1144 size = (cc_op - CC_OP_ADDB) & 3;
1147 size = (cc_op - CC_OP_ADDB) & 3;
1155 gen_setcc_slow_T0(jcc_op);
1156 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1157 cpu_T[0], tcg_const_tl(0), l1);
1162 /* XXX: does not work with gdbstub "ice" single step - not a
1164 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1168 l1 = gen_new_label();
1169 l2 = gen_new_label();
1170 gen_op_jnz_ecx(s->aflag, l1);
1172 gen_jmp_tb(s, next_eip, 1);
1177 static inline void gen_stos(DisasContext *s, int ot)
1179 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1180 gen_string_movl_A0_EDI(s);
1181 gen_op_st_T0_A0(ot + s->mem_index);
1182 gen_op_movl_T0_Dshift(ot);
1183 gen_op_add_reg_T0(s->aflag, R_EDI);
1186 static inline void gen_lods(DisasContext *s, int ot)
1188 gen_string_movl_A0_ESI(s);
1189 gen_op_ld_T0_A0(ot + s->mem_index);
1190 gen_op_mov_reg_T0(ot, R_EAX);
1191 gen_op_movl_T0_Dshift(ot);
1192 gen_op_add_reg_T0(s->aflag, R_ESI);
1195 static inline void gen_scas(DisasContext *s, int ot)
1197 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1198 gen_string_movl_A0_EDI(s);
1199 gen_op_ld_T1_A0(ot + s->mem_index);
1200 gen_op_cmpl_T0_T1_cc();
1201 gen_op_movl_T0_Dshift(ot);
1202 gen_op_add_reg_T0(s->aflag, R_EDI);
1205 static inline void gen_cmps(DisasContext *s, int ot)
1207 gen_string_movl_A0_ESI(s);
1208 gen_op_ld_T0_A0(ot + s->mem_index);
1209 gen_string_movl_A0_EDI(s);
1210 gen_op_ld_T1_A0(ot + s->mem_index);
1211 gen_op_cmpl_T0_T1_cc();
1212 gen_op_movl_T0_Dshift(ot);
1213 gen_op_add_reg_T0(s->aflag, R_ESI);
1214 gen_op_add_reg_T0(s->aflag, R_EDI);
1217 static inline void gen_ins(DisasContext *s, int ot)
1219 gen_string_movl_A0_EDI(s);
1220 /* Note: we must do this dummy write first to be restartable in
1221 case of page fault. */
1223 gen_op_st_T0_A0(ot + s->mem_index);
1224 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1225 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1226 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1227 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1228 gen_op_st_T0_A0(ot + s->mem_index);
1229 gen_op_movl_T0_Dshift(ot);
1230 gen_op_add_reg_T0(s->aflag, R_EDI);
1233 static inline void gen_outs(DisasContext *s, int ot)
1235 gen_string_movl_A0_ESI(s);
1236 gen_op_ld_T0_A0(ot + s->mem_index);
1238 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1239 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1240 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1241 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1242 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1244 gen_op_movl_T0_Dshift(ot);
1245 gen_op_add_reg_T0(s->aflag, R_ESI);
1248 /* same method as Valgrind : we generate jumps to current or next
1250 #define GEN_REPZ(op) \
1251 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1252 target_ulong cur_eip, target_ulong next_eip) \
1255 gen_update_cc_op(s); \
1256 l2 = gen_jz_ecx_string(s, next_eip); \
1257 gen_ ## op(s, ot); \
1258 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1259 /* a loop would cause two single step exceptions if ECX = 1 \
1260 before rep string_insn */ \
1262 gen_op_jz_ecx(s->aflag, l2); \
1263 gen_jmp(s, cur_eip); \
1266 #define GEN_REPZ2(op) \
1267 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1268 target_ulong cur_eip, \
1269 target_ulong next_eip, \
1273 gen_update_cc_op(s); \
1274 l2 = gen_jz_ecx_string(s, next_eip); \
1275 gen_ ## op(s, ot); \
1276 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1277 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1278 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1280 gen_op_jz_ecx(s->aflag, l2); \
1281 gen_jmp(s, cur_eip); \
1292 static void *helper_fp_arith_ST0_FT0[8] = {
1293 helper_fadd_ST0_FT0,
1294 helper_fmul_ST0_FT0,
1295 helper_fcom_ST0_FT0,
1296 helper_fcom_ST0_FT0,
1297 helper_fsub_ST0_FT0,
1298 helper_fsubr_ST0_FT0,
1299 helper_fdiv_ST0_FT0,
1300 helper_fdivr_ST0_FT0,
1303 /* NOTE the exception in "r" op ordering */
1304 static void *helper_fp_arith_STN_ST0[8] = {
1305 helper_fadd_STN_ST0,
1306 helper_fmul_STN_ST0,
1309 helper_fsubr_STN_ST0,
1310 helper_fsub_STN_ST0,
1311 helper_fdivr_STN_ST0,
1312 helper_fdiv_STN_ST0,
1315 /* if d == OR_TMP0, it means memory operand (address in A0) */
1316 static void gen_op(DisasContext *s1, int op, int ot, int d)
1319 gen_op_mov_TN_reg(ot, 0, d);
1321 gen_op_ld_T0_A0(ot + s1->mem_index);
1325 if (s1->cc_op != CC_OP_DYNAMIC)
1326 gen_op_set_cc_op(s1->cc_op);
1327 gen_compute_eflags_c(cpu_tmp4);
1328 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1329 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1331 gen_op_mov_reg_T0(ot, d);
1333 gen_op_st_T0_A0(ot + s1->mem_index);
1334 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1335 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1336 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1337 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1338 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1339 s1->cc_op = CC_OP_DYNAMIC;
1342 if (s1->cc_op != CC_OP_DYNAMIC)
1343 gen_op_set_cc_op(s1->cc_op);
1344 gen_compute_eflags_c(cpu_tmp4);
1345 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1346 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1348 gen_op_mov_reg_T0(ot, d);
1350 gen_op_st_T0_A0(ot + s1->mem_index);
1351 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1352 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1353 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1354 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1355 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1356 s1->cc_op = CC_OP_DYNAMIC;
1359 gen_op_addl_T0_T1();
1361 gen_op_mov_reg_T0(ot, d);
1363 gen_op_st_T0_A0(ot + s1->mem_index);
1364 gen_op_update2_cc();
1365 s1->cc_op = CC_OP_ADDB + ot;
1368 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1370 gen_op_mov_reg_T0(ot, d);
1372 gen_op_st_T0_A0(ot + s1->mem_index);
1373 gen_op_update2_cc();
1374 s1->cc_op = CC_OP_SUBB + ot;
1378 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1380 gen_op_mov_reg_T0(ot, d);
1382 gen_op_st_T0_A0(ot + s1->mem_index);
1383 gen_op_update1_cc();
1384 s1->cc_op = CC_OP_LOGICB + ot;
1387 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1389 gen_op_mov_reg_T0(ot, d);
1391 gen_op_st_T0_A0(ot + s1->mem_index);
1392 gen_op_update1_cc();
1393 s1->cc_op = CC_OP_LOGICB + ot;
1396 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1398 gen_op_mov_reg_T0(ot, d);
1400 gen_op_st_T0_A0(ot + s1->mem_index);
1401 gen_op_update1_cc();
1402 s1->cc_op = CC_OP_LOGICB + ot;
1405 gen_op_cmpl_T0_T1_cc();
1406 s1->cc_op = CC_OP_SUBB + ot;
1411 /* if d == OR_TMP0, it means memory operand (address in A0) */
1412 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1415 gen_op_mov_TN_reg(ot, 0, d);
1417 gen_op_ld_T0_A0(ot + s1->mem_index);
1418 if (s1->cc_op != CC_OP_DYNAMIC)
1419 gen_op_set_cc_op(s1->cc_op);
1421 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1422 s1->cc_op = CC_OP_INCB + ot;
1424 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1425 s1->cc_op = CC_OP_DECB + ot;
1428 gen_op_mov_reg_T0(ot, d);
1430 gen_op_st_T0_A0(ot + s1->mem_index);
1431 gen_compute_eflags_c(cpu_cc_src);
1432 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1435 /* XXX: add faster immediate case */
1436 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1437 int is_right, int is_arith)
1449 gen_op_ld_T0_A0(ot + s->mem_index);
1451 gen_op_mov_TN_reg(ot, 0, op1);
1453 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1455 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1459 gen_exts(ot, cpu_T[0]);
1460 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1461 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1463 gen_extu(ot, cpu_T[0]);
1464 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1465 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1468 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1469 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1474 gen_op_st_T0_A0(ot + s->mem_index);
1476 gen_op_mov_reg_T0(ot, op1);
1478 /* update eflags if non zero shift */
1479 if (s->cc_op != CC_OP_DYNAMIC)
1480 gen_op_set_cc_op(s->cc_op);
1482 shift_label = gen_new_label();
1483 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1485 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1486 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1488 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1490 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1492 gen_set_label(shift_label);
1493 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1496 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1499 tcg_gen_shli_tl(ret, arg1, arg2);
1501 tcg_gen_shri_tl(ret, arg1, -arg2);
1504 /* XXX: add faster immediate case */
1505 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1509 int label1, label2, data_bits;
1518 gen_op_ld_T0_A0(ot + s->mem_index);
1520 gen_op_mov_TN_reg(ot, 0, op1);
1522 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1524 /* Must test zero case to avoid using undefined behaviour in TCG
1526 label1 = gen_new_label();
1527 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1530 tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1532 tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1534 gen_extu(ot, cpu_T[0]);
1535 tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1537 data_bits = 8 << ot;
1538 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1539 fix TCG definition) */
1541 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1542 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1543 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1545 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1546 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1547 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1549 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1551 gen_set_label(label1);
1554 gen_op_st_T0_A0(ot + s->mem_index);
1556 gen_op_mov_reg_T0(ot, op1);
1559 if (s->cc_op != CC_OP_DYNAMIC)
1560 gen_op_set_cc_op(s->cc_op);
1562 label2 = gen_new_label();
1563 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1565 gen_compute_eflags(cpu_cc_src);
1566 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1567 tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1568 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1569 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1570 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1572 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1574 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1575 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1577 tcg_gen_discard_tl(cpu_cc_dst);
1578 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1580 gen_set_label(label2);
1581 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1584 static void *helper_rotc[8] = {
1588 X86_64_ONLY(helper_rclq),
1592 X86_64_ONLY(helper_rcrq),
1595 /* XXX: add faster immediate = 1 case */
1596 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1601 if (s->cc_op != CC_OP_DYNAMIC)
1602 gen_op_set_cc_op(s->cc_op);
1606 gen_op_ld_T0_A0(ot + s->mem_index);
1608 gen_op_mov_TN_reg(ot, 0, op1);
1610 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1611 cpu_T[0], cpu_T[0], cpu_T[1]);
1614 gen_op_st_T0_A0(ot + s->mem_index);
1616 gen_op_mov_reg_T0(ot, op1);
1619 label1 = gen_new_label();
1620 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1622 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1623 tcg_gen_discard_tl(cpu_cc_dst);
1624 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1626 gen_set_label(label1);
1627 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1630 /* XXX: add faster immediate case */
1631 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1634 int label1, label2, data_bits;
1644 gen_op_ld_T0_A0(ot + s->mem_index);
1646 gen_op_mov_TN_reg(ot, 0, op1);
1648 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1649 /* Must test zero case to avoid using undefined behaviour in TCG
1651 label1 = gen_new_label();
1652 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1654 tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1655 if (ot == OT_WORD) {
1656 /* Note: we implement the Intel behaviour for shift count > 16 */
1658 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1659 tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1660 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1661 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1663 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1665 /* only needed if count > 16, but a test would complicate */
1666 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1667 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1669 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1671 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1673 /* XXX: not optimal */
1674 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1675 tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1676 tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1677 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1679 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1680 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1681 tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1682 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1684 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1685 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1686 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1687 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1690 data_bits = 8 << ot;
1693 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1695 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1697 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1698 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1699 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1700 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1704 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1706 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1708 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1709 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1710 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1711 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1714 tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1716 gen_set_label(label1);
1719 gen_op_st_T0_A0(ot + s->mem_index);
1721 gen_op_mov_reg_T0(ot, op1);
1724 if (s->cc_op != CC_OP_DYNAMIC)
1725 gen_op_set_cc_op(s->cc_op);
1727 label2 = gen_new_label();
1728 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1730 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1731 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1733 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1735 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1737 gen_set_label(label2);
1738 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1741 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1744 gen_op_mov_TN_reg(ot, 1, s);
1747 gen_rot_rm_T1(s1, ot, d, 0);
1750 gen_rot_rm_T1(s1, ot, d, 1);
1754 gen_shift_rm_T1(s1, ot, d, 0, 0);
1757 gen_shift_rm_T1(s1, ot, d, 1, 0);
1760 gen_shift_rm_T1(s1, ot, d, 1, 1);
1763 gen_rotc_rm_T1(s1, ot, d, 0);
1766 gen_rotc_rm_T1(s1, ot, d, 1);
1771 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1773 /* currently not optimized */
1774 gen_op_movl_T1_im(c);
1775 gen_shift(s1, op, ot, d, OR_TMP1);
1778 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1786 int mod, rm, code, override, must_add_seg;
1788 override = s->override;
1789 must_add_seg = s->addseg;
1792 mod = (modrm >> 6) & 3;
1804 code = ldub_code(s->pc++);
1805 scale = (code >> 6) & 3;
1806 index = ((code >> 3) & 7) | REX_X(s);
1813 if ((base & 7) == 5) {
1815 disp = (int32_t)ldl_code(s->pc);
1817 if (CODE64(s) && !havesib) {
1818 disp += s->pc + s->rip_offset;
1825 disp = (int8_t)ldub_code(s->pc++);
1829 disp = ldl_code(s->pc);
1835 /* for correct popl handling with esp */
1836 if (base == 4 && s->popl_esp_hack)
1837 disp += s->popl_esp_hack;
1838 #ifdef TARGET_X86_64
1839 if (s->aflag == 2) {
1840 gen_op_movq_A0_reg(base);
1842 gen_op_addq_A0_im(disp);
1847 gen_op_movl_A0_reg(base);
1849 gen_op_addl_A0_im(disp);
1852 #ifdef TARGET_X86_64
1853 if (s->aflag == 2) {
1854 gen_op_movq_A0_im(disp);
1858 gen_op_movl_A0_im(disp);
1861 /* XXX: index == 4 is always invalid */
1862 if (havesib && (index != 4 || scale != 0)) {
1863 #ifdef TARGET_X86_64
1864 if (s->aflag == 2) {
1865 gen_op_addq_A0_reg_sN(scale, index);
1869 gen_op_addl_A0_reg_sN(scale, index);
1874 if (base == R_EBP || base == R_ESP)
1879 #ifdef TARGET_X86_64
1880 if (s->aflag == 2) {
1881 gen_op_addq_A0_seg(override);
1885 gen_op_addl_A0_seg(override);
1892 disp = lduw_code(s->pc);
1894 gen_op_movl_A0_im(disp);
1895 rm = 0; /* avoid SS override */
1902 disp = (int8_t)ldub_code(s->pc++);
1906 disp = lduw_code(s->pc);
1912 gen_op_movl_A0_reg(R_EBX);
1913 gen_op_addl_A0_reg_sN(0, R_ESI);
1916 gen_op_movl_A0_reg(R_EBX);
1917 gen_op_addl_A0_reg_sN(0, R_EDI);
1920 gen_op_movl_A0_reg(R_EBP);
1921 gen_op_addl_A0_reg_sN(0, R_ESI);
1924 gen_op_movl_A0_reg(R_EBP);
1925 gen_op_addl_A0_reg_sN(0, R_EDI);
1928 gen_op_movl_A0_reg(R_ESI);
1931 gen_op_movl_A0_reg(R_EDI);
1934 gen_op_movl_A0_reg(R_EBP);
1938 gen_op_movl_A0_reg(R_EBX);
1942 gen_op_addl_A0_im(disp);
1943 gen_op_andl_A0_ffff();
1947 if (rm == 2 || rm == 3 || rm == 6)
1952 gen_op_addl_A0_seg(override);
1962 static void gen_nop_modrm(DisasContext *s, int modrm)
1964 int mod, rm, base, code;
1966 mod = (modrm >> 6) & 3;
1976 code = ldub_code(s->pc++);
2012 /* used for LEA and MOV AX, mem */
2013 static void gen_add_A0_ds_seg(DisasContext *s)
2015 int override, must_add_seg;
2016 must_add_seg = s->addseg;
2018 if (s->override >= 0) {
2019 override = s->override;
2025 #ifdef TARGET_X86_64
2027 gen_op_addq_A0_seg(override);
2031 gen_op_addl_A0_seg(override);
2036 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2038 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2040 int mod, rm, opreg, disp;
2042 mod = (modrm >> 6) & 3;
2043 rm = (modrm & 7) | REX_B(s);
2047 gen_op_mov_TN_reg(ot, 0, reg);
2048 gen_op_mov_reg_T0(ot, rm);
2050 gen_op_mov_TN_reg(ot, 0, rm);
2052 gen_op_mov_reg_T0(ot, reg);
2055 gen_lea_modrm(s, modrm, &opreg, &disp);
2058 gen_op_mov_TN_reg(ot, 0, reg);
2059 gen_op_st_T0_A0(ot + s->mem_index);
2061 gen_op_ld_T0_A0(ot + s->mem_index);
2063 gen_op_mov_reg_T0(ot, reg);
2068 static inline uint32_t insn_get(DisasContext *s, int ot)
2074 ret = ldub_code(s->pc);
2078 ret = lduw_code(s->pc);
2083 ret = ldl_code(s->pc);
2090 static inline int insn_const_size(unsigned int ot)
2098 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2100 TranslationBlock *tb;
2103 pc = s->cs_base + eip;
2105 /* NOTE: we handle the case where the TB spans two pages here */
2106 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2107 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2108 /* jump to same page: we can use a direct jump */
2109 tcg_gen_goto_tb(tb_num);
2111 tcg_gen_exit_tb((long)tb + tb_num);
2113 /* jump to another page: currently not optimized */
2119 static inline void gen_jcc(DisasContext *s, int b,
2120 target_ulong val, target_ulong next_eip)
2125 if (s->cc_op != CC_OP_DYNAMIC) {
2126 gen_op_set_cc_op(s->cc_op);
2127 s->cc_op = CC_OP_DYNAMIC;
2130 l1 = gen_new_label();
2131 gen_jcc1(s, cc_op, b, l1);
2133 gen_goto_tb(s, 0, next_eip);
2136 gen_goto_tb(s, 1, val);
2140 l1 = gen_new_label();
2141 l2 = gen_new_label();
2142 gen_jcc1(s, cc_op, b, l1);
2144 gen_jmp_im(next_eip);
2154 static void gen_setcc(DisasContext *s, int b)
2156 int inv, jcc_op, l1;
2158 if (is_fast_jcc_case(s, b)) {
2159 /* nominal case: we use a jump */
2160 tcg_gen_movi_tl(cpu_T[0], 0);
2161 l1 = gen_new_label();
2162 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2163 tcg_gen_movi_tl(cpu_T[0], 1);
2166 /* slow case: it is more efficient not to generate a jump,
2167 although it is questionnable whether this optimization is
2170 jcc_op = (b >> 1) & 7;
2171 if (s->cc_op != CC_OP_DYNAMIC)
2172 gen_op_set_cc_op(s->cc_op);
2173 gen_setcc_slow_T0(jcc_op);
2175 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2180 static inline void gen_op_movl_T0_seg(int seg_reg)
2182 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2183 offsetof(CPUX86State,segs[seg_reg].selector));
2186 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2188 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2189 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2190 offsetof(CPUX86State,segs[seg_reg].selector));
2191 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2192 tcg_gen_st_tl(cpu_T[0], cpu_env,
2193 offsetof(CPUX86State,segs[seg_reg].base));
2196 /* move T0 to seg_reg and compute if the CPU state may change. Never
2197 call this function with seg_reg == R_CS */
2198 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2200 if (s->pe && !s->vm86) {
2201 /* XXX: optimize by finding processor state dynamically */
2202 if (s->cc_op != CC_OP_DYNAMIC)
2203 gen_op_set_cc_op(s->cc_op);
2204 gen_jmp_im(cur_eip);
2205 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2206 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2207 /* abort translation because the addseg value may change or
2208 because ss32 may change. For R_SS, translation must always
2209 stop as a special handling must be done to disable hardware
2210 interrupts for the next instruction */
2211 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2214 gen_op_movl_seg_T0_vm(seg_reg);
2215 if (seg_reg == R_SS)
2220 static inline int svm_is_rep(int prefixes)
2222 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2226 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2227 uint32_t type, uint64_t param)
2229 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2230 /* no SVM activated */
2233 /* CRx and DRx reads/writes */
2234 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2235 if (s->cc_op != CC_OP_DYNAMIC) {
2236 gen_op_set_cc_op(s->cc_op);
2238 gen_jmp_im(pc_start - s->cs_base);
2239 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2240 tcg_const_i32(type), tcg_const_i64(param));
2241 /* this is a special case as we do not know if the interception occurs
2242 so we assume there was none */
2245 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2246 if (s->cc_op != CC_OP_DYNAMIC) {
2247 gen_op_set_cc_op(s->cc_op);
2249 gen_jmp_im(pc_start - s->cs_base);
2250 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2251 tcg_const_i32(type), tcg_const_i64(param));
2252 /* this is a special case as we do not know if the interception occurs
2253 so we assume there was none */
2258 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2259 if (s->cc_op != CC_OP_DYNAMIC) {
2260 gen_op_set_cc_op(s->cc_op);
2262 gen_jmp_im(pc_start - s->cs_base);
2263 tcg_gen_helper_0_2(helper_vmexit,
2264 tcg_const_i32(type), tcg_const_i64(param));
2265 /* we can optimize this one so TBs don't get longer
2266 than up to vmexit */
2275 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2277 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2280 static inline void gen_stack_update(DisasContext *s, int addend)
2282 #ifdef TARGET_X86_64
2284 gen_op_add_reg_im(2, R_ESP, addend);
2288 gen_op_add_reg_im(1, R_ESP, addend);
2290 gen_op_add_reg_im(0, R_ESP, addend);
2294 /* generate a push. It depends on ss32, addseg and dflag */
2295 static void gen_push_T0(DisasContext *s)
2297 #ifdef TARGET_X86_64
2299 gen_op_movq_A0_reg(R_ESP);
2301 gen_op_addq_A0_im(-8);
2302 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2304 gen_op_addq_A0_im(-2);
2305 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2307 gen_op_mov_reg_A0(2, R_ESP);
2311 gen_op_movl_A0_reg(R_ESP);
2313 gen_op_addl_A0_im(-2);
2315 gen_op_addl_A0_im(-4);
2318 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2319 gen_op_addl_A0_seg(R_SS);
2322 gen_op_andl_A0_ffff();
2323 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2324 gen_op_addl_A0_seg(R_SS);
2326 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2327 if (s->ss32 && !s->addseg)
2328 gen_op_mov_reg_A0(1, R_ESP);
2330 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2334 /* generate a push. It depends on ss32, addseg and dflag */
2335 /* slower version for T1, only used for call Ev */
2336 static void gen_push_T1(DisasContext *s)
2338 #ifdef TARGET_X86_64
2340 gen_op_movq_A0_reg(R_ESP);
2342 gen_op_addq_A0_im(-8);
2343 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2345 gen_op_addq_A0_im(-2);
2346 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2348 gen_op_mov_reg_A0(2, R_ESP);
2352 gen_op_movl_A0_reg(R_ESP);
2354 gen_op_addl_A0_im(-2);
2356 gen_op_addl_A0_im(-4);
2359 gen_op_addl_A0_seg(R_SS);
2362 gen_op_andl_A0_ffff();
2363 gen_op_addl_A0_seg(R_SS);
2365 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2367 if (s->ss32 && !s->addseg)
2368 gen_op_mov_reg_A0(1, R_ESP);
2370 gen_stack_update(s, (-2) << s->dflag);
2374 /* two step pop is necessary for precise exceptions */
2375 static void gen_pop_T0(DisasContext *s)
2377 #ifdef TARGET_X86_64
2379 gen_op_movq_A0_reg(R_ESP);
2380 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2384 gen_op_movl_A0_reg(R_ESP);
2387 gen_op_addl_A0_seg(R_SS);
2389 gen_op_andl_A0_ffff();
2390 gen_op_addl_A0_seg(R_SS);
2392 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2396 static void gen_pop_update(DisasContext *s)
2398 #ifdef TARGET_X86_64
2399 if (CODE64(s) && s->dflag) {
2400 gen_stack_update(s, 8);
2404 gen_stack_update(s, 2 << s->dflag);
2408 static void gen_stack_A0(DisasContext *s)
2410 gen_op_movl_A0_reg(R_ESP);
2412 gen_op_andl_A0_ffff();
2413 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2415 gen_op_addl_A0_seg(R_SS);
2418 /* NOTE: wrap around in 16 bit not fully handled */
2419 static void gen_pusha(DisasContext *s)
2422 gen_op_movl_A0_reg(R_ESP);
2423 gen_op_addl_A0_im(-16 << s->dflag);
2425 gen_op_andl_A0_ffff();
2426 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2428 gen_op_addl_A0_seg(R_SS);
2429 for(i = 0;i < 8; i++) {
2430 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2431 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2432 gen_op_addl_A0_im(2 << s->dflag);
2434 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2437 /* NOTE: wrap around in 16 bit not fully handled */
2438 static void gen_popa(DisasContext *s)
2441 gen_op_movl_A0_reg(R_ESP);
2443 gen_op_andl_A0_ffff();
2444 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2445 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2447 gen_op_addl_A0_seg(R_SS);
2448 for(i = 0;i < 8; i++) {
2449 /* ESP is not reloaded */
2451 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2452 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2454 gen_op_addl_A0_im(2 << s->dflag);
2456 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2459 static void gen_enter(DisasContext *s, int esp_addend, int level)
2464 #ifdef TARGET_X86_64
2466 ot = s->dflag ? OT_QUAD : OT_WORD;
2469 gen_op_movl_A0_reg(R_ESP);
2470 gen_op_addq_A0_im(-opsize);
2471 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2474 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2475 gen_op_st_T0_A0(ot + s->mem_index);
2477 /* XXX: must save state */
2478 tcg_gen_helper_0_3(helper_enter64_level,
2479 tcg_const_i32(level),
2480 tcg_const_i32((ot == OT_QUAD)),
2483 gen_op_mov_reg_T1(ot, R_EBP);
2484 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2485 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2489 ot = s->dflag + OT_WORD;
2490 opsize = 2 << s->dflag;
2492 gen_op_movl_A0_reg(R_ESP);
2493 gen_op_addl_A0_im(-opsize);
2495 gen_op_andl_A0_ffff();
2496 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2498 gen_op_addl_A0_seg(R_SS);
2500 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2501 gen_op_st_T0_A0(ot + s->mem_index);
2503 /* XXX: must save state */
2504 tcg_gen_helper_0_3(helper_enter_level,
2505 tcg_const_i32(level),
2506 tcg_const_i32(s->dflag),
2509 gen_op_mov_reg_T1(ot, R_EBP);
2510 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2511 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2515 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2517 if (s->cc_op != CC_OP_DYNAMIC)
2518 gen_op_set_cc_op(s->cc_op);
2519 gen_jmp_im(cur_eip);
2520 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2524 /* an interrupt is different from an exception because of the
2526 static void gen_interrupt(DisasContext *s, int intno,
2527 target_ulong cur_eip, target_ulong next_eip)
2529 if (s->cc_op != CC_OP_DYNAMIC)
2530 gen_op_set_cc_op(s->cc_op);
2531 gen_jmp_im(cur_eip);
2532 tcg_gen_helper_0_2(helper_raise_interrupt,
2533 tcg_const_i32(intno),
2534 tcg_const_i32(next_eip - cur_eip));
2538 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2540 if (s->cc_op != CC_OP_DYNAMIC)
2541 gen_op_set_cc_op(s->cc_op);
2542 gen_jmp_im(cur_eip);
2543 tcg_gen_helper_0_0(helper_debug);
2547 /* generate a generic end of block. Trace exception is also generated
2549 static void gen_eob(DisasContext *s)
2551 if (s->cc_op != CC_OP_DYNAMIC)
2552 gen_op_set_cc_op(s->cc_op);
2553 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2554 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2556 if (s->singlestep_enabled) {
2557 tcg_gen_helper_0_0(helper_debug);
2559 tcg_gen_helper_0_0(helper_single_step);
2566 /* generate a jump to eip. No segment change must happen before as a
2567 direct call to the next block may occur */
2568 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2571 if (s->cc_op != CC_OP_DYNAMIC) {
2572 gen_op_set_cc_op(s->cc_op);
2573 s->cc_op = CC_OP_DYNAMIC;
2575 gen_goto_tb(s, tb_num, eip);
2583 static void gen_jmp(DisasContext *s, target_ulong eip)
2585 gen_jmp_tb(s, eip, 0);
2588 static inline void gen_ldq_env_A0(int idx, int offset)
2590 int mem_index = (idx >> 2) - 1;
2591 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2592 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2595 static inline void gen_stq_env_A0(int idx, int offset)
2597 int mem_index = (idx >> 2) - 1;
2598 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2599 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2602 static inline void gen_ldo_env_A0(int idx, int offset)
2604 int mem_index = (idx >> 2) - 1;
2605 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2606 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2607 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2608 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2609 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2612 static inline void gen_sto_env_A0(int idx, int offset)
2614 int mem_index = (idx >> 2) - 1;
2615 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2616 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2617 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2618 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2619 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2622 static inline void gen_op_movo(int d_offset, int s_offset)
2624 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2625 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2626 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2627 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2630 static inline void gen_op_movq(int d_offset, int s_offset)
2632 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2633 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2636 static inline void gen_op_movl(int d_offset, int s_offset)
2638 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2639 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2642 static inline void gen_op_movq_env_0(int d_offset)
2644 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2645 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2648 #define SSE_SPECIAL ((void *)1)
2649 #define SSE_DUMMY ((void *)2)
2651 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2652 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2653 helper_ ## x ## ss, helper_ ## x ## sd, }
2655 static void *sse_op_table1[256][4] = {
2656 /* 3DNow! extensions */
2657 [0x0e] = { SSE_DUMMY }, /* femms */
2658 [0x0f] = { SSE_DUMMY }, /* pf... */
2659 /* pure SSE operations */
2660 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2661 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2662 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2663 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2664 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2665 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2666 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2667 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2669 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2670 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2671 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2672 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2673 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2674 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2675 [0x2e] = { helper_ucomiss, helper_ucomisd },
2676 [0x2f] = { helper_comiss, helper_comisd },
2677 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2678 [0x51] = SSE_FOP(sqrt),
2679 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2680 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2681 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2682 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2683 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2684 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2685 [0x58] = SSE_FOP(add),
2686 [0x59] = SSE_FOP(mul),
2687 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2688 helper_cvtss2sd, helper_cvtsd2ss },
2689 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2690 [0x5c] = SSE_FOP(sub),
2691 [0x5d] = SSE_FOP(min),
2692 [0x5e] = SSE_FOP(div),
2693 [0x5f] = SSE_FOP(max),
2695 [0xc2] = SSE_FOP(cmpeq),
2696 [0xc6] = { helper_shufps, helper_shufpd },
2698 /* MMX ops and their SSE extensions */
2699 [0x60] = MMX_OP2(punpcklbw),
2700 [0x61] = MMX_OP2(punpcklwd),
2701 [0x62] = MMX_OP2(punpckldq),
2702 [0x63] = MMX_OP2(packsswb),
2703 [0x64] = MMX_OP2(pcmpgtb),
2704 [0x65] = MMX_OP2(pcmpgtw),
2705 [0x66] = MMX_OP2(pcmpgtl),
2706 [0x67] = MMX_OP2(packuswb),
2707 [0x68] = MMX_OP2(punpckhbw),
2708 [0x69] = MMX_OP2(punpckhwd),
2709 [0x6a] = MMX_OP2(punpckhdq),
2710 [0x6b] = MMX_OP2(packssdw),
2711 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2712 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2713 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2714 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2715 [0x70] = { helper_pshufw_mmx,
2718 helper_pshuflw_xmm },
2719 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2720 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2721 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2722 [0x74] = MMX_OP2(pcmpeqb),
2723 [0x75] = MMX_OP2(pcmpeqw),
2724 [0x76] = MMX_OP2(pcmpeql),
2725 [0x77] = { SSE_DUMMY }, /* emms */
2726 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2727 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2728 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2729 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2730 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2731 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2732 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2733 [0xd1] = MMX_OP2(psrlw),
2734 [0xd2] = MMX_OP2(psrld),
2735 [0xd3] = MMX_OP2(psrlq),
2736 [0xd4] = MMX_OP2(paddq),
2737 [0xd5] = MMX_OP2(pmullw),
2738 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2739 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2740 [0xd8] = MMX_OP2(psubusb),
2741 [0xd9] = MMX_OP2(psubusw),
2742 [0xda] = MMX_OP2(pminub),
2743 [0xdb] = MMX_OP2(pand),
2744 [0xdc] = MMX_OP2(paddusb),
2745 [0xdd] = MMX_OP2(paddusw),
2746 [0xde] = MMX_OP2(pmaxub),
2747 [0xdf] = MMX_OP2(pandn),
2748 [0xe0] = MMX_OP2(pavgb),
2749 [0xe1] = MMX_OP2(psraw),
2750 [0xe2] = MMX_OP2(psrad),
2751 [0xe3] = MMX_OP2(pavgw),
2752 [0xe4] = MMX_OP2(pmulhuw),
2753 [0xe5] = MMX_OP2(pmulhw),
2754 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2755 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2756 [0xe8] = MMX_OP2(psubsb),
2757 [0xe9] = MMX_OP2(psubsw),
2758 [0xea] = MMX_OP2(pminsw),
2759 [0xeb] = MMX_OP2(por),
2760 [0xec] = MMX_OP2(paddsb),
2761 [0xed] = MMX_OP2(paddsw),
2762 [0xee] = MMX_OP2(pmaxsw),
2763 [0xef] = MMX_OP2(pxor),
2764 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2765 [0xf1] = MMX_OP2(psllw),
2766 [0xf2] = MMX_OP2(pslld),
2767 [0xf3] = MMX_OP2(psllq),
2768 [0xf4] = MMX_OP2(pmuludq),
2769 [0xf5] = MMX_OP2(pmaddwd),
2770 [0xf6] = MMX_OP2(psadbw),
2771 [0xf7] = MMX_OP2(maskmov),
2772 [0xf8] = MMX_OP2(psubb),
2773 [0xf9] = MMX_OP2(psubw),
2774 [0xfa] = MMX_OP2(psubl),
2775 [0xfb] = MMX_OP2(psubq),
2776 [0xfc] = MMX_OP2(paddb),
2777 [0xfd] = MMX_OP2(paddw),
2778 [0xfe] = MMX_OP2(paddl),
2781 static void *sse_op_table2[3 * 8][2] = {
2782 [0 + 2] = MMX_OP2(psrlw),
2783 [0 + 4] = MMX_OP2(psraw),
2784 [0 + 6] = MMX_OP2(psllw),
2785 [8 + 2] = MMX_OP2(psrld),
2786 [8 + 4] = MMX_OP2(psrad),
2787 [8 + 6] = MMX_OP2(pslld),
2788 [16 + 2] = MMX_OP2(psrlq),
2789 [16 + 3] = { NULL, helper_psrldq_xmm },
2790 [16 + 6] = MMX_OP2(psllq),
2791 [16 + 7] = { NULL, helper_pslldq_xmm },
2794 static void *sse_op_table3[4 * 3] = {
2797 X86_64_ONLY(helper_cvtsq2ss),
2798 X86_64_ONLY(helper_cvtsq2sd),
2802 X86_64_ONLY(helper_cvttss2sq),
2803 X86_64_ONLY(helper_cvttsd2sq),
2807 X86_64_ONLY(helper_cvtss2sq),
2808 X86_64_ONLY(helper_cvtsd2sq),
2811 static void *sse_op_table4[8][4] = {
2822 static void *sse_op_table5[256] = {
2823 [0x0c] = helper_pi2fw,
2824 [0x0d] = helper_pi2fd,
2825 [0x1c] = helper_pf2iw,
2826 [0x1d] = helper_pf2id,
2827 [0x8a] = helper_pfnacc,
2828 [0x8e] = helper_pfpnacc,
2829 [0x90] = helper_pfcmpge,
2830 [0x94] = helper_pfmin,
2831 [0x96] = helper_pfrcp,
2832 [0x97] = helper_pfrsqrt,
2833 [0x9a] = helper_pfsub,
2834 [0x9e] = helper_pfadd,
2835 [0xa0] = helper_pfcmpgt,
2836 [0xa4] = helper_pfmax,
2837 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2838 [0xa7] = helper_movq, /* pfrsqit1 */
2839 [0xaa] = helper_pfsubr,
2840 [0xae] = helper_pfacc,
2841 [0xb0] = helper_pfcmpeq,
2842 [0xb4] = helper_pfmul,
2843 [0xb6] = helper_movq, /* pfrcpit2 */
2844 [0xb7] = helper_pmulhrw_mmx,
2845 [0xbb] = helper_pswapd,
2846 [0xbf] = helper_pavgb_mmx /* pavgusb */
2849 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2851 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2852 int modrm, mod, rm, reg, reg_addr, offset_addr;
2856 if (s->prefix & PREFIX_DATA)
2858 else if (s->prefix & PREFIX_REPZ)
2860 else if (s->prefix & PREFIX_REPNZ)
2864 sse_op2 = sse_op_table1[b][b1];
2867 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2877 /* simple MMX/SSE operation */
2878 if (s->flags & HF_TS_MASK) {
2879 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2882 if (s->flags & HF_EM_MASK) {
2884 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2887 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2890 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2893 tcg_gen_helper_0_0(helper_emms);
2898 tcg_gen_helper_0_0(helper_emms);
2901 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2902 the static cpu state) */
2904 tcg_gen_helper_0_0(helper_enter_mmx);
2907 modrm = ldub_code(s->pc++);
2908 reg = ((modrm >> 3) & 7);
2911 mod = (modrm >> 6) & 3;
2912 if (sse_op2 == SSE_SPECIAL) {
2915 case 0x0e7: /* movntq */
2918 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2919 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2921 case 0x1e7: /* movntdq */
2922 case 0x02b: /* movntps */
2923 case 0x12b: /* movntps */
2924 case 0x3f0: /* lddqu */
2927 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2928 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2930 case 0x6e: /* movd mm, ea */
2931 #ifdef TARGET_X86_64
2932 if (s->dflag == 2) {
2933 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2934 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2938 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2939 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2940 offsetof(CPUX86State,fpregs[reg].mmx));
2941 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2944 case 0x16e: /* movd xmm, ea */
2945 #ifdef TARGET_X86_64
2946 if (s->dflag == 2) {
2947 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2948 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2949 offsetof(CPUX86State,xmm_regs[reg]));
2950 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2954 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2955 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2956 offsetof(CPUX86State,xmm_regs[reg]));
2957 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2958 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2961 case 0x6f: /* movq mm, ea */
2963 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2964 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2967 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
2968 offsetof(CPUX86State,fpregs[rm].mmx));
2969 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
2970 offsetof(CPUX86State,fpregs[reg].mmx));
2973 case 0x010: /* movups */
2974 case 0x110: /* movupd */
2975 case 0x028: /* movaps */
2976 case 0x128: /* movapd */
2977 case 0x16f: /* movdqa xmm, ea */
2978 case 0x26f: /* movdqu xmm, ea */
2980 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2981 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2983 rm = (modrm & 7) | REX_B(s);
2984 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2985 offsetof(CPUX86State,xmm_regs[rm]));
2988 case 0x210: /* movss xmm, ea */
2990 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2991 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2992 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2994 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2995 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2996 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2998 rm = (modrm & 7) | REX_B(s);
2999 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3000 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3003 case 0x310: /* movsd xmm, ea */
3005 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3006 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3008 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3009 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3011 rm = (modrm & 7) | REX_B(s);
3012 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3013 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3016 case 0x012: /* movlps */
3017 case 0x112: /* movlpd */
3019 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3020 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3023 rm = (modrm & 7) | REX_B(s);
3024 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3025 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3028 case 0x212: /* movsldup */
3030 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3031 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3033 rm = (modrm & 7) | REX_B(s);
3034 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3035 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3036 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3037 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3039 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3040 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3041 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3042 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3044 case 0x312: /* movddup */
3046 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3047 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3049 rm = (modrm & 7) | REX_B(s);
3050 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3051 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3053 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3054 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3056 case 0x016: /* movhps */
3057 case 0x116: /* movhpd */
3059 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3060 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3063 rm = (modrm & 7) | REX_B(s);
3064 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3065 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3068 case 0x216: /* movshdup */
3070 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3071 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3073 rm = (modrm & 7) | REX_B(s);
3074 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3075 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3076 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3077 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3079 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3080 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3081 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3082 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3084 case 0x7e: /* movd ea, mm */
3085 #ifdef TARGET_X86_64
3086 if (s->dflag == 2) {
3087 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3088 offsetof(CPUX86State,fpregs[reg].mmx));
3089 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3093 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3094 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3095 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3098 case 0x17e: /* movd ea, xmm */
3099 #ifdef TARGET_X86_64
3100 if (s->dflag == 2) {
3101 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3102 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3103 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3107 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3108 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3109 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3112 case 0x27e: /* movq xmm, ea */
3114 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3115 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3117 rm = (modrm & 7) | REX_B(s);
3118 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3119 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3121 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3123 case 0x7f: /* movq ea, mm */
3125 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3126 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3129 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3130 offsetof(CPUX86State,fpregs[reg].mmx));
3133 case 0x011: /* movups */
3134 case 0x111: /* movupd */
3135 case 0x029: /* movaps */
3136 case 0x129: /* movapd */
3137 case 0x17f: /* movdqa ea, xmm */
3138 case 0x27f: /* movdqu ea, xmm */
3140 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3141 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3143 rm = (modrm & 7) | REX_B(s);
3144 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3145 offsetof(CPUX86State,xmm_regs[reg]));
3148 case 0x211: /* movss ea, xmm */
3150 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3151 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3152 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3154 rm = (modrm & 7) | REX_B(s);
3155 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3156 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3159 case 0x311: /* movsd ea, xmm */
3161 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3162 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3164 rm = (modrm & 7) | REX_B(s);
3165 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3166 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3169 case 0x013: /* movlps */
3170 case 0x113: /* movlpd */
3172 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3173 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3178 case 0x017: /* movhps */
3179 case 0x117: /* movhpd */
3181 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3182 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3187 case 0x71: /* shift mm, im */
3190 case 0x171: /* shift xmm, im */
3193 val = ldub_code(s->pc++);
3195 gen_op_movl_T0_im(val);
3196 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3198 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3199 op1_offset = offsetof(CPUX86State,xmm_t0);
3201 gen_op_movl_T0_im(val);
3202 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3204 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3205 op1_offset = offsetof(CPUX86State,mmx_t0);
3207 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3211 rm = (modrm & 7) | REX_B(s);
3212 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3215 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3217 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3218 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3219 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3221 case 0x050: /* movmskps */
3222 rm = (modrm & 7) | REX_B(s);
3223 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3224 offsetof(CPUX86State,xmm_regs[rm]));
3225 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3226 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3227 gen_op_mov_reg_T0(OT_LONG, reg);
3229 case 0x150: /* movmskpd */
3230 rm = (modrm & 7) | REX_B(s);
3231 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3232 offsetof(CPUX86State,xmm_regs[rm]));
3233 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3234 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3235 gen_op_mov_reg_T0(OT_LONG, reg);
3237 case 0x02a: /* cvtpi2ps */
3238 case 0x12a: /* cvtpi2pd */
3239 tcg_gen_helper_0_0(helper_enter_mmx);
3241 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3242 op2_offset = offsetof(CPUX86State,mmx_t0);
3243 gen_ldq_env_A0(s->mem_index, op2_offset);
3246 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3248 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3249 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3250 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3253 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3257 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3261 case 0x22a: /* cvtsi2ss */
3262 case 0x32a: /* cvtsi2sd */
3263 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3264 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3265 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3266 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3267 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3268 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3269 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3271 case 0x02c: /* cvttps2pi */
3272 case 0x12c: /* cvttpd2pi */
3273 case 0x02d: /* cvtps2pi */
3274 case 0x12d: /* cvtpd2pi */
3275 tcg_gen_helper_0_0(helper_enter_mmx);
3277 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3278 op2_offset = offsetof(CPUX86State,xmm_t0);
3279 gen_ldo_env_A0(s->mem_index, op2_offset);
3281 rm = (modrm & 7) | REX_B(s);
3282 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3284 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3285 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3286 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3289 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3292 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3295 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3298 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3302 case 0x22c: /* cvttss2si */
3303 case 0x32c: /* cvttsd2si */
3304 case 0x22d: /* cvtss2si */
3305 case 0x32d: /* cvtsd2si */
3306 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3308 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3310 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3312 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3313 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3315 op2_offset = offsetof(CPUX86State,xmm_t0);
3317 rm = (modrm & 7) | REX_B(s);
3318 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3320 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3322 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3323 if (ot == OT_LONG) {
3324 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3325 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3327 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3329 gen_op_mov_reg_T0(ot, reg);
3331 case 0xc4: /* pinsrw */
3334 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3335 val = ldub_code(s->pc++);
3338 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3339 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3342 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3343 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3346 case 0xc5: /* pextrw */
3350 val = ldub_code(s->pc++);
3353 rm = (modrm & 7) | REX_B(s);
3354 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3355 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3359 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3360 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3362 reg = ((modrm >> 3) & 7) | rex_r;
3363 gen_op_mov_reg_T0(OT_LONG, reg);
3365 case 0x1d6: /* movq ea, xmm */
3367 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3368 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3370 rm = (modrm & 7) | REX_B(s);
3371 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3372 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3373 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3376 case 0x2d6: /* movq2dq */
3377 tcg_gen_helper_0_0(helper_enter_mmx);
3379 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3380 offsetof(CPUX86State,fpregs[rm].mmx));
3381 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3383 case 0x3d6: /* movdq2q */
3384 tcg_gen_helper_0_0(helper_enter_mmx);
3385 rm = (modrm & 7) | REX_B(s);
3386 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3387 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3389 case 0xd7: /* pmovmskb */
3394 rm = (modrm & 7) | REX_B(s);
3395 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3396 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3399 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3400 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3402 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3403 reg = ((modrm >> 3) & 7) | rex_r;
3404 gen_op_mov_reg_T0(OT_LONG, reg);
3410 /* generic MMX or SSE operation */
3412 case 0x70: /* pshufx insn */
3413 case 0xc6: /* pshufx insn */
3414 case 0xc2: /* compare insns */
3421 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3423 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3424 op2_offset = offsetof(CPUX86State,xmm_t0);
3425 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3427 /* specific case for SSE single instructions */
3430 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3431 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3434 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3437 gen_ldo_env_A0(s->mem_index, op2_offset);
3440 rm = (modrm & 7) | REX_B(s);
3441 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3444 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3446 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3447 op2_offset = offsetof(CPUX86State,mmx_t0);
3448 gen_ldq_env_A0(s->mem_index, op2_offset);
3451 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3455 case 0x0f: /* 3DNow! data insns */
3456 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3458 val = ldub_code(s->pc++);
3459 sse_op2 = sse_op_table5[val];
3462 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3463 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3464 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3466 case 0x70: /* pshufx insn */
3467 case 0xc6: /* pshufx insn */
3468 val = ldub_code(s->pc++);
3469 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3470 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3471 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3475 val = ldub_code(s->pc++);
3478 sse_op2 = sse_op_table4[val][b1];
3479 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3480 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3481 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3484 /* maskmov : we must prepare A0 */
3487 #ifdef TARGET_X86_64
3488 if (s->aflag == 2) {
3489 gen_op_movq_A0_reg(R_EDI);
3493 gen_op_movl_A0_reg(R_EDI);
3495 gen_op_andl_A0_ffff();
3497 gen_add_A0_ds_seg(s);
3499 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3500 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3501 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3504 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3505 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3506 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3509 if (b == 0x2e || b == 0x2f) {
3510 s->cc_op = CC_OP_EFLAGS;
3515 /* convert one instruction. s->is_jmp is set if the translation must
3516 be stopped. Return the next pc value */
3517 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3519 int b, prefixes, aflag, dflag;
3521 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3522 target_ulong next_eip, tval;
3532 #ifdef TARGET_X86_64
3537 s->rip_offset = 0; /* for relative ip address */
3539 b = ldub_code(s->pc);
3541 /* check prefixes */
3542 #ifdef TARGET_X86_64
3546 prefixes |= PREFIX_REPZ;
3549 prefixes |= PREFIX_REPNZ;
3552 prefixes |= PREFIX_LOCK;
3573 prefixes |= PREFIX_DATA;
3576 prefixes |= PREFIX_ADR;
3580 rex_w = (b >> 3) & 1;
3581 rex_r = (b & 0x4) << 1;
3582 s->rex_x = (b & 0x2) << 2;
3583 REX_B(s) = (b & 0x1) << 3;
3584 x86_64_hregs = 1; /* select uniform byte register addressing */
3588 /* 0x66 is ignored if rex.w is set */
3591 if (prefixes & PREFIX_DATA)
3594 if (!(prefixes & PREFIX_ADR))
3601 prefixes |= PREFIX_REPZ;
3604 prefixes |= PREFIX_REPNZ;
3607 prefixes |= PREFIX_LOCK;
3628 prefixes |= PREFIX_DATA;
3631 prefixes |= PREFIX_ADR;
3634 if (prefixes & PREFIX_DATA)
3636 if (prefixes & PREFIX_ADR)
3640 s->prefix = prefixes;
3644 /* lock generation */
3645 if (prefixes & PREFIX_LOCK)
3646 tcg_gen_helper_0_0(helper_lock);
3648 /* now check op code */
3652 /**************************/
3653 /* extended op code */
3654 b = ldub_code(s->pc++) | 0x100;
3657 /**************************/
3675 ot = dflag + OT_WORD;
3678 case 0: /* OP Ev, Gv */
3679 modrm = ldub_code(s->pc++);
3680 reg = ((modrm >> 3) & 7) | rex_r;
3681 mod = (modrm >> 6) & 3;
3682 rm = (modrm & 7) | REX_B(s);
3684 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3686 } else if (op == OP_XORL && rm == reg) {
3688 /* xor reg, reg optimisation */
3690 s->cc_op = CC_OP_LOGICB + ot;
3691 gen_op_mov_reg_T0(ot, reg);
3692 gen_op_update1_cc();
3697 gen_op_mov_TN_reg(ot, 1, reg);
3698 gen_op(s, op, ot, opreg);
3700 case 1: /* OP Gv, Ev */
3701 modrm = ldub_code(s->pc++);
3702 mod = (modrm >> 6) & 3;
3703 reg = ((modrm >> 3) & 7) | rex_r;
3704 rm = (modrm & 7) | REX_B(s);
3706 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3707 gen_op_ld_T1_A0(ot + s->mem_index);
3708 } else if (op == OP_XORL && rm == reg) {
3711 gen_op_mov_TN_reg(ot, 1, rm);
3713 gen_op(s, op, ot, reg);
3715 case 2: /* OP A, Iv */
3716 val = insn_get(s, ot);
3717 gen_op_movl_T1_im(val);
3718 gen_op(s, op, ot, OR_EAX);
3724 case 0x80: /* GRP1 */
3734 ot = dflag + OT_WORD;
3736 modrm = ldub_code(s->pc++);
3737 mod = (modrm >> 6) & 3;
3738 rm = (modrm & 7) | REX_B(s);
3739 op = (modrm >> 3) & 7;
3745 s->rip_offset = insn_const_size(ot);
3746 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3757 val = insn_get(s, ot);
3760 val = (int8_t)insn_get(s, OT_BYTE);
3763 gen_op_movl_T1_im(val);
3764 gen_op(s, op, ot, opreg);
3768 /**************************/
3769 /* inc, dec, and other misc arith */
3770 case 0x40 ... 0x47: /* inc Gv */
3771 ot = dflag ? OT_LONG : OT_WORD;
3772 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3774 case 0x48 ... 0x4f: /* dec Gv */
3775 ot = dflag ? OT_LONG : OT_WORD;
3776 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3778 case 0xf6: /* GRP3 */
3783 ot = dflag + OT_WORD;
3785 modrm = ldub_code(s->pc++);
3786 mod = (modrm >> 6) & 3;
3787 rm = (modrm & 7) | REX_B(s);
3788 op = (modrm >> 3) & 7;
3791 s->rip_offset = insn_const_size(ot);
3792 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3793 gen_op_ld_T0_A0(ot + s->mem_index);
3795 gen_op_mov_TN_reg(ot, 0, rm);
3800 val = insn_get(s, ot);
3801 gen_op_movl_T1_im(val);
3802 gen_op_testl_T0_T1_cc();
3803 s->cc_op = CC_OP_LOGICB + ot;
3806 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3808 gen_op_st_T0_A0(ot + s->mem_index);
3810 gen_op_mov_reg_T0(ot, rm);
3814 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3816 gen_op_st_T0_A0(ot + s->mem_index);
3818 gen_op_mov_reg_T0(ot, rm);
3820 gen_op_update_neg_cc();
3821 s->cc_op = CC_OP_SUBB + ot;
3826 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3827 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3828 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3829 /* XXX: use 32 bit mul which could be faster */
3830 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3831 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3832 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3833 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3834 s->cc_op = CC_OP_MULB;
3837 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3838 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3839 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3840 /* XXX: use 32 bit mul which could be faster */
3841 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3842 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3843 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3844 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3845 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3846 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3847 s->cc_op = CC_OP_MULW;
3851 #ifdef TARGET_X86_64
3852 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3853 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3854 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3855 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3856 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3857 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3858 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3859 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3860 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3864 t0 = tcg_temp_new(TCG_TYPE_I64);
3865 t1 = tcg_temp_new(TCG_TYPE_I64);
3866 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3867 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3868 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3869 tcg_gen_mul_i64(t0, t0, t1);
3870 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3871 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3872 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3873 tcg_gen_shri_i64(t0, t0, 32);
3874 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3875 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3876 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3879 s->cc_op = CC_OP_MULL;
3881 #ifdef TARGET_X86_64
3883 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3884 s->cc_op = CC_OP_MULQ;
3892 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3893 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3894 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3895 /* XXX: use 32 bit mul which could be faster */
3896 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3897 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3898 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3899 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3900 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3901 s->cc_op = CC_OP_MULB;
3904 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3905 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3906 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3907 /* XXX: use 32 bit mul which could be faster */
3908 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3909 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3910 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3911 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3912 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3913 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3914 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3915 s->cc_op = CC_OP_MULW;
3919 #ifdef TARGET_X86_64
3920 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3921 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3922 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3923 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3924 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3925 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3926 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3927 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3928 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3929 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3933 t0 = tcg_temp_new(TCG_TYPE_I64);
3934 t1 = tcg_temp_new(TCG_TYPE_I64);
3935 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3936 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
3937 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
3938 tcg_gen_mul_i64(t0, t0, t1);
3939 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3940 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3941 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3942 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
3943 tcg_gen_shri_i64(t0, t0, 32);
3944 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3945 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3946 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3949 s->cc_op = CC_OP_MULL;
3951 #ifdef TARGET_X86_64
3953 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
3954 s->cc_op = CC_OP_MULQ;
3962 gen_jmp_im(pc_start - s->cs_base);
3963 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3966 gen_jmp_im(pc_start - s->cs_base);
3967 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3971 gen_jmp_im(pc_start - s->cs_base);
3972 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3974 #ifdef TARGET_X86_64
3976 gen_jmp_im(pc_start - s->cs_base);
3977 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3985 gen_jmp_im(pc_start - s->cs_base);
3986 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3989 gen_jmp_im(pc_start - s->cs_base);
3990 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3994 gen_jmp_im(pc_start - s->cs_base);
3995 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3997 #ifdef TARGET_X86_64
3999 gen_jmp_im(pc_start - s->cs_base);
4000 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4010 case 0xfe: /* GRP4 */
4011 case 0xff: /* GRP5 */
4015 ot = dflag + OT_WORD;
4017 modrm = ldub_code(s->pc++);
4018 mod = (modrm >> 6) & 3;
4019 rm = (modrm & 7) | REX_B(s);
4020 op = (modrm >> 3) & 7;
4021 if (op >= 2 && b == 0xfe) {
4025 if (op == 2 || op == 4) {
4026 /* operand size for jumps is 64 bit */
4028 } else if (op == 3 || op == 5) {
4029 /* for call calls, the operand is 16 or 32 bit, even
4031 ot = dflag ? OT_LONG : OT_WORD;
4032 } else if (op == 6) {
4033 /* default push size is 64 bit */
4034 ot = dflag ? OT_QUAD : OT_WORD;
4038 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4039 if (op >= 2 && op != 3 && op != 5)
4040 gen_op_ld_T0_A0(ot + s->mem_index);
4042 gen_op_mov_TN_reg(ot, 0, rm);
4046 case 0: /* inc Ev */
4051 gen_inc(s, ot, opreg, 1);
4053 case 1: /* dec Ev */
4058 gen_inc(s, ot, opreg, -1);
4060 case 2: /* call Ev */
4061 /* XXX: optimize if memory (no 'and' is necessary) */
4063 gen_op_andl_T0_ffff();
4064 next_eip = s->pc - s->cs_base;
4065 gen_movtl_T1_im(next_eip);
4070 case 3: /* lcall Ev */
4071 gen_op_ld_T1_A0(ot + s->mem_index);
4072 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4073 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4075 if (s->pe && !s->vm86) {
4076 if (s->cc_op != CC_OP_DYNAMIC)
4077 gen_op_set_cc_op(s->cc_op);
4078 gen_jmp_im(pc_start - s->cs_base);
4079 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4080 tcg_gen_helper_0_4(helper_lcall_protected,
4081 cpu_tmp2_i32, cpu_T[1],
4082 tcg_const_i32(dflag),
4083 tcg_const_i32(s->pc - pc_start));
4085 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4086 tcg_gen_helper_0_4(helper_lcall_real,
4087 cpu_tmp2_i32, cpu_T[1],
4088 tcg_const_i32(dflag),
4089 tcg_const_i32(s->pc - s->cs_base));
4093 case 4: /* jmp Ev */
4095 gen_op_andl_T0_ffff();
4099 case 5: /* ljmp Ev */
4100 gen_op_ld_T1_A0(ot + s->mem_index);
4101 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4102 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4104 if (s->pe && !s->vm86) {
4105 if (s->cc_op != CC_OP_DYNAMIC)
4106 gen_op_set_cc_op(s->cc_op);
4107 gen_jmp_im(pc_start - s->cs_base);
4108 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4109 tcg_gen_helper_0_3(helper_ljmp_protected,
4112 tcg_const_i32(s->pc - pc_start));
4114 gen_op_movl_seg_T0_vm(R_CS);
4115 gen_op_movl_T0_T1();
4120 case 6: /* push Ev */
4128 case 0x84: /* test Ev, Gv */
4133 ot = dflag + OT_WORD;
4135 modrm = ldub_code(s->pc++);
4136 mod = (modrm >> 6) & 3;
4137 rm = (modrm & 7) | REX_B(s);
4138 reg = ((modrm >> 3) & 7) | rex_r;
4140 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4141 gen_op_mov_TN_reg(ot, 1, reg);
4142 gen_op_testl_T0_T1_cc();
4143 s->cc_op = CC_OP_LOGICB + ot;
4146 case 0xa8: /* test eAX, Iv */
4151 ot = dflag + OT_WORD;
4152 val = insn_get(s, ot);
4154 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4155 gen_op_movl_T1_im(val);
4156 gen_op_testl_T0_T1_cc();
4157 s->cc_op = CC_OP_LOGICB + ot;
4160 case 0x98: /* CWDE/CBW */
4161 #ifdef TARGET_X86_64
4163 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4164 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4165 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4169 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4170 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4171 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4173 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4174 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4175 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4178 case 0x99: /* CDQ/CWD */
4179 #ifdef TARGET_X86_64
4181 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4182 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4183 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4187 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4188 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4189 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4190 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4192 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4193 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4194 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4195 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4198 case 0x1af: /* imul Gv, Ev */
4199 case 0x69: /* imul Gv, Ev, I */
4201 ot = dflag + OT_WORD;
4202 modrm = ldub_code(s->pc++);
4203 reg = ((modrm >> 3) & 7) | rex_r;
4205 s->rip_offset = insn_const_size(ot);
4208 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4210 val = insn_get(s, ot);
4211 gen_op_movl_T1_im(val);
4212 } else if (b == 0x6b) {
4213 val = (int8_t)insn_get(s, OT_BYTE);
4214 gen_op_movl_T1_im(val);
4216 gen_op_mov_TN_reg(ot, 1, reg);
4219 #ifdef TARGET_X86_64
4220 if (ot == OT_QUAD) {
4221 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4224 if (ot == OT_LONG) {
4225 #ifdef TARGET_X86_64
4226 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4227 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4228 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4229 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4230 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4231 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4235 t0 = tcg_temp_new(TCG_TYPE_I64);
4236 t1 = tcg_temp_new(TCG_TYPE_I64);
4237 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4238 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4239 tcg_gen_mul_i64(t0, t0, t1);
4240 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4241 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4242 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4243 tcg_gen_shri_i64(t0, t0, 32);
4244 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4245 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4249 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4250 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4251 /* XXX: use 32 bit mul which could be faster */
4252 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4253 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4254 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4255 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4257 gen_op_mov_reg_T0(ot, reg);
4258 s->cc_op = CC_OP_MULB + ot;
4261 case 0x1c1: /* xadd Ev, Gv */
4265 ot = dflag + OT_WORD;
4266 modrm = ldub_code(s->pc++);
4267 reg = ((modrm >> 3) & 7) | rex_r;
4268 mod = (modrm >> 6) & 3;
4270 rm = (modrm & 7) | REX_B(s);
4271 gen_op_mov_TN_reg(ot, 0, reg);
4272 gen_op_mov_TN_reg(ot, 1, rm);
4273 gen_op_addl_T0_T1();
4274 gen_op_mov_reg_T1(ot, reg);
4275 gen_op_mov_reg_T0(ot, rm);
4277 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4278 gen_op_mov_TN_reg(ot, 0, reg);
4279 gen_op_ld_T1_A0(ot + s->mem_index);
4280 gen_op_addl_T0_T1();
4281 gen_op_st_T0_A0(ot + s->mem_index);
4282 gen_op_mov_reg_T1(ot, reg);
4284 gen_op_update2_cc();
4285 s->cc_op = CC_OP_ADDB + ot;
4288 case 0x1b1: /* cmpxchg Ev, Gv */
4295 ot = dflag + OT_WORD;
4296 modrm = ldub_code(s->pc++);
4297 reg = ((modrm >> 3) & 7) | rex_r;
4298 mod = (modrm >> 6) & 3;
4299 gen_op_mov_TN_reg(ot, 1, reg);
4301 rm = (modrm & 7) | REX_B(s);
4302 gen_op_mov_TN_reg(ot, 0, rm);
4304 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4305 gen_op_ld_T0_A0(ot + s->mem_index);
4306 rm = 0; /* avoid warning */
4308 label1 = gen_new_label();
4309 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4310 tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4311 gen_extu(ot, cpu_T3);
4312 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4314 label2 = gen_new_label();
4315 gen_op_mov_reg_T0(ot, R_EAX);
4317 gen_set_label(label1);
4318 gen_op_mov_reg_T1(ot, rm);
4319 gen_set_label(label2);
4321 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4322 gen_op_mov_reg_T0(ot, R_EAX);
4323 gen_set_label(label1);
4325 gen_op_st_T1_A0(ot + s->mem_index);
4327 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4328 tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4329 s->cc_op = CC_OP_SUBB + ot;
4332 case 0x1c7: /* cmpxchg8b */
4333 modrm = ldub_code(s->pc++);
4334 mod = (modrm >> 6) & 3;
4335 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4337 #ifdef TARGET_X86_64
4339 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4341 gen_jmp_im(pc_start - s->cs_base);
4342 if (s->cc_op != CC_OP_DYNAMIC)
4343 gen_op_set_cc_op(s->cc_op);
4344 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4345 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4349 if (!(s->cpuid_features & CPUID_CX8))
4351 gen_jmp_im(pc_start - s->cs_base);
4352 if (s->cc_op != CC_OP_DYNAMIC)
4353 gen_op_set_cc_op(s->cc_op);
4354 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4355 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4357 s->cc_op = CC_OP_EFLAGS;
4360 /**************************/
4362 case 0x50 ... 0x57: /* push */
4363 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4366 case 0x58 ... 0x5f: /* pop */
4368 ot = dflag ? OT_QUAD : OT_WORD;
4370 ot = dflag + OT_WORD;
4373 /* NOTE: order is important for pop %sp */
4375 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4377 case 0x60: /* pusha */
4382 case 0x61: /* popa */
4387 case 0x68: /* push Iv */
4390 ot = dflag ? OT_QUAD : OT_WORD;
4392 ot = dflag + OT_WORD;
4395 val = insn_get(s, ot);
4397 val = (int8_t)insn_get(s, OT_BYTE);
4398 gen_op_movl_T0_im(val);
4401 case 0x8f: /* pop Ev */
4403 ot = dflag ? OT_QUAD : OT_WORD;
4405 ot = dflag + OT_WORD;
4407 modrm = ldub_code(s->pc++);
4408 mod = (modrm >> 6) & 3;
4411 /* NOTE: order is important for pop %sp */
4413 rm = (modrm & 7) | REX_B(s);
4414 gen_op_mov_reg_T0(ot, rm);
4416 /* NOTE: order is important too for MMU exceptions */
4417 s->popl_esp_hack = 1 << ot;
4418 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4419 s->popl_esp_hack = 0;
4423 case 0xc8: /* enter */
4426 val = lduw_code(s->pc);
4428 level = ldub_code(s->pc++);
4429 gen_enter(s, val, level);
4432 case 0xc9: /* leave */
4433 /* XXX: exception not precise (ESP is updated before potential exception) */
4435 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4436 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4437 } else if (s->ss32) {
4438 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4439 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4441 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4442 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4446 ot = dflag ? OT_QUAD : OT_WORD;
4448 ot = dflag + OT_WORD;
4450 gen_op_mov_reg_T0(ot, R_EBP);
4453 case 0x06: /* push es */
4454 case 0x0e: /* push cs */
4455 case 0x16: /* push ss */
4456 case 0x1e: /* push ds */
4459 gen_op_movl_T0_seg(b >> 3);
4462 case 0x1a0: /* push fs */
4463 case 0x1a8: /* push gs */
4464 gen_op_movl_T0_seg((b >> 3) & 7);
4467 case 0x07: /* pop es */
4468 case 0x17: /* pop ss */
4469 case 0x1f: /* pop ds */
4474 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4477 /* if reg == SS, inhibit interrupts/trace. */
4478 /* If several instructions disable interrupts, only the
4480 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4481 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4485 gen_jmp_im(s->pc - s->cs_base);
4489 case 0x1a1: /* pop fs */
4490 case 0x1a9: /* pop gs */
4492 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4495 gen_jmp_im(s->pc - s->cs_base);
4500 /**************************/
4503 case 0x89: /* mov Gv, Ev */
4507 ot = dflag + OT_WORD;
4508 modrm = ldub_code(s->pc++);
4509 reg = ((modrm >> 3) & 7) | rex_r;
4511 /* generate a generic store */
4512 gen_ldst_modrm(s, modrm, ot, reg, 1);
4515 case 0xc7: /* mov Ev, Iv */
4519 ot = dflag + OT_WORD;
4520 modrm = ldub_code(s->pc++);
4521 mod = (modrm >> 6) & 3;
4523 s->rip_offset = insn_const_size(ot);
4524 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4526 val = insn_get(s, ot);
4527 gen_op_movl_T0_im(val);
4529 gen_op_st_T0_A0(ot + s->mem_index);
4531 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4534 case 0x8b: /* mov Ev, Gv */
4538 ot = OT_WORD + dflag;
4539 modrm = ldub_code(s->pc++);
4540 reg = ((modrm >> 3) & 7) | rex_r;
4542 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4543 gen_op_mov_reg_T0(ot, reg);
4545 case 0x8e: /* mov seg, Gv */
4546 modrm = ldub_code(s->pc++);
4547 reg = (modrm >> 3) & 7;
4548 if (reg >= 6 || reg == R_CS)
4550 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4551 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4553 /* if reg == SS, inhibit interrupts/trace */
4554 /* If several instructions disable interrupts, only the
4556 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4557 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4561 gen_jmp_im(s->pc - s->cs_base);
4565 case 0x8c: /* mov Gv, seg */
4566 modrm = ldub_code(s->pc++);
4567 reg = (modrm >> 3) & 7;
4568 mod = (modrm >> 6) & 3;
4571 gen_op_movl_T0_seg(reg);
4573 ot = OT_WORD + dflag;
4576 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4579 case 0x1b6: /* movzbS Gv, Eb */
4580 case 0x1b7: /* movzwS Gv, Eb */
4581 case 0x1be: /* movsbS Gv, Eb */
4582 case 0x1bf: /* movswS Gv, Eb */
4585 /* d_ot is the size of destination */
4586 d_ot = dflag + OT_WORD;
4587 /* ot is the size of source */
4588 ot = (b & 1) + OT_BYTE;
4589 modrm = ldub_code(s->pc++);
4590 reg = ((modrm >> 3) & 7) | rex_r;
4591 mod = (modrm >> 6) & 3;
4592 rm = (modrm & 7) | REX_B(s);
4595 gen_op_mov_TN_reg(ot, 0, rm);
4596 switch(ot | (b & 8)) {
4598 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4601 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4604 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4608 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4611 gen_op_mov_reg_T0(d_ot, reg);
4613 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4615 gen_op_lds_T0_A0(ot + s->mem_index);
4617 gen_op_ldu_T0_A0(ot + s->mem_index);
4619 gen_op_mov_reg_T0(d_ot, reg);
4624 case 0x8d: /* lea */
4625 ot = dflag + OT_WORD;
4626 modrm = ldub_code(s->pc++);
4627 mod = (modrm >> 6) & 3;
4630 reg = ((modrm >> 3) & 7) | rex_r;
4631 /* we must ensure that no segment is added */
4635 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4637 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4640 case 0xa0: /* mov EAX, Ov */
4642 case 0xa2: /* mov Ov, EAX */
4645 target_ulong offset_addr;
4650 ot = dflag + OT_WORD;
4651 #ifdef TARGET_X86_64
4652 if (s->aflag == 2) {
4653 offset_addr = ldq_code(s->pc);
4655 gen_op_movq_A0_im(offset_addr);
4660 offset_addr = insn_get(s, OT_LONG);
4662 offset_addr = insn_get(s, OT_WORD);
4664 gen_op_movl_A0_im(offset_addr);
4666 gen_add_A0_ds_seg(s);
4668 gen_op_ld_T0_A0(ot + s->mem_index);
4669 gen_op_mov_reg_T0(ot, R_EAX);
4671 gen_op_mov_TN_reg(ot, 0, R_EAX);
4672 gen_op_st_T0_A0(ot + s->mem_index);
4676 case 0xd7: /* xlat */
4677 #ifdef TARGET_X86_64
4678 if (s->aflag == 2) {
4679 gen_op_movq_A0_reg(R_EBX);
4680 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4681 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4682 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4686 gen_op_movl_A0_reg(R_EBX);
4687 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4688 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4689 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4691 gen_op_andl_A0_ffff();
4693 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4695 gen_add_A0_ds_seg(s);
4696 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4697 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4699 case 0xb0 ... 0xb7: /* mov R, Ib */
4700 val = insn_get(s, OT_BYTE);
4701 gen_op_movl_T0_im(val);
4702 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4704 case 0xb8 ... 0xbf: /* mov R, Iv */
4705 #ifdef TARGET_X86_64
4709 tmp = ldq_code(s->pc);
4711 reg = (b & 7) | REX_B(s);
4712 gen_movtl_T0_im(tmp);
4713 gen_op_mov_reg_T0(OT_QUAD, reg);
4717 ot = dflag ? OT_LONG : OT_WORD;
4718 val = insn_get(s, ot);
4719 reg = (b & 7) | REX_B(s);
4720 gen_op_movl_T0_im(val);
4721 gen_op_mov_reg_T0(ot, reg);
4725 case 0x91 ... 0x97: /* xchg R, EAX */
4726 ot = dflag + OT_WORD;
4727 reg = (b & 7) | REX_B(s);
4731 case 0x87: /* xchg Ev, Gv */
4735 ot = dflag + OT_WORD;
4736 modrm = ldub_code(s->pc++);
4737 reg = ((modrm >> 3) & 7) | rex_r;
4738 mod = (modrm >> 6) & 3;
4740 rm = (modrm & 7) | REX_B(s);
4742 gen_op_mov_TN_reg(ot, 0, reg);
4743 gen_op_mov_TN_reg(ot, 1, rm);
4744 gen_op_mov_reg_T0(ot, rm);
4745 gen_op_mov_reg_T1(ot, reg);
4747 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4748 gen_op_mov_TN_reg(ot, 0, reg);
4749 /* for xchg, lock is implicit */
4750 if (!(prefixes & PREFIX_LOCK))
4751 tcg_gen_helper_0_0(helper_lock);
4752 gen_op_ld_T1_A0(ot + s->mem_index);
4753 gen_op_st_T0_A0(ot + s->mem_index);
4754 if (!(prefixes & PREFIX_LOCK))
4755 tcg_gen_helper_0_0(helper_unlock);
4756 gen_op_mov_reg_T1(ot, reg);
4759 case 0xc4: /* les Gv */
4764 case 0xc5: /* lds Gv */
4769 case 0x1b2: /* lss Gv */
4772 case 0x1b4: /* lfs Gv */
4775 case 0x1b5: /* lgs Gv */
4778 ot = dflag ? OT_LONG : OT_WORD;
4779 modrm = ldub_code(s->pc++);
4780 reg = ((modrm >> 3) & 7) | rex_r;
4781 mod = (modrm >> 6) & 3;
4784 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4785 gen_op_ld_T1_A0(ot + s->mem_index);
4786 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4787 /* load the segment first to handle exceptions properly */
4788 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4789 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4790 /* then put the data */
4791 gen_op_mov_reg_T1(ot, reg);
4793 gen_jmp_im(s->pc - s->cs_base);
4798 /************************/
4809 ot = dflag + OT_WORD;
4811 modrm = ldub_code(s->pc++);
4812 mod = (modrm >> 6) & 3;
4813 op = (modrm >> 3) & 7;
4819 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4822 opreg = (modrm & 7) | REX_B(s);
4827 gen_shift(s, op, ot, opreg, OR_ECX);
4830 shift = ldub_code(s->pc++);
4832 gen_shifti(s, op, ot, opreg, shift);
4847 case 0x1a4: /* shld imm */
4851 case 0x1a5: /* shld cl */
4855 case 0x1ac: /* shrd imm */
4859 case 0x1ad: /* shrd cl */
4863 ot = dflag + OT_WORD;
4864 modrm = ldub_code(s->pc++);
4865 mod = (modrm >> 6) & 3;
4866 rm = (modrm & 7) | REX_B(s);
4867 reg = ((modrm >> 3) & 7) | rex_r;
4869 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4874 gen_op_mov_TN_reg(ot, 1, reg);
4877 val = ldub_code(s->pc++);
4878 tcg_gen_movi_tl(cpu_T3, val);
4880 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4882 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4885 /************************/
4888 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4889 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4890 /* XXX: what to do if illegal op ? */
4891 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4894 modrm = ldub_code(s->pc++);
4895 mod = (modrm >> 6) & 3;
4897 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4900 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4902 case 0x00 ... 0x07: /* fxxxs */
4903 case 0x10 ... 0x17: /* fixxxl */
4904 case 0x20 ... 0x27: /* fxxxl */
4905 case 0x30 ... 0x37: /* fixxx */
4912 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4913 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4914 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4917 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4918 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4919 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4922 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4923 (s->mem_index >> 2) - 1);
4924 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4928 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4929 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4930 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4934 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4936 /* fcomp needs pop */
4937 tcg_gen_helper_0_0(helper_fpop);
4941 case 0x08: /* flds */
4942 case 0x0a: /* fsts */
4943 case 0x0b: /* fstps */
4944 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4945 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4946 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4951 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4952 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4953 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4956 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4957 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4958 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4961 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4962 (s->mem_index >> 2) - 1);
4963 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4967 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4968 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4969 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4974 /* XXX: the corresponding CPUID bit must be tested ! */
4977 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4978 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4979 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4982 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4983 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4984 (s->mem_index >> 2) - 1);
4988 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4989 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4990 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4993 tcg_gen_helper_0_0(helper_fpop);
4998 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4999 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5000 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5003 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5004 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5005 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5008 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5009 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5010 (s->mem_index >> 2) - 1);
5014 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5015 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5016 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5020 tcg_gen_helper_0_0(helper_fpop);
5024 case 0x0c: /* fldenv mem */
5025 if (s->cc_op != CC_OP_DYNAMIC)
5026 gen_op_set_cc_op(s->cc_op);
5027 gen_jmp_im(pc_start - s->cs_base);
5028 tcg_gen_helper_0_2(helper_fldenv,
5029 cpu_A0, tcg_const_i32(s->dflag));
5031 case 0x0d: /* fldcw mem */
5032 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5033 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5034 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5036 case 0x0e: /* fnstenv mem */
5037 if (s->cc_op != CC_OP_DYNAMIC)
5038 gen_op_set_cc_op(s->cc_op);
5039 gen_jmp_im(pc_start - s->cs_base);
5040 tcg_gen_helper_0_2(helper_fstenv,
5041 cpu_A0, tcg_const_i32(s->dflag));
5043 case 0x0f: /* fnstcw mem */
5044 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5045 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5046 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5048 case 0x1d: /* fldt mem */
5049 if (s->cc_op != CC_OP_DYNAMIC)
5050 gen_op_set_cc_op(s->cc_op);
5051 gen_jmp_im(pc_start - s->cs_base);
5052 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5054 case 0x1f: /* fstpt mem */
5055 if (s->cc_op != CC_OP_DYNAMIC)
5056 gen_op_set_cc_op(s->cc_op);
5057 gen_jmp_im(pc_start - s->cs_base);
5058 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5059 tcg_gen_helper_0_0(helper_fpop);
5061 case 0x2c: /* frstor mem */
5062 if (s->cc_op != CC_OP_DYNAMIC)
5063 gen_op_set_cc_op(s->cc_op);
5064 gen_jmp_im(pc_start - s->cs_base);
5065 tcg_gen_helper_0_2(helper_frstor,
5066 cpu_A0, tcg_const_i32(s->dflag));
5068 case 0x2e: /* fnsave mem */
5069 if (s->cc_op != CC_OP_DYNAMIC)
5070 gen_op_set_cc_op(s->cc_op);
5071 gen_jmp_im(pc_start - s->cs_base);
5072 tcg_gen_helper_0_2(helper_fsave,
5073 cpu_A0, tcg_const_i32(s->dflag));
5075 case 0x2f: /* fnstsw mem */
5076 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5077 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5078 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5080 case 0x3c: /* fbld */
5081 if (s->cc_op != CC_OP_DYNAMIC)
5082 gen_op_set_cc_op(s->cc_op);
5083 gen_jmp_im(pc_start - s->cs_base);
5084 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5086 case 0x3e: /* fbstp */
5087 if (s->cc_op != CC_OP_DYNAMIC)
5088 gen_op_set_cc_op(s->cc_op);
5089 gen_jmp_im(pc_start - s->cs_base);
5090 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5091 tcg_gen_helper_0_0(helper_fpop);
5093 case 0x3d: /* fildll */
5094 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5095 (s->mem_index >> 2) - 1);
5096 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5098 case 0x3f: /* fistpll */
5099 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5100 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5101 (s->mem_index >> 2) - 1);
5102 tcg_gen_helper_0_0(helper_fpop);
5108 /* register float ops */
5112 case 0x08: /* fld sti */
5113 tcg_gen_helper_0_0(helper_fpush);
5114 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5116 case 0x09: /* fxchg sti */
5117 case 0x29: /* fxchg4 sti, undocumented op */
5118 case 0x39: /* fxchg7 sti, undocumented op */
5119 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5121 case 0x0a: /* grp d9/2 */
5124 /* check exceptions (FreeBSD FPU probe) */
5125 if (s->cc_op != CC_OP_DYNAMIC)
5126 gen_op_set_cc_op(s->cc_op);
5127 gen_jmp_im(pc_start - s->cs_base);
5128 tcg_gen_helper_0_0(helper_fwait);
5134 case 0x0c: /* grp d9/4 */
5137 tcg_gen_helper_0_0(helper_fchs_ST0);
5140 tcg_gen_helper_0_0(helper_fabs_ST0);
5143 tcg_gen_helper_0_0(helper_fldz_FT0);
5144 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5147 tcg_gen_helper_0_0(helper_fxam_ST0);
5153 case 0x0d: /* grp d9/5 */
5157 tcg_gen_helper_0_0(helper_fpush);
5158 tcg_gen_helper_0_0(helper_fld1_ST0);
5161 tcg_gen_helper_0_0(helper_fpush);
5162 tcg_gen_helper_0_0(helper_fldl2t_ST0);
5165 tcg_gen_helper_0_0(helper_fpush);
5166 tcg_gen_helper_0_0(helper_fldl2e_ST0);
5169 tcg_gen_helper_0_0(helper_fpush);
5170 tcg_gen_helper_0_0(helper_fldpi_ST0);
5173 tcg_gen_helper_0_0(helper_fpush);
5174 tcg_gen_helper_0_0(helper_fldlg2_ST0);
5177 tcg_gen_helper_0_0(helper_fpush);
5178 tcg_gen_helper_0_0(helper_fldln2_ST0);
5181 tcg_gen_helper_0_0(helper_fpush);
5182 tcg_gen_helper_0_0(helper_fldz_ST0);
5189 case 0x0e: /* grp d9/6 */
5192 tcg_gen_helper_0_0(helper_f2xm1);
5195 tcg_gen_helper_0_0(helper_fyl2x);
5198 tcg_gen_helper_0_0(helper_fptan);
5200 case 3: /* fpatan */
5201 tcg_gen_helper_0_0(helper_fpatan);
5203 case 4: /* fxtract */
5204 tcg_gen_helper_0_0(helper_fxtract);
5206 case 5: /* fprem1 */
5207 tcg_gen_helper_0_0(helper_fprem1);
5209 case 6: /* fdecstp */
5210 tcg_gen_helper_0_0(helper_fdecstp);
5213 case 7: /* fincstp */
5214 tcg_gen_helper_0_0(helper_fincstp);
5218 case 0x0f: /* grp d9/7 */
5221 tcg_gen_helper_0_0(helper_fprem);
5223 case 1: /* fyl2xp1 */
5224 tcg_gen_helper_0_0(helper_fyl2xp1);
5227 tcg_gen_helper_0_0(helper_fsqrt);
5229 case 3: /* fsincos */
5230 tcg_gen_helper_0_0(helper_fsincos);
5232 case 5: /* fscale */
5233 tcg_gen_helper_0_0(helper_fscale);
5235 case 4: /* frndint */
5236 tcg_gen_helper_0_0(helper_frndint);
5239 tcg_gen_helper_0_0(helper_fsin);
5243 tcg_gen_helper_0_0(helper_fcos);
5247 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5248 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5249 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5255 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5257 tcg_gen_helper_0_0(helper_fpop);
5259 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5260 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5264 case 0x02: /* fcom */
5265 case 0x22: /* fcom2, undocumented op */
5266 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5267 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5269 case 0x03: /* fcomp */
5270 case 0x23: /* fcomp3, undocumented op */
5271 case 0x32: /* fcomp5, undocumented op */
5272 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5273 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5274 tcg_gen_helper_0_0(helper_fpop);
5276 case 0x15: /* da/5 */
5278 case 1: /* fucompp */
5279 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5280 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5281 tcg_gen_helper_0_0(helper_fpop);
5282 tcg_gen_helper_0_0(helper_fpop);
5290 case 0: /* feni (287 only, just do nop here) */
5292 case 1: /* fdisi (287 only, just do nop here) */
5295 tcg_gen_helper_0_0(helper_fclex);
5297 case 3: /* fninit */
5298 tcg_gen_helper_0_0(helper_fninit);
5300 case 4: /* fsetpm (287 only, just do nop here) */
5306 case 0x1d: /* fucomi */
5307 if (s->cc_op != CC_OP_DYNAMIC)
5308 gen_op_set_cc_op(s->cc_op);
5309 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5310 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5311 s->cc_op = CC_OP_EFLAGS;
5313 case 0x1e: /* fcomi */
5314 if (s->cc_op != CC_OP_DYNAMIC)
5315 gen_op_set_cc_op(s->cc_op);
5316 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5317 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5318 s->cc_op = CC_OP_EFLAGS;
5320 case 0x28: /* ffree sti */
5321 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5323 case 0x2a: /* fst sti */
5324 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5326 case 0x2b: /* fstp sti */
5327 case 0x0b: /* fstp1 sti, undocumented op */
5328 case 0x3a: /* fstp8 sti, undocumented op */
5329 case 0x3b: /* fstp9 sti, undocumented op */
5330 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5331 tcg_gen_helper_0_0(helper_fpop);
5333 case 0x2c: /* fucom st(i) */
5334 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5335 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5337 case 0x2d: /* fucomp st(i) */
5338 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5339 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5340 tcg_gen_helper_0_0(helper_fpop);
5342 case 0x33: /* de/3 */
5344 case 1: /* fcompp */
5345 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5346 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5347 tcg_gen_helper_0_0(helper_fpop);
5348 tcg_gen_helper_0_0(helper_fpop);
5354 case 0x38: /* ffreep sti, undocumented op */
5355 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5356 tcg_gen_helper_0_0(helper_fpop);
5358 case 0x3c: /* df/4 */
5361 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5362 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5363 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5369 case 0x3d: /* fucomip */
5370 if (s->cc_op != CC_OP_DYNAMIC)
5371 gen_op_set_cc_op(s->cc_op);
5372 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5373 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5374 tcg_gen_helper_0_0(helper_fpop);
5375 s->cc_op = CC_OP_EFLAGS;
5377 case 0x3e: /* fcomip */
5378 if (s->cc_op != CC_OP_DYNAMIC)
5379 gen_op_set_cc_op(s->cc_op);
5380 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5381 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5382 tcg_gen_helper_0_0(helper_fpop);
5383 s->cc_op = CC_OP_EFLAGS;
5385 case 0x10 ... 0x13: /* fcmovxx */
5389 const static uint8_t fcmov_cc[8] = {
5395 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5397 l1 = gen_new_label();
5398 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5399 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5408 /************************/
5411 case 0xa4: /* movsS */
5416 ot = dflag + OT_WORD;
5418 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5419 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5425 case 0xaa: /* stosS */
5430 ot = dflag + OT_WORD;
5432 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5433 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5438 case 0xac: /* lodsS */
5443 ot = dflag + OT_WORD;
5444 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5445 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5450 case 0xae: /* scasS */
5455 ot = dflag + OT_WORD;
5456 if (prefixes & PREFIX_REPNZ) {
5457 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5458 } else if (prefixes & PREFIX_REPZ) {
5459 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5462 s->cc_op = CC_OP_SUBB + ot;
5466 case 0xa6: /* cmpsS */
5471 ot = dflag + OT_WORD;
5472 if (prefixes & PREFIX_REPNZ) {
5473 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5474 } else if (prefixes & PREFIX_REPZ) {
5475 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5478 s->cc_op = CC_OP_SUBB + ot;
5481 case 0x6c: /* insS */
5486 ot = dflag ? OT_LONG : OT_WORD;
5487 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5488 gen_op_andl_T0_ffff();
5489 gen_check_io(s, ot, pc_start - s->cs_base,
5490 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5491 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5492 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5497 case 0x6e: /* outsS */
5502 ot = dflag ? OT_LONG : OT_WORD;
5503 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5504 gen_op_andl_T0_ffff();
5505 gen_check_io(s, ot, pc_start - s->cs_base,
5506 svm_is_rep(prefixes) | 4);
5507 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5508 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5514 /************************/
5522 ot = dflag ? OT_LONG : OT_WORD;
5523 val = ldub_code(s->pc++);
5524 gen_op_movl_T0_im(val);
5525 gen_check_io(s, ot, pc_start - s->cs_base,
5526 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5527 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5528 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5529 gen_op_mov_reg_T1(ot, R_EAX);
5536 ot = dflag ? OT_LONG : OT_WORD;
5537 val = ldub_code(s->pc++);
5538 gen_op_movl_T0_im(val);
5539 gen_check_io(s, ot, pc_start - s->cs_base,
5540 svm_is_rep(prefixes));
5541 gen_op_mov_TN_reg(ot, 1, R_EAX);
5543 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5544 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5545 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5546 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5553 ot = dflag ? OT_LONG : OT_WORD;
5554 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5555 gen_op_andl_T0_ffff();
5556 gen_check_io(s, ot, pc_start - s->cs_base,
5557 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5558 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5559 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5560 gen_op_mov_reg_T1(ot, R_EAX);
5567 ot = dflag ? OT_LONG : OT_WORD;
5568 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5569 gen_op_andl_T0_ffff();
5570 gen_check_io(s, ot, pc_start - s->cs_base,
5571 svm_is_rep(prefixes));
5572 gen_op_mov_TN_reg(ot, 1, R_EAX);
5574 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5575 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5576 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5577 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5580 /************************/
5582 case 0xc2: /* ret im */
5583 val = ldsw_code(s->pc);
5586 if (CODE64(s) && s->dflag)
5588 gen_stack_update(s, val + (2 << s->dflag));
5590 gen_op_andl_T0_ffff();
5594 case 0xc3: /* ret */
5598 gen_op_andl_T0_ffff();
5602 case 0xca: /* lret im */
5603 val = ldsw_code(s->pc);
5606 if (s->pe && !s->vm86) {
5607 if (s->cc_op != CC_OP_DYNAMIC)
5608 gen_op_set_cc_op(s->cc_op);
5609 gen_jmp_im(pc_start - s->cs_base);
5610 tcg_gen_helper_0_2(helper_lret_protected,
5611 tcg_const_i32(s->dflag),
5612 tcg_const_i32(val));
5616 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5618 gen_op_andl_T0_ffff();
5619 /* NOTE: keeping EIP updated is not a problem in case of
5623 gen_op_addl_A0_im(2 << s->dflag);
5624 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5625 gen_op_movl_seg_T0_vm(R_CS);
5626 /* add stack offset */
5627 gen_stack_update(s, val + (4 << s->dflag));
5631 case 0xcb: /* lret */
5634 case 0xcf: /* iret */
5635 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5639 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5640 s->cc_op = CC_OP_EFLAGS;
5641 } else if (s->vm86) {
5643 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5645 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5646 s->cc_op = CC_OP_EFLAGS;
5649 if (s->cc_op != CC_OP_DYNAMIC)
5650 gen_op_set_cc_op(s->cc_op);
5651 gen_jmp_im(pc_start - s->cs_base);
5652 tcg_gen_helper_0_2(helper_iret_protected,
5653 tcg_const_i32(s->dflag),
5654 tcg_const_i32(s->pc - s->cs_base));
5655 s->cc_op = CC_OP_EFLAGS;
5659 case 0xe8: /* call im */
5662 tval = (int32_t)insn_get(s, OT_LONG);
5664 tval = (int16_t)insn_get(s, OT_WORD);
5665 next_eip = s->pc - s->cs_base;
5669 gen_movtl_T0_im(next_eip);
5674 case 0x9a: /* lcall im */
5676 unsigned int selector, offset;
5680 ot = dflag ? OT_LONG : OT_WORD;
5681 offset = insn_get(s, ot);
5682 selector = insn_get(s, OT_WORD);
5684 gen_op_movl_T0_im(selector);
5685 gen_op_movl_T1_imu(offset);
5688 case 0xe9: /* jmp im */
5690 tval = (int32_t)insn_get(s, OT_LONG);
5692 tval = (int16_t)insn_get(s, OT_WORD);
5693 tval += s->pc - s->cs_base;
5698 case 0xea: /* ljmp im */
5700 unsigned int selector, offset;
5704 ot = dflag ? OT_LONG : OT_WORD;
5705 offset = insn_get(s, ot);
5706 selector = insn_get(s, OT_WORD);
5708 gen_op_movl_T0_im(selector);
5709 gen_op_movl_T1_imu(offset);
5712 case 0xeb: /* jmp Jb */
5713 tval = (int8_t)insn_get(s, OT_BYTE);
5714 tval += s->pc - s->cs_base;
5719 case 0x70 ... 0x7f: /* jcc Jb */
5720 tval = (int8_t)insn_get(s, OT_BYTE);
5722 case 0x180 ... 0x18f: /* jcc Jv */
5724 tval = (int32_t)insn_get(s, OT_LONG);
5726 tval = (int16_t)insn_get(s, OT_WORD);
5729 next_eip = s->pc - s->cs_base;
5733 gen_jcc(s, b, tval, next_eip);
5736 case 0x190 ... 0x19f: /* setcc Gv */
5737 modrm = ldub_code(s->pc++);
5739 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5741 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5744 ot = dflag + OT_WORD;
5745 modrm = ldub_code(s->pc++);
5746 reg = ((modrm >> 3) & 7) | rex_r;
5747 mod = (modrm >> 6) & 3;
5749 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5750 gen_op_ld_T1_A0(ot + s->mem_index);
5752 rm = (modrm & 7) | REX_B(s);
5753 gen_op_mov_TN_reg(ot, 1, rm);
5755 if (s->cc_op != CC_OP_DYNAMIC)
5756 gen_op_set_cc_op(s->cc_op);
5757 #ifdef TARGET_X86_64
5758 if (ot == OT_LONG) {
5759 /* XXX: specific Intel behaviour ? */
5760 l1 = gen_new_label();
5761 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5762 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
5764 tcg_gen_movi_tl(cpu_tmp0, 0);
5765 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
5769 l1 = gen_new_label();
5770 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5771 gen_op_mov_reg_T1(ot, reg);
5777 /************************/
5779 case 0x9c: /* pushf */
5780 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5782 if (s->vm86 && s->iopl != 3) {
5783 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5785 if (s->cc_op != CC_OP_DYNAMIC)
5786 gen_op_set_cc_op(s->cc_op);
5787 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
5791 case 0x9d: /* popf */
5792 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5794 if (s->vm86 && s->iopl != 3) {
5795 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5800 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5801 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
5803 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5804 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
5807 if (s->cpl <= s->iopl) {
5809 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5810 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
5812 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5813 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
5817 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5818 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
5820 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5821 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
5826 s->cc_op = CC_OP_EFLAGS;
5827 /* abort translation because TF flag may change */
5828 gen_jmp_im(s->pc - s->cs_base);
5832 case 0x9e: /* sahf */
5833 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5835 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5836 if (s->cc_op != CC_OP_DYNAMIC)
5837 gen_op_set_cc_op(s->cc_op);
5838 gen_compute_eflags(cpu_cc_src);
5839 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
5840 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
5841 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
5842 s->cc_op = CC_OP_EFLAGS;
5844 case 0x9f: /* lahf */
5845 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5847 if (s->cc_op != CC_OP_DYNAMIC)
5848 gen_op_set_cc_op(s->cc_op);
5849 gen_compute_eflags(cpu_T[0]);
5850 /* Note: gen_compute_eflags() only gives the condition codes */
5851 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
5852 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5854 case 0xf5: /* cmc */
5855 if (s->cc_op != CC_OP_DYNAMIC)
5856 gen_op_set_cc_op(s->cc_op);
5857 gen_compute_eflags(cpu_cc_src);
5858 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5859 s->cc_op = CC_OP_EFLAGS;
5861 case 0xf8: /* clc */
5862 if (s->cc_op != CC_OP_DYNAMIC)
5863 gen_op_set_cc_op(s->cc_op);
5864 gen_compute_eflags(cpu_cc_src);
5865 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
5866 s->cc_op = CC_OP_EFLAGS;
5868 case 0xf9: /* stc */
5869 if (s->cc_op != CC_OP_DYNAMIC)
5870 gen_op_set_cc_op(s->cc_op);
5871 gen_compute_eflags(cpu_cc_src);
5872 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5873 s->cc_op = CC_OP_EFLAGS;
5875 case 0xfc: /* cld */
5876 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5877 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5879 case 0xfd: /* std */
5880 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5881 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5884 /************************/
5885 /* bit operations */
5886 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5887 ot = dflag + OT_WORD;
5888 modrm = ldub_code(s->pc++);
5889 op = (modrm >> 3) & 7;
5890 mod = (modrm >> 6) & 3;
5891 rm = (modrm & 7) | REX_B(s);
5894 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5895 gen_op_ld_T0_A0(ot + s->mem_index);
5897 gen_op_mov_TN_reg(ot, 0, rm);
5900 val = ldub_code(s->pc++);
5901 gen_op_movl_T1_im(val);
5906 case 0x1a3: /* bt Gv, Ev */
5909 case 0x1ab: /* bts */
5912 case 0x1b3: /* btr */
5915 case 0x1bb: /* btc */
5918 ot = dflag + OT_WORD;
5919 modrm = ldub_code(s->pc++);
5920 reg = ((modrm >> 3) & 7) | rex_r;
5921 mod = (modrm >> 6) & 3;
5922 rm = (modrm & 7) | REX_B(s);
5923 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5925 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5926 /* specific case: we need to add a displacement */
5927 gen_exts(ot, cpu_T[1]);
5928 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5929 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5930 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5931 gen_op_ld_T0_A0(ot + s->mem_index);
5933 gen_op_mov_TN_reg(ot, 0, rm);
5936 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5939 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5940 tcg_gen_movi_tl(cpu_cc_dst, 0);
5943 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5944 tcg_gen_movi_tl(cpu_tmp0, 1);
5945 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5946 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5949 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5950 tcg_gen_movi_tl(cpu_tmp0, 1);
5951 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5952 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5953 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5957 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5958 tcg_gen_movi_tl(cpu_tmp0, 1);
5959 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5960 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5963 s->cc_op = CC_OP_SARB + ot;
5966 gen_op_st_T0_A0(ot + s->mem_index);
5968 gen_op_mov_reg_T0(ot, rm);
5969 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5970 tcg_gen_movi_tl(cpu_cc_dst, 0);
5973 case 0x1bc: /* bsf */
5974 case 0x1bd: /* bsr */
5977 ot = dflag + OT_WORD;
5978 modrm = ldub_code(s->pc++);
5979 reg = ((modrm >> 3) & 7) | rex_r;
5980 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5981 gen_extu(ot, cpu_T[0]);
5982 label1 = gen_new_label();
5983 tcg_gen_movi_tl(cpu_cc_dst, 0);
5984 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5986 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5988 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5990 gen_op_mov_reg_T0(ot, reg);
5991 tcg_gen_movi_tl(cpu_cc_dst, 1);
5992 gen_set_label(label1);
5993 tcg_gen_discard_tl(cpu_cc_src);
5994 s->cc_op = CC_OP_LOGICB + ot;
5997 /************************/
5999 case 0x27: /* daa */
6002 if (s->cc_op != CC_OP_DYNAMIC)
6003 gen_op_set_cc_op(s->cc_op);
6004 tcg_gen_helper_0_0(helper_daa);
6005 s->cc_op = CC_OP_EFLAGS;
6007 case 0x2f: /* das */
6010 if (s->cc_op != CC_OP_DYNAMIC)
6011 gen_op_set_cc_op(s->cc_op);
6012 tcg_gen_helper_0_0(helper_das);
6013 s->cc_op = CC_OP_EFLAGS;
6015 case 0x37: /* aaa */
6018 if (s->cc_op != CC_OP_DYNAMIC)
6019 gen_op_set_cc_op(s->cc_op);
6020 tcg_gen_helper_0_0(helper_aaa);
6021 s->cc_op = CC_OP_EFLAGS;
6023 case 0x3f: /* aas */
6026 if (s->cc_op != CC_OP_DYNAMIC)
6027 gen_op_set_cc_op(s->cc_op);
6028 tcg_gen_helper_0_0(helper_aas);
6029 s->cc_op = CC_OP_EFLAGS;
6031 case 0xd4: /* aam */
6034 val = ldub_code(s->pc++);
6036 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6038 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
6039 s->cc_op = CC_OP_LOGICB;
6042 case 0xd5: /* aad */
6045 val = ldub_code(s->pc++);
6046 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
6047 s->cc_op = CC_OP_LOGICB;
6049 /************************/
6051 case 0x90: /* nop */
6052 /* XXX: xchg + rex handling */
6053 /* XXX: correct lock test for all insn */
6054 if (prefixes & PREFIX_LOCK)
6056 if (prefixes & PREFIX_REPZ) {
6057 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6060 case 0x9b: /* fwait */
6061 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6062 (HF_MP_MASK | HF_TS_MASK)) {
6063 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6065 if (s->cc_op != CC_OP_DYNAMIC)
6066 gen_op_set_cc_op(s->cc_op);
6067 gen_jmp_im(pc_start - s->cs_base);
6068 tcg_gen_helper_0_0(helper_fwait);
6071 case 0xcc: /* int3 */
6072 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6074 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6076 case 0xcd: /* int N */
6077 val = ldub_code(s->pc++);
6078 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6080 if (s->vm86 && s->iopl != 3) {
6081 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6083 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6086 case 0xce: /* into */
6089 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6091 if (s->cc_op != CC_OP_DYNAMIC)
6092 gen_op_set_cc_op(s->cc_op);
6093 gen_jmp_im(pc_start - s->cs_base);
6094 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6096 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6097 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
6100 gen_debug(s, pc_start - s->cs_base);
6103 tb_flush(cpu_single_env);
6104 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6107 case 0xfa: /* cli */
6109 if (s->cpl <= s->iopl) {
6110 tcg_gen_helper_0_0(helper_cli);
6112 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6116 tcg_gen_helper_0_0(helper_cli);
6118 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6122 case 0xfb: /* sti */
6124 if (s->cpl <= s->iopl) {
6126 tcg_gen_helper_0_0(helper_sti);
6127 /* interruptions are enabled only the first insn after sti */
6128 /* If several instructions disable interrupts, only the
6130 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6131 tcg_gen_helper_0_0(helper_set_inhibit_irq);
6132 /* give a chance to handle pending irqs */
6133 gen_jmp_im(s->pc - s->cs_base);
6136 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6142 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6146 case 0x62: /* bound */
6149 ot = dflag ? OT_LONG : OT_WORD;
6150 modrm = ldub_code(s->pc++);
6151 reg = (modrm >> 3) & 7;
6152 mod = (modrm >> 6) & 3;
6155 gen_op_mov_TN_reg(ot, 0, reg);
6156 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6157 gen_jmp_im(pc_start - s->cs_base);
6158 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6160 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6162 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6164 case 0x1c8 ... 0x1cf: /* bswap reg */
6165 reg = (b & 7) | REX_B(s);
6166 #ifdef TARGET_X86_64
6168 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6169 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6170 gen_op_mov_reg_T0(OT_QUAD, reg);
6174 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6176 tmp0 = tcg_temp_new(TCG_TYPE_I32);
6177 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6178 tcg_gen_bswap_i32(tmp0, tmp0);
6179 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6180 gen_op_mov_reg_T0(OT_LONG, reg);
6184 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6185 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6186 gen_op_mov_reg_T0(OT_LONG, reg);
6190 case 0xd6: /* salc */
6193 if (s->cc_op != CC_OP_DYNAMIC)
6194 gen_op_set_cc_op(s->cc_op);
6195 gen_compute_eflags_c(cpu_T[0]);
6196 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6197 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6199 case 0xe0: /* loopnz */
6200 case 0xe1: /* loopz */
6201 case 0xe2: /* loop */
6202 case 0xe3: /* jecxz */
6206 tval = (int8_t)insn_get(s, OT_BYTE);
6207 next_eip = s->pc - s->cs_base;
6212 l1 = gen_new_label();
6213 l2 = gen_new_label();
6214 l3 = gen_new_label();
6217 case 0: /* loopnz */
6219 if (s->cc_op != CC_OP_DYNAMIC)
6220 gen_op_set_cc_op(s->cc_op);
6221 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6222 gen_op_jz_ecx(s->aflag, l3);
6223 gen_compute_eflags(cpu_tmp0);
6224 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6226 tcg_gen_brcond_tl(TCG_COND_EQ,
6227 cpu_tmp0, tcg_const_tl(0), l1);
6229 tcg_gen_brcond_tl(TCG_COND_NE,
6230 cpu_tmp0, tcg_const_tl(0), l1);
6234 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6235 gen_op_jnz_ecx(s->aflag, l1);
6239 gen_op_jz_ecx(s->aflag, l1);
6244 gen_jmp_im(next_eip);
6253 case 0x130: /* wrmsr */
6254 case 0x132: /* rdmsr */
6256 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6260 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6261 tcg_gen_helper_0_0(helper_rdmsr);
6263 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6264 tcg_gen_helper_0_0(helper_wrmsr);
6270 case 0x131: /* rdtsc */
6271 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6273 gen_jmp_im(pc_start - s->cs_base);
6274 tcg_gen_helper_0_0(helper_rdtsc);
6276 case 0x133: /* rdpmc */
6277 gen_jmp_im(pc_start - s->cs_base);
6278 tcg_gen_helper_0_0(helper_rdpmc);
6280 case 0x134: /* sysenter */
6284 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6286 if (s->cc_op != CC_OP_DYNAMIC) {
6287 gen_op_set_cc_op(s->cc_op);
6288 s->cc_op = CC_OP_DYNAMIC;
6290 gen_jmp_im(pc_start - s->cs_base);
6291 tcg_gen_helper_0_0(helper_sysenter);
6295 case 0x135: /* sysexit */
6299 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6301 if (s->cc_op != CC_OP_DYNAMIC) {
6302 gen_op_set_cc_op(s->cc_op);
6303 s->cc_op = CC_OP_DYNAMIC;
6305 gen_jmp_im(pc_start - s->cs_base);
6306 tcg_gen_helper_0_0(helper_sysexit);
6310 #ifdef TARGET_X86_64
6311 case 0x105: /* syscall */
6312 /* XXX: is it usable in real mode ? */
6313 if (s->cc_op != CC_OP_DYNAMIC) {
6314 gen_op_set_cc_op(s->cc_op);
6315 s->cc_op = CC_OP_DYNAMIC;
6317 gen_jmp_im(pc_start - s->cs_base);
6318 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6321 case 0x107: /* sysret */
6323 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6325 if (s->cc_op != CC_OP_DYNAMIC) {
6326 gen_op_set_cc_op(s->cc_op);
6327 s->cc_op = CC_OP_DYNAMIC;
6329 gen_jmp_im(pc_start - s->cs_base);
6330 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6331 /* condition codes are modified only in long mode */
6333 s->cc_op = CC_OP_EFLAGS;
6338 case 0x1a2: /* cpuid */
6339 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6341 tcg_gen_helper_0_0(helper_cpuid);
6343 case 0xf4: /* hlt */
6345 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6347 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6349 if (s->cc_op != CC_OP_DYNAMIC)
6350 gen_op_set_cc_op(s->cc_op);
6351 gen_jmp_im(s->pc - s->cs_base);
6352 tcg_gen_helper_0_0(helper_hlt);
6357 modrm = ldub_code(s->pc++);
6358 mod = (modrm >> 6) & 3;
6359 op = (modrm >> 3) & 7;
6362 if (!s->pe || s->vm86)
6364 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6366 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6370 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6373 if (!s->pe || s->vm86)
6376 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6378 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6380 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6381 gen_jmp_im(pc_start - s->cs_base);
6382 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6383 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6387 if (!s->pe || s->vm86)
6389 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6391 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6395 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6398 if (!s->pe || s->vm86)
6401 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6403 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6405 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6406 gen_jmp_im(pc_start - s->cs_base);
6407 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6408 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6413 if (!s->pe || s->vm86)
6415 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6416 if (s->cc_op != CC_OP_DYNAMIC)
6417 gen_op_set_cc_op(s->cc_op);
6419 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6421 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6422 s->cc_op = CC_OP_EFLAGS;
6429 modrm = ldub_code(s->pc++);
6430 mod = (modrm >> 6) & 3;
6431 op = (modrm >> 3) & 7;
6437 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6439 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6440 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6441 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6442 gen_add_A0_im(s, 2);
6443 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6445 gen_op_andl_T0_im(0xffffff);
6446 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6451 case 0: /* monitor */
6452 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6455 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6457 gen_jmp_im(pc_start - s->cs_base);
6458 #ifdef TARGET_X86_64
6459 if (s->aflag == 2) {
6460 gen_op_movq_A0_reg(R_EAX);
6464 gen_op_movl_A0_reg(R_EAX);
6466 gen_op_andl_A0_ffff();
6468 gen_add_A0_ds_seg(s);
6469 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6472 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6475 if (s->cc_op != CC_OP_DYNAMIC) {
6476 gen_op_set_cc_op(s->cc_op);
6477 s->cc_op = CC_OP_DYNAMIC;
6479 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6481 gen_jmp_im(s->pc - s->cs_base);
6482 tcg_gen_helper_0_0(helper_mwait);
6489 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6491 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6492 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6493 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6494 gen_add_A0_im(s, 2);
6495 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6497 gen_op_andl_T0_im(0xffffff);
6498 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6506 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6508 if (s->cc_op != CC_OP_DYNAMIC)
6509 gen_op_set_cc_op(s->cc_op);
6510 gen_jmp_im(s->pc - s->cs_base);
6511 tcg_gen_helper_0_0(helper_vmrun);
6512 s->cc_op = CC_OP_EFLAGS;
6515 case 1: /* VMMCALL */
6516 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6518 /* FIXME: cause #UD if hflags & SVM */
6519 tcg_gen_helper_0_0(helper_vmmcall);
6521 case 2: /* VMLOAD */
6522 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6524 tcg_gen_helper_0_0(helper_vmload);
6526 case 3: /* VMSAVE */
6527 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6529 tcg_gen_helper_0_0(helper_vmsave);
6532 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6534 tcg_gen_helper_0_0(helper_stgi);
6537 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6539 tcg_gen_helper_0_0(helper_clgi);
6541 case 6: /* SKINIT */
6542 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6544 tcg_gen_helper_0_0(helper_skinit);
6546 case 7: /* INVLPGA */
6547 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6549 tcg_gen_helper_0_0(helper_invlpga);
6554 } else if (s->cpl != 0) {
6555 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6557 if (gen_svm_check_intercept(s, pc_start,
6558 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6560 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6561 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6562 gen_add_A0_im(s, 2);
6563 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6565 gen_op_andl_T0_im(0xffffff);
6567 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
6568 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
6570 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
6571 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
6576 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6578 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
6579 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6583 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6585 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6587 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6588 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6589 gen_jmp_im(s->pc - s->cs_base);
6593 case 7: /* invlpg */
6595 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6598 #ifdef TARGET_X86_64
6599 if (CODE64(s) && rm == 0) {
6601 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6602 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
6603 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6604 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
6611 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6613 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6614 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6615 gen_jmp_im(s->pc - s->cs_base);
6624 case 0x108: /* invd */
6625 case 0x109: /* wbinvd */
6627 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6629 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6634 case 0x63: /* arpl or movslS (x86_64) */
6635 #ifdef TARGET_X86_64
6638 /* d_ot is the size of destination */
6639 d_ot = dflag + OT_WORD;
6641 modrm = ldub_code(s->pc++);
6642 reg = ((modrm >> 3) & 7) | rex_r;
6643 mod = (modrm >> 6) & 3;
6644 rm = (modrm & 7) | REX_B(s);
6647 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6649 if (d_ot == OT_QUAD)
6650 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6651 gen_op_mov_reg_T0(d_ot, reg);
6653 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6654 if (d_ot == OT_QUAD) {
6655 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6657 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6659 gen_op_mov_reg_T0(d_ot, reg);
6665 if (!s->pe || s->vm86)
6668 modrm = ldub_code(s->pc++);
6669 reg = (modrm >> 3) & 7;
6670 mod = (modrm >> 6) & 3;
6673 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6674 gen_op_ld_T0_A0(ot + s->mem_index);
6676 gen_op_mov_TN_reg(ot, 0, rm);
6678 gen_op_mov_TN_reg(ot, 1, reg);
6679 tcg_gen_andi_tl(cpu_tmp0, cpu_T[0], 3);
6680 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 3);
6681 tcg_gen_movi_tl(cpu_T3, 0);
6682 label1 = gen_new_label();
6683 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, cpu_T[1], label1);
6684 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], ~3);
6685 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
6686 tcg_gen_movi_tl(cpu_T3, CC_Z);
6687 gen_set_label(label1);
6689 gen_op_st_T0_A0(ot + s->mem_index);
6691 gen_op_mov_reg_T0(ot, rm);
6693 if (s->cc_op != CC_OP_DYNAMIC)
6694 gen_op_set_cc_op(s->cc_op);
6695 gen_compute_eflags(cpu_cc_src);
6696 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6697 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T3);
6698 s->cc_op = CC_OP_EFLAGS;
6701 case 0x102: /* lar */
6702 case 0x103: /* lsl */
6705 if (!s->pe || s->vm86)
6707 ot = dflag ? OT_LONG : OT_WORD;
6708 modrm = ldub_code(s->pc++);
6709 reg = ((modrm >> 3) & 7) | rex_r;
6710 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6711 if (s->cc_op != CC_OP_DYNAMIC)
6712 gen_op_set_cc_op(s->cc_op);
6714 tcg_gen_helper_1_1(helper_lar, cpu_T[0], cpu_T[0]);
6716 tcg_gen_helper_1_1(helper_lsl, cpu_T[0], cpu_T[0]);
6717 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6718 label1 = gen_new_label();
6719 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
6720 gen_op_mov_reg_T0(ot, reg);
6721 gen_set_label(label1);
6722 s->cc_op = CC_OP_EFLAGS;
6726 modrm = ldub_code(s->pc++);
6727 mod = (modrm >> 6) & 3;
6728 op = (modrm >> 3) & 7;
6730 case 0: /* prefetchnta */
6731 case 1: /* prefetchnt0 */
6732 case 2: /* prefetchnt0 */
6733 case 3: /* prefetchnt0 */
6736 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6737 /* nothing more to do */
6739 default: /* nop (multi byte) */
6740 gen_nop_modrm(s, modrm);
6744 case 0x119 ... 0x11f: /* nop (multi byte) */
6745 modrm = ldub_code(s->pc++);
6746 gen_nop_modrm(s, modrm);
6748 case 0x120: /* mov reg, crN */
6749 case 0x122: /* mov crN, reg */
6751 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6753 modrm = ldub_code(s->pc++);
6754 if ((modrm & 0xc0) != 0xc0)
6756 rm = (modrm & 7) | REX_B(s);
6757 reg = ((modrm >> 3) & 7) | rex_r;
6769 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6770 gen_op_mov_TN_reg(ot, 0, rm);
6771 tcg_gen_helper_0_2(helper_movl_crN_T0,
6772 tcg_const_i32(reg), cpu_T[0]);
6773 gen_jmp_im(s->pc - s->cs_base);
6776 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6777 #if !defined(CONFIG_USER_ONLY)
6779 tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6782 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[reg]));
6783 gen_op_mov_reg_T0(ot, rm);
6791 case 0x121: /* mov reg, drN */
6792 case 0x123: /* mov drN, reg */
6794 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6796 modrm = ldub_code(s->pc++);
6797 if ((modrm & 0xc0) != 0xc0)
6799 rm = (modrm & 7) | REX_B(s);
6800 reg = ((modrm >> 3) & 7) | rex_r;
6805 /* XXX: do it dynamically with CR4.DE bit */
6806 if (reg == 4 || reg == 5 || reg >= 8)
6809 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6810 gen_op_mov_TN_reg(ot, 0, rm);
6811 tcg_gen_helper_0_2(helper_movl_drN_T0,
6812 tcg_const_i32(reg), cpu_T[0]);
6813 gen_jmp_im(s->pc - s->cs_base);
6816 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6817 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
6818 gen_op_mov_reg_T0(ot, rm);
6822 case 0x106: /* clts */
6824 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6826 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6827 tcg_gen_helper_0_0(helper_clts);
6828 /* abort block because static cpu state changed */
6829 gen_jmp_im(s->pc - s->cs_base);
6833 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6834 case 0x1c3: /* MOVNTI reg, mem */
6835 if (!(s->cpuid_features & CPUID_SSE2))
6837 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6838 modrm = ldub_code(s->pc++);
6839 mod = (modrm >> 6) & 3;
6842 reg = ((modrm >> 3) & 7) | rex_r;
6843 /* generate a generic store */
6844 gen_ldst_modrm(s, modrm, ot, reg, 1);
6847 modrm = ldub_code(s->pc++);
6848 mod = (modrm >> 6) & 3;
6849 op = (modrm >> 3) & 7;
6851 case 0: /* fxsave */
6852 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6853 (s->flags & HF_EM_MASK))
6855 if (s->flags & HF_TS_MASK) {
6856 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6859 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6860 if (s->cc_op != CC_OP_DYNAMIC)
6861 gen_op_set_cc_op(s->cc_op);
6862 gen_jmp_im(pc_start - s->cs_base);
6863 tcg_gen_helper_0_2(helper_fxsave,
6864 cpu_A0, tcg_const_i32((s->dflag == 2)));
6866 case 1: /* fxrstor */
6867 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6868 (s->flags & HF_EM_MASK))
6870 if (s->flags & HF_TS_MASK) {
6871 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6874 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6875 if (s->cc_op != CC_OP_DYNAMIC)
6876 gen_op_set_cc_op(s->cc_op);
6877 gen_jmp_im(pc_start - s->cs_base);
6878 tcg_gen_helper_0_2(helper_fxrstor,
6879 cpu_A0, tcg_const_i32((s->dflag == 2)));
6881 case 2: /* ldmxcsr */
6882 case 3: /* stmxcsr */
6883 if (s->flags & HF_TS_MASK) {
6884 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6887 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6890 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6892 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6893 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6895 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6896 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6899 case 5: /* lfence */
6900 case 6: /* mfence */
6901 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6904 case 7: /* sfence / clflush */
6905 if ((modrm & 0xc7) == 0xc0) {
6907 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6908 if (!(s->cpuid_features & CPUID_SSE))
6912 if (!(s->cpuid_features & CPUID_CLFLUSH))
6914 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6921 case 0x10d: /* 3DNow! prefetch(w) */
6922 modrm = ldub_code(s->pc++);
6923 mod = (modrm >> 6) & 3;
6926 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6927 /* ignore for now */
6929 case 0x1aa: /* rsm */
6930 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6932 if (!(s->flags & HF_SMM_MASK))
6934 if (s->cc_op != CC_OP_DYNAMIC) {
6935 gen_op_set_cc_op(s->cc_op);
6936 s->cc_op = CC_OP_DYNAMIC;
6938 gen_jmp_im(s->pc - s->cs_base);
6939 tcg_gen_helper_0_0(helper_rsm);
6942 case 0x10e ... 0x10f:
6943 /* 3DNow! instructions, ignore prefixes */
6944 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6945 case 0x110 ... 0x117:
6946 case 0x128 ... 0x12f:
6947 case 0x150 ... 0x177:
6948 case 0x17c ... 0x17f:
6950 case 0x1c4 ... 0x1c6:
6951 case 0x1d0 ... 0x1fe:
6952 gen_sse(s, b, pc_start, rex_r);
6957 /* lock generation */
6958 if (s->prefix & PREFIX_LOCK)
6959 tcg_gen_helper_0_0(helper_unlock);
6962 if (s->prefix & PREFIX_LOCK)
6963 tcg_gen_helper_0_0(helper_unlock);
6964 /* XXX: ensure that no lock was generated */
6965 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6969 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6974 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6980 void optimize_flags_init(void)
6982 #if TCG_TARGET_REG_BITS == 32
6983 assert(sizeof(CCTable) == (1 << 3));
6985 assert(sizeof(CCTable) == (1 << 4));
6987 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6989 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6990 #if TARGET_LONG_BITS > HOST_LONG_BITS
6991 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6992 TCG_AREG0, offsetof(CPUState, t0), "T0");
6993 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6994 TCG_AREG0, offsetof(CPUState, t1), "T1");
6995 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6996 TCG_AREG0, offsetof(CPUState, t2), "A0");
6998 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6999 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
7000 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
7002 cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
7003 TCG_AREG0, offsetof(CPUState, t3), "T3");
7004 #if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
7005 /* XXX: must be suppressed once there are less fixed registers */
7006 cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
7008 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7009 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7010 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7011 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7012 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7013 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7016 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7017 basic block 'tb'. If search_pc is TRUE, also generate PC
7018 information for each intermediate instruction. */
7019 static inline int gen_intermediate_code_internal(CPUState *env,
7020 TranslationBlock *tb,
7023 DisasContext dc1, *dc = &dc1;
7024 target_ulong pc_ptr;
7025 uint16_t *gen_opc_end;
7028 target_ulong pc_start;
7029 target_ulong cs_base;
7031 /* generate intermediate code */
7033 cs_base = tb->cs_base;
7035 cflags = tb->cflags;
7037 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7038 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7039 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7040 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7042 dc->vm86 = (flags >> VM_SHIFT) & 1;
7043 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7044 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7045 dc->tf = (flags >> TF_SHIFT) & 1;
7046 dc->singlestep_enabled = env->singlestep_enabled;
7047 dc->cc_op = CC_OP_DYNAMIC;
7048 dc->cs_base = cs_base;
7050 dc->popl_esp_hack = 0;
7051 /* select memory access functions */
7053 if (flags & HF_SOFTMMU_MASK) {
7055 dc->mem_index = 2 * 4;
7057 dc->mem_index = 1 * 4;
7059 dc->cpuid_features = env->cpuid_features;
7060 dc->cpuid_ext_features = env->cpuid_ext_features;
7061 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7062 dc->cpuid_ext3_features = env->cpuid_ext3_features;
7063 #ifdef TARGET_X86_64
7064 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7065 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7068 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7069 (flags & HF_INHIBIT_IRQ_MASK)
7070 #ifndef CONFIG_SOFTMMU
7071 || (flags & HF_SOFTMMU_MASK)
7075 /* check addseg logic */
7076 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7077 printf("ERROR addseg\n");
7080 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7081 #if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
7082 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7084 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7085 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7086 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7087 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7088 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7089 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7090 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7092 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7094 dc->is_jmp = DISAS_NEXT;
7099 if (env->nb_breakpoints > 0) {
7100 for(j = 0; j < env->nb_breakpoints; j++) {
7101 if (env->breakpoints[j] == pc_ptr) {
7102 gen_debug(dc, pc_ptr - dc->cs_base);
7108 j = gen_opc_ptr - gen_opc_buf;
7112 gen_opc_instr_start[lj++] = 0;
7114 gen_opc_pc[lj] = pc_ptr;
7115 gen_opc_cc_op[lj] = dc->cc_op;
7116 gen_opc_instr_start[lj] = 1;
7118 pc_ptr = disas_insn(dc, pc_ptr);
7119 /* stop translation if indicated */
7122 /* if single step mode, we generate only one instruction and
7123 generate an exception */
7124 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7125 the flag and abort the translation to give the irqs a
7126 change to be happen */
7127 if (dc->tf || dc->singlestep_enabled ||
7128 (flags & HF_INHIBIT_IRQ_MASK) ||
7129 (cflags & CF_SINGLE_INSN)) {
7130 gen_jmp_im(pc_ptr - dc->cs_base);
7134 /* if too long translation, stop generation too */
7135 if (gen_opc_ptr >= gen_opc_end ||
7136 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7137 gen_jmp_im(pc_ptr - dc->cs_base);
7142 *gen_opc_ptr = INDEX_op_end;
7143 /* we don't forget to fill the last values */
7145 j = gen_opc_ptr - gen_opc_buf;
7148 gen_opc_instr_start[lj++] = 0;
7152 if (loglevel & CPU_LOG_TB_CPU) {
7153 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7155 if (loglevel & CPU_LOG_TB_IN_ASM) {
7157 fprintf(logfile, "----------------\n");
7158 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7159 #ifdef TARGET_X86_64
7164 disas_flags = !dc->code32;
7165 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7166 fprintf(logfile, "\n");
7167 if (loglevel & CPU_LOG_TB_OP_OPT) {
7168 fprintf(logfile, "OP before opt:\n");
7169 tcg_dump_ops(&tcg_ctx, logfile);
7170 fprintf(logfile, "\n");
7176 tb->size = pc_ptr - pc_start;
7180 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7182 return gen_intermediate_code_internal(env, tb, 0);
7185 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7187 return gen_intermediate_code_internal(env, tb, 1);
7190 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7191 unsigned long searched_pc, int pc_pos, void *puc)
7195 if (loglevel & CPU_LOG_TB_OP) {
7197 fprintf(logfile, "RESTORE:\n");
7198 for(i = 0;i <= pc_pos; i++) {
7199 if (gen_opc_instr_start[i]) {
7200 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7203 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7204 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7205 (uint32_t)tb->cs_base);
7208 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7209 cc_op = gen_opc_cc_op[pc_pos];
7210 if (cc_op != CC_OP_DYNAMIC)