4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
63 static TCGv cpu_T[2], cpu_T3;
64 /* local register indexes (only used inside old micro ops) */
65 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
66 static TCGv cpu_tmp5, cpu_tmp6;
69 static int x86_64_hregs;
72 typedef struct DisasContext {
73 /* current insn context */
74 int override; /* -1 if no override */
77 target_ulong pc; /* pc = eip + cs_base */
78 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
79 static state change (stop translation) */
80 /* current block context */
81 target_ulong cs_base; /* base of CS segment */
82 int pe; /* protected mode */
83 int code32; /* 32 bit code segment */
85 int lma; /* long mode active */
86 int code64; /* 64 bit code segment */
89 int ss32; /* 32 bit stack segment */
90 int cc_op; /* current CC operation */
91 int addseg; /* non zero if either DS/ES/SS have a non zero base */
92 int f_st; /* currently unused */
93 int vm86; /* vm86 mode */
96 int tf; /* TF cpu flag */
97 int singlestep_enabled; /* "hardware" single step enabled */
98 int jmp_opt; /* use direct block chaining for direct jumps */
99 int mem_index; /* select memory access functions */
100 uint64_t flags; /* all execution flags */
101 struct TranslationBlock *tb;
102 int popl_esp_hack; /* for correct popl with esp base handling */
103 int rip_offset; /* only used in x86_64, but left for simplicity */
105 int cpuid_ext_features;
106 int cpuid_ext2_features;
107 int cpuid_ext3_features;
110 static void gen_eob(DisasContext *s);
111 static void gen_jmp(DisasContext *s, target_ulong eip);
112 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
114 /* i386 arith/logic operations */
134 OP_SHL1, /* undocumented */
158 /* I386 int registers */
159 OR_EAX, /* MUST be even numbered */
168 OR_TMP0 = 16, /* temporary operand register */
170 OR_A0, /* temporary register used when doing address evaluation */
173 static inline void gen_op_movl_T0_0(void)
175 tcg_gen_movi_tl(cpu_T[0], 0);
178 static inline void gen_op_movl_T0_im(int32_t val)
180 tcg_gen_movi_tl(cpu_T[0], val);
183 static inline void gen_op_movl_T0_imu(uint32_t val)
185 tcg_gen_movi_tl(cpu_T[0], val);
188 static inline void gen_op_movl_T1_im(int32_t val)
190 tcg_gen_movi_tl(cpu_T[1], val);
193 static inline void gen_op_movl_T1_imu(uint32_t val)
195 tcg_gen_movi_tl(cpu_T[1], val);
198 static inline void gen_op_movl_A0_im(uint32_t val)
200 tcg_gen_movi_tl(cpu_A0, val);
204 static inline void gen_op_movq_A0_im(int64_t val)
206 tcg_gen_movi_tl(cpu_A0, val);
210 static inline void gen_movtl_T0_im(target_ulong val)
212 tcg_gen_movi_tl(cpu_T[0], val);
215 static inline void gen_movtl_T1_im(target_ulong val)
217 tcg_gen_movi_tl(cpu_T[1], val);
220 static inline void gen_op_andl_T0_ffff(void)
222 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
225 static inline void gen_op_andl_T0_im(uint32_t val)
227 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
230 static inline void gen_op_movl_T0_T1(void)
232 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
235 static inline void gen_op_andl_A0_ffff(void)
237 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
242 #define NB_OP_SIZES 4
244 #else /* !TARGET_X86_64 */
246 #define NB_OP_SIZES 3
248 #endif /* !TARGET_X86_64 */
250 #if defined(WORDS_BIGENDIAN)
251 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
252 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
253 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
254 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
255 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
257 #define REG_B_OFFSET 0
258 #define REG_H_OFFSET 1
259 #define REG_W_OFFSET 0
260 #define REG_L_OFFSET 0
261 #define REG_LH_OFFSET 4
264 static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
268 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
269 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
271 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
275 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
279 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
280 /* high part of register set to zero */
281 tcg_gen_movi_tl(cpu_tmp0, 0);
282 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
286 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
291 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
297 static inline void gen_op_mov_reg_T0(int ot, int reg)
299 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
302 static inline void gen_op_mov_reg_T1(int ot, int reg)
304 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
307 static inline void gen_op_mov_reg_A0(int size, int reg)
311 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
315 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
316 /* high part of register set to zero */
317 tcg_gen_movi_tl(cpu_tmp0, 0);
318 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
322 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
327 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
333 static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
337 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
340 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
345 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
350 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
352 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
355 static inline void gen_op_movl_A0_reg(int reg)
357 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
360 static inline void gen_op_addl_A0_im(int32_t val)
362 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
364 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
369 static inline void gen_op_addq_A0_im(int64_t val)
371 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
375 static void gen_add_A0_im(DisasContext *s, int val)
379 gen_op_addq_A0_im(val);
382 gen_op_addl_A0_im(val);
385 static inline void gen_op_addl_T0_T1(void)
387 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
390 static inline void gen_op_jmp_T0(void)
392 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
395 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
399 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
400 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
401 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
404 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
405 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
409 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
413 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
414 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
415 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
421 static inline void gen_op_add_reg_T0(int size, int reg)
425 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
426 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
427 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
430 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
431 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
433 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
435 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
440 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
441 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
447 static inline void gen_op_set_cc_op(int32_t val)
449 tcg_gen_movi_i32(cpu_cc_op, val);
452 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
454 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
456 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
457 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
459 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
463 static inline void gen_op_movl_A0_seg(int reg)
465 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
468 static inline void gen_op_addl_A0_seg(int reg)
470 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
471 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
473 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
478 static inline void gen_op_movq_A0_seg(int reg)
480 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
483 static inline void gen_op_addq_A0_seg(int reg)
485 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
486 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
489 static inline void gen_op_movq_A0_reg(int reg)
491 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
494 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
496 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
498 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
499 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
503 static inline void gen_op_lds_T0_A0(int idx)
505 int mem_index = (idx >> 2) - 1;
508 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
511 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
515 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
520 static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
522 int mem_index = (idx >> 2) - 1;
525 tcg_gen_qemu_ld8u(t0, a0, mem_index);
528 tcg_gen_qemu_ld16u(t0, a0, mem_index);
531 tcg_gen_qemu_ld32u(t0, a0, mem_index);
535 tcg_gen_qemu_ld64(t0, a0, mem_index);
540 /* XXX: always use ldu or lds */
541 static inline void gen_op_ld_T0_A0(int idx)
543 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
546 static inline void gen_op_ldu_T0_A0(int idx)
548 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
551 static inline void gen_op_ld_T1_A0(int idx)
553 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
556 static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
558 int mem_index = (idx >> 2) - 1;
561 tcg_gen_qemu_st8(t0, a0, mem_index);
564 tcg_gen_qemu_st16(t0, a0, mem_index);
567 tcg_gen_qemu_st32(t0, a0, mem_index);
571 tcg_gen_qemu_st64(t0, a0, mem_index);
576 static inline void gen_op_st_T0_A0(int idx)
578 gen_op_st_v(idx, cpu_T[0], cpu_A0);
581 static inline void gen_op_st_T1_A0(int idx)
583 gen_op_st_v(idx, cpu_T[1], cpu_A0);
586 static inline void gen_jmp_im(target_ulong pc)
588 tcg_gen_movi_tl(cpu_tmp0, pc);
589 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
592 static inline void gen_string_movl_A0_ESI(DisasContext *s)
596 override = s->override;
600 gen_op_movq_A0_seg(override);
601 gen_op_addq_A0_reg_sN(0, R_ESI);
603 gen_op_movq_A0_reg(R_ESI);
609 if (s->addseg && override < 0)
612 gen_op_movl_A0_seg(override);
613 gen_op_addl_A0_reg_sN(0, R_ESI);
615 gen_op_movl_A0_reg(R_ESI);
618 /* 16 address, always override */
621 gen_op_movl_A0_reg(R_ESI);
622 gen_op_andl_A0_ffff();
623 gen_op_addl_A0_seg(override);
627 static inline void gen_string_movl_A0_EDI(DisasContext *s)
631 gen_op_movq_A0_reg(R_EDI);
636 gen_op_movl_A0_seg(R_ES);
637 gen_op_addl_A0_reg_sN(0, R_EDI);
639 gen_op_movl_A0_reg(R_EDI);
642 gen_op_movl_A0_reg(R_EDI);
643 gen_op_andl_A0_ffff();
644 gen_op_addl_A0_seg(R_ES);
648 static inline void gen_op_movl_T0_Dshift(int ot)
650 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
651 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
654 static void gen_extu(int ot, TCGv reg)
658 tcg_gen_ext8u_tl(reg, reg);
661 tcg_gen_ext16u_tl(reg, reg);
664 tcg_gen_ext32u_tl(reg, reg);
671 static void gen_exts(int ot, TCGv reg)
675 tcg_gen_ext8s_tl(reg, reg);
678 tcg_gen_ext16s_tl(reg, reg);
681 tcg_gen_ext32s_tl(reg, reg);
688 static inline void gen_op_jnz_ecx(int size, int label1)
690 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
691 gen_extu(size + 1, cpu_tmp0);
692 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
695 static inline void gen_op_jz_ecx(int size, int label1)
697 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
698 gen_extu(size + 1, cpu_tmp0);
699 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
702 static void *helper_in_func[3] = {
708 static void *helper_out_func[3] = {
714 static void *gen_check_io_func[3] = {
720 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
724 target_ulong next_eip;
727 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
728 if (s->cc_op != CC_OP_DYNAMIC)
729 gen_op_set_cc_op(s->cc_op);
732 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
733 tcg_gen_helper_0_1(gen_check_io_func[ot],
736 if(s->flags & HF_SVMI_MASK) {
738 if (s->cc_op != CC_OP_DYNAMIC)
739 gen_op_set_cc_op(s->cc_op);
743 svm_flags |= (1 << (4 + ot));
744 next_eip = s->pc - s->cs_base;
745 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
746 tcg_gen_helper_0_3(helper_svm_check_io,
748 tcg_const_i32(svm_flags),
749 tcg_const_i32(next_eip - cur_eip));
753 static inline void gen_movs(DisasContext *s, int ot)
755 gen_string_movl_A0_ESI(s);
756 gen_op_ld_T0_A0(ot + s->mem_index);
757 gen_string_movl_A0_EDI(s);
758 gen_op_st_T0_A0(ot + s->mem_index);
759 gen_op_movl_T0_Dshift(ot);
760 gen_op_add_reg_T0(s->aflag, R_ESI);
761 gen_op_add_reg_T0(s->aflag, R_EDI);
764 static inline void gen_update_cc_op(DisasContext *s)
766 if (s->cc_op != CC_OP_DYNAMIC) {
767 gen_op_set_cc_op(s->cc_op);
768 s->cc_op = CC_OP_DYNAMIC;
772 static void gen_op_update1_cc(void)
774 tcg_gen_discard_tl(cpu_cc_src);
775 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
778 static void gen_op_update2_cc(void)
780 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
781 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
784 static inline void gen_op_cmpl_T0_T1_cc(void)
786 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
787 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
790 static inline void gen_op_testl_T0_T1_cc(void)
792 tcg_gen_discard_tl(cpu_cc_src);
793 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
796 static void gen_op_update_neg_cc(void)
798 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
799 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
802 /* compute eflags.C to reg */
803 static void gen_compute_eflags_c(TCGv reg)
805 #if TCG_TARGET_REG_BITS == 32
806 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
807 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
808 (long)cc_table + offsetof(CCTable, compute_c));
809 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
810 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
811 1, &cpu_tmp2_i32, 0, NULL);
813 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
814 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
815 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
816 (long)cc_table + offsetof(CCTable, compute_c));
817 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
818 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
819 1, &cpu_tmp2_i32, 0, NULL);
821 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
824 /* compute all eflags to cc_src */
825 static void gen_compute_eflags(TCGv reg)
827 #if TCG_TARGET_REG_BITS == 32
828 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
829 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
830 (long)cc_table + offsetof(CCTable, compute_all));
831 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
832 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
833 1, &cpu_tmp2_i32, 0, NULL);
835 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
836 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
837 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
838 (long)cc_table + offsetof(CCTable, compute_all));
839 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
840 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
841 1, &cpu_tmp2_i32, 0, NULL);
843 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
846 static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
848 if (s->cc_op != CC_OP_DYNAMIC)
849 gen_op_set_cc_op(s->cc_op);
852 gen_compute_eflags(cpu_T[0]);
853 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
854 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
857 gen_compute_eflags_c(cpu_T[0]);
860 gen_compute_eflags(cpu_T[0]);
861 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
862 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
865 gen_compute_eflags(cpu_tmp0);
866 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
867 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
868 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
871 gen_compute_eflags(cpu_T[0]);
872 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
873 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
876 gen_compute_eflags(cpu_T[0]);
877 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
878 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
881 gen_compute_eflags(cpu_tmp0);
882 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
883 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
884 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
885 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
889 gen_compute_eflags(cpu_tmp0);
890 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
891 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
892 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
893 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
894 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
895 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
900 /* return true if setcc_slow is not needed (WARNING: must be kept in
901 sync with gen_jcc1) */
902 static int is_fast_jcc_case(DisasContext *s, int b)
905 jcc_op = (b >> 1) & 7;
907 /* we optimize the cmp/jcc case */
912 if (jcc_op == JCC_O || jcc_op == JCC_P)
916 /* some jumps are easy to compute */
941 if (jcc_op != JCC_Z && jcc_op != JCC_S)
951 /* generate a conditional jump to label 'l1' according to jump opcode
952 value 'b'. In the fast case, T0 is guaranted not to be used. */
953 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
955 int inv, jcc_op, size, cond;
959 jcc_op = (b >> 1) & 7;
962 /* we optimize the cmp/jcc case */
968 size = cc_op - CC_OP_SUBB;
974 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
978 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
983 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
991 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
997 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
998 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1002 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1003 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1006 #ifdef TARGET_X86_64
1008 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1009 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1014 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1021 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1024 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1026 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1030 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1031 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1035 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1036 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1038 #ifdef TARGET_X86_64
1041 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1042 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1049 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1053 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1056 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1058 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1062 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1063 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1067 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1068 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1070 #ifdef TARGET_X86_64
1073 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1074 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1081 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1089 /* some jumps are easy to compute */
1131 size = (cc_op - CC_OP_ADDB) & 3;
1134 size = (cc_op - CC_OP_ADDB) & 3;
1142 gen_setcc_slow_T0(s, jcc_op);
1143 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1149 /* XXX: does not work with gdbstub "ice" single step - not a
1151 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1155 l1 = gen_new_label();
1156 l2 = gen_new_label();
1157 gen_op_jnz_ecx(s->aflag, l1);
1159 gen_jmp_tb(s, next_eip, 1);
1164 static inline void gen_stos(DisasContext *s, int ot)
1166 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1167 gen_string_movl_A0_EDI(s);
1168 gen_op_st_T0_A0(ot + s->mem_index);
1169 gen_op_movl_T0_Dshift(ot);
1170 gen_op_add_reg_T0(s->aflag, R_EDI);
1173 static inline void gen_lods(DisasContext *s, int ot)
1175 gen_string_movl_A0_ESI(s);
1176 gen_op_ld_T0_A0(ot + s->mem_index);
1177 gen_op_mov_reg_T0(ot, R_EAX);
1178 gen_op_movl_T0_Dshift(ot);
1179 gen_op_add_reg_T0(s->aflag, R_ESI);
1182 static inline void gen_scas(DisasContext *s, int ot)
1184 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1185 gen_string_movl_A0_EDI(s);
1186 gen_op_ld_T1_A0(ot + s->mem_index);
1187 gen_op_cmpl_T0_T1_cc();
1188 gen_op_movl_T0_Dshift(ot);
1189 gen_op_add_reg_T0(s->aflag, R_EDI);
1192 static inline void gen_cmps(DisasContext *s, int ot)
1194 gen_string_movl_A0_ESI(s);
1195 gen_op_ld_T0_A0(ot + s->mem_index);
1196 gen_string_movl_A0_EDI(s);
1197 gen_op_ld_T1_A0(ot + s->mem_index);
1198 gen_op_cmpl_T0_T1_cc();
1199 gen_op_movl_T0_Dshift(ot);
1200 gen_op_add_reg_T0(s->aflag, R_ESI);
1201 gen_op_add_reg_T0(s->aflag, R_EDI);
1204 static inline void gen_ins(DisasContext *s, int ot)
1206 gen_string_movl_A0_EDI(s);
1207 /* Note: we must do this dummy write first to be restartable in
1208 case of page fault. */
1210 gen_op_st_T0_A0(ot + s->mem_index);
1211 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1212 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1213 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1214 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1215 gen_op_st_T0_A0(ot + s->mem_index);
1216 gen_op_movl_T0_Dshift(ot);
1217 gen_op_add_reg_T0(s->aflag, R_EDI);
1220 static inline void gen_outs(DisasContext *s, int ot)
1222 gen_string_movl_A0_ESI(s);
1223 gen_op_ld_T0_A0(ot + s->mem_index);
1225 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1226 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1227 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1228 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1229 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1231 gen_op_movl_T0_Dshift(ot);
1232 gen_op_add_reg_T0(s->aflag, R_ESI);
1235 /* same method as Valgrind : we generate jumps to current or next
1237 #define GEN_REPZ(op) \
1238 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1239 target_ulong cur_eip, target_ulong next_eip) \
1242 gen_update_cc_op(s); \
1243 l2 = gen_jz_ecx_string(s, next_eip); \
1244 gen_ ## op(s, ot); \
1245 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1246 /* a loop would cause two single step exceptions if ECX = 1 \
1247 before rep string_insn */ \
1249 gen_op_jz_ecx(s->aflag, l2); \
1250 gen_jmp(s, cur_eip); \
1253 #define GEN_REPZ2(op) \
1254 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1255 target_ulong cur_eip, \
1256 target_ulong next_eip, \
1260 gen_update_cc_op(s); \
1261 l2 = gen_jz_ecx_string(s, next_eip); \
1262 gen_ ## op(s, ot); \
1263 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1264 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1265 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1267 gen_op_jz_ecx(s->aflag, l2); \
1268 gen_jmp(s, cur_eip); \
1279 static void *helper_fp_arith_ST0_FT0[8] = {
1280 helper_fadd_ST0_FT0,
1281 helper_fmul_ST0_FT0,
1282 helper_fcom_ST0_FT0,
1283 helper_fcom_ST0_FT0,
1284 helper_fsub_ST0_FT0,
1285 helper_fsubr_ST0_FT0,
1286 helper_fdiv_ST0_FT0,
1287 helper_fdivr_ST0_FT0,
1290 /* NOTE the exception in "r" op ordering */
1291 static void *helper_fp_arith_STN_ST0[8] = {
1292 helper_fadd_STN_ST0,
1293 helper_fmul_STN_ST0,
1296 helper_fsubr_STN_ST0,
1297 helper_fsub_STN_ST0,
1298 helper_fdivr_STN_ST0,
1299 helper_fdiv_STN_ST0,
1302 /* if d == OR_TMP0, it means memory operand (address in A0) */
1303 static void gen_op(DisasContext *s1, int op, int ot, int d)
1306 gen_op_mov_TN_reg(ot, 0, d);
1308 gen_op_ld_T0_A0(ot + s1->mem_index);
1312 if (s1->cc_op != CC_OP_DYNAMIC)
1313 gen_op_set_cc_op(s1->cc_op);
1314 gen_compute_eflags_c(cpu_tmp4);
1315 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1316 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1318 gen_op_mov_reg_T0(ot, d);
1320 gen_op_st_T0_A0(ot + s1->mem_index);
1321 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1322 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1323 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1324 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1325 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1326 s1->cc_op = CC_OP_DYNAMIC;
1329 if (s1->cc_op != CC_OP_DYNAMIC)
1330 gen_op_set_cc_op(s1->cc_op);
1331 gen_compute_eflags_c(cpu_tmp4);
1332 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1333 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1335 gen_op_mov_reg_T0(ot, d);
1337 gen_op_st_T0_A0(ot + s1->mem_index);
1338 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1339 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1340 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1341 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1342 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1343 s1->cc_op = CC_OP_DYNAMIC;
1346 gen_op_addl_T0_T1();
1348 gen_op_mov_reg_T0(ot, d);
1350 gen_op_st_T0_A0(ot + s1->mem_index);
1351 gen_op_update2_cc();
1352 s1->cc_op = CC_OP_ADDB + ot;
1355 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1357 gen_op_mov_reg_T0(ot, d);
1359 gen_op_st_T0_A0(ot + s1->mem_index);
1360 gen_op_update2_cc();
1361 s1->cc_op = CC_OP_SUBB + ot;
1365 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1367 gen_op_mov_reg_T0(ot, d);
1369 gen_op_st_T0_A0(ot + s1->mem_index);
1370 gen_op_update1_cc();
1371 s1->cc_op = CC_OP_LOGICB + ot;
1374 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1376 gen_op_mov_reg_T0(ot, d);
1378 gen_op_st_T0_A0(ot + s1->mem_index);
1379 gen_op_update1_cc();
1380 s1->cc_op = CC_OP_LOGICB + ot;
1383 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1385 gen_op_mov_reg_T0(ot, d);
1387 gen_op_st_T0_A0(ot + s1->mem_index);
1388 gen_op_update1_cc();
1389 s1->cc_op = CC_OP_LOGICB + ot;
1392 gen_op_cmpl_T0_T1_cc();
1393 s1->cc_op = CC_OP_SUBB + ot;
1398 /* if d == OR_TMP0, it means memory operand (address in A0) */
1399 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1402 gen_op_mov_TN_reg(ot, 0, d);
1404 gen_op_ld_T0_A0(ot + s1->mem_index);
1405 if (s1->cc_op != CC_OP_DYNAMIC)
1406 gen_op_set_cc_op(s1->cc_op);
1408 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1409 s1->cc_op = CC_OP_INCB + ot;
1411 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1412 s1->cc_op = CC_OP_DECB + ot;
1415 gen_op_mov_reg_T0(ot, d);
1417 gen_op_st_T0_A0(ot + s1->mem_index);
1418 gen_compute_eflags_c(cpu_cc_src);
1419 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1422 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1423 int is_right, int is_arith)
1436 gen_op_ld_T0_A0(ot + s->mem_index);
1438 gen_op_mov_TN_reg(ot, 0, op1);
1440 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1442 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1446 gen_exts(ot, cpu_T[0]);
1447 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1448 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1450 gen_extu(ot, cpu_T[0]);
1451 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1452 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1455 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1456 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1461 gen_op_st_T0_A0(ot + s->mem_index);
1463 gen_op_mov_reg_T0(ot, op1);
1465 /* update eflags if non zero shift */
1466 if (s->cc_op != CC_OP_DYNAMIC)
1467 gen_op_set_cc_op(s->cc_op);
1469 /* XXX: inefficient */
1470 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1471 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1473 tcg_gen_mov_tl(t0, cpu_T[0]);
1474 tcg_gen_mov_tl(t1, cpu_T3);
1476 shift_label = gen_new_label();
1477 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1479 tcg_gen_mov_tl(cpu_cc_src, t1);
1480 tcg_gen_mov_tl(cpu_cc_dst, t0);
1482 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1484 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1486 gen_set_label(shift_label);
1487 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1493 static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1494 int is_right, int is_arith)
1505 gen_op_ld_T0_A0(ot + s->mem_index);
1507 gen_op_mov_TN_reg(ot, 0, op1);
1513 gen_exts(ot, cpu_T[0]);
1514 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1515 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1517 gen_extu(ot, cpu_T[0]);
1518 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1519 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1522 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1523 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1529 gen_op_st_T0_A0(ot + s->mem_index);
1531 gen_op_mov_reg_T0(ot, op1);
1533 /* update eflags if non zero shift */
1535 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1536 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1538 s->cc_op = CC_OP_SARB + ot;
1540 s->cc_op = CC_OP_SHLB + ot;
1544 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1547 tcg_gen_shli_tl(ret, arg1, arg2);
1549 tcg_gen_shri_tl(ret, arg1, -arg2);
1552 /* XXX: add faster immediate case */
1553 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1557 int label1, label2, data_bits;
1558 TCGv t0, t1, t2, a0;
1560 /* XXX: inefficient, but we must use local temps */
1561 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1562 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1563 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1564 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1572 if (op1 == OR_TMP0) {
1573 tcg_gen_mov_tl(a0, cpu_A0);
1574 gen_op_ld_v(ot + s->mem_index, t0, a0);
1576 gen_op_mov_v_reg(ot, t0, op1);
1579 tcg_gen_mov_tl(t1, cpu_T[1]);
1581 tcg_gen_andi_tl(t1, t1, mask);
1583 /* Must test zero case to avoid using undefined behaviour in TCG
1585 label1 = gen_new_label();
1586 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1589 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1591 tcg_gen_mov_tl(cpu_tmp0, t1);
1594 tcg_gen_mov_tl(t2, t0);
1596 data_bits = 8 << ot;
1597 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1598 fix TCG definition) */
1600 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1601 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1602 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1604 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1605 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1606 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1608 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1610 gen_set_label(label1);
1612 if (op1 == OR_TMP0) {
1613 gen_op_st_v(ot + s->mem_index, t0, a0);
1615 gen_op_mov_reg_v(ot, op1, t0);
1619 if (s->cc_op != CC_OP_DYNAMIC)
1620 gen_op_set_cc_op(s->cc_op);
1622 label2 = gen_new_label();
1623 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1625 gen_compute_eflags(cpu_cc_src);
1626 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1627 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1628 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1629 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1630 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1632 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1634 tcg_gen_andi_tl(t0, t0, CC_C);
1635 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1637 tcg_gen_discard_tl(cpu_cc_dst);
1638 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1640 gen_set_label(label2);
1641 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1649 static void *helper_rotc[8] = {
1653 X86_64_ONLY(helper_rclq),
1657 X86_64_ONLY(helper_rcrq),
1660 /* XXX: add faster immediate = 1 case */
1661 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1666 if (s->cc_op != CC_OP_DYNAMIC)
1667 gen_op_set_cc_op(s->cc_op);
1671 gen_op_ld_T0_A0(ot + s->mem_index);
1673 gen_op_mov_TN_reg(ot, 0, op1);
1675 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1676 cpu_T[0], cpu_T[0], cpu_T[1]);
1679 gen_op_st_T0_A0(ot + s->mem_index);
1681 gen_op_mov_reg_T0(ot, op1);
1684 label1 = gen_new_label();
1685 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1687 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1688 tcg_gen_discard_tl(cpu_cc_dst);
1689 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1691 gen_set_label(label1);
1692 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1695 /* XXX: add faster immediate case */
1696 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1699 int label1, label2, data_bits;
1701 TCGv t0, t1, t2, a0;
1703 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1704 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1705 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1706 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1714 if (op1 == OR_TMP0) {
1715 tcg_gen_mov_tl(a0, cpu_A0);
1716 gen_op_ld_v(ot + s->mem_index, t0, a0);
1718 gen_op_mov_v_reg(ot, t0, op1);
1721 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1723 tcg_gen_mov_tl(t1, cpu_T[1]);
1724 tcg_gen_mov_tl(t2, cpu_T3);
1726 /* Must test zero case to avoid using undefined behaviour in TCG
1728 label1 = gen_new_label();
1729 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1731 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1732 if (ot == OT_WORD) {
1733 /* Note: we implement the Intel behaviour for shift count > 16 */
1735 tcg_gen_andi_tl(t0, t0, 0xffff);
1736 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1737 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1738 tcg_gen_ext32u_tl(t0, t0);
1740 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1742 /* only needed if count > 16, but a test would complicate */
1743 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1744 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1746 tcg_gen_shr_tl(t0, t0, t2);
1748 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1750 /* XXX: not optimal */
1751 tcg_gen_andi_tl(t0, t0, 0xffff);
1752 tcg_gen_shli_tl(t1, t1, 16);
1753 tcg_gen_or_tl(t1, t1, t0);
1754 tcg_gen_ext32u_tl(t1, t1);
1756 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1757 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1758 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
1759 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1761 tcg_gen_shl_tl(t0, t0, t2);
1762 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1763 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1764 tcg_gen_or_tl(t0, t0, t1);
1767 data_bits = 8 << ot;
1770 tcg_gen_ext32u_tl(t0, t0);
1772 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1774 tcg_gen_shr_tl(t0, t0, t2);
1775 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1776 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1777 tcg_gen_or_tl(t0, t0, t1);
1781 tcg_gen_ext32u_tl(t1, t1);
1783 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1785 tcg_gen_shl_tl(t0, t0, t2);
1786 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1787 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1788 tcg_gen_or_tl(t0, t0, t1);
1791 tcg_gen_mov_tl(t1, cpu_tmp4);
1793 gen_set_label(label1);
1795 if (op1 == OR_TMP0) {
1796 gen_op_st_v(ot + s->mem_index, t0, a0);
1798 gen_op_mov_reg_v(ot, op1, t0);
1802 if (s->cc_op != CC_OP_DYNAMIC)
1803 gen_op_set_cc_op(s->cc_op);
1805 label2 = gen_new_label();
1806 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1808 tcg_gen_mov_tl(cpu_cc_src, t1);
1809 tcg_gen_mov_tl(cpu_cc_dst, t0);
1811 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1813 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1815 gen_set_label(label2);
1816 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1824 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1827 gen_op_mov_TN_reg(ot, 1, s);
1830 gen_rot_rm_T1(s1, ot, d, 0);
1833 gen_rot_rm_T1(s1, ot, d, 1);
1837 gen_shift_rm_T1(s1, ot, d, 0, 0);
1840 gen_shift_rm_T1(s1, ot, d, 1, 0);
1843 gen_shift_rm_T1(s1, ot, d, 1, 1);
1846 gen_rotc_rm_T1(s1, ot, d, 0);
1849 gen_rotc_rm_T1(s1, ot, d, 1);
1854 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1859 gen_shift_rm_im(s1, ot, d, c, 0, 0);
1862 gen_shift_rm_im(s1, ot, d, c, 1, 0);
1865 gen_shift_rm_im(s1, ot, d, c, 1, 1);
1868 /* currently not optimized */
1869 gen_op_movl_T1_im(c);
1870 gen_shift(s1, op, ot, d, OR_TMP1);
1875 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1883 int mod, rm, code, override, must_add_seg;
1885 override = s->override;
1886 must_add_seg = s->addseg;
1889 mod = (modrm >> 6) & 3;
1901 code = ldub_code(s->pc++);
1902 scale = (code >> 6) & 3;
1903 index = ((code >> 3) & 7) | REX_X(s);
1910 if ((base & 7) == 5) {
1912 disp = (int32_t)ldl_code(s->pc);
1914 if (CODE64(s) && !havesib) {
1915 disp += s->pc + s->rip_offset;
1922 disp = (int8_t)ldub_code(s->pc++);
1926 disp = ldl_code(s->pc);
1932 /* for correct popl handling with esp */
1933 if (base == 4 && s->popl_esp_hack)
1934 disp += s->popl_esp_hack;
1935 #ifdef TARGET_X86_64
1936 if (s->aflag == 2) {
1937 gen_op_movq_A0_reg(base);
1939 gen_op_addq_A0_im(disp);
1944 gen_op_movl_A0_reg(base);
1946 gen_op_addl_A0_im(disp);
1949 #ifdef TARGET_X86_64
1950 if (s->aflag == 2) {
1951 gen_op_movq_A0_im(disp);
1955 gen_op_movl_A0_im(disp);
1958 /* XXX: index == 4 is always invalid */
1959 if (havesib && (index != 4 || scale != 0)) {
1960 #ifdef TARGET_X86_64
1961 if (s->aflag == 2) {
1962 gen_op_addq_A0_reg_sN(scale, index);
1966 gen_op_addl_A0_reg_sN(scale, index);
1971 if (base == R_EBP || base == R_ESP)
1976 #ifdef TARGET_X86_64
1977 if (s->aflag == 2) {
1978 gen_op_addq_A0_seg(override);
1982 gen_op_addl_A0_seg(override);
1989 disp = lduw_code(s->pc);
1991 gen_op_movl_A0_im(disp);
1992 rm = 0; /* avoid SS override */
1999 disp = (int8_t)ldub_code(s->pc++);
2003 disp = lduw_code(s->pc);
2009 gen_op_movl_A0_reg(R_EBX);
2010 gen_op_addl_A0_reg_sN(0, R_ESI);
2013 gen_op_movl_A0_reg(R_EBX);
2014 gen_op_addl_A0_reg_sN(0, R_EDI);
2017 gen_op_movl_A0_reg(R_EBP);
2018 gen_op_addl_A0_reg_sN(0, R_ESI);
2021 gen_op_movl_A0_reg(R_EBP);
2022 gen_op_addl_A0_reg_sN(0, R_EDI);
2025 gen_op_movl_A0_reg(R_ESI);
2028 gen_op_movl_A0_reg(R_EDI);
2031 gen_op_movl_A0_reg(R_EBP);
2035 gen_op_movl_A0_reg(R_EBX);
2039 gen_op_addl_A0_im(disp);
2040 gen_op_andl_A0_ffff();
2044 if (rm == 2 || rm == 3 || rm == 6)
2049 gen_op_addl_A0_seg(override);
2059 static void gen_nop_modrm(DisasContext *s, int modrm)
2061 int mod, rm, base, code;
2063 mod = (modrm >> 6) & 3;
2073 code = ldub_code(s->pc++);
2109 /* used for LEA and MOV AX, mem */
2110 static void gen_add_A0_ds_seg(DisasContext *s)
2112 int override, must_add_seg;
2113 must_add_seg = s->addseg;
2115 if (s->override >= 0) {
2116 override = s->override;
2122 #ifdef TARGET_X86_64
2124 gen_op_addq_A0_seg(override);
2128 gen_op_addl_A0_seg(override);
2133 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2135 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2137 int mod, rm, opreg, disp;
2139 mod = (modrm >> 6) & 3;
2140 rm = (modrm & 7) | REX_B(s);
2144 gen_op_mov_TN_reg(ot, 0, reg);
2145 gen_op_mov_reg_T0(ot, rm);
2147 gen_op_mov_TN_reg(ot, 0, rm);
2149 gen_op_mov_reg_T0(ot, reg);
2152 gen_lea_modrm(s, modrm, &opreg, &disp);
2155 gen_op_mov_TN_reg(ot, 0, reg);
2156 gen_op_st_T0_A0(ot + s->mem_index);
2158 gen_op_ld_T0_A0(ot + s->mem_index);
2160 gen_op_mov_reg_T0(ot, reg);
2165 static inline uint32_t insn_get(DisasContext *s, int ot)
2171 ret = ldub_code(s->pc);
2175 ret = lduw_code(s->pc);
2180 ret = ldl_code(s->pc);
2187 static inline int insn_const_size(unsigned int ot)
2195 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2197 TranslationBlock *tb;
2200 pc = s->cs_base + eip;
2202 /* NOTE: we handle the case where the TB spans two pages here */
2203 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2204 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2205 /* jump to same page: we can use a direct jump */
2206 tcg_gen_goto_tb(tb_num);
2208 tcg_gen_exit_tb((long)tb + tb_num);
2210 /* jump to another page: currently not optimized */
2216 static inline void gen_jcc(DisasContext *s, int b,
2217 target_ulong val, target_ulong next_eip)
2222 if (s->cc_op != CC_OP_DYNAMIC) {
2223 gen_op_set_cc_op(s->cc_op);
2224 s->cc_op = CC_OP_DYNAMIC;
2227 l1 = gen_new_label();
2228 gen_jcc1(s, cc_op, b, l1);
2230 gen_goto_tb(s, 0, next_eip);
2233 gen_goto_tb(s, 1, val);
2237 l1 = gen_new_label();
2238 l2 = gen_new_label();
2239 gen_jcc1(s, cc_op, b, l1);
2241 gen_jmp_im(next_eip);
2251 static void gen_setcc(DisasContext *s, int b)
2253 int inv, jcc_op, l1;
2256 if (is_fast_jcc_case(s, b)) {
2257 /* nominal case: we use a jump */
2258 /* XXX: make it faster by adding new instructions in TCG */
2259 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2260 tcg_gen_movi_tl(t0, 0);
2261 l1 = gen_new_label();
2262 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2263 tcg_gen_movi_tl(t0, 1);
2265 tcg_gen_mov_tl(cpu_T[0], t0);
2268 /* slow case: it is more efficient not to generate a jump,
2269 although it is questionnable whether this optimization is
2272 jcc_op = (b >> 1) & 7;
2273 gen_setcc_slow_T0(s, jcc_op);
2275 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2280 static inline void gen_op_movl_T0_seg(int seg_reg)
2282 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2283 offsetof(CPUX86State,segs[seg_reg].selector));
2286 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2288 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2289 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2290 offsetof(CPUX86State,segs[seg_reg].selector));
2291 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2292 tcg_gen_st_tl(cpu_T[0], cpu_env,
2293 offsetof(CPUX86State,segs[seg_reg].base));
2296 /* move T0 to seg_reg and compute if the CPU state may change. Never
2297 call this function with seg_reg == R_CS */
2298 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2300 if (s->pe && !s->vm86) {
2301 /* XXX: optimize by finding processor state dynamically */
2302 if (s->cc_op != CC_OP_DYNAMIC)
2303 gen_op_set_cc_op(s->cc_op);
2304 gen_jmp_im(cur_eip);
2305 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2306 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2307 /* abort translation because the addseg value may change or
2308 because ss32 may change. For R_SS, translation must always
2309 stop as a special handling must be done to disable hardware
2310 interrupts for the next instruction */
2311 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2314 gen_op_movl_seg_T0_vm(seg_reg);
2315 if (seg_reg == R_SS)
2320 static inline int svm_is_rep(int prefixes)
2322 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2326 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2327 uint32_t type, uint64_t param)
2329 /* no SVM activated; fast case */
2330 if (likely(!(s->flags & HF_SVMI_MASK)))
2332 if (s->cc_op != CC_OP_DYNAMIC)
2333 gen_op_set_cc_op(s->cc_op);
2334 gen_jmp_im(pc_start - s->cs_base);
2335 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2336 tcg_const_i32(type), tcg_const_i64(param));
2340 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2342 gen_svm_check_intercept_param(s, pc_start, type, 0);
2345 static inline void gen_stack_update(DisasContext *s, int addend)
2347 #ifdef TARGET_X86_64
2349 gen_op_add_reg_im(2, R_ESP, addend);
2353 gen_op_add_reg_im(1, R_ESP, addend);
2355 gen_op_add_reg_im(0, R_ESP, addend);
2359 /* generate a push. It depends on ss32, addseg and dflag */
2360 static void gen_push_T0(DisasContext *s)
2362 #ifdef TARGET_X86_64
2364 gen_op_movq_A0_reg(R_ESP);
2366 gen_op_addq_A0_im(-8);
2367 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2369 gen_op_addq_A0_im(-2);
2370 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2372 gen_op_mov_reg_A0(2, R_ESP);
2376 gen_op_movl_A0_reg(R_ESP);
2378 gen_op_addl_A0_im(-2);
2380 gen_op_addl_A0_im(-4);
2383 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2384 gen_op_addl_A0_seg(R_SS);
2387 gen_op_andl_A0_ffff();
2388 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2389 gen_op_addl_A0_seg(R_SS);
2391 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2392 if (s->ss32 && !s->addseg)
2393 gen_op_mov_reg_A0(1, R_ESP);
2395 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2399 /* generate a push. It depends on ss32, addseg and dflag */
2400 /* slower version for T1, only used for call Ev */
2401 static void gen_push_T1(DisasContext *s)
2403 #ifdef TARGET_X86_64
2405 gen_op_movq_A0_reg(R_ESP);
2407 gen_op_addq_A0_im(-8);
2408 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2410 gen_op_addq_A0_im(-2);
2411 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2413 gen_op_mov_reg_A0(2, R_ESP);
2417 gen_op_movl_A0_reg(R_ESP);
2419 gen_op_addl_A0_im(-2);
2421 gen_op_addl_A0_im(-4);
2424 gen_op_addl_A0_seg(R_SS);
2427 gen_op_andl_A0_ffff();
2428 gen_op_addl_A0_seg(R_SS);
2430 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2432 if (s->ss32 && !s->addseg)
2433 gen_op_mov_reg_A0(1, R_ESP);
2435 gen_stack_update(s, (-2) << s->dflag);
2439 /* two step pop is necessary for precise exceptions */
2440 static void gen_pop_T0(DisasContext *s)
2442 #ifdef TARGET_X86_64
2444 gen_op_movq_A0_reg(R_ESP);
2445 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2449 gen_op_movl_A0_reg(R_ESP);
2452 gen_op_addl_A0_seg(R_SS);
2454 gen_op_andl_A0_ffff();
2455 gen_op_addl_A0_seg(R_SS);
2457 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2461 static void gen_pop_update(DisasContext *s)
2463 #ifdef TARGET_X86_64
2464 if (CODE64(s) && s->dflag) {
2465 gen_stack_update(s, 8);
2469 gen_stack_update(s, 2 << s->dflag);
2473 static void gen_stack_A0(DisasContext *s)
2475 gen_op_movl_A0_reg(R_ESP);
2477 gen_op_andl_A0_ffff();
2478 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2480 gen_op_addl_A0_seg(R_SS);
2483 /* NOTE: wrap around in 16 bit not fully handled */
2484 static void gen_pusha(DisasContext *s)
2487 gen_op_movl_A0_reg(R_ESP);
2488 gen_op_addl_A0_im(-16 << s->dflag);
2490 gen_op_andl_A0_ffff();
2491 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2493 gen_op_addl_A0_seg(R_SS);
2494 for(i = 0;i < 8; i++) {
2495 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2496 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2497 gen_op_addl_A0_im(2 << s->dflag);
2499 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2502 /* NOTE: wrap around in 16 bit not fully handled */
2503 static void gen_popa(DisasContext *s)
2506 gen_op_movl_A0_reg(R_ESP);
2508 gen_op_andl_A0_ffff();
2509 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2510 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2512 gen_op_addl_A0_seg(R_SS);
2513 for(i = 0;i < 8; i++) {
2514 /* ESP is not reloaded */
2516 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2517 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2519 gen_op_addl_A0_im(2 << s->dflag);
2521 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2524 static void gen_enter(DisasContext *s, int esp_addend, int level)
2529 #ifdef TARGET_X86_64
2531 ot = s->dflag ? OT_QUAD : OT_WORD;
2534 gen_op_movl_A0_reg(R_ESP);
2535 gen_op_addq_A0_im(-opsize);
2536 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2539 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2540 gen_op_st_T0_A0(ot + s->mem_index);
2542 /* XXX: must save state */
2543 tcg_gen_helper_0_3(helper_enter64_level,
2544 tcg_const_i32(level),
2545 tcg_const_i32((ot == OT_QUAD)),
2548 gen_op_mov_reg_T1(ot, R_EBP);
2549 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2550 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2554 ot = s->dflag + OT_WORD;
2555 opsize = 2 << s->dflag;
2557 gen_op_movl_A0_reg(R_ESP);
2558 gen_op_addl_A0_im(-opsize);
2560 gen_op_andl_A0_ffff();
2561 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2563 gen_op_addl_A0_seg(R_SS);
2565 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2566 gen_op_st_T0_A0(ot + s->mem_index);
2568 /* XXX: must save state */
2569 tcg_gen_helper_0_3(helper_enter_level,
2570 tcg_const_i32(level),
2571 tcg_const_i32(s->dflag),
2574 gen_op_mov_reg_T1(ot, R_EBP);
2575 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2576 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2580 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2582 if (s->cc_op != CC_OP_DYNAMIC)
2583 gen_op_set_cc_op(s->cc_op);
2584 gen_jmp_im(cur_eip);
2585 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2589 /* an interrupt is different from an exception because of the
2591 static void gen_interrupt(DisasContext *s, int intno,
2592 target_ulong cur_eip, target_ulong next_eip)
2594 if (s->cc_op != CC_OP_DYNAMIC)
2595 gen_op_set_cc_op(s->cc_op);
2596 gen_jmp_im(cur_eip);
2597 tcg_gen_helper_0_2(helper_raise_interrupt,
2598 tcg_const_i32(intno),
2599 tcg_const_i32(next_eip - cur_eip));
2603 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2605 if (s->cc_op != CC_OP_DYNAMIC)
2606 gen_op_set_cc_op(s->cc_op);
2607 gen_jmp_im(cur_eip);
2608 tcg_gen_helper_0_0(helper_debug);
2612 /* generate a generic end of block. Trace exception is also generated
2614 static void gen_eob(DisasContext *s)
2616 if (s->cc_op != CC_OP_DYNAMIC)
2617 gen_op_set_cc_op(s->cc_op);
2618 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2619 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2621 if (s->singlestep_enabled) {
2622 tcg_gen_helper_0_0(helper_debug);
2624 tcg_gen_helper_0_0(helper_single_step);
2631 /* generate a jump to eip. No segment change must happen before as a
2632 direct call to the next block may occur */
2633 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2636 if (s->cc_op != CC_OP_DYNAMIC) {
2637 gen_op_set_cc_op(s->cc_op);
2638 s->cc_op = CC_OP_DYNAMIC;
2640 gen_goto_tb(s, tb_num, eip);
2648 static void gen_jmp(DisasContext *s, target_ulong eip)
2650 gen_jmp_tb(s, eip, 0);
2653 static inline void gen_ldq_env_A0(int idx, int offset)
2655 int mem_index = (idx >> 2) - 1;
2656 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2657 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2660 static inline void gen_stq_env_A0(int idx, int offset)
2662 int mem_index = (idx >> 2) - 1;
2663 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2664 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2667 static inline void gen_ldo_env_A0(int idx, int offset)
2669 int mem_index = (idx >> 2) - 1;
2670 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2671 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2672 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2673 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2674 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2677 static inline void gen_sto_env_A0(int idx, int offset)
2679 int mem_index = (idx >> 2) - 1;
2680 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2681 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2682 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2683 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2684 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2687 static inline void gen_op_movo(int d_offset, int s_offset)
2689 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2690 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2691 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2692 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2695 static inline void gen_op_movq(int d_offset, int s_offset)
2697 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2698 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2701 static inline void gen_op_movl(int d_offset, int s_offset)
2703 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2704 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2707 static inline void gen_op_movq_env_0(int d_offset)
2709 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2710 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2713 #define SSE_SPECIAL ((void *)1)
2714 #define SSE_DUMMY ((void *)2)
2716 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2717 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2718 helper_ ## x ## ss, helper_ ## x ## sd, }
2720 static void *sse_op_table1[256][4] = {
2721 /* 3DNow! extensions */
2722 [0x0e] = { SSE_DUMMY }, /* femms */
2723 [0x0f] = { SSE_DUMMY }, /* pf... */
2724 /* pure SSE operations */
2725 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2726 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2727 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2728 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2729 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2730 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2731 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2732 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2734 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2735 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2736 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2737 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2738 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2739 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2740 [0x2e] = { helper_ucomiss, helper_ucomisd },
2741 [0x2f] = { helper_comiss, helper_comisd },
2742 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2743 [0x51] = SSE_FOP(sqrt),
2744 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2745 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2746 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2747 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2748 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2749 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2750 [0x58] = SSE_FOP(add),
2751 [0x59] = SSE_FOP(mul),
2752 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2753 helper_cvtss2sd, helper_cvtsd2ss },
2754 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2755 [0x5c] = SSE_FOP(sub),
2756 [0x5d] = SSE_FOP(min),
2757 [0x5e] = SSE_FOP(div),
2758 [0x5f] = SSE_FOP(max),
2760 [0xc2] = SSE_FOP(cmpeq),
2761 [0xc6] = { helper_shufps, helper_shufpd },
2763 /* MMX ops and their SSE extensions */
2764 [0x60] = MMX_OP2(punpcklbw),
2765 [0x61] = MMX_OP2(punpcklwd),
2766 [0x62] = MMX_OP2(punpckldq),
2767 [0x63] = MMX_OP2(packsswb),
2768 [0x64] = MMX_OP2(pcmpgtb),
2769 [0x65] = MMX_OP2(pcmpgtw),
2770 [0x66] = MMX_OP2(pcmpgtl),
2771 [0x67] = MMX_OP2(packuswb),
2772 [0x68] = MMX_OP2(punpckhbw),
2773 [0x69] = MMX_OP2(punpckhwd),
2774 [0x6a] = MMX_OP2(punpckhdq),
2775 [0x6b] = MMX_OP2(packssdw),
2776 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2777 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2778 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2779 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2780 [0x70] = { helper_pshufw_mmx,
2783 helper_pshuflw_xmm },
2784 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2785 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2786 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2787 [0x74] = MMX_OP2(pcmpeqb),
2788 [0x75] = MMX_OP2(pcmpeqw),
2789 [0x76] = MMX_OP2(pcmpeql),
2790 [0x77] = { SSE_DUMMY }, /* emms */
2791 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2792 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2793 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2794 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2795 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2796 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2797 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2798 [0xd1] = MMX_OP2(psrlw),
2799 [0xd2] = MMX_OP2(psrld),
2800 [0xd3] = MMX_OP2(psrlq),
2801 [0xd4] = MMX_OP2(paddq),
2802 [0xd5] = MMX_OP2(pmullw),
2803 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2804 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2805 [0xd8] = MMX_OP2(psubusb),
2806 [0xd9] = MMX_OP2(psubusw),
2807 [0xda] = MMX_OP2(pminub),
2808 [0xdb] = MMX_OP2(pand),
2809 [0xdc] = MMX_OP2(paddusb),
2810 [0xdd] = MMX_OP2(paddusw),
2811 [0xde] = MMX_OP2(pmaxub),
2812 [0xdf] = MMX_OP2(pandn),
2813 [0xe0] = MMX_OP2(pavgb),
2814 [0xe1] = MMX_OP2(psraw),
2815 [0xe2] = MMX_OP2(psrad),
2816 [0xe3] = MMX_OP2(pavgw),
2817 [0xe4] = MMX_OP2(pmulhuw),
2818 [0xe5] = MMX_OP2(pmulhw),
2819 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2820 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2821 [0xe8] = MMX_OP2(psubsb),
2822 [0xe9] = MMX_OP2(psubsw),
2823 [0xea] = MMX_OP2(pminsw),
2824 [0xeb] = MMX_OP2(por),
2825 [0xec] = MMX_OP2(paddsb),
2826 [0xed] = MMX_OP2(paddsw),
2827 [0xee] = MMX_OP2(pmaxsw),
2828 [0xef] = MMX_OP2(pxor),
2829 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2830 [0xf1] = MMX_OP2(psllw),
2831 [0xf2] = MMX_OP2(pslld),
2832 [0xf3] = MMX_OP2(psllq),
2833 [0xf4] = MMX_OP2(pmuludq),
2834 [0xf5] = MMX_OP2(pmaddwd),
2835 [0xf6] = MMX_OP2(psadbw),
2836 [0xf7] = MMX_OP2(maskmov),
2837 [0xf8] = MMX_OP2(psubb),
2838 [0xf9] = MMX_OP2(psubw),
2839 [0xfa] = MMX_OP2(psubl),
2840 [0xfb] = MMX_OP2(psubq),
2841 [0xfc] = MMX_OP2(paddb),
2842 [0xfd] = MMX_OP2(paddw),
2843 [0xfe] = MMX_OP2(paddl),
2846 static void *sse_op_table2[3 * 8][2] = {
2847 [0 + 2] = MMX_OP2(psrlw),
2848 [0 + 4] = MMX_OP2(psraw),
2849 [0 + 6] = MMX_OP2(psllw),
2850 [8 + 2] = MMX_OP2(psrld),
2851 [8 + 4] = MMX_OP2(psrad),
2852 [8 + 6] = MMX_OP2(pslld),
2853 [16 + 2] = MMX_OP2(psrlq),
2854 [16 + 3] = { NULL, helper_psrldq_xmm },
2855 [16 + 6] = MMX_OP2(psllq),
2856 [16 + 7] = { NULL, helper_pslldq_xmm },
2859 static void *sse_op_table3[4 * 3] = {
2862 X86_64_ONLY(helper_cvtsq2ss),
2863 X86_64_ONLY(helper_cvtsq2sd),
2867 X86_64_ONLY(helper_cvttss2sq),
2868 X86_64_ONLY(helper_cvttsd2sq),
2872 X86_64_ONLY(helper_cvtss2sq),
2873 X86_64_ONLY(helper_cvtsd2sq),
2876 static void *sse_op_table4[8][4] = {
2887 static void *sse_op_table5[256] = {
2888 [0x0c] = helper_pi2fw,
2889 [0x0d] = helper_pi2fd,
2890 [0x1c] = helper_pf2iw,
2891 [0x1d] = helper_pf2id,
2892 [0x8a] = helper_pfnacc,
2893 [0x8e] = helper_pfpnacc,
2894 [0x90] = helper_pfcmpge,
2895 [0x94] = helper_pfmin,
2896 [0x96] = helper_pfrcp,
2897 [0x97] = helper_pfrsqrt,
2898 [0x9a] = helper_pfsub,
2899 [0x9e] = helper_pfadd,
2900 [0xa0] = helper_pfcmpgt,
2901 [0xa4] = helper_pfmax,
2902 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2903 [0xa7] = helper_movq, /* pfrsqit1 */
2904 [0xaa] = helper_pfsubr,
2905 [0xae] = helper_pfacc,
2906 [0xb0] = helper_pfcmpeq,
2907 [0xb4] = helper_pfmul,
2908 [0xb6] = helper_movq, /* pfrcpit2 */
2909 [0xb7] = helper_pmulhrw_mmx,
2910 [0xbb] = helper_pswapd,
2911 [0xbf] = helper_pavgb_mmx /* pavgusb */
2914 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2916 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2917 int modrm, mod, rm, reg, reg_addr, offset_addr;
2921 if (s->prefix & PREFIX_DATA)
2923 else if (s->prefix & PREFIX_REPZ)
2925 else if (s->prefix & PREFIX_REPNZ)
2929 sse_op2 = sse_op_table1[b][b1];
2932 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2942 /* simple MMX/SSE operation */
2943 if (s->flags & HF_TS_MASK) {
2944 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2947 if (s->flags & HF_EM_MASK) {
2949 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2952 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2955 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2958 tcg_gen_helper_0_0(helper_emms);
2963 tcg_gen_helper_0_0(helper_emms);
2966 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2967 the static cpu state) */
2969 tcg_gen_helper_0_0(helper_enter_mmx);
2972 modrm = ldub_code(s->pc++);
2973 reg = ((modrm >> 3) & 7);
2976 mod = (modrm >> 6) & 3;
2977 if (sse_op2 == SSE_SPECIAL) {
2980 case 0x0e7: /* movntq */
2983 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2984 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2986 case 0x1e7: /* movntdq */
2987 case 0x02b: /* movntps */
2988 case 0x12b: /* movntps */
2989 case 0x3f0: /* lddqu */
2992 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2993 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2995 case 0x6e: /* movd mm, ea */
2996 #ifdef TARGET_X86_64
2997 if (s->dflag == 2) {
2998 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2999 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3003 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3004 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3005 offsetof(CPUX86State,fpregs[reg].mmx));
3006 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3009 case 0x16e: /* movd xmm, ea */
3010 #ifdef TARGET_X86_64
3011 if (s->dflag == 2) {
3012 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3013 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3014 offsetof(CPUX86State,xmm_regs[reg]));
3015 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3019 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3020 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3021 offsetof(CPUX86State,xmm_regs[reg]));
3022 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3023 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3026 case 0x6f: /* movq mm, ea */
3028 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3029 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3032 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3033 offsetof(CPUX86State,fpregs[rm].mmx));
3034 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3035 offsetof(CPUX86State,fpregs[reg].mmx));
3038 case 0x010: /* movups */
3039 case 0x110: /* movupd */
3040 case 0x028: /* movaps */
3041 case 0x128: /* movapd */
3042 case 0x16f: /* movdqa xmm, ea */
3043 case 0x26f: /* movdqu xmm, ea */
3045 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3046 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3048 rm = (modrm & 7) | REX_B(s);
3049 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3050 offsetof(CPUX86State,xmm_regs[rm]));
3053 case 0x210: /* movss xmm, ea */
3055 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3056 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3057 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3059 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3060 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3061 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3063 rm = (modrm & 7) | REX_B(s);
3064 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3065 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3068 case 0x310: /* movsd xmm, ea */
3070 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3071 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3073 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3074 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3076 rm = (modrm & 7) | REX_B(s);
3077 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3078 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3081 case 0x012: /* movlps */
3082 case 0x112: /* movlpd */
3084 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3085 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3088 rm = (modrm & 7) | REX_B(s);
3089 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3090 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3093 case 0x212: /* movsldup */
3095 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3096 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3098 rm = (modrm & 7) | REX_B(s);
3099 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3100 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3101 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3102 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3104 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3105 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3106 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3107 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3109 case 0x312: /* movddup */
3111 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3112 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3114 rm = (modrm & 7) | REX_B(s);
3115 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3116 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3118 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3119 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3121 case 0x016: /* movhps */
3122 case 0x116: /* movhpd */
3124 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3125 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3128 rm = (modrm & 7) | REX_B(s);
3129 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3130 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3133 case 0x216: /* movshdup */
3135 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3136 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3138 rm = (modrm & 7) | REX_B(s);
3139 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3140 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3141 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3142 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3144 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3145 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3146 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3147 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3149 case 0x7e: /* movd ea, mm */
3150 #ifdef TARGET_X86_64
3151 if (s->dflag == 2) {
3152 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3153 offsetof(CPUX86State,fpregs[reg].mmx));
3154 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3158 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3159 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3160 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3163 case 0x17e: /* movd ea, xmm */
3164 #ifdef TARGET_X86_64
3165 if (s->dflag == 2) {
3166 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3167 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3168 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3172 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3173 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3174 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3177 case 0x27e: /* movq xmm, ea */
3179 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3180 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3182 rm = (modrm & 7) | REX_B(s);
3183 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3184 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3186 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3188 case 0x7f: /* movq ea, mm */
3190 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3191 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3194 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3195 offsetof(CPUX86State,fpregs[reg].mmx));
3198 case 0x011: /* movups */
3199 case 0x111: /* movupd */
3200 case 0x029: /* movaps */
3201 case 0x129: /* movapd */
3202 case 0x17f: /* movdqa ea, xmm */
3203 case 0x27f: /* movdqu ea, xmm */
3205 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3206 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3208 rm = (modrm & 7) | REX_B(s);
3209 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3210 offsetof(CPUX86State,xmm_regs[reg]));
3213 case 0x211: /* movss ea, xmm */
3215 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3216 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3217 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3219 rm = (modrm & 7) | REX_B(s);
3220 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3221 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3224 case 0x311: /* movsd ea, xmm */
3226 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3227 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3229 rm = (modrm & 7) | REX_B(s);
3230 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3231 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3234 case 0x013: /* movlps */
3235 case 0x113: /* movlpd */
3237 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3238 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3243 case 0x017: /* movhps */
3244 case 0x117: /* movhpd */
3246 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3247 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3252 case 0x71: /* shift mm, im */
3255 case 0x171: /* shift xmm, im */
3258 val = ldub_code(s->pc++);
3260 gen_op_movl_T0_im(val);
3261 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3263 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3264 op1_offset = offsetof(CPUX86State,xmm_t0);
3266 gen_op_movl_T0_im(val);
3267 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3269 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3270 op1_offset = offsetof(CPUX86State,mmx_t0);
3272 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3276 rm = (modrm & 7) | REX_B(s);
3277 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3280 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3282 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3283 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3284 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3286 case 0x050: /* movmskps */
3287 rm = (modrm & 7) | REX_B(s);
3288 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3289 offsetof(CPUX86State,xmm_regs[rm]));
3290 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3291 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3292 gen_op_mov_reg_T0(OT_LONG, reg);
3294 case 0x150: /* movmskpd */
3295 rm = (modrm & 7) | REX_B(s);
3296 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3297 offsetof(CPUX86State,xmm_regs[rm]));
3298 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3299 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3300 gen_op_mov_reg_T0(OT_LONG, reg);
3302 case 0x02a: /* cvtpi2ps */
3303 case 0x12a: /* cvtpi2pd */
3304 tcg_gen_helper_0_0(helper_enter_mmx);
3306 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3307 op2_offset = offsetof(CPUX86State,mmx_t0);
3308 gen_ldq_env_A0(s->mem_index, op2_offset);
3311 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3313 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3314 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3315 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3318 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3322 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3326 case 0x22a: /* cvtsi2ss */
3327 case 0x32a: /* cvtsi2sd */
3328 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3329 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3330 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3331 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3332 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3333 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3334 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3336 case 0x02c: /* cvttps2pi */
3337 case 0x12c: /* cvttpd2pi */
3338 case 0x02d: /* cvtps2pi */
3339 case 0x12d: /* cvtpd2pi */
3340 tcg_gen_helper_0_0(helper_enter_mmx);
3342 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3343 op2_offset = offsetof(CPUX86State,xmm_t0);
3344 gen_ldo_env_A0(s->mem_index, op2_offset);
3346 rm = (modrm & 7) | REX_B(s);
3347 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3349 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3350 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3351 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3354 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3357 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3360 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3363 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3367 case 0x22c: /* cvttss2si */
3368 case 0x32c: /* cvttsd2si */
3369 case 0x22d: /* cvtss2si */
3370 case 0x32d: /* cvtsd2si */
3371 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3373 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3375 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3377 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3378 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3380 op2_offset = offsetof(CPUX86State,xmm_t0);
3382 rm = (modrm & 7) | REX_B(s);
3383 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3385 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3387 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3388 if (ot == OT_LONG) {
3389 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3390 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3392 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3394 gen_op_mov_reg_T0(ot, reg);
3396 case 0xc4: /* pinsrw */
3399 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3400 val = ldub_code(s->pc++);
3403 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3404 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3407 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3408 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3411 case 0xc5: /* pextrw */
3415 val = ldub_code(s->pc++);
3418 rm = (modrm & 7) | REX_B(s);
3419 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3420 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3424 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3425 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3427 reg = ((modrm >> 3) & 7) | rex_r;
3428 gen_op_mov_reg_T0(OT_LONG, reg);
3430 case 0x1d6: /* movq ea, xmm */
3432 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3433 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3435 rm = (modrm & 7) | REX_B(s);
3436 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3437 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3438 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3441 case 0x2d6: /* movq2dq */
3442 tcg_gen_helper_0_0(helper_enter_mmx);
3444 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3445 offsetof(CPUX86State,fpregs[rm].mmx));
3446 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3448 case 0x3d6: /* movdq2q */
3449 tcg_gen_helper_0_0(helper_enter_mmx);
3450 rm = (modrm & 7) | REX_B(s);
3451 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3452 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3454 case 0xd7: /* pmovmskb */
3459 rm = (modrm & 7) | REX_B(s);
3460 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3461 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3464 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3465 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3467 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3468 reg = ((modrm >> 3) & 7) | rex_r;
3469 gen_op_mov_reg_T0(OT_LONG, reg);
3475 /* generic MMX or SSE operation */
3477 case 0x70: /* pshufx insn */
3478 case 0xc6: /* pshufx insn */
3479 case 0xc2: /* compare insns */
3486 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3488 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3489 op2_offset = offsetof(CPUX86State,xmm_t0);
3490 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3492 /* specific case for SSE single instructions */
3495 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3496 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3499 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3502 gen_ldo_env_A0(s->mem_index, op2_offset);
3505 rm = (modrm & 7) | REX_B(s);
3506 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3509 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3511 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3512 op2_offset = offsetof(CPUX86State,mmx_t0);
3513 gen_ldq_env_A0(s->mem_index, op2_offset);
3516 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3520 case 0x0f: /* 3DNow! data insns */
3521 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3523 val = ldub_code(s->pc++);
3524 sse_op2 = sse_op_table5[val];
3527 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3528 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3529 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3531 case 0x70: /* pshufx insn */
3532 case 0xc6: /* pshufx insn */
3533 val = ldub_code(s->pc++);
3534 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3535 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3536 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3540 val = ldub_code(s->pc++);
3543 sse_op2 = sse_op_table4[val][b1];
3544 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3545 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3546 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3549 /* maskmov : we must prepare A0 */
3552 #ifdef TARGET_X86_64
3553 if (s->aflag == 2) {
3554 gen_op_movq_A0_reg(R_EDI);
3558 gen_op_movl_A0_reg(R_EDI);
3560 gen_op_andl_A0_ffff();
3562 gen_add_A0_ds_seg(s);
3564 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3565 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3566 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3569 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3570 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3571 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3574 if (b == 0x2e || b == 0x2f) {
3575 s->cc_op = CC_OP_EFLAGS;
3580 /* convert one instruction. s->is_jmp is set if the translation must
3581 be stopped. Return the next pc value */
3582 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3584 int b, prefixes, aflag, dflag;
3586 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3587 target_ulong next_eip, tval;
3590 if (unlikely(loglevel & CPU_LOG_TB_OP))
3591 tcg_gen_debug_insn_start(pc_start);
3599 #ifdef TARGET_X86_64
3604 s->rip_offset = 0; /* for relative ip address */
3606 b = ldub_code(s->pc);
3608 /* check prefixes */
3609 #ifdef TARGET_X86_64
3613 prefixes |= PREFIX_REPZ;
3616 prefixes |= PREFIX_REPNZ;
3619 prefixes |= PREFIX_LOCK;
3640 prefixes |= PREFIX_DATA;
3643 prefixes |= PREFIX_ADR;
3647 rex_w = (b >> 3) & 1;
3648 rex_r = (b & 0x4) << 1;
3649 s->rex_x = (b & 0x2) << 2;
3650 REX_B(s) = (b & 0x1) << 3;
3651 x86_64_hregs = 1; /* select uniform byte register addressing */
3655 /* 0x66 is ignored if rex.w is set */
3658 if (prefixes & PREFIX_DATA)
3661 if (!(prefixes & PREFIX_ADR))
3668 prefixes |= PREFIX_REPZ;
3671 prefixes |= PREFIX_REPNZ;
3674 prefixes |= PREFIX_LOCK;
3695 prefixes |= PREFIX_DATA;
3698 prefixes |= PREFIX_ADR;
3701 if (prefixes & PREFIX_DATA)
3703 if (prefixes & PREFIX_ADR)
3707 s->prefix = prefixes;
3711 /* lock generation */
3712 if (prefixes & PREFIX_LOCK)
3713 tcg_gen_helper_0_0(helper_lock);
3715 /* now check op code */
3719 /**************************/
3720 /* extended op code */
3721 b = ldub_code(s->pc++) | 0x100;
3724 /**************************/
3742 ot = dflag + OT_WORD;
3745 case 0: /* OP Ev, Gv */
3746 modrm = ldub_code(s->pc++);
3747 reg = ((modrm >> 3) & 7) | rex_r;
3748 mod = (modrm >> 6) & 3;
3749 rm = (modrm & 7) | REX_B(s);
3751 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3753 } else if (op == OP_XORL && rm == reg) {
3755 /* xor reg, reg optimisation */
3757 s->cc_op = CC_OP_LOGICB + ot;
3758 gen_op_mov_reg_T0(ot, reg);
3759 gen_op_update1_cc();
3764 gen_op_mov_TN_reg(ot, 1, reg);
3765 gen_op(s, op, ot, opreg);
3767 case 1: /* OP Gv, Ev */
3768 modrm = ldub_code(s->pc++);
3769 mod = (modrm >> 6) & 3;
3770 reg = ((modrm >> 3) & 7) | rex_r;
3771 rm = (modrm & 7) | REX_B(s);
3773 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3774 gen_op_ld_T1_A0(ot + s->mem_index);
3775 } else if (op == OP_XORL && rm == reg) {
3778 gen_op_mov_TN_reg(ot, 1, rm);
3780 gen_op(s, op, ot, reg);
3782 case 2: /* OP A, Iv */
3783 val = insn_get(s, ot);
3784 gen_op_movl_T1_im(val);
3785 gen_op(s, op, ot, OR_EAX);
3794 case 0x80: /* GRP1 */
3803 ot = dflag + OT_WORD;
3805 modrm = ldub_code(s->pc++);
3806 mod = (modrm >> 6) & 3;
3807 rm = (modrm & 7) | REX_B(s);
3808 op = (modrm >> 3) & 7;
3814 s->rip_offset = insn_const_size(ot);
3815 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3826 val = insn_get(s, ot);
3829 val = (int8_t)insn_get(s, OT_BYTE);
3832 gen_op_movl_T1_im(val);
3833 gen_op(s, op, ot, opreg);
3837 /**************************/
3838 /* inc, dec, and other misc arith */
3839 case 0x40 ... 0x47: /* inc Gv */
3840 ot = dflag ? OT_LONG : OT_WORD;
3841 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3843 case 0x48 ... 0x4f: /* dec Gv */
3844 ot = dflag ? OT_LONG : OT_WORD;
3845 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3847 case 0xf6: /* GRP3 */
3852 ot = dflag + OT_WORD;
3854 modrm = ldub_code(s->pc++);
3855 mod = (modrm >> 6) & 3;
3856 rm = (modrm & 7) | REX_B(s);
3857 op = (modrm >> 3) & 7;
3860 s->rip_offset = insn_const_size(ot);
3861 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3862 gen_op_ld_T0_A0(ot + s->mem_index);
3864 gen_op_mov_TN_reg(ot, 0, rm);
3869 val = insn_get(s, ot);
3870 gen_op_movl_T1_im(val);
3871 gen_op_testl_T0_T1_cc();
3872 s->cc_op = CC_OP_LOGICB + ot;
3875 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3877 gen_op_st_T0_A0(ot + s->mem_index);
3879 gen_op_mov_reg_T0(ot, rm);
3883 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3885 gen_op_st_T0_A0(ot + s->mem_index);
3887 gen_op_mov_reg_T0(ot, rm);
3889 gen_op_update_neg_cc();
3890 s->cc_op = CC_OP_SUBB + ot;
3895 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3896 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3897 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3898 /* XXX: use 32 bit mul which could be faster */
3899 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3900 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3901 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3902 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3903 s->cc_op = CC_OP_MULB;
3906 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3907 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3908 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3909 /* XXX: use 32 bit mul which could be faster */
3910 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3911 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3912 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3913 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3914 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3915 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3916 s->cc_op = CC_OP_MULW;
3920 #ifdef TARGET_X86_64
3921 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3922 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3923 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3924 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3925 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3926 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3927 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3928 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3929 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3933 t0 = tcg_temp_new(TCG_TYPE_I64);
3934 t1 = tcg_temp_new(TCG_TYPE_I64);
3935 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3936 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3937 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3938 tcg_gen_mul_i64(t0, t0, t1);
3939 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3940 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3941 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3942 tcg_gen_shri_i64(t0, t0, 32);
3943 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3944 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3945 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3948 s->cc_op = CC_OP_MULL;
3950 #ifdef TARGET_X86_64
3952 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3953 s->cc_op = CC_OP_MULQ;
3961 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3962 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3963 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3964 /* XXX: use 32 bit mul which could be faster */
3965 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3966 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3967 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3968 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3969 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3970 s->cc_op = CC_OP_MULB;
3973 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3974 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3975 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3976 /* XXX: use 32 bit mul which could be faster */
3977 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3978 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3979 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3980 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3981 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3982 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3983 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3984 s->cc_op = CC_OP_MULW;
3988 #ifdef TARGET_X86_64
3989 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3990 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3991 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3992 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3993 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3994 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3995 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3996 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3997 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3998 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4002 t0 = tcg_temp_new(TCG_TYPE_I64);
4003 t1 = tcg_temp_new(TCG_TYPE_I64);
4004 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4005 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4006 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4007 tcg_gen_mul_i64(t0, t0, t1);
4008 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4009 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4010 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4011 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4012 tcg_gen_shri_i64(t0, t0, 32);
4013 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4014 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4015 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4018 s->cc_op = CC_OP_MULL;
4020 #ifdef TARGET_X86_64
4022 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
4023 s->cc_op = CC_OP_MULQ;
4031 gen_jmp_im(pc_start - s->cs_base);
4032 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
4035 gen_jmp_im(pc_start - s->cs_base);
4036 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
4040 gen_jmp_im(pc_start - s->cs_base);
4041 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
4043 #ifdef TARGET_X86_64
4045 gen_jmp_im(pc_start - s->cs_base);
4046 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
4054 gen_jmp_im(pc_start - s->cs_base);
4055 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
4058 gen_jmp_im(pc_start - s->cs_base);
4059 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
4063 gen_jmp_im(pc_start - s->cs_base);
4064 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
4066 #ifdef TARGET_X86_64
4068 gen_jmp_im(pc_start - s->cs_base);
4069 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4079 case 0xfe: /* GRP4 */
4080 case 0xff: /* GRP5 */
4084 ot = dflag + OT_WORD;
4086 modrm = ldub_code(s->pc++);
4087 mod = (modrm >> 6) & 3;
4088 rm = (modrm & 7) | REX_B(s);
4089 op = (modrm >> 3) & 7;
4090 if (op >= 2 && b == 0xfe) {
4094 if (op == 2 || op == 4) {
4095 /* operand size for jumps is 64 bit */
4097 } else if (op == 3 || op == 5) {
4098 /* for call calls, the operand is 16 or 32 bit, even
4100 ot = dflag ? OT_LONG : OT_WORD;
4101 } else if (op == 6) {
4102 /* default push size is 64 bit */
4103 ot = dflag ? OT_QUAD : OT_WORD;
4107 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4108 if (op >= 2 && op != 3 && op != 5)
4109 gen_op_ld_T0_A0(ot + s->mem_index);
4111 gen_op_mov_TN_reg(ot, 0, rm);
4115 case 0: /* inc Ev */
4120 gen_inc(s, ot, opreg, 1);
4122 case 1: /* dec Ev */
4127 gen_inc(s, ot, opreg, -1);
4129 case 2: /* call Ev */
4130 /* XXX: optimize if memory (no 'and' is necessary) */
4132 gen_op_andl_T0_ffff();
4133 next_eip = s->pc - s->cs_base;
4134 gen_movtl_T1_im(next_eip);
4139 case 3: /* lcall Ev */
4140 gen_op_ld_T1_A0(ot + s->mem_index);
4141 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4142 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4144 if (s->pe && !s->vm86) {
4145 if (s->cc_op != CC_OP_DYNAMIC)
4146 gen_op_set_cc_op(s->cc_op);
4147 gen_jmp_im(pc_start - s->cs_base);
4148 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4149 tcg_gen_helper_0_4(helper_lcall_protected,
4150 cpu_tmp2_i32, cpu_T[1],
4151 tcg_const_i32(dflag),
4152 tcg_const_i32(s->pc - pc_start));
4154 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4155 tcg_gen_helper_0_4(helper_lcall_real,
4156 cpu_tmp2_i32, cpu_T[1],
4157 tcg_const_i32(dflag),
4158 tcg_const_i32(s->pc - s->cs_base));
4162 case 4: /* jmp Ev */
4164 gen_op_andl_T0_ffff();
4168 case 5: /* ljmp Ev */
4169 gen_op_ld_T1_A0(ot + s->mem_index);
4170 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4171 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4173 if (s->pe && !s->vm86) {
4174 if (s->cc_op != CC_OP_DYNAMIC)
4175 gen_op_set_cc_op(s->cc_op);
4176 gen_jmp_im(pc_start - s->cs_base);
4177 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4178 tcg_gen_helper_0_3(helper_ljmp_protected,
4181 tcg_const_i32(s->pc - pc_start));
4183 gen_op_movl_seg_T0_vm(R_CS);
4184 gen_op_movl_T0_T1();
4189 case 6: /* push Ev */
4197 case 0x84: /* test Ev, Gv */
4202 ot = dflag + OT_WORD;
4204 modrm = ldub_code(s->pc++);
4205 mod = (modrm >> 6) & 3;
4206 rm = (modrm & 7) | REX_B(s);
4207 reg = ((modrm >> 3) & 7) | rex_r;
4209 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4210 gen_op_mov_TN_reg(ot, 1, reg);
4211 gen_op_testl_T0_T1_cc();
4212 s->cc_op = CC_OP_LOGICB + ot;
4215 case 0xa8: /* test eAX, Iv */
4220 ot = dflag + OT_WORD;
4221 val = insn_get(s, ot);
4223 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4224 gen_op_movl_T1_im(val);
4225 gen_op_testl_T0_T1_cc();
4226 s->cc_op = CC_OP_LOGICB + ot;
4229 case 0x98: /* CWDE/CBW */
4230 #ifdef TARGET_X86_64
4232 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4233 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4234 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4238 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4239 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4240 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4242 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4243 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4244 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4247 case 0x99: /* CDQ/CWD */
4248 #ifdef TARGET_X86_64
4250 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4251 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4252 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4256 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4257 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4258 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4259 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4261 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4262 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4263 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4264 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4267 case 0x1af: /* imul Gv, Ev */
4268 case 0x69: /* imul Gv, Ev, I */
4270 ot = dflag + OT_WORD;
4271 modrm = ldub_code(s->pc++);
4272 reg = ((modrm >> 3) & 7) | rex_r;
4274 s->rip_offset = insn_const_size(ot);
4277 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4279 val = insn_get(s, ot);
4280 gen_op_movl_T1_im(val);
4281 } else if (b == 0x6b) {
4282 val = (int8_t)insn_get(s, OT_BYTE);
4283 gen_op_movl_T1_im(val);
4285 gen_op_mov_TN_reg(ot, 1, reg);
4288 #ifdef TARGET_X86_64
4289 if (ot == OT_QUAD) {
4290 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4293 if (ot == OT_LONG) {
4294 #ifdef TARGET_X86_64
4295 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4296 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4297 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4298 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4299 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4300 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4304 t0 = tcg_temp_new(TCG_TYPE_I64);
4305 t1 = tcg_temp_new(TCG_TYPE_I64);
4306 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4307 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4308 tcg_gen_mul_i64(t0, t0, t1);
4309 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4310 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4311 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4312 tcg_gen_shri_i64(t0, t0, 32);
4313 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4314 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4318 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4319 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4320 /* XXX: use 32 bit mul which could be faster */
4321 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4322 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4323 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4324 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4326 gen_op_mov_reg_T0(ot, reg);
4327 s->cc_op = CC_OP_MULB + ot;
4330 case 0x1c1: /* xadd Ev, Gv */
4334 ot = dflag + OT_WORD;
4335 modrm = ldub_code(s->pc++);
4336 reg = ((modrm >> 3) & 7) | rex_r;
4337 mod = (modrm >> 6) & 3;
4339 rm = (modrm & 7) | REX_B(s);
4340 gen_op_mov_TN_reg(ot, 0, reg);
4341 gen_op_mov_TN_reg(ot, 1, rm);
4342 gen_op_addl_T0_T1();
4343 gen_op_mov_reg_T1(ot, reg);
4344 gen_op_mov_reg_T0(ot, rm);
4346 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4347 gen_op_mov_TN_reg(ot, 0, reg);
4348 gen_op_ld_T1_A0(ot + s->mem_index);
4349 gen_op_addl_T0_T1();
4350 gen_op_st_T0_A0(ot + s->mem_index);
4351 gen_op_mov_reg_T1(ot, reg);
4353 gen_op_update2_cc();
4354 s->cc_op = CC_OP_ADDB + ot;
4357 case 0x1b1: /* cmpxchg Ev, Gv */
4360 TCGv t0, t1, t2, a0;
4365 ot = dflag + OT_WORD;
4366 modrm = ldub_code(s->pc++);
4367 reg = ((modrm >> 3) & 7) | rex_r;
4368 mod = (modrm >> 6) & 3;
4369 t0 = tcg_temp_local_new(TCG_TYPE_TL);
4370 t1 = tcg_temp_local_new(TCG_TYPE_TL);
4371 t2 = tcg_temp_local_new(TCG_TYPE_TL);
4372 a0 = tcg_temp_local_new(TCG_TYPE_TL);
4373 gen_op_mov_v_reg(ot, t1, reg);
4375 rm = (modrm & 7) | REX_B(s);
4376 gen_op_mov_v_reg(ot, t0, rm);
4378 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4379 tcg_gen_mov_tl(a0, cpu_A0);
4380 gen_op_ld_v(ot + s->mem_index, t0, a0);
4381 rm = 0; /* avoid warning */
4383 label1 = gen_new_label();
4384 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
4385 tcg_gen_sub_tl(t2, t2, t0);
4387 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4389 label2 = gen_new_label();
4390 gen_op_mov_reg_v(ot, R_EAX, t0);
4392 gen_set_label(label1);
4393 gen_op_mov_reg_v(ot, rm, t1);
4394 gen_set_label(label2);
4396 tcg_gen_mov_tl(t1, t0);
4397 gen_op_mov_reg_v(ot, R_EAX, t0);
4398 gen_set_label(label1);
4400 gen_op_st_v(ot + s->mem_index, t1, a0);
4402 tcg_gen_mov_tl(cpu_cc_src, t0);
4403 tcg_gen_mov_tl(cpu_cc_dst, t2);
4404 s->cc_op = CC_OP_SUBB + ot;
4411 case 0x1c7: /* cmpxchg8b */
4412 modrm = ldub_code(s->pc++);
4413 mod = (modrm >> 6) & 3;
4414 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4416 #ifdef TARGET_X86_64
4418 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4420 gen_jmp_im(pc_start - s->cs_base);
4421 if (s->cc_op != CC_OP_DYNAMIC)
4422 gen_op_set_cc_op(s->cc_op);
4423 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4424 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4428 if (!(s->cpuid_features & CPUID_CX8))
4430 gen_jmp_im(pc_start - s->cs_base);
4431 if (s->cc_op != CC_OP_DYNAMIC)
4432 gen_op_set_cc_op(s->cc_op);
4433 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4434 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4436 s->cc_op = CC_OP_EFLAGS;
4439 /**************************/
4441 case 0x50 ... 0x57: /* push */
4442 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4445 case 0x58 ... 0x5f: /* pop */
4447 ot = dflag ? OT_QUAD : OT_WORD;
4449 ot = dflag + OT_WORD;
4452 /* NOTE: order is important for pop %sp */
4454 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4456 case 0x60: /* pusha */
4461 case 0x61: /* popa */
4466 case 0x68: /* push Iv */
4469 ot = dflag ? OT_QUAD : OT_WORD;
4471 ot = dflag + OT_WORD;
4474 val = insn_get(s, ot);
4476 val = (int8_t)insn_get(s, OT_BYTE);
4477 gen_op_movl_T0_im(val);
4480 case 0x8f: /* pop Ev */
4482 ot = dflag ? OT_QUAD : OT_WORD;
4484 ot = dflag + OT_WORD;
4486 modrm = ldub_code(s->pc++);
4487 mod = (modrm >> 6) & 3;
4490 /* NOTE: order is important for pop %sp */
4492 rm = (modrm & 7) | REX_B(s);
4493 gen_op_mov_reg_T0(ot, rm);
4495 /* NOTE: order is important too for MMU exceptions */
4496 s->popl_esp_hack = 1 << ot;
4497 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4498 s->popl_esp_hack = 0;
4502 case 0xc8: /* enter */
4505 val = lduw_code(s->pc);
4507 level = ldub_code(s->pc++);
4508 gen_enter(s, val, level);
4511 case 0xc9: /* leave */
4512 /* XXX: exception not precise (ESP is updated before potential exception) */
4514 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4515 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4516 } else if (s->ss32) {
4517 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4518 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4520 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4521 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4525 ot = dflag ? OT_QUAD : OT_WORD;
4527 ot = dflag + OT_WORD;
4529 gen_op_mov_reg_T0(ot, R_EBP);
4532 case 0x06: /* push es */
4533 case 0x0e: /* push cs */
4534 case 0x16: /* push ss */
4535 case 0x1e: /* push ds */
4538 gen_op_movl_T0_seg(b >> 3);
4541 case 0x1a0: /* push fs */
4542 case 0x1a8: /* push gs */
4543 gen_op_movl_T0_seg((b >> 3) & 7);
4546 case 0x07: /* pop es */
4547 case 0x17: /* pop ss */
4548 case 0x1f: /* pop ds */
4553 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4556 /* if reg == SS, inhibit interrupts/trace. */
4557 /* If several instructions disable interrupts, only the
4559 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4560 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4564 gen_jmp_im(s->pc - s->cs_base);
4568 case 0x1a1: /* pop fs */
4569 case 0x1a9: /* pop gs */
4571 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4574 gen_jmp_im(s->pc - s->cs_base);
4579 /**************************/
4582 case 0x89: /* mov Gv, Ev */
4586 ot = dflag + OT_WORD;
4587 modrm = ldub_code(s->pc++);
4588 reg = ((modrm >> 3) & 7) | rex_r;
4590 /* generate a generic store */
4591 gen_ldst_modrm(s, modrm, ot, reg, 1);
4594 case 0xc7: /* mov Ev, Iv */
4598 ot = dflag + OT_WORD;
4599 modrm = ldub_code(s->pc++);
4600 mod = (modrm >> 6) & 3;
4602 s->rip_offset = insn_const_size(ot);
4603 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4605 val = insn_get(s, ot);
4606 gen_op_movl_T0_im(val);
4608 gen_op_st_T0_A0(ot + s->mem_index);
4610 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4613 case 0x8b: /* mov Ev, Gv */
4617 ot = OT_WORD + dflag;
4618 modrm = ldub_code(s->pc++);
4619 reg = ((modrm >> 3) & 7) | rex_r;
4621 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4622 gen_op_mov_reg_T0(ot, reg);
4624 case 0x8e: /* mov seg, Gv */
4625 modrm = ldub_code(s->pc++);
4626 reg = (modrm >> 3) & 7;
4627 if (reg >= 6 || reg == R_CS)
4629 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4630 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4632 /* if reg == SS, inhibit interrupts/trace */
4633 /* If several instructions disable interrupts, only the
4635 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4636 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4640 gen_jmp_im(s->pc - s->cs_base);
4644 case 0x8c: /* mov Gv, seg */
4645 modrm = ldub_code(s->pc++);
4646 reg = (modrm >> 3) & 7;
4647 mod = (modrm >> 6) & 3;
4650 gen_op_movl_T0_seg(reg);
4652 ot = OT_WORD + dflag;
4655 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4658 case 0x1b6: /* movzbS Gv, Eb */
4659 case 0x1b7: /* movzwS Gv, Eb */
4660 case 0x1be: /* movsbS Gv, Eb */
4661 case 0x1bf: /* movswS Gv, Eb */
4664 /* d_ot is the size of destination */
4665 d_ot = dflag + OT_WORD;
4666 /* ot is the size of source */
4667 ot = (b & 1) + OT_BYTE;
4668 modrm = ldub_code(s->pc++);
4669 reg = ((modrm >> 3) & 7) | rex_r;
4670 mod = (modrm >> 6) & 3;
4671 rm = (modrm & 7) | REX_B(s);
4674 gen_op_mov_TN_reg(ot, 0, rm);
4675 switch(ot | (b & 8)) {
4677 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4680 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4683 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4687 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4690 gen_op_mov_reg_T0(d_ot, reg);
4692 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4694 gen_op_lds_T0_A0(ot + s->mem_index);
4696 gen_op_ldu_T0_A0(ot + s->mem_index);
4698 gen_op_mov_reg_T0(d_ot, reg);
4703 case 0x8d: /* lea */
4704 ot = dflag + OT_WORD;
4705 modrm = ldub_code(s->pc++);
4706 mod = (modrm >> 6) & 3;
4709 reg = ((modrm >> 3) & 7) | rex_r;
4710 /* we must ensure that no segment is added */
4714 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4716 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4719 case 0xa0: /* mov EAX, Ov */
4721 case 0xa2: /* mov Ov, EAX */
4724 target_ulong offset_addr;
4729 ot = dflag + OT_WORD;
4730 #ifdef TARGET_X86_64
4731 if (s->aflag == 2) {
4732 offset_addr = ldq_code(s->pc);
4734 gen_op_movq_A0_im(offset_addr);
4739 offset_addr = insn_get(s, OT_LONG);
4741 offset_addr = insn_get(s, OT_WORD);
4743 gen_op_movl_A0_im(offset_addr);
4745 gen_add_A0_ds_seg(s);
4747 gen_op_ld_T0_A0(ot + s->mem_index);
4748 gen_op_mov_reg_T0(ot, R_EAX);
4750 gen_op_mov_TN_reg(ot, 0, R_EAX);
4751 gen_op_st_T0_A0(ot + s->mem_index);
4755 case 0xd7: /* xlat */
4756 #ifdef TARGET_X86_64
4757 if (s->aflag == 2) {
4758 gen_op_movq_A0_reg(R_EBX);
4759 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4760 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4761 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4765 gen_op_movl_A0_reg(R_EBX);
4766 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4767 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4768 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4770 gen_op_andl_A0_ffff();
4772 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4774 gen_add_A0_ds_seg(s);
4775 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4776 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4778 case 0xb0 ... 0xb7: /* mov R, Ib */
4779 val = insn_get(s, OT_BYTE);
4780 gen_op_movl_T0_im(val);
4781 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4783 case 0xb8 ... 0xbf: /* mov R, Iv */
4784 #ifdef TARGET_X86_64
4788 tmp = ldq_code(s->pc);
4790 reg = (b & 7) | REX_B(s);
4791 gen_movtl_T0_im(tmp);
4792 gen_op_mov_reg_T0(OT_QUAD, reg);
4796 ot = dflag ? OT_LONG : OT_WORD;
4797 val = insn_get(s, ot);
4798 reg = (b & 7) | REX_B(s);
4799 gen_op_movl_T0_im(val);
4800 gen_op_mov_reg_T0(ot, reg);
4804 case 0x91 ... 0x97: /* xchg R, EAX */
4805 ot = dflag + OT_WORD;
4806 reg = (b & 7) | REX_B(s);
4810 case 0x87: /* xchg Ev, Gv */
4814 ot = dflag + OT_WORD;
4815 modrm = ldub_code(s->pc++);
4816 reg = ((modrm >> 3) & 7) | rex_r;
4817 mod = (modrm >> 6) & 3;
4819 rm = (modrm & 7) | REX_B(s);
4821 gen_op_mov_TN_reg(ot, 0, reg);
4822 gen_op_mov_TN_reg(ot, 1, rm);
4823 gen_op_mov_reg_T0(ot, rm);
4824 gen_op_mov_reg_T1(ot, reg);
4826 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4827 gen_op_mov_TN_reg(ot, 0, reg);
4828 /* for xchg, lock is implicit */
4829 if (!(prefixes & PREFIX_LOCK))
4830 tcg_gen_helper_0_0(helper_lock);
4831 gen_op_ld_T1_A0(ot + s->mem_index);
4832 gen_op_st_T0_A0(ot + s->mem_index);
4833 if (!(prefixes & PREFIX_LOCK))
4834 tcg_gen_helper_0_0(helper_unlock);
4835 gen_op_mov_reg_T1(ot, reg);
4838 case 0xc4: /* les Gv */
4843 case 0xc5: /* lds Gv */
4848 case 0x1b2: /* lss Gv */
4851 case 0x1b4: /* lfs Gv */
4854 case 0x1b5: /* lgs Gv */
4857 ot = dflag ? OT_LONG : OT_WORD;
4858 modrm = ldub_code(s->pc++);
4859 reg = ((modrm >> 3) & 7) | rex_r;
4860 mod = (modrm >> 6) & 3;
4863 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4864 gen_op_ld_T1_A0(ot + s->mem_index);
4865 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4866 /* load the segment first to handle exceptions properly */
4867 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4868 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4869 /* then put the data */
4870 gen_op_mov_reg_T1(ot, reg);
4872 gen_jmp_im(s->pc - s->cs_base);
4877 /************************/
4888 ot = dflag + OT_WORD;
4890 modrm = ldub_code(s->pc++);
4891 mod = (modrm >> 6) & 3;
4892 op = (modrm >> 3) & 7;
4898 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4901 opreg = (modrm & 7) | REX_B(s);
4906 gen_shift(s, op, ot, opreg, OR_ECX);
4909 shift = ldub_code(s->pc++);
4911 gen_shifti(s, op, ot, opreg, shift);
4926 case 0x1a4: /* shld imm */
4930 case 0x1a5: /* shld cl */
4934 case 0x1ac: /* shrd imm */
4938 case 0x1ad: /* shrd cl */
4942 ot = dflag + OT_WORD;
4943 modrm = ldub_code(s->pc++);
4944 mod = (modrm >> 6) & 3;
4945 rm = (modrm & 7) | REX_B(s);
4946 reg = ((modrm >> 3) & 7) | rex_r;
4948 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4953 gen_op_mov_TN_reg(ot, 1, reg);
4956 val = ldub_code(s->pc++);
4957 tcg_gen_movi_tl(cpu_T3, val);
4959 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4961 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4964 /************************/
4967 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4968 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4969 /* XXX: what to do if illegal op ? */
4970 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4973 modrm = ldub_code(s->pc++);
4974 mod = (modrm >> 6) & 3;
4976 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4979 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4981 case 0x00 ... 0x07: /* fxxxs */
4982 case 0x10 ... 0x17: /* fixxxl */
4983 case 0x20 ... 0x27: /* fxxxl */
4984 case 0x30 ... 0x37: /* fixxx */
4991 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4992 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4993 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4996 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4997 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4998 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5001 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5002 (s->mem_index >> 2) - 1);
5003 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
5007 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5008 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5009 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5013 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5015 /* fcomp needs pop */
5016 tcg_gen_helper_0_0(helper_fpop);
5020 case 0x08: /* flds */
5021 case 0x0a: /* fsts */
5022 case 0x0b: /* fstps */
5023 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5024 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5025 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5030 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5031 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5032 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
5035 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5036 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5037 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5040 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5041 (s->mem_index >> 2) - 1);
5042 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
5046 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5047 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5048 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5053 /* XXX: the corresponding CPUID bit must be tested ! */
5056 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
5057 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5058 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5061 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
5062 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5063 (s->mem_index >> 2) - 1);
5067 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
5068 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5069 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5072 tcg_gen_helper_0_0(helper_fpop);
5077 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
5078 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5079 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5082 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5083 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5084 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5087 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5088 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5089 (s->mem_index >> 2) - 1);
5093 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5094 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5095 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5099 tcg_gen_helper_0_0(helper_fpop);
5103 case 0x0c: /* fldenv mem */
5104 if (s->cc_op != CC_OP_DYNAMIC)
5105 gen_op_set_cc_op(s->cc_op);
5106 gen_jmp_im(pc_start - s->cs_base);
5107 tcg_gen_helper_0_2(helper_fldenv,
5108 cpu_A0, tcg_const_i32(s->dflag));
5110 case 0x0d: /* fldcw mem */
5111 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5112 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5113 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5115 case 0x0e: /* fnstenv mem */
5116 if (s->cc_op != CC_OP_DYNAMIC)
5117 gen_op_set_cc_op(s->cc_op);
5118 gen_jmp_im(pc_start - s->cs_base);
5119 tcg_gen_helper_0_2(helper_fstenv,
5120 cpu_A0, tcg_const_i32(s->dflag));
5122 case 0x0f: /* fnstcw mem */
5123 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5124 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5125 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5127 case 0x1d: /* fldt mem */
5128 if (s->cc_op != CC_OP_DYNAMIC)
5129 gen_op_set_cc_op(s->cc_op);
5130 gen_jmp_im(pc_start - s->cs_base);
5131 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5133 case 0x1f: /* fstpt mem */
5134 if (s->cc_op != CC_OP_DYNAMIC)
5135 gen_op_set_cc_op(s->cc_op);
5136 gen_jmp_im(pc_start - s->cs_base);
5137 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5138 tcg_gen_helper_0_0(helper_fpop);
5140 case 0x2c: /* frstor mem */
5141 if (s->cc_op != CC_OP_DYNAMIC)
5142 gen_op_set_cc_op(s->cc_op);
5143 gen_jmp_im(pc_start - s->cs_base);
5144 tcg_gen_helper_0_2(helper_frstor,
5145 cpu_A0, tcg_const_i32(s->dflag));
5147 case 0x2e: /* fnsave mem */
5148 if (s->cc_op != CC_OP_DYNAMIC)
5149 gen_op_set_cc_op(s->cc_op);
5150 gen_jmp_im(pc_start - s->cs_base);
5151 tcg_gen_helper_0_2(helper_fsave,
5152 cpu_A0, tcg_const_i32(s->dflag));
5154 case 0x2f: /* fnstsw mem */
5155 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5156 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5157 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5159 case 0x3c: /* fbld */
5160 if (s->cc_op != CC_OP_DYNAMIC)
5161 gen_op_set_cc_op(s->cc_op);
5162 gen_jmp_im(pc_start - s->cs_base);
5163 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5165 case 0x3e: /* fbstp */
5166 if (s->cc_op != CC_OP_DYNAMIC)
5167 gen_op_set_cc_op(s->cc_op);
5168 gen_jmp_im(pc_start - s->cs_base);
5169 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5170 tcg_gen_helper_0_0(helper_fpop);
5172 case 0x3d: /* fildll */
5173 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5174 (s->mem_index >> 2) - 1);
5175 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5177 case 0x3f: /* fistpll */
5178 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5179 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5180 (s->mem_index >> 2) - 1);
5181 tcg_gen_helper_0_0(helper_fpop);
5187 /* register float ops */
5191 case 0x08: /* fld sti */
5192 tcg_gen_helper_0_0(helper_fpush);
5193 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5195 case 0x09: /* fxchg sti */
5196 case 0x29: /* fxchg4 sti, undocumented op */
5197 case 0x39: /* fxchg7 sti, undocumented op */
5198 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5200 case 0x0a: /* grp d9/2 */
5203 /* check exceptions (FreeBSD FPU probe) */
5204 if (s->cc_op != CC_OP_DYNAMIC)
5205 gen_op_set_cc_op(s->cc_op);
5206 gen_jmp_im(pc_start - s->cs_base);
5207 tcg_gen_helper_0_0(helper_fwait);
5213 case 0x0c: /* grp d9/4 */
5216 tcg_gen_helper_0_0(helper_fchs_ST0);
5219 tcg_gen_helper_0_0(helper_fabs_ST0);
5222 tcg_gen_helper_0_0(helper_fldz_FT0);
5223 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5226 tcg_gen_helper_0_0(helper_fxam_ST0);
5232 case 0x0d: /* grp d9/5 */
5236 tcg_gen_helper_0_0(helper_fpush);
5237 tcg_gen_helper_0_0(helper_fld1_ST0);
5240 tcg_gen_helper_0_0(helper_fpush);
5241 tcg_gen_helper_0_0(helper_fldl2t_ST0);
5244 tcg_gen_helper_0_0(helper_fpush);
5245 tcg_gen_helper_0_0(helper_fldl2e_ST0);
5248 tcg_gen_helper_0_0(helper_fpush);
5249 tcg_gen_helper_0_0(helper_fldpi_ST0);
5252 tcg_gen_helper_0_0(helper_fpush);
5253 tcg_gen_helper_0_0(helper_fldlg2_ST0);
5256 tcg_gen_helper_0_0(helper_fpush);
5257 tcg_gen_helper_0_0(helper_fldln2_ST0);
5260 tcg_gen_helper_0_0(helper_fpush);
5261 tcg_gen_helper_0_0(helper_fldz_ST0);
5268 case 0x0e: /* grp d9/6 */
5271 tcg_gen_helper_0_0(helper_f2xm1);
5274 tcg_gen_helper_0_0(helper_fyl2x);
5277 tcg_gen_helper_0_0(helper_fptan);
5279 case 3: /* fpatan */
5280 tcg_gen_helper_0_0(helper_fpatan);
5282 case 4: /* fxtract */
5283 tcg_gen_helper_0_0(helper_fxtract);
5285 case 5: /* fprem1 */
5286 tcg_gen_helper_0_0(helper_fprem1);
5288 case 6: /* fdecstp */
5289 tcg_gen_helper_0_0(helper_fdecstp);
5292 case 7: /* fincstp */
5293 tcg_gen_helper_0_0(helper_fincstp);
5297 case 0x0f: /* grp d9/7 */
5300 tcg_gen_helper_0_0(helper_fprem);
5302 case 1: /* fyl2xp1 */
5303 tcg_gen_helper_0_0(helper_fyl2xp1);
5306 tcg_gen_helper_0_0(helper_fsqrt);
5308 case 3: /* fsincos */
5309 tcg_gen_helper_0_0(helper_fsincos);
5311 case 5: /* fscale */
5312 tcg_gen_helper_0_0(helper_fscale);
5314 case 4: /* frndint */
5315 tcg_gen_helper_0_0(helper_frndint);
5318 tcg_gen_helper_0_0(helper_fsin);
5322 tcg_gen_helper_0_0(helper_fcos);
5326 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5327 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5328 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5334 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5336 tcg_gen_helper_0_0(helper_fpop);
5338 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5339 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5343 case 0x02: /* fcom */
5344 case 0x22: /* fcom2, undocumented op */
5345 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5346 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5348 case 0x03: /* fcomp */
5349 case 0x23: /* fcomp3, undocumented op */
5350 case 0x32: /* fcomp5, undocumented op */
5351 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5352 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5353 tcg_gen_helper_0_0(helper_fpop);
5355 case 0x15: /* da/5 */
5357 case 1: /* fucompp */
5358 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5359 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5360 tcg_gen_helper_0_0(helper_fpop);
5361 tcg_gen_helper_0_0(helper_fpop);
5369 case 0: /* feni (287 only, just do nop here) */
5371 case 1: /* fdisi (287 only, just do nop here) */
5374 tcg_gen_helper_0_0(helper_fclex);
5376 case 3: /* fninit */
5377 tcg_gen_helper_0_0(helper_fninit);
5379 case 4: /* fsetpm (287 only, just do nop here) */
5385 case 0x1d: /* fucomi */
5386 if (s->cc_op != CC_OP_DYNAMIC)
5387 gen_op_set_cc_op(s->cc_op);
5388 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5389 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5390 s->cc_op = CC_OP_EFLAGS;
5392 case 0x1e: /* fcomi */
5393 if (s->cc_op != CC_OP_DYNAMIC)
5394 gen_op_set_cc_op(s->cc_op);
5395 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5396 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5397 s->cc_op = CC_OP_EFLAGS;
5399 case 0x28: /* ffree sti */
5400 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5402 case 0x2a: /* fst sti */
5403 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5405 case 0x2b: /* fstp sti */
5406 case 0x0b: /* fstp1 sti, undocumented op */
5407 case 0x3a: /* fstp8 sti, undocumented op */
5408 case 0x3b: /* fstp9 sti, undocumented op */
5409 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5410 tcg_gen_helper_0_0(helper_fpop);
5412 case 0x2c: /* fucom st(i) */
5413 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5414 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5416 case 0x2d: /* fucomp st(i) */
5417 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5418 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5419 tcg_gen_helper_0_0(helper_fpop);
5421 case 0x33: /* de/3 */
5423 case 1: /* fcompp */
5424 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5425 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5426 tcg_gen_helper_0_0(helper_fpop);
5427 tcg_gen_helper_0_0(helper_fpop);
5433 case 0x38: /* ffreep sti, undocumented op */
5434 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5435 tcg_gen_helper_0_0(helper_fpop);
5437 case 0x3c: /* df/4 */
5440 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5441 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5442 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5448 case 0x3d: /* fucomip */
5449 if (s->cc_op != CC_OP_DYNAMIC)
5450 gen_op_set_cc_op(s->cc_op);
5451 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5452 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5453 tcg_gen_helper_0_0(helper_fpop);
5454 s->cc_op = CC_OP_EFLAGS;
5456 case 0x3e: /* fcomip */
5457 if (s->cc_op != CC_OP_DYNAMIC)
5458 gen_op_set_cc_op(s->cc_op);
5459 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5460 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5461 tcg_gen_helper_0_0(helper_fpop);
5462 s->cc_op = CC_OP_EFLAGS;
5464 case 0x10 ... 0x13: /* fcmovxx */
5468 const static uint8_t fcmov_cc[8] = {
5474 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
5475 l1 = gen_new_label();
5476 gen_jcc1(s, s->cc_op, op1, l1);
5477 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5486 /************************/
5489 case 0xa4: /* movsS */
5494 ot = dflag + OT_WORD;
5496 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5497 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5503 case 0xaa: /* stosS */
5508 ot = dflag + OT_WORD;
5510 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5511 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5516 case 0xac: /* lodsS */
5521 ot = dflag + OT_WORD;
5522 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5523 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5528 case 0xae: /* scasS */
5533 ot = dflag + OT_WORD;
5534 if (prefixes & PREFIX_REPNZ) {
5535 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5536 } else if (prefixes & PREFIX_REPZ) {
5537 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5540 s->cc_op = CC_OP_SUBB + ot;
5544 case 0xa6: /* cmpsS */
5549 ot = dflag + OT_WORD;
5550 if (prefixes & PREFIX_REPNZ) {
5551 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5552 } else if (prefixes & PREFIX_REPZ) {
5553 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5556 s->cc_op = CC_OP_SUBB + ot;
5559 case 0x6c: /* insS */
5564 ot = dflag ? OT_LONG : OT_WORD;
5565 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5566 gen_op_andl_T0_ffff();
5567 gen_check_io(s, ot, pc_start - s->cs_base,
5568 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5569 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5570 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5575 case 0x6e: /* outsS */
5580 ot = dflag ? OT_LONG : OT_WORD;
5581 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5582 gen_op_andl_T0_ffff();
5583 gen_check_io(s, ot, pc_start - s->cs_base,
5584 svm_is_rep(prefixes) | 4);
5585 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5586 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5592 /************************/
5600 ot = dflag ? OT_LONG : OT_WORD;
5601 val = ldub_code(s->pc++);
5602 gen_op_movl_T0_im(val);
5603 gen_check_io(s, ot, pc_start - s->cs_base,
5604 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5605 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5606 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5607 gen_op_mov_reg_T1(ot, R_EAX);
5614 ot = dflag ? OT_LONG : OT_WORD;
5615 val = ldub_code(s->pc++);
5616 gen_op_movl_T0_im(val);
5617 gen_check_io(s, ot, pc_start - s->cs_base,
5618 svm_is_rep(prefixes));
5619 gen_op_mov_TN_reg(ot, 1, R_EAX);
5621 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5622 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5623 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5624 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5631 ot = dflag ? OT_LONG : OT_WORD;
5632 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5633 gen_op_andl_T0_ffff();
5634 gen_check_io(s, ot, pc_start - s->cs_base,
5635 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5636 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5637 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5638 gen_op_mov_reg_T1(ot, R_EAX);
5645 ot = dflag ? OT_LONG : OT_WORD;
5646 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5647 gen_op_andl_T0_ffff();
5648 gen_check_io(s, ot, pc_start - s->cs_base,
5649 svm_is_rep(prefixes));
5650 gen_op_mov_TN_reg(ot, 1, R_EAX);
5652 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5653 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5654 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5655 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5658 /************************/
5660 case 0xc2: /* ret im */
5661 val = ldsw_code(s->pc);
5664 if (CODE64(s) && s->dflag)
5666 gen_stack_update(s, val + (2 << s->dflag));
5668 gen_op_andl_T0_ffff();
5672 case 0xc3: /* ret */
5676 gen_op_andl_T0_ffff();
5680 case 0xca: /* lret im */
5681 val = ldsw_code(s->pc);
5684 if (s->pe && !s->vm86) {
5685 if (s->cc_op != CC_OP_DYNAMIC)
5686 gen_op_set_cc_op(s->cc_op);
5687 gen_jmp_im(pc_start - s->cs_base);
5688 tcg_gen_helper_0_2(helper_lret_protected,
5689 tcg_const_i32(s->dflag),
5690 tcg_const_i32(val));
5694 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5696 gen_op_andl_T0_ffff();
5697 /* NOTE: keeping EIP updated is not a problem in case of
5701 gen_op_addl_A0_im(2 << s->dflag);
5702 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5703 gen_op_movl_seg_T0_vm(R_CS);
5704 /* add stack offset */
5705 gen_stack_update(s, val + (4 << s->dflag));
5709 case 0xcb: /* lret */
5712 case 0xcf: /* iret */
5713 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
5716 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5717 s->cc_op = CC_OP_EFLAGS;
5718 } else if (s->vm86) {
5720 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5722 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5723 s->cc_op = CC_OP_EFLAGS;
5726 if (s->cc_op != CC_OP_DYNAMIC)
5727 gen_op_set_cc_op(s->cc_op);
5728 gen_jmp_im(pc_start - s->cs_base);
5729 tcg_gen_helper_0_2(helper_iret_protected,
5730 tcg_const_i32(s->dflag),
5731 tcg_const_i32(s->pc - s->cs_base));
5732 s->cc_op = CC_OP_EFLAGS;
5736 case 0xe8: /* call im */
5739 tval = (int32_t)insn_get(s, OT_LONG);
5741 tval = (int16_t)insn_get(s, OT_WORD);
5742 next_eip = s->pc - s->cs_base;
5746 gen_movtl_T0_im(next_eip);
5751 case 0x9a: /* lcall im */
5753 unsigned int selector, offset;
5757 ot = dflag ? OT_LONG : OT_WORD;
5758 offset = insn_get(s, ot);
5759 selector = insn_get(s, OT_WORD);
5761 gen_op_movl_T0_im(selector);
5762 gen_op_movl_T1_imu(offset);
5765 case 0xe9: /* jmp im */
5767 tval = (int32_t)insn_get(s, OT_LONG);
5769 tval = (int16_t)insn_get(s, OT_WORD);
5770 tval += s->pc - s->cs_base;
5775 case 0xea: /* ljmp im */
5777 unsigned int selector, offset;
5781 ot = dflag ? OT_LONG : OT_WORD;
5782 offset = insn_get(s, ot);
5783 selector = insn_get(s, OT_WORD);
5785 gen_op_movl_T0_im(selector);
5786 gen_op_movl_T1_imu(offset);
5789 case 0xeb: /* jmp Jb */
5790 tval = (int8_t)insn_get(s, OT_BYTE);
5791 tval += s->pc - s->cs_base;
5796 case 0x70 ... 0x7f: /* jcc Jb */
5797 tval = (int8_t)insn_get(s, OT_BYTE);
5799 case 0x180 ... 0x18f: /* jcc Jv */
5801 tval = (int32_t)insn_get(s, OT_LONG);
5803 tval = (int16_t)insn_get(s, OT_WORD);
5806 next_eip = s->pc - s->cs_base;
5810 gen_jcc(s, b, tval, next_eip);
5813 case 0x190 ... 0x19f: /* setcc Gv */
5814 modrm = ldub_code(s->pc++);
5816 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5818 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5823 ot = dflag + OT_WORD;
5824 modrm = ldub_code(s->pc++);
5825 reg = ((modrm >> 3) & 7) | rex_r;
5826 mod = (modrm >> 6) & 3;
5827 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5829 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5830 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
5832 rm = (modrm & 7) | REX_B(s);
5833 gen_op_mov_v_reg(ot, t0, rm);
5835 #ifdef TARGET_X86_64
5836 if (ot == OT_LONG) {
5837 /* XXX: specific Intel behaviour ? */
5838 l1 = gen_new_label();
5839 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5840 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
5842 tcg_gen_movi_tl(cpu_tmp0, 0);
5843 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
5847 l1 = gen_new_label();
5848 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5849 gen_op_mov_reg_v(ot, reg, t0);
5856 /************************/
5858 case 0x9c: /* pushf */
5859 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
5860 if (s->vm86 && s->iopl != 3) {
5861 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5863 if (s->cc_op != CC_OP_DYNAMIC)
5864 gen_op_set_cc_op(s->cc_op);
5865 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
5869 case 0x9d: /* popf */
5870 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
5871 if (s->vm86 && s->iopl != 3) {
5872 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5877 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5878 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
5880 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5881 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
5884 if (s->cpl <= s->iopl) {
5886 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5887 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
5889 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5890 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
5894 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5895 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
5897 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5898 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
5903 s->cc_op = CC_OP_EFLAGS;
5904 /* abort translation because TF flag may change */
5905 gen_jmp_im(s->pc - s->cs_base);
5909 case 0x9e: /* sahf */
5910 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5912 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5913 if (s->cc_op != CC_OP_DYNAMIC)
5914 gen_op_set_cc_op(s->cc_op);
5915 gen_compute_eflags(cpu_cc_src);
5916 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
5917 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
5918 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
5919 s->cc_op = CC_OP_EFLAGS;
5921 case 0x9f: /* lahf */
5922 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5924 if (s->cc_op != CC_OP_DYNAMIC)
5925 gen_op_set_cc_op(s->cc_op);
5926 gen_compute_eflags(cpu_T[0]);
5927 /* Note: gen_compute_eflags() only gives the condition codes */
5928 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
5929 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5931 case 0xf5: /* cmc */
5932 if (s->cc_op != CC_OP_DYNAMIC)
5933 gen_op_set_cc_op(s->cc_op);
5934 gen_compute_eflags(cpu_cc_src);
5935 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5936 s->cc_op = CC_OP_EFLAGS;
5938 case 0xf8: /* clc */
5939 if (s->cc_op != CC_OP_DYNAMIC)
5940 gen_op_set_cc_op(s->cc_op);
5941 gen_compute_eflags(cpu_cc_src);
5942 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
5943 s->cc_op = CC_OP_EFLAGS;
5945 case 0xf9: /* stc */
5946 if (s->cc_op != CC_OP_DYNAMIC)
5947 gen_op_set_cc_op(s->cc_op);
5948 gen_compute_eflags(cpu_cc_src);
5949 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5950 s->cc_op = CC_OP_EFLAGS;
5952 case 0xfc: /* cld */
5953 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5954 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5956 case 0xfd: /* std */
5957 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5958 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5961 /************************/
5962 /* bit operations */
5963 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5964 ot = dflag + OT_WORD;
5965 modrm = ldub_code(s->pc++);
5966 op = (modrm >> 3) & 7;
5967 mod = (modrm >> 6) & 3;
5968 rm = (modrm & 7) | REX_B(s);
5971 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5972 gen_op_ld_T0_A0(ot + s->mem_index);
5974 gen_op_mov_TN_reg(ot, 0, rm);
5977 val = ldub_code(s->pc++);
5978 gen_op_movl_T1_im(val);
5983 case 0x1a3: /* bt Gv, Ev */
5986 case 0x1ab: /* bts */
5989 case 0x1b3: /* btr */
5992 case 0x1bb: /* btc */
5995 ot = dflag + OT_WORD;
5996 modrm = ldub_code(s->pc++);
5997 reg = ((modrm >> 3) & 7) | rex_r;
5998 mod = (modrm >> 6) & 3;
5999 rm = (modrm & 7) | REX_B(s);
6000 gen_op_mov_TN_reg(OT_LONG, 1, reg);
6002 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6003 /* specific case: we need to add a displacement */
6004 gen_exts(ot, cpu_T[1]);
6005 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
6006 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
6007 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
6008 gen_op_ld_T0_A0(ot + s->mem_index);
6010 gen_op_mov_TN_reg(ot, 0, rm);
6013 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6016 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6017 tcg_gen_movi_tl(cpu_cc_dst, 0);
6020 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6021 tcg_gen_movi_tl(cpu_tmp0, 1);
6022 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6023 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6026 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6027 tcg_gen_movi_tl(cpu_tmp0, 1);
6028 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6029 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6030 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6034 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6035 tcg_gen_movi_tl(cpu_tmp0, 1);
6036 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6037 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6040 s->cc_op = CC_OP_SARB + ot;
6043 gen_op_st_T0_A0(ot + s->mem_index);
6045 gen_op_mov_reg_T0(ot, rm);
6046 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6047 tcg_gen_movi_tl(cpu_cc_dst, 0);
6050 case 0x1bc: /* bsf */
6051 case 0x1bd: /* bsr */
6056 ot = dflag + OT_WORD;
6057 modrm = ldub_code(s->pc++);
6058 reg = ((modrm >> 3) & 7) | rex_r;
6059 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6060 gen_extu(ot, cpu_T[0]);
6061 label1 = gen_new_label();
6062 tcg_gen_movi_tl(cpu_cc_dst, 0);
6063 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6064 tcg_gen_mov_tl(t0, cpu_T[0]);
6065 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
6067 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
6069 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
6071 gen_op_mov_reg_T0(ot, reg);
6072 tcg_gen_movi_tl(cpu_cc_dst, 1);
6073 gen_set_label(label1);
6074 tcg_gen_discard_tl(cpu_cc_src);
6075 s->cc_op = CC_OP_LOGICB + ot;
6079 /************************/
6081 case 0x27: /* daa */
6084 if (s->cc_op != CC_OP_DYNAMIC)
6085 gen_op_set_cc_op(s->cc_op);
6086 tcg_gen_helper_0_0(helper_daa);
6087 s->cc_op = CC_OP_EFLAGS;
6089 case 0x2f: /* das */
6092 if (s->cc_op != CC_OP_DYNAMIC)
6093 gen_op_set_cc_op(s->cc_op);
6094 tcg_gen_helper_0_0(helper_das);
6095 s->cc_op = CC_OP_EFLAGS;
6097 case 0x37: /* aaa */
6100 if (s->cc_op != CC_OP_DYNAMIC)
6101 gen_op_set_cc_op(s->cc_op);
6102 tcg_gen_helper_0_0(helper_aaa);
6103 s->cc_op = CC_OP_EFLAGS;
6105 case 0x3f: /* aas */
6108 if (s->cc_op != CC_OP_DYNAMIC)
6109 gen_op_set_cc_op(s->cc_op);
6110 tcg_gen_helper_0_0(helper_aas);
6111 s->cc_op = CC_OP_EFLAGS;
6113 case 0xd4: /* aam */
6116 val = ldub_code(s->pc++);
6118 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6120 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
6121 s->cc_op = CC_OP_LOGICB;
6124 case 0xd5: /* aad */
6127 val = ldub_code(s->pc++);
6128 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
6129 s->cc_op = CC_OP_LOGICB;
6131 /************************/
6133 case 0x90: /* nop */
6134 /* XXX: xchg + rex handling */
6135 /* XXX: correct lock test for all insn */
6136 if (prefixes & PREFIX_LOCK)
6138 if (prefixes & PREFIX_REPZ) {
6139 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6142 case 0x9b: /* fwait */
6143 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6144 (HF_MP_MASK | HF_TS_MASK)) {
6145 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6147 if (s->cc_op != CC_OP_DYNAMIC)
6148 gen_op_set_cc_op(s->cc_op);
6149 gen_jmp_im(pc_start - s->cs_base);
6150 tcg_gen_helper_0_0(helper_fwait);
6153 case 0xcc: /* int3 */
6154 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6156 case 0xcd: /* int N */
6157 val = ldub_code(s->pc++);
6158 if (s->vm86 && s->iopl != 3) {
6159 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6161 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6164 case 0xce: /* into */
6167 if (s->cc_op != CC_OP_DYNAMIC)
6168 gen_op_set_cc_op(s->cc_op);
6169 gen_jmp_im(pc_start - s->cs_base);
6170 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6172 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6173 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
6175 gen_debug(s, pc_start - s->cs_base);
6178 tb_flush(cpu_single_env);
6179 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6182 case 0xfa: /* cli */
6184 if (s->cpl <= s->iopl) {
6185 tcg_gen_helper_0_0(helper_cli);
6187 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6191 tcg_gen_helper_0_0(helper_cli);
6193 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6197 case 0xfb: /* sti */
6199 if (s->cpl <= s->iopl) {
6201 tcg_gen_helper_0_0(helper_sti);
6202 /* interruptions are enabled only the first insn after sti */
6203 /* If several instructions disable interrupts, only the
6205 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6206 tcg_gen_helper_0_0(helper_set_inhibit_irq);
6207 /* give a chance to handle pending irqs */
6208 gen_jmp_im(s->pc - s->cs_base);
6211 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6217 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6221 case 0x62: /* bound */
6224 ot = dflag ? OT_LONG : OT_WORD;
6225 modrm = ldub_code(s->pc++);
6226 reg = (modrm >> 3) & 7;
6227 mod = (modrm >> 6) & 3;
6230 gen_op_mov_TN_reg(ot, 0, reg);
6231 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6232 gen_jmp_im(pc_start - s->cs_base);
6233 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6235 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6237 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6239 case 0x1c8 ... 0x1cf: /* bswap reg */
6240 reg = (b & 7) | REX_B(s);
6241 #ifdef TARGET_X86_64
6243 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6244 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6245 gen_op_mov_reg_T0(OT_QUAD, reg);
6249 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6251 tmp0 = tcg_temp_new(TCG_TYPE_I32);
6252 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6253 tcg_gen_bswap_i32(tmp0, tmp0);
6254 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6255 gen_op_mov_reg_T0(OT_LONG, reg);
6259 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6260 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6261 gen_op_mov_reg_T0(OT_LONG, reg);
6265 case 0xd6: /* salc */
6268 if (s->cc_op != CC_OP_DYNAMIC)
6269 gen_op_set_cc_op(s->cc_op);
6270 gen_compute_eflags_c(cpu_T[0]);
6271 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6272 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6274 case 0xe0: /* loopnz */
6275 case 0xe1: /* loopz */
6276 case 0xe2: /* loop */
6277 case 0xe3: /* jecxz */
6281 tval = (int8_t)insn_get(s, OT_BYTE);
6282 next_eip = s->pc - s->cs_base;
6287 l1 = gen_new_label();
6288 l2 = gen_new_label();
6289 l3 = gen_new_label();
6292 case 0: /* loopnz */
6294 if (s->cc_op != CC_OP_DYNAMIC)
6295 gen_op_set_cc_op(s->cc_op);
6296 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6297 gen_op_jz_ecx(s->aflag, l3);
6298 gen_compute_eflags(cpu_tmp0);
6299 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6301 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
6303 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
6307 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6308 gen_op_jnz_ecx(s->aflag, l1);
6312 gen_op_jz_ecx(s->aflag, l1);
6317 gen_jmp_im(next_eip);
6326 case 0x130: /* wrmsr */
6327 case 0x132: /* rdmsr */
6329 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6331 if (s->cc_op != CC_OP_DYNAMIC)
6332 gen_op_set_cc_op(s->cc_op);
6333 gen_jmp_im(pc_start - s->cs_base);
6335 tcg_gen_helper_0_0(helper_rdmsr);
6337 tcg_gen_helper_0_0(helper_wrmsr);
6341 case 0x131: /* rdtsc */
6342 if (s->cc_op != CC_OP_DYNAMIC)
6343 gen_op_set_cc_op(s->cc_op);
6344 gen_jmp_im(pc_start - s->cs_base);
6345 tcg_gen_helper_0_0(helper_rdtsc);
6347 case 0x133: /* rdpmc */
6348 if (s->cc_op != CC_OP_DYNAMIC)
6349 gen_op_set_cc_op(s->cc_op);
6350 gen_jmp_im(pc_start - s->cs_base);
6351 tcg_gen_helper_0_0(helper_rdpmc);
6353 case 0x134: /* sysenter */
6357 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6359 if (s->cc_op != CC_OP_DYNAMIC) {
6360 gen_op_set_cc_op(s->cc_op);
6361 s->cc_op = CC_OP_DYNAMIC;
6363 gen_jmp_im(pc_start - s->cs_base);
6364 tcg_gen_helper_0_0(helper_sysenter);
6368 case 0x135: /* sysexit */
6372 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6374 if (s->cc_op != CC_OP_DYNAMIC) {
6375 gen_op_set_cc_op(s->cc_op);
6376 s->cc_op = CC_OP_DYNAMIC;
6378 gen_jmp_im(pc_start - s->cs_base);
6379 tcg_gen_helper_0_0(helper_sysexit);
6383 #ifdef TARGET_X86_64
6384 case 0x105: /* syscall */
6385 /* XXX: is it usable in real mode ? */
6386 if (s->cc_op != CC_OP_DYNAMIC) {
6387 gen_op_set_cc_op(s->cc_op);
6388 s->cc_op = CC_OP_DYNAMIC;
6390 gen_jmp_im(pc_start - s->cs_base);
6391 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6394 case 0x107: /* sysret */
6396 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6398 if (s->cc_op != CC_OP_DYNAMIC) {
6399 gen_op_set_cc_op(s->cc_op);
6400 s->cc_op = CC_OP_DYNAMIC;
6402 gen_jmp_im(pc_start - s->cs_base);
6403 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6404 /* condition codes are modified only in long mode */
6406 s->cc_op = CC_OP_EFLAGS;
6411 case 0x1a2: /* cpuid */
6412 if (s->cc_op != CC_OP_DYNAMIC)
6413 gen_op_set_cc_op(s->cc_op);
6414 gen_jmp_im(pc_start - s->cs_base);
6415 tcg_gen_helper_0_0(helper_cpuid);
6417 case 0xf4: /* hlt */
6419 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6421 if (s->cc_op != CC_OP_DYNAMIC)
6422 gen_op_set_cc_op(s->cc_op);
6423 gen_jmp_im(pc_start - s->cs_base);
6424 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
6429 modrm = ldub_code(s->pc++);
6430 mod = (modrm >> 6) & 3;
6431 op = (modrm >> 3) & 7;
6434 if (!s->pe || s->vm86)
6436 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
6437 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6441 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6444 if (!s->pe || s->vm86)
6447 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6449 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
6450 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6451 gen_jmp_im(pc_start - s->cs_base);
6452 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6453 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6457 if (!s->pe || s->vm86)
6459 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
6460 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6464 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6467 if (!s->pe || s->vm86)
6470 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6472 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
6473 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6474 gen_jmp_im(pc_start - s->cs_base);
6475 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6476 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6481 if (!s->pe || s->vm86)
6483 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6484 if (s->cc_op != CC_OP_DYNAMIC)
6485 gen_op_set_cc_op(s->cc_op);
6487 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6489 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6490 s->cc_op = CC_OP_EFLAGS;
6497 modrm = ldub_code(s->pc++);
6498 mod = (modrm >> 6) & 3;
6499 op = (modrm >> 3) & 7;
6505 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
6506 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6507 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6508 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6509 gen_add_A0_im(s, 2);
6510 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6512 gen_op_andl_T0_im(0xffffff);
6513 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6518 case 0: /* monitor */
6519 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6522 if (s->cc_op != CC_OP_DYNAMIC)
6523 gen_op_set_cc_op(s->cc_op);
6524 gen_jmp_im(pc_start - s->cs_base);
6525 #ifdef TARGET_X86_64
6526 if (s->aflag == 2) {
6527 gen_op_movq_A0_reg(R_EAX);
6531 gen_op_movl_A0_reg(R_EAX);
6533 gen_op_andl_A0_ffff();
6535 gen_add_A0_ds_seg(s);
6536 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6539 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6542 if (s->cc_op != CC_OP_DYNAMIC) {
6543 gen_op_set_cc_op(s->cc_op);
6544 s->cc_op = CC_OP_DYNAMIC;
6546 gen_jmp_im(pc_start - s->cs_base);
6547 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
6554 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
6555 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6556 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6557 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6558 gen_add_A0_im(s, 2);
6559 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6561 gen_op_andl_T0_im(0xffffff);
6562 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6568 if (s->cc_op != CC_OP_DYNAMIC)
6569 gen_op_set_cc_op(s->cc_op);
6570 gen_jmp_im(pc_start - s->cs_base);
6573 if (!(s->flags & HF_SVME_MASK) || !s->pe)
6576 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6579 tcg_gen_helper_0_2(helper_vmrun,
6580 tcg_const_i32(s->aflag),
6581 tcg_const_i32(s->pc - pc_start));
6586 case 1: /* VMMCALL */
6587 if (!(s->flags & HF_SVME_MASK))
6589 tcg_gen_helper_0_0(helper_vmmcall);
6591 case 2: /* VMLOAD */
6592 if (!(s->flags & HF_SVME_MASK) || !s->pe)
6595 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6598 tcg_gen_helper_0_1(helper_vmload,
6599 tcg_const_i32(s->aflag));
6602 case 3: /* VMSAVE */
6603 if (!(s->flags & HF_SVME_MASK) || !s->pe)
6606 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6609 tcg_gen_helper_0_1(helper_vmsave,
6610 tcg_const_i32(s->aflag));
6614 if ((!(s->flags & HF_SVME_MASK) &&
6615 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
6619 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6622 tcg_gen_helper_0_0(helper_stgi);
6626 if (!(s->flags & HF_SVME_MASK) || !s->pe)
6629 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6632 tcg_gen_helper_0_0(helper_clgi);
6635 case 6: /* SKINIT */
6636 if ((!(s->flags & HF_SVME_MASK) &&
6637 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
6640 tcg_gen_helper_0_0(helper_skinit);
6642 case 7: /* INVLPGA */
6643 if (!(s->flags & HF_SVME_MASK) || !s->pe)
6646 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6649 tcg_gen_helper_0_1(helper_invlpga,
6650 tcg_const_i32(s->aflag));
6656 } else if (s->cpl != 0) {
6657 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6659 gen_svm_check_intercept(s, pc_start,
6660 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
6661 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6662 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6663 gen_add_A0_im(s, 2);
6664 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6666 gen_op_andl_T0_im(0xffffff);
6668 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
6669 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
6671 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
6672 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
6677 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
6678 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
6679 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6683 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6685 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6686 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6687 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6688 gen_jmp_im(s->pc - s->cs_base);
6692 case 7: /* invlpg */
6694 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6697 #ifdef TARGET_X86_64
6698 if (CODE64(s) && rm == 0) {
6700 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6701 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
6702 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6703 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
6710 if (s->cc_op != CC_OP_DYNAMIC)
6711 gen_op_set_cc_op(s->cc_op);
6712 gen_jmp_im(pc_start - s->cs_base);
6713 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6714 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6715 gen_jmp_im(s->pc - s->cs_base);
6724 case 0x108: /* invd */
6725 case 0x109: /* wbinvd */
6727 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6729 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
6733 case 0x63: /* arpl or movslS (x86_64) */
6734 #ifdef TARGET_X86_64
6737 /* d_ot is the size of destination */
6738 d_ot = dflag + OT_WORD;
6740 modrm = ldub_code(s->pc++);
6741 reg = ((modrm >> 3) & 7) | rex_r;
6742 mod = (modrm >> 6) & 3;
6743 rm = (modrm & 7) | REX_B(s);
6746 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6748 if (d_ot == OT_QUAD)
6749 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6750 gen_op_mov_reg_T0(d_ot, reg);
6752 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6753 if (d_ot == OT_QUAD) {
6754 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6756 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6758 gen_op_mov_reg_T0(d_ot, reg);
6766 if (!s->pe || s->vm86)
6768 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6769 t1 = tcg_temp_local_new(TCG_TYPE_TL);
6770 t2 = tcg_temp_local_new(TCG_TYPE_TL);
6772 modrm = ldub_code(s->pc++);
6773 reg = (modrm >> 3) & 7;
6774 mod = (modrm >> 6) & 3;
6777 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6778 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6780 gen_op_mov_v_reg(ot, t0, rm);
6782 gen_op_mov_v_reg(ot, t1, reg);
6783 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
6784 tcg_gen_andi_tl(t1, t1, 3);
6785 tcg_gen_movi_tl(t2, 0);
6786 label1 = gen_new_label();
6787 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
6788 tcg_gen_andi_tl(t0, t0, ~3);
6789 tcg_gen_or_tl(t0, t0, t1);
6790 tcg_gen_movi_tl(t2, CC_Z);
6791 gen_set_label(label1);
6793 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
6795 gen_op_mov_reg_v(ot, rm, t0);
6797 if (s->cc_op != CC_OP_DYNAMIC)
6798 gen_op_set_cc_op(s->cc_op);
6799 gen_compute_eflags(cpu_cc_src);
6800 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6801 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
6802 s->cc_op = CC_OP_EFLAGS;
6808 case 0x102: /* lar */
6809 case 0x103: /* lsl */
6813 if (!s->pe || s->vm86)
6815 ot = dflag ? OT_LONG : OT_WORD;
6816 modrm = ldub_code(s->pc++);
6817 reg = ((modrm >> 3) & 7) | rex_r;
6818 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6819 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6820 if (s->cc_op != CC_OP_DYNAMIC)
6821 gen_op_set_cc_op(s->cc_op);
6823 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
6825 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
6826 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6827 label1 = gen_new_label();
6828 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
6829 gen_op_mov_reg_v(ot, reg, t0);
6830 gen_set_label(label1);
6831 s->cc_op = CC_OP_EFLAGS;
6836 modrm = ldub_code(s->pc++);
6837 mod = (modrm >> 6) & 3;
6838 op = (modrm >> 3) & 7;
6840 case 0: /* prefetchnta */
6841 case 1: /* prefetchnt0 */
6842 case 2: /* prefetchnt0 */
6843 case 3: /* prefetchnt0 */
6846 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6847 /* nothing more to do */
6849 default: /* nop (multi byte) */
6850 gen_nop_modrm(s, modrm);
6854 case 0x119 ... 0x11f: /* nop (multi byte) */
6855 modrm = ldub_code(s->pc++);
6856 gen_nop_modrm(s, modrm);
6858 case 0x120: /* mov reg, crN */
6859 case 0x122: /* mov crN, reg */
6861 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6863 modrm = ldub_code(s->pc++);
6864 if ((modrm & 0xc0) != 0xc0)
6866 rm = (modrm & 7) | REX_B(s);
6867 reg = ((modrm >> 3) & 7) | rex_r;
6878 if (s->cc_op != CC_OP_DYNAMIC)
6879 gen_op_set_cc_op(s->cc_op);
6880 gen_jmp_im(pc_start - s->cs_base);
6882 gen_op_mov_TN_reg(ot, 0, rm);
6883 tcg_gen_helper_0_2(helper_write_crN,
6884 tcg_const_i32(reg), cpu_T[0]);
6885 gen_jmp_im(s->pc - s->cs_base);
6888 tcg_gen_helper_1_1(helper_read_crN,
6889 cpu_T[0], tcg_const_i32(reg));
6890 gen_op_mov_reg_T0(ot, rm);
6898 case 0x121: /* mov reg, drN */
6899 case 0x123: /* mov drN, reg */
6901 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6903 modrm = ldub_code(s->pc++);
6904 if ((modrm & 0xc0) != 0xc0)
6906 rm = (modrm & 7) | REX_B(s);
6907 reg = ((modrm >> 3) & 7) | rex_r;
6912 /* XXX: do it dynamically with CR4.DE bit */
6913 if (reg == 4 || reg == 5 || reg >= 8)
6916 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6917 gen_op_mov_TN_reg(ot, 0, rm);
6918 tcg_gen_helper_0_2(helper_movl_drN_T0,
6919 tcg_const_i32(reg), cpu_T[0]);
6920 gen_jmp_im(s->pc - s->cs_base);
6923 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6924 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
6925 gen_op_mov_reg_T0(ot, rm);
6929 case 0x106: /* clts */
6931 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6933 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6934 tcg_gen_helper_0_0(helper_clts);
6935 /* abort block because static cpu state changed */
6936 gen_jmp_im(s->pc - s->cs_base);
6940 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6941 case 0x1c3: /* MOVNTI reg, mem */
6942 if (!(s->cpuid_features & CPUID_SSE2))
6944 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6945 modrm = ldub_code(s->pc++);
6946 mod = (modrm >> 6) & 3;
6949 reg = ((modrm >> 3) & 7) | rex_r;
6950 /* generate a generic store */
6951 gen_ldst_modrm(s, modrm, ot, reg, 1);
6954 modrm = ldub_code(s->pc++);
6955 mod = (modrm >> 6) & 3;
6956 op = (modrm >> 3) & 7;
6958 case 0: /* fxsave */
6959 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6960 (s->flags & HF_EM_MASK))
6962 if (s->flags & HF_TS_MASK) {
6963 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6966 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6967 if (s->cc_op != CC_OP_DYNAMIC)
6968 gen_op_set_cc_op(s->cc_op);
6969 gen_jmp_im(pc_start - s->cs_base);
6970 tcg_gen_helper_0_2(helper_fxsave,
6971 cpu_A0, tcg_const_i32((s->dflag == 2)));
6973 case 1: /* fxrstor */
6974 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6975 (s->flags & HF_EM_MASK))
6977 if (s->flags & HF_TS_MASK) {
6978 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6981 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6982 if (s->cc_op != CC_OP_DYNAMIC)
6983 gen_op_set_cc_op(s->cc_op);
6984 gen_jmp_im(pc_start - s->cs_base);
6985 tcg_gen_helper_0_2(helper_fxrstor,
6986 cpu_A0, tcg_const_i32((s->dflag == 2)));
6988 case 2: /* ldmxcsr */
6989 case 3: /* stmxcsr */
6990 if (s->flags & HF_TS_MASK) {
6991 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6994 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6997 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6999 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7000 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7002 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7003 gen_op_st_T0_A0(OT_LONG + s->mem_index);
7006 case 5: /* lfence */
7007 case 6: /* mfence */
7008 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
7011 case 7: /* sfence / clflush */
7012 if ((modrm & 0xc7) == 0xc0) {
7014 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7015 if (!(s->cpuid_features & CPUID_SSE))
7019 if (!(s->cpuid_features & CPUID_CLFLUSH))
7021 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7028 case 0x10d: /* 3DNow! prefetch(w) */
7029 modrm = ldub_code(s->pc++);
7030 mod = (modrm >> 6) & 3;
7033 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7034 /* ignore for now */
7036 case 0x1aa: /* rsm */
7037 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
7038 if (!(s->flags & HF_SMM_MASK))
7040 if (s->cc_op != CC_OP_DYNAMIC) {
7041 gen_op_set_cc_op(s->cc_op);
7042 s->cc_op = CC_OP_DYNAMIC;
7044 gen_jmp_im(s->pc - s->cs_base);
7045 tcg_gen_helper_0_0(helper_rsm);
7048 case 0x10e ... 0x10f:
7049 /* 3DNow! instructions, ignore prefixes */
7050 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7051 case 0x110 ... 0x117:
7052 case 0x128 ... 0x12f:
7053 case 0x150 ... 0x177:
7054 case 0x17c ... 0x17f:
7056 case 0x1c4 ... 0x1c6:
7057 case 0x1d0 ... 0x1fe:
7058 gen_sse(s, b, pc_start, rex_r);
7063 /* lock generation */
7064 if (s->prefix & PREFIX_LOCK)
7065 tcg_gen_helper_0_0(helper_unlock);
7068 if (s->prefix & PREFIX_LOCK)
7069 tcg_gen_helper_0_0(helper_unlock);
7070 /* XXX: ensure that no lock was generated */
7071 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7075 void optimize_flags_init(void)
7077 #if TCG_TARGET_REG_BITS == 32
7078 assert(sizeof(CCTable) == (1 << 3));
7080 assert(sizeof(CCTable) == (1 << 4));
7082 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
7083 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7084 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7085 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7086 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7087 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7088 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7089 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
7090 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
7092 /* register helpers */
7094 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
7098 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7099 basic block 'tb'. If search_pc is TRUE, also generate PC
7100 information for each intermediate instruction. */
7101 static inline int gen_intermediate_code_internal(CPUState *env,
7102 TranslationBlock *tb,
7105 DisasContext dc1, *dc = &dc1;
7106 target_ulong pc_ptr;
7107 uint16_t *gen_opc_end;
7110 target_ulong pc_start;
7111 target_ulong cs_base;
7113 /* generate intermediate code */
7115 cs_base = tb->cs_base;
7117 cflags = tb->cflags;
7119 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7120 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7121 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7122 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7124 dc->vm86 = (flags >> VM_SHIFT) & 1;
7125 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7126 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7127 dc->tf = (flags >> TF_SHIFT) & 1;
7128 dc->singlestep_enabled = env->singlestep_enabled;
7129 dc->cc_op = CC_OP_DYNAMIC;
7130 dc->cs_base = cs_base;
7132 dc->popl_esp_hack = 0;
7133 /* select memory access functions */
7135 if (flags & HF_SOFTMMU_MASK) {
7137 dc->mem_index = 2 * 4;
7139 dc->mem_index = 1 * 4;
7141 dc->cpuid_features = env->cpuid_features;
7142 dc->cpuid_ext_features = env->cpuid_ext_features;
7143 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7144 dc->cpuid_ext3_features = env->cpuid_ext3_features;
7145 #ifdef TARGET_X86_64
7146 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7147 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7150 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7151 (flags & HF_INHIBIT_IRQ_MASK)
7152 #ifndef CONFIG_SOFTMMU
7153 || (flags & HF_SOFTMMU_MASK)
7157 /* check addseg logic */
7158 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7159 printf("ERROR addseg\n");
7162 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
7163 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
7164 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
7165 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
7167 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7168 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7169 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7170 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7171 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7172 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7173 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7174 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7175 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7177 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7179 dc->is_jmp = DISAS_NEXT;
7184 if (env->nb_breakpoints > 0) {
7185 for(j = 0; j < env->nb_breakpoints; j++) {
7186 if (env->breakpoints[j] == pc_ptr) {
7187 gen_debug(dc, pc_ptr - dc->cs_base);
7193 j = gen_opc_ptr - gen_opc_buf;
7197 gen_opc_instr_start[lj++] = 0;
7199 gen_opc_pc[lj] = pc_ptr;
7200 gen_opc_cc_op[lj] = dc->cc_op;
7201 gen_opc_instr_start[lj] = 1;
7203 pc_ptr = disas_insn(dc, pc_ptr);
7204 /* stop translation if indicated */
7207 /* if single step mode, we generate only one instruction and
7208 generate an exception */
7209 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7210 the flag and abort the translation to give the irqs a
7211 change to be happen */
7212 if (dc->tf || dc->singlestep_enabled ||
7213 (flags & HF_INHIBIT_IRQ_MASK) ||
7214 (cflags & CF_SINGLE_INSN)) {
7215 gen_jmp_im(pc_ptr - dc->cs_base);
7219 /* if too long translation, stop generation too */
7220 if (gen_opc_ptr >= gen_opc_end ||
7221 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7222 gen_jmp_im(pc_ptr - dc->cs_base);
7227 *gen_opc_ptr = INDEX_op_end;
7228 /* we don't forget to fill the last values */
7230 j = gen_opc_ptr - gen_opc_buf;
7233 gen_opc_instr_start[lj++] = 0;
7237 if (loglevel & CPU_LOG_TB_CPU) {
7238 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7240 if (loglevel & CPU_LOG_TB_IN_ASM) {
7242 fprintf(logfile, "----------------\n");
7243 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7244 #ifdef TARGET_X86_64
7249 disas_flags = !dc->code32;
7250 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7251 fprintf(logfile, "\n");
7256 tb->size = pc_ptr - pc_start;
7260 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7262 return gen_intermediate_code_internal(env, tb, 0);
7265 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7267 return gen_intermediate_code_internal(env, tb, 1);
7270 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7271 unsigned long searched_pc, int pc_pos, void *puc)
7275 if (loglevel & CPU_LOG_TB_OP) {
7277 fprintf(logfile, "RESTORE:\n");
7278 for(i = 0;i <= pc_pos; i++) {
7279 if (gen_opc_instr_start[i]) {
7280 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7283 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7284 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7285 (uint32_t)tb->cs_base);
7288 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7289 cc_op = gen_opc_cc_op[pc_pos];
7290 if (cc_op != CC_OP_DYNAMIC)