4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
63 static TCGv cpu_T[2], cpu_T3;
64 /* local register indexes (only used inside old micro ops) */
65 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
66 static TCGv cpu_tmp5, cpu_tmp6;
69 static int x86_64_hregs;
72 typedef struct DisasContext {
73 /* current insn context */
74 int override; /* -1 if no override */
77 target_ulong pc; /* pc = eip + cs_base */
78 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
79 static state change (stop translation) */
80 /* current block context */
81 target_ulong cs_base; /* base of CS segment */
82 int pe; /* protected mode */
83 int code32; /* 32 bit code segment */
85 int lma; /* long mode active */
86 int code64; /* 64 bit code segment */
89 int ss32; /* 32 bit stack segment */
90 int cc_op; /* current CC operation */
91 int addseg; /* non zero if either DS/ES/SS have a non zero base */
92 int f_st; /* currently unused */
93 int vm86; /* vm86 mode */
96 int tf; /* TF cpu flag */
97 int singlestep_enabled; /* "hardware" single step enabled */
98 int jmp_opt; /* use direct block chaining for direct jumps */
99 int mem_index; /* select memory access functions */
100 uint64_t flags; /* all execution flags */
101 struct TranslationBlock *tb;
102 int popl_esp_hack; /* for correct popl with esp base handling */
103 int rip_offset; /* only used in x86_64, but left for simplicity */
105 int cpuid_ext_features;
106 int cpuid_ext2_features;
107 int cpuid_ext3_features;
110 static void gen_eob(DisasContext *s);
111 static void gen_jmp(DisasContext *s, target_ulong eip);
112 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
114 /* i386 arith/logic operations */
134 OP_SHL1, /* undocumented */
158 /* I386 int registers */
159 OR_EAX, /* MUST be even numbered */
168 OR_TMP0 = 16, /* temporary operand register */
170 OR_A0, /* temporary register used when doing address evaluation */
173 static inline void gen_op_movl_T0_0(void)
175 tcg_gen_movi_tl(cpu_T[0], 0);
178 static inline void gen_op_movl_T0_im(int32_t val)
180 tcg_gen_movi_tl(cpu_T[0], val);
183 static inline void gen_op_movl_T0_imu(uint32_t val)
185 tcg_gen_movi_tl(cpu_T[0], val);
188 static inline void gen_op_movl_T1_im(int32_t val)
190 tcg_gen_movi_tl(cpu_T[1], val);
193 static inline void gen_op_movl_T1_imu(uint32_t val)
195 tcg_gen_movi_tl(cpu_T[1], val);
198 static inline void gen_op_movl_A0_im(uint32_t val)
200 tcg_gen_movi_tl(cpu_A0, val);
204 static inline void gen_op_movq_A0_im(int64_t val)
206 tcg_gen_movi_tl(cpu_A0, val);
210 static inline void gen_movtl_T0_im(target_ulong val)
212 tcg_gen_movi_tl(cpu_T[0], val);
215 static inline void gen_movtl_T1_im(target_ulong val)
217 tcg_gen_movi_tl(cpu_T[1], val);
220 static inline void gen_op_andl_T0_ffff(void)
222 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
225 static inline void gen_op_andl_T0_im(uint32_t val)
227 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
230 static inline void gen_op_movl_T0_T1(void)
232 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
235 static inline void gen_op_andl_A0_ffff(void)
237 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
242 #define NB_OP_SIZES 4
244 #else /* !TARGET_X86_64 */
246 #define NB_OP_SIZES 3
248 #endif /* !TARGET_X86_64 */
250 #if defined(WORDS_BIGENDIAN)
251 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
252 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
253 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
254 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
255 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
257 #define REG_B_OFFSET 0
258 #define REG_H_OFFSET 1
259 #define REG_W_OFFSET 0
260 #define REG_L_OFFSET 0
261 #define REG_LH_OFFSET 4
264 static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
268 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
269 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
271 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
275 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
279 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
280 /* high part of register set to zero */
281 tcg_gen_movi_tl(cpu_tmp0, 0);
282 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
286 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
291 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
297 static inline void gen_op_mov_reg_T0(int ot, int reg)
299 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
302 static inline void gen_op_mov_reg_T1(int ot, int reg)
304 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
307 static inline void gen_op_mov_reg_A0(int size, int reg)
311 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
315 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
316 /* high part of register set to zero */
317 tcg_gen_movi_tl(cpu_tmp0, 0);
318 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
322 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
327 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
333 static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
337 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
340 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
345 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
350 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
352 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
355 static inline void gen_op_movl_A0_reg(int reg)
357 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
360 static inline void gen_op_addl_A0_im(int32_t val)
362 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
364 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
369 static inline void gen_op_addq_A0_im(int64_t val)
371 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
375 static void gen_add_A0_im(DisasContext *s, int val)
379 gen_op_addq_A0_im(val);
382 gen_op_addl_A0_im(val);
385 static inline void gen_op_addl_T0_T1(void)
387 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
390 static inline void gen_op_jmp_T0(void)
392 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
395 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
399 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
400 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
401 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
404 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
405 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
409 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
413 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
414 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
415 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
421 static inline void gen_op_add_reg_T0(int size, int reg)
425 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
426 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
427 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
430 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
431 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
433 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
435 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
440 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
441 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
447 static inline void gen_op_set_cc_op(int32_t val)
449 tcg_gen_movi_i32(cpu_cc_op, val);
452 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
454 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
456 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
457 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
459 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
463 static inline void gen_op_movl_A0_seg(int reg)
465 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
468 static inline void gen_op_addl_A0_seg(int reg)
470 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
471 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
473 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
478 static inline void gen_op_movq_A0_seg(int reg)
480 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
483 static inline void gen_op_addq_A0_seg(int reg)
485 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
486 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
489 static inline void gen_op_movq_A0_reg(int reg)
491 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
494 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
496 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
498 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
499 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
503 static inline void gen_op_lds_T0_A0(int idx)
505 int mem_index = (idx >> 2) - 1;
508 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
511 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
515 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
520 static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
522 int mem_index = (idx >> 2) - 1;
525 tcg_gen_qemu_ld8u(t0, a0, mem_index);
528 tcg_gen_qemu_ld16u(t0, a0, mem_index);
531 tcg_gen_qemu_ld32u(t0, a0, mem_index);
535 tcg_gen_qemu_ld64(t0, a0, mem_index);
540 /* XXX: always use ldu or lds */
541 static inline void gen_op_ld_T0_A0(int idx)
543 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
546 static inline void gen_op_ldu_T0_A0(int idx)
548 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
551 static inline void gen_op_ld_T1_A0(int idx)
553 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
556 static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
558 int mem_index = (idx >> 2) - 1;
561 tcg_gen_qemu_st8(t0, a0, mem_index);
564 tcg_gen_qemu_st16(t0, a0, mem_index);
567 tcg_gen_qemu_st32(t0, a0, mem_index);
571 tcg_gen_qemu_st64(t0, a0, mem_index);
576 static inline void gen_op_st_T0_A0(int idx)
578 gen_op_st_v(idx, cpu_T[0], cpu_A0);
581 static inline void gen_op_st_T1_A0(int idx)
583 gen_op_st_v(idx, cpu_T[1], cpu_A0);
586 static inline void gen_jmp_im(target_ulong pc)
588 tcg_gen_movi_tl(cpu_tmp0, pc);
589 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
592 static inline void gen_string_movl_A0_ESI(DisasContext *s)
596 override = s->override;
600 gen_op_movq_A0_seg(override);
601 gen_op_addq_A0_reg_sN(0, R_ESI);
603 gen_op_movq_A0_reg(R_ESI);
609 if (s->addseg && override < 0)
612 gen_op_movl_A0_seg(override);
613 gen_op_addl_A0_reg_sN(0, R_ESI);
615 gen_op_movl_A0_reg(R_ESI);
618 /* 16 address, always override */
621 gen_op_movl_A0_reg(R_ESI);
622 gen_op_andl_A0_ffff();
623 gen_op_addl_A0_seg(override);
627 static inline void gen_string_movl_A0_EDI(DisasContext *s)
631 gen_op_movq_A0_reg(R_EDI);
636 gen_op_movl_A0_seg(R_ES);
637 gen_op_addl_A0_reg_sN(0, R_EDI);
639 gen_op_movl_A0_reg(R_EDI);
642 gen_op_movl_A0_reg(R_EDI);
643 gen_op_andl_A0_ffff();
644 gen_op_addl_A0_seg(R_ES);
648 static inline void gen_op_movl_T0_Dshift(int ot)
650 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
651 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
654 static void gen_extu(int ot, TCGv reg)
658 tcg_gen_ext8u_tl(reg, reg);
661 tcg_gen_ext16u_tl(reg, reg);
664 tcg_gen_ext32u_tl(reg, reg);
671 static void gen_exts(int ot, TCGv reg)
675 tcg_gen_ext8s_tl(reg, reg);
678 tcg_gen_ext16s_tl(reg, reg);
681 tcg_gen_ext32s_tl(reg, reg);
688 static inline void gen_op_jnz_ecx(int size, int label1)
690 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
691 gen_extu(size + 1, cpu_tmp0);
692 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
695 static inline void gen_op_jz_ecx(int size, int label1)
697 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
698 gen_extu(size + 1, cpu_tmp0);
699 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
702 static void *helper_in_func[3] = {
708 static void *helper_out_func[3] = {
714 static void *gen_check_io_func[3] = {
720 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
724 target_ulong next_eip;
727 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
728 if (s->cc_op != CC_OP_DYNAMIC)
729 gen_op_set_cc_op(s->cc_op);
732 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
733 tcg_gen_helper_0_1(gen_check_io_func[ot],
736 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
738 if (s->cc_op != CC_OP_DYNAMIC)
739 gen_op_set_cc_op(s->cc_op);
743 svm_flags |= (1 << (4 + ot));
744 next_eip = s->pc - s->cs_base;
745 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
746 tcg_gen_helper_0_3(helper_svm_check_io,
748 tcg_const_i32(svm_flags),
749 tcg_const_i32(next_eip - cur_eip));
753 static inline void gen_movs(DisasContext *s, int ot)
755 gen_string_movl_A0_ESI(s);
756 gen_op_ld_T0_A0(ot + s->mem_index);
757 gen_string_movl_A0_EDI(s);
758 gen_op_st_T0_A0(ot + s->mem_index);
759 gen_op_movl_T0_Dshift(ot);
760 gen_op_add_reg_T0(s->aflag, R_ESI);
761 gen_op_add_reg_T0(s->aflag, R_EDI);
764 static inline void gen_update_cc_op(DisasContext *s)
766 if (s->cc_op != CC_OP_DYNAMIC) {
767 gen_op_set_cc_op(s->cc_op);
768 s->cc_op = CC_OP_DYNAMIC;
772 static void gen_op_update1_cc(void)
774 tcg_gen_discard_tl(cpu_cc_src);
775 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
778 static void gen_op_update2_cc(void)
780 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
781 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
784 static inline void gen_op_cmpl_T0_T1_cc(void)
786 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
787 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
790 static inline void gen_op_testl_T0_T1_cc(void)
792 tcg_gen_discard_tl(cpu_cc_src);
793 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
796 static void gen_op_update_neg_cc(void)
798 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
799 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
802 /* compute eflags.C to reg */
803 static void gen_compute_eflags_c(TCGv reg)
805 #if TCG_TARGET_REG_BITS == 32
806 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
807 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
808 (long)cc_table + offsetof(CCTable, compute_c));
809 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
810 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
811 1, &cpu_tmp2_i32, 0, NULL);
813 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
814 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
815 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
816 (long)cc_table + offsetof(CCTable, compute_c));
817 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
818 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
819 1, &cpu_tmp2_i32, 0, NULL);
821 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
824 /* compute all eflags to cc_src */
825 static void gen_compute_eflags(TCGv reg)
827 #if TCG_TARGET_REG_BITS == 32
828 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
829 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
830 (long)cc_table + offsetof(CCTable, compute_all));
831 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
832 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
833 1, &cpu_tmp2_i32, 0, NULL);
835 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
836 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
837 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
838 (long)cc_table + offsetof(CCTable, compute_all));
839 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
840 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
841 1, &cpu_tmp2_i32, 0, NULL);
843 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
846 static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
848 if (s->cc_op != CC_OP_DYNAMIC)
849 gen_op_set_cc_op(s->cc_op);
852 gen_compute_eflags(cpu_T[0]);
853 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
854 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
857 gen_compute_eflags_c(cpu_T[0]);
860 gen_compute_eflags(cpu_T[0]);
861 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
862 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
865 gen_compute_eflags(cpu_tmp0);
866 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
867 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
868 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
871 gen_compute_eflags(cpu_T[0]);
872 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
873 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
876 gen_compute_eflags(cpu_T[0]);
877 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
878 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
881 gen_compute_eflags(cpu_tmp0);
882 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
883 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
884 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
885 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
889 gen_compute_eflags(cpu_tmp0);
890 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
891 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
892 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
893 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
894 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
895 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
900 /* return true if setcc_slow is not needed (WARNING: must be kept in
901 sync with gen_jcc1) */
902 static int is_fast_jcc_case(DisasContext *s, int b)
905 jcc_op = (b >> 1) & 7;
907 /* we optimize the cmp/jcc case */
912 if (jcc_op == JCC_O || jcc_op == JCC_P)
916 /* some jumps are easy to compute */
941 if (jcc_op != JCC_Z && jcc_op != JCC_S)
951 /* generate a conditional jump to label 'l1' according to jump opcode
952 value 'b'. In the fast case, T0 is guaranted not to be used. */
953 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
955 int inv, jcc_op, size, cond;
959 jcc_op = (b >> 1) & 7;
962 /* we optimize the cmp/jcc case */
968 size = cc_op - CC_OP_SUBB;
974 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
978 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
983 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
991 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
997 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
998 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1002 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1003 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1006 #ifdef TARGET_X86_64
1008 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1009 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1014 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1021 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1024 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1026 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1030 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1031 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1035 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1036 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1038 #ifdef TARGET_X86_64
1041 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1042 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1049 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1053 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1056 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1058 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1062 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1063 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1067 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1068 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1070 #ifdef TARGET_X86_64
1073 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1074 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1081 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1089 /* some jumps are easy to compute */
1131 size = (cc_op - CC_OP_ADDB) & 3;
1134 size = (cc_op - CC_OP_ADDB) & 3;
1142 gen_setcc_slow_T0(s, jcc_op);
1143 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1149 /* XXX: does not work with gdbstub "ice" single step - not a
1151 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1155 l1 = gen_new_label();
1156 l2 = gen_new_label();
1157 gen_op_jnz_ecx(s->aflag, l1);
1159 gen_jmp_tb(s, next_eip, 1);
1164 static inline void gen_stos(DisasContext *s, int ot)
1166 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1167 gen_string_movl_A0_EDI(s);
1168 gen_op_st_T0_A0(ot + s->mem_index);
1169 gen_op_movl_T0_Dshift(ot);
1170 gen_op_add_reg_T0(s->aflag, R_EDI);
1173 static inline void gen_lods(DisasContext *s, int ot)
1175 gen_string_movl_A0_ESI(s);
1176 gen_op_ld_T0_A0(ot + s->mem_index);
1177 gen_op_mov_reg_T0(ot, R_EAX);
1178 gen_op_movl_T0_Dshift(ot);
1179 gen_op_add_reg_T0(s->aflag, R_ESI);
1182 static inline void gen_scas(DisasContext *s, int ot)
1184 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1185 gen_string_movl_A0_EDI(s);
1186 gen_op_ld_T1_A0(ot + s->mem_index);
1187 gen_op_cmpl_T0_T1_cc();
1188 gen_op_movl_T0_Dshift(ot);
1189 gen_op_add_reg_T0(s->aflag, R_EDI);
1192 static inline void gen_cmps(DisasContext *s, int ot)
1194 gen_string_movl_A0_ESI(s);
1195 gen_op_ld_T0_A0(ot + s->mem_index);
1196 gen_string_movl_A0_EDI(s);
1197 gen_op_ld_T1_A0(ot + s->mem_index);
1198 gen_op_cmpl_T0_T1_cc();
1199 gen_op_movl_T0_Dshift(ot);
1200 gen_op_add_reg_T0(s->aflag, R_ESI);
1201 gen_op_add_reg_T0(s->aflag, R_EDI);
1204 static inline void gen_ins(DisasContext *s, int ot)
1206 gen_string_movl_A0_EDI(s);
1207 /* Note: we must do this dummy write first to be restartable in
1208 case of page fault. */
1210 gen_op_st_T0_A0(ot + s->mem_index);
1211 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1212 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1213 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1214 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1215 gen_op_st_T0_A0(ot + s->mem_index);
1216 gen_op_movl_T0_Dshift(ot);
1217 gen_op_add_reg_T0(s->aflag, R_EDI);
1220 static inline void gen_outs(DisasContext *s, int ot)
1222 gen_string_movl_A0_ESI(s);
1223 gen_op_ld_T0_A0(ot + s->mem_index);
1225 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1226 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1227 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1228 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1229 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1231 gen_op_movl_T0_Dshift(ot);
1232 gen_op_add_reg_T0(s->aflag, R_ESI);
1235 /* same method as Valgrind : we generate jumps to current or next
1237 #define GEN_REPZ(op) \
1238 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1239 target_ulong cur_eip, target_ulong next_eip) \
1242 gen_update_cc_op(s); \
1243 l2 = gen_jz_ecx_string(s, next_eip); \
1244 gen_ ## op(s, ot); \
1245 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1246 /* a loop would cause two single step exceptions if ECX = 1 \
1247 before rep string_insn */ \
1249 gen_op_jz_ecx(s->aflag, l2); \
1250 gen_jmp(s, cur_eip); \
1253 #define GEN_REPZ2(op) \
1254 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1255 target_ulong cur_eip, \
1256 target_ulong next_eip, \
1260 gen_update_cc_op(s); \
1261 l2 = gen_jz_ecx_string(s, next_eip); \
1262 gen_ ## op(s, ot); \
1263 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1264 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1265 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1267 gen_op_jz_ecx(s->aflag, l2); \
1268 gen_jmp(s, cur_eip); \
1279 static void *helper_fp_arith_ST0_FT0[8] = {
1280 helper_fadd_ST0_FT0,
1281 helper_fmul_ST0_FT0,
1282 helper_fcom_ST0_FT0,
1283 helper_fcom_ST0_FT0,
1284 helper_fsub_ST0_FT0,
1285 helper_fsubr_ST0_FT0,
1286 helper_fdiv_ST0_FT0,
1287 helper_fdivr_ST0_FT0,
1290 /* NOTE the exception in "r" op ordering */
1291 static void *helper_fp_arith_STN_ST0[8] = {
1292 helper_fadd_STN_ST0,
1293 helper_fmul_STN_ST0,
1296 helper_fsubr_STN_ST0,
1297 helper_fsub_STN_ST0,
1298 helper_fdivr_STN_ST0,
1299 helper_fdiv_STN_ST0,
1302 /* if d == OR_TMP0, it means memory operand (address in A0) */
1303 static void gen_op(DisasContext *s1, int op, int ot, int d)
1306 gen_op_mov_TN_reg(ot, 0, d);
1308 gen_op_ld_T0_A0(ot + s1->mem_index);
1312 if (s1->cc_op != CC_OP_DYNAMIC)
1313 gen_op_set_cc_op(s1->cc_op);
1314 gen_compute_eflags_c(cpu_tmp4);
1315 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1316 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1318 gen_op_mov_reg_T0(ot, d);
1320 gen_op_st_T0_A0(ot + s1->mem_index);
1321 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1322 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1323 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1324 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1325 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1326 s1->cc_op = CC_OP_DYNAMIC;
1329 if (s1->cc_op != CC_OP_DYNAMIC)
1330 gen_op_set_cc_op(s1->cc_op);
1331 gen_compute_eflags_c(cpu_tmp4);
1332 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1333 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1335 gen_op_mov_reg_T0(ot, d);
1337 gen_op_st_T0_A0(ot + s1->mem_index);
1338 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1339 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1340 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1341 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1342 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1343 s1->cc_op = CC_OP_DYNAMIC;
1346 gen_op_addl_T0_T1();
1348 gen_op_mov_reg_T0(ot, d);
1350 gen_op_st_T0_A0(ot + s1->mem_index);
1351 gen_op_update2_cc();
1352 s1->cc_op = CC_OP_ADDB + ot;
1355 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1357 gen_op_mov_reg_T0(ot, d);
1359 gen_op_st_T0_A0(ot + s1->mem_index);
1360 gen_op_update2_cc();
1361 s1->cc_op = CC_OP_SUBB + ot;
1365 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1367 gen_op_mov_reg_T0(ot, d);
1369 gen_op_st_T0_A0(ot + s1->mem_index);
1370 gen_op_update1_cc();
1371 s1->cc_op = CC_OP_LOGICB + ot;
1374 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1376 gen_op_mov_reg_T0(ot, d);
1378 gen_op_st_T0_A0(ot + s1->mem_index);
1379 gen_op_update1_cc();
1380 s1->cc_op = CC_OP_LOGICB + ot;
1383 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1385 gen_op_mov_reg_T0(ot, d);
1387 gen_op_st_T0_A0(ot + s1->mem_index);
1388 gen_op_update1_cc();
1389 s1->cc_op = CC_OP_LOGICB + ot;
1392 gen_op_cmpl_T0_T1_cc();
1393 s1->cc_op = CC_OP_SUBB + ot;
1398 /* if d == OR_TMP0, it means memory operand (address in A0) */
1399 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1402 gen_op_mov_TN_reg(ot, 0, d);
1404 gen_op_ld_T0_A0(ot + s1->mem_index);
1405 if (s1->cc_op != CC_OP_DYNAMIC)
1406 gen_op_set_cc_op(s1->cc_op);
1408 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1409 s1->cc_op = CC_OP_INCB + ot;
1411 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1412 s1->cc_op = CC_OP_DECB + ot;
1415 gen_op_mov_reg_T0(ot, d);
1417 gen_op_st_T0_A0(ot + s1->mem_index);
1418 gen_compute_eflags_c(cpu_cc_src);
1419 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1422 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1423 int is_right, int is_arith)
1436 gen_op_ld_T0_A0(ot + s->mem_index);
1438 gen_op_mov_TN_reg(ot, 0, op1);
1440 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1442 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1446 gen_exts(ot, cpu_T[0]);
1447 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1448 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1450 gen_extu(ot, cpu_T[0]);
1451 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1452 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1455 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1456 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1461 gen_op_st_T0_A0(ot + s->mem_index);
1463 gen_op_mov_reg_T0(ot, op1);
1465 /* update eflags if non zero shift */
1466 if (s->cc_op != CC_OP_DYNAMIC)
1467 gen_op_set_cc_op(s->cc_op);
1469 /* XXX: inefficient */
1470 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1471 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1473 tcg_gen_mov_tl(t0, cpu_T[0]);
1474 tcg_gen_mov_tl(t1, cpu_T3);
1476 shift_label = gen_new_label();
1477 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1479 tcg_gen_mov_tl(cpu_cc_src, t1);
1480 tcg_gen_mov_tl(cpu_cc_dst, t0);
1482 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1484 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1486 gen_set_label(shift_label);
1487 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1493 static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1494 int is_right, int is_arith)
1505 gen_op_ld_T0_A0(ot + s->mem_index);
1507 gen_op_mov_TN_reg(ot, 0, op1);
1513 gen_exts(ot, cpu_T[0]);
1514 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], op2 - 1);
1515 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1517 gen_extu(ot, cpu_T[0]);
1518 tcg_gen_shri_tl(cpu_tmp0, cpu_T[0], op2 - 1);
1519 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1522 tcg_gen_shli_tl(cpu_tmp0, cpu_T[0], op2 - 1);
1523 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1529 gen_op_st_T0_A0(ot + s->mem_index);
1531 gen_op_mov_reg_T0(ot, op1);
1533 /* update eflags if non zero shift */
1535 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp0);
1536 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1538 s->cc_op = CC_OP_SARB + ot;
1540 s->cc_op = CC_OP_SHLB + ot;
1544 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1547 tcg_gen_shli_tl(ret, arg1, arg2);
1549 tcg_gen_shri_tl(ret, arg1, -arg2);
1552 /* XXX: add faster immediate case */
1553 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1557 int label1, label2, data_bits;
1558 TCGv t0, t1, t2, a0;
1560 /* XXX: inefficient, but we must use local temps */
1561 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1562 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1563 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1564 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1572 if (op1 == OR_TMP0) {
1573 tcg_gen_mov_tl(a0, cpu_A0);
1574 gen_op_ld_v(ot + s->mem_index, t0, a0);
1576 gen_op_mov_v_reg(ot, t0, op1);
1579 tcg_gen_mov_tl(t1, cpu_T[1]);
1581 tcg_gen_andi_tl(t1, t1, mask);
1583 /* Must test zero case to avoid using undefined behaviour in TCG
1585 label1 = gen_new_label();
1586 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1589 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1591 tcg_gen_mov_tl(cpu_tmp0, t1);
1594 tcg_gen_mov_tl(t2, t0);
1596 data_bits = 8 << ot;
1597 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1598 fix TCG definition) */
1600 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1601 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1602 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1604 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1605 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1606 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1608 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1610 gen_set_label(label1);
1612 if (op1 == OR_TMP0) {
1613 gen_op_st_v(ot + s->mem_index, t0, a0);
1615 gen_op_mov_reg_v(ot, op1, t0);
1619 if (s->cc_op != CC_OP_DYNAMIC)
1620 gen_op_set_cc_op(s->cc_op);
1622 label2 = gen_new_label();
1623 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1625 gen_compute_eflags(cpu_cc_src);
1626 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1627 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1628 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1629 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1630 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1632 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1634 tcg_gen_andi_tl(t0, t0, CC_C);
1635 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1637 tcg_gen_discard_tl(cpu_cc_dst);
1638 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1640 gen_set_label(label2);
1641 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1649 static void *helper_rotc[8] = {
1653 X86_64_ONLY(helper_rclq),
1657 X86_64_ONLY(helper_rcrq),
1660 /* XXX: add faster immediate = 1 case */
1661 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1666 if (s->cc_op != CC_OP_DYNAMIC)
1667 gen_op_set_cc_op(s->cc_op);
1671 gen_op_ld_T0_A0(ot + s->mem_index);
1673 gen_op_mov_TN_reg(ot, 0, op1);
1675 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1676 cpu_T[0], cpu_T[0], cpu_T[1]);
1679 gen_op_st_T0_A0(ot + s->mem_index);
1681 gen_op_mov_reg_T0(ot, op1);
1684 label1 = gen_new_label();
1685 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1687 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1688 tcg_gen_discard_tl(cpu_cc_dst);
1689 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1691 gen_set_label(label1);
1692 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1695 /* XXX: add faster immediate case */
1696 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1699 int label1, label2, data_bits;
1701 TCGv t0, t1, t2, a0;
1703 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1704 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1705 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1706 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1714 if (op1 == OR_TMP0) {
1715 tcg_gen_mov_tl(a0, cpu_A0);
1716 gen_op_ld_v(ot + s->mem_index, t0, a0);
1718 gen_op_mov_v_reg(ot, t0, op1);
1721 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1723 tcg_gen_mov_tl(t1, cpu_T[1]);
1724 tcg_gen_mov_tl(t2, cpu_T3);
1726 /* Must test zero case to avoid using undefined behaviour in TCG
1728 label1 = gen_new_label();
1729 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1731 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1732 if (ot == OT_WORD) {
1733 /* Note: we implement the Intel behaviour for shift count > 16 */
1735 tcg_gen_andi_tl(t0, t0, 0xffff);
1736 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1737 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1738 tcg_gen_ext32u_tl(t0, t0);
1740 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1742 /* only needed if count > 16, but a test would complicate */
1743 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1744 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1746 tcg_gen_shr_tl(t0, t0, t2);
1748 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1750 /* XXX: not optimal */
1751 tcg_gen_andi_tl(t0, t0, 0xffff);
1752 tcg_gen_shli_tl(t1, t1, 16);
1753 tcg_gen_or_tl(t1, t1, t0);
1754 tcg_gen_ext32u_tl(t1, t1);
1756 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1757 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1758 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
1759 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1761 tcg_gen_shl_tl(t0, t0, t2);
1762 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1763 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1764 tcg_gen_or_tl(t0, t0, t1);
1767 data_bits = 8 << ot;
1770 tcg_gen_ext32u_tl(t0, t0);
1772 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1774 tcg_gen_shr_tl(t0, t0, t2);
1775 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1776 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1777 tcg_gen_or_tl(t0, t0, t1);
1781 tcg_gen_ext32u_tl(t1, t1);
1783 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1785 tcg_gen_shl_tl(t0, t0, t2);
1786 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1787 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1788 tcg_gen_or_tl(t0, t0, t1);
1791 tcg_gen_mov_tl(t1, cpu_tmp4);
1793 gen_set_label(label1);
1795 if (op1 == OR_TMP0) {
1796 gen_op_st_v(ot + s->mem_index, t0, a0);
1798 gen_op_mov_reg_v(ot, op1, t0);
1802 if (s->cc_op != CC_OP_DYNAMIC)
1803 gen_op_set_cc_op(s->cc_op);
1805 label2 = gen_new_label();
1806 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1808 tcg_gen_mov_tl(cpu_cc_src, t1);
1809 tcg_gen_mov_tl(cpu_cc_dst, t0);
1811 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1813 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1815 gen_set_label(label2);
1816 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1824 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1827 gen_op_mov_TN_reg(ot, 1, s);
1830 gen_rot_rm_T1(s1, ot, d, 0);
1833 gen_rot_rm_T1(s1, ot, d, 1);
1837 gen_shift_rm_T1(s1, ot, d, 0, 0);
1840 gen_shift_rm_T1(s1, ot, d, 1, 0);
1843 gen_shift_rm_T1(s1, ot, d, 1, 1);
1846 gen_rotc_rm_T1(s1, ot, d, 0);
1849 gen_rotc_rm_T1(s1, ot, d, 1);
1854 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1859 gen_shift_rm_im(s1, ot, d, c, 0, 0);
1862 gen_shift_rm_im(s1, ot, d, c, 1, 0);
1865 gen_shift_rm_im(s1, ot, d, c, 1, 1);
1868 /* currently not optimized */
1869 gen_op_movl_T1_im(c);
1870 gen_shift(s1, op, ot, d, OR_TMP1);
1875 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1883 int mod, rm, code, override, must_add_seg;
1885 override = s->override;
1886 must_add_seg = s->addseg;
1889 mod = (modrm >> 6) & 3;
1901 code = ldub_code(s->pc++);
1902 scale = (code >> 6) & 3;
1903 index = ((code >> 3) & 7) | REX_X(s);
1910 if ((base & 7) == 5) {
1912 disp = (int32_t)ldl_code(s->pc);
1914 if (CODE64(s) && !havesib) {
1915 disp += s->pc + s->rip_offset;
1922 disp = (int8_t)ldub_code(s->pc++);
1926 disp = ldl_code(s->pc);
1932 /* for correct popl handling with esp */
1933 if (base == 4 && s->popl_esp_hack)
1934 disp += s->popl_esp_hack;
1935 #ifdef TARGET_X86_64
1936 if (s->aflag == 2) {
1937 gen_op_movq_A0_reg(base);
1939 gen_op_addq_A0_im(disp);
1944 gen_op_movl_A0_reg(base);
1946 gen_op_addl_A0_im(disp);
1949 #ifdef TARGET_X86_64
1950 if (s->aflag == 2) {
1951 gen_op_movq_A0_im(disp);
1955 gen_op_movl_A0_im(disp);
1958 /* XXX: index == 4 is always invalid */
1959 if (havesib && (index != 4 || scale != 0)) {
1960 #ifdef TARGET_X86_64
1961 if (s->aflag == 2) {
1962 gen_op_addq_A0_reg_sN(scale, index);
1966 gen_op_addl_A0_reg_sN(scale, index);
1971 if (base == R_EBP || base == R_ESP)
1976 #ifdef TARGET_X86_64
1977 if (s->aflag == 2) {
1978 gen_op_addq_A0_seg(override);
1982 gen_op_addl_A0_seg(override);
1989 disp = lduw_code(s->pc);
1991 gen_op_movl_A0_im(disp);
1992 rm = 0; /* avoid SS override */
1999 disp = (int8_t)ldub_code(s->pc++);
2003 disp = lduw_code(s->pc);
2009 gen_op_movl_A0_reg(R_EBX);
2010 gen_op_addl_A0_reg_sN(0, R_ESI);
2013 gen_op_movl_A0_reg(R_EBX);
2014 gen_op_addl_A0_reg_sN(0, R_EDI);
2017 gen_op_movl_A0_reg(R_EBP);
2018 gen_op_addl_A0_reg_sN(0, R_ESI);
2021 gen_op_movl_A0_reg(R_EBP);
2022 gen_op_addl_A0_reg_sN(0, R_EDI);
2025 gen_op_movl_A0_reg(R_ESI);
2028 gen_op_movl_A0_reg(R_EDI);
2031 gen_op_movl_A0_reg(R_EBP);
2035 gen_op_movl_A0_reg(R_EBX);
2039 gen_op_addl_A0_im(disp);
2040 gen_op_andl_A0_ffff();
2044 if (rm == 2 || rm == 3 || rm == 6)
2049 gen_op_addl_A0_seg(override);
2059 static void gen_nop_modrm(DisasContext *s, int modrm)
2061 int mod, rm, base, code;
2063 mod = (modrm >> 6) & 3;
2073 code = ldub_code(s->pc++);
2109 /* used for LEA and MOV AX, mem */
2110 static void gen_add_A0_ds_seg(DisasContext *s)
2112 int override, must_add_seg;
2113 must_add_seg = s->addseg;
2115 if (s->override >= 0) {
2116 override = s->override;
2122 #ifdef TARGET_X86_64
2124 gen_op_addq_A0_seg(override);
2128 gen_op_addl_A0_seg(override);
2133 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2135 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2137 int mod, rm, opreg, disp;
2139 mod = (modrm >> 6) & 3;
2140 rm = (modrm & 7) | REX_B(s);
2144 gen_op_mov_TN_reg(ot, 0, reg);
2145 gen_op_mov_reg_T0(ot, rm);
2147 gen_op_mov_TN_reg(ot, 0, rm);
2149 gen_op_mov_reg_T0(ot, reg);
2152 gen_lea_modrm(s, modrm, &opreg, &disp);
2155 gen_op_mov_TN_reg(ot, 0, reg);
2156 gen_op_st_T0_A0(ot + s->mem_index);
2158 gen_op_ld_T0_A0(ot + s->mem_index);
2160 gen_op_mov_reg_T0(ot, reg);
2165 static inline uint32_t insn_get(DisasContext *s, int ot)
2171 ret = ldub_code(s->pc);
2175 ret = lduw_code(s->pc);
2180 ret = ldl_code(s->pc);
2187 static inline int insn_const_size(unsigned int ot)
2195 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2197 TranslationBlock *tb;
2200 pc = s->cs_base + eip;
2202 /* NOTE: we handle the case where the TB spans two pages here */
2203 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2204 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2205 /* jump to same page: we can use a direct jump */
2206 tcg_gen_goto_tb(tb_num);
2208 tcg_gen_exit_tb((long)tb + tb_num);
2210 /* jump to another page: currently not optimized */
2216 static inline void gen_jcc(DisasContext *s, int b,
2217 target_ulong val, target_ulong next_eip)
2222 if (s->cc_op != CC_OP_DYNAMIC) {
2223 gen_op_set_cc_op(s->cc_op);
2224 s->cc_op = CC_OP_DYNAMIC;
2227 l1 = gen_new_label();
2228 gen_jcc1(s, cc_op, b, l1);
2230 gen_goto_tb(s, 0, next_eip);
2233 gen_goto_tb(s, 1, val);
2237 l1 = gen_new_label();
2238 l2 = gen_new_label();
2239 gen_jcc1(s, cc_op, b, l1);
2241 gen_jmp_im(next_eip);
2251 static void gen_setcc(DisasContext *s, int b)
2253 int inv, jcc_op, l1;
2256 if (is_fast_jcc_case(s, b)) {
2257 /* nominal case: we use a jump */
2258 /* XXX: make it faster by adding new instructions in TCG */
2259 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2260 tcg_gen_movi_tl(t0, 0);
2261 l1 = gen_new_label();
2262 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2263 tcg_gen_movi_tl(t0, 1);
2265 tcg_gen_mov_tl(cpu_T[0], t0);
2268 /* slow case: it is more efficient not to generate a jump,
2269 although it is questionnable whether this optimization is
2272 jcc_op = (b >> 1) & 7;
2273 gen_setcc_slow_T0(s, jcc_op);
2275 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2280 static inline void gen_op_movl_T0_seg(int seg_reg)
2282 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2283 offsetof(CPUX86State,segs[seg_reg].selector));
2286 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2288 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2289 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2290 offsetof(CPUX86State,segs[seg_reg].selector));
2291 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2292 tcg_gen_st_tl(cpu_T[0], cpu_env,
2293 offsetof(CPUX86State,segs[seg_reg].base));
2296 /* move T0 to seg_reg and compute if the CPU state may change. Never
2297 call this function with seg_reg == R_CS */
2298 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2300 if (s->pe && !s->vm86) {
2301 /* XXX: optimize by finding processor state dynamically */
2302 if (s->cc_op != CC_OP_DYNAMIC)
2303 gen_op_set_cc_op(s->cc_op);
2304 gen_jmp_im(cur_eip);
2305 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2306 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2307 /* abort translation because the addseg value may change or
2308 because ss32 may change. For R_SS, translation must always
2309 stop as a special handling must be done to disable hardware
2310 interrupts for the next instruction */
2311 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2314 gen_op_movl_seg_T0_vm(seg_reg);
2315 if (seg_reg == R_SS)
2320 static inline int svm_is_rep(int prefixes)
2322 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2326 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2327 uint32_t type, uint64_t param)
2329 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2330 /* no SVM activated */
2333 /* CRx and DRx reads/writes */
2334 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2335 if (s->cc_op != CC_OP_DYNAMIC) {
2336 gen_op_set_cc_op(s->cc_op);
2338 gen_jmp_im(pc_start - s->cs_base);
2339 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2340 tcg_const_i32(type), tcg_const_i64(param));
2341 /* this is a special case as we do not know if the interception occurs
2342 so we assume there was none */
2345 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2346 if (s->cc_op != CC_OP_DYNAMIC) {
2347 gen_op_set_cc_op(s->cc_op);
2349 gen_jmp_im(pc_start - s->cs_base);
2350 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2351 tcg_const_i32(type), tcg_const_i64(param));
2352 /* this is a special case as we do not know if the interception occurs
2353 so we assume there was none */
2358 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2359 if (s->cc_op != CC_OP_DYNAMIC) {
2360 gen_op_set_cc_op(s->cc_op);
2362 gen_jmp_im(pc_start - s->cs_base);
2363 tcg_gen_helper_0_2(helper_vmexit,
2364 tcg_const_i32(type), tcg_const_i64(param));
2365 /* we can optimize this one so TBs don't get longer
2366 than up to vmexit */
2375 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2377 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2380 static inline void gen_stack_update(DisasContext *s, int addend)
2382 #ifdef TARGET_X86_64
2384 gen_op_add_reg_im(2, R_ESP, addend);
2388 gen_op_add_reg_im(1, R_ESP, addend);
2390 gen_op_add_reg_im(0, R_ESP, addend);
2394 /* generate a push. It depends on ss32, addseg and dflag */
2395 static void gen_push_T0(DisasContext *s)
2397 #ifdef TARGET_X86_64
2399 gen_op_movq_A0_reg(R_ESP);
2401 gen_op_addq_A0_im(-8);
2402 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2404 gen_op_addq_A0_im(-2);
2405 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2407 gen_op_mov_reg_A0(2, R_ESP);
2411 gen_op_movl_A0_reg(R_ESP);
2413 gen_op_addl_A0_im(-2);
2415 gen_op_addl_A0_im(-4);
2418 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2419 gen_op_addl_A0_seg(R_SS);
2422 gen_op_andl_A0_ffff();
2423 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2424 gen_op_addl_A0_seg(R_SS);
2426 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2427 if (s->ss32 && !s->addseg)
2428 gen_op_mov_reg_A0(1, R_ESP);
2430 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2434 /* generate a push. It depends on ss32, addseg and dflag */
2435 /* slower version for T1, only used for call Ev */
2436 static void gen_push_T1(DisasContext *s)
2438 #ifdef TARGET_X86_64
2440 gen_op_movq_A0_reg(R_ESP);
2442 gen_op_addq_A0_im(-8);
2443 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2445 gen_op_addq_A0_im(-2);
2446 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2448 gen_op_mov_reg_A0(2, R_ESP);
2452 gen_op_movl_A0_reg(R_ESP);
2454 gen_op_addl_A0_im(-2);
2456 gen_op_addl_A0_im(-4);
2459 gen_op_addl_A0_seg(R_SS);
2462 gen_op_andl_A0_ffff();
2463 gen_op_addl_A0_seg(R_SS);
2465 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2467 if (s->ss32 && !s->addseg)
2468 gen_op_mov_reg_A0(1, R_ESP);
2470 gen_stack_update(s, (-2) << s->dflag);
2474 /* two step pop is necessary for precise exceptions */
2475 static void gen_pop_T0(DisasContext *s)
2477 #ifdef TARGET_X86_64
2479 gen_op_movq_A0_reg(R_ESP);
2480 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2484 gen_op_movl_A0_reg(R_ESP);
2487 gen_op_addl_A0_seg(R_SS);
2489 gen_op_andl_A0_ffff();
2490 gen_op_addl_A0_seg(R_SS);
2492 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2496 static void gen_pop_update(DisasContext *s)
2498 #ifdef TARGET_X86_64
2499 if (CODE64(s) && s->dflag) {
2500 gen_stack_update(s, 8);
2504 gen_stack_update(s, 2 << s->dflag);
2508 static void gen_stack_A0(DisasContext *s)
2510 gen_op_movl_A0_reg(R_ESP);
2512 gen_op_andl_A0_ffff();
2513 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2515 gen_op_addl_A0_seg(R_SS);
2518 /* NOTE: wrap around in 16 bit not fully handled */
2519 static void gen_pusha(DisasContext *s)
2522 gen_op_movl_A0_reg(R_ESP);
2523 gen_op_addl_A0_im(-16 << s->dflag);
2525 gen_op_andl_A0_ffff();
2526 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2528 gen_op_addl_A0_seg(R_SS);
2529 for(i = 0;i < 8; i++) {
2530 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2531 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2532 gen_op_addl_A0_im(2 << s->dflag);
2534 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2537 /* NOTE: wrap around in 16 bit not fully handled */
2538 static void gen_popa(DisasContext *s)
2541 gen_op_movl_A0_reg(R_ESP);
2543 gen_op_andl_A0_ffff();
2544 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2545 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2547 gen_op_addl_A0_seg(R_SS);
2548 for(i = 0;i < 8; i++) {
2549 /* ESP is not reloaded */
2551 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2552 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2554 gen_op_addl_A0_im(2 << s->dflag);
2556 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2559 static void gen_enter(DisasContext *s, int esp_addend, int level)
2564 #ifdef TARGET_X86_64
2566 ot = s->dflag ? OT_QUAD : OT_WORD;
2569 gen_op_movl_A0_reg(R_ESP);
2570 gen_op_addq_A0_im(-opsize);
2571 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2574 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2575 gen_op_st_T0_A0(ot + s->mem_index);
2577 /* XXX: must save state */
2578 tcg_gen_helper_0_3(helper_enter64_level,
2579 tcg_const_i32(level),
2580 tcg_const_i32((ot == OT_QUAD)),
2583 gen_op_mov_reg_T1(ot, R_EBP);
2584 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2585 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2589 ot = s->dflag + OT_WORD;
2590 opsize = 2 << s->dflag;
2592 gen_op_movl_A0_reg(R_ESP);
2593 gen_op_addl_A0_im(-opsize);
2595 gen_op_andl_A0_ffff();
2596 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2598 gen_op_addl_A0_seg(R_SS);
2600 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2601 gen_op_st_T0_A0(ot + s->mem_index);
2603 /* XXX: must save state */
2604 tcg_gen_helper_0_3(helper_enter_level,
2605 tcg_const_i32(level),
2606 tcg_const_i32(s->dflag),
2609 gen_op_mov_reg_T1(ot, R_EBP);
2610 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2611 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2615 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2617 if (s->cc_op != CC_OP_DYNAMIC)
2618 gen_op_set_cc_op(s->cc_op);
2619 gen_jmp_im(cur_eip);
2620 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2624 /* an interrupt is different from an exception because of the
2626 static void gen_interrupt(DisasContext *s, int intno,
2627 target_ulong cur_eip, target_ulong next_eip)
2629 if (s->cc_op != CC_OP_DYNAMIC)
2630 gen_op_set_cc_op(s->cc_op);
2631 gen_jmp_im(cur_eip);
2632 tcg_gen_helper_0_2(helper_raise_interrupt,
2633 tcg_const_i32(intno),
2634 tcg_const_i32(next_eip - cur_eip));
2638 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2640 if (s->cc_op != CC_OP_DYNAMIC)
2641 gen_op_set_cc_op(s->cc_op);
2642 gen_jmp_im(cur_eip);
2643 tcg_gen_helper_0_0(helper_debug);
2647 /* generate a generic end of block. Trace exception is also generated
2649 static void gen_eob(DisasContext *s)
2651 if (s->cc_op != CC_OP_DYNAMIC)
2652 gen_op_set_cc_op(s->cc_op);
2653 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2654 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2656 if (s->singlestep_enabled) {
2657 tcg_gen_helper_0_0(helper_debug);
2659 tcg_gen_helper_0_0(helper_single_step);
2666 /* generate a jump to eip. No segment change must happen before as a
2667 direct call to the next block may occur */
2668 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2671 if (s->cc_op != CC_OP_DYNAMIC) {
2672 gen_op_set_cc_op(s->cc_op);
2673 s->cc_op = CC_OP_DYNAMIC;
2675 gen_goto_tb(s, tb_num, eip);
2683 static void gen_jmp(DisasContext *s, target_ulong eip)
2685 gen_jmp_tb(s, eip, 0);
2688 static inline void gen_ldq_env_A0(int idx, int offset)
2690 int mem_index = (idx >> 2) - 1;
2691 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2692 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2695 static inline void gen_stq_env_A0(int idx, int offset)
2697 int mem_index = (idx >> 2) - 1;
2698 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2699 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2702 static inline void gen_ldo_env_A0(int idx, int offset)
2704 int mem_index = (idx >> 2) - 1;
2705 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2706 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2707 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2708 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2709 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2712 static inline void gen_sto_env_A0(int idx, int offset)
2714 int mem_index = (idx >> 2) - 1;
2715 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2716 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2717 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2718 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2719 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2722 static inline void gen_op_movo(int d_offset, int s_offset)
2724 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2725 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2726 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2727 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2730 static inline void gen_op_movq(int d_offset, int s_offset)
2732 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2733 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2736 static inline void gen_op_movl(int d_offset, int s_offset)
2738 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2739 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2742 static inline void gen_op_movq_env_0(int d_offset)
2744 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2745 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2748 #define SSE_SPECIAL ((void *)1)
2749 #define SSE_DUMMY ((void *)2)
2751 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2752 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2753 helper_ ## x ## ss, helper_ ## x ## sd, }
2755 static void *sse_op_table1[256][4] = {
2756 /* 3DNow! extensions */
2757 [0x0e] = { SSE_DUMMY }, /* femms */
2758 [0x0f] = { SSE_DUMMY }, /* pf... */
2759 /* pure SSE operations */
2760 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2761 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2762 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2763 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2764 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2765 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2766 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2767 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2769 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2770 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2771 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2772 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2773 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2774 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2775 [0x2e] = { helper_ucomiss, helper_ucomisd },
2776 [0x2f] = { helper_comiss, helper_comisd },
2777 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2778 [0x51] = SSE_FOP(sqrt),
2779 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2780 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2781 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2782 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2783 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2784 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2785 [0x58] = SSE_FOP(add),
2786 [0x59] = SSE_FOP(mul),
2787 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2788 helper_cvtss2sd, helper_cvtsd2ss },
2789 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2790 [0x5c] = SSE_FOP(sub),
2791 [0x5d] = SSE_FOP(min),
2792 [0x5e] = SSE_FOP(div),
2793 [0x5f] = SSE_FOP(max),
2795 [0xc2] = SSE_FOP(cmpeq),
2796 [0xc6] = { helper_shufps, helper_shufpd },
2798 /* MMX ops and their SSE extensions */
2799 [0x60] = MMX_OP2(punpcklbw),
2800 [0x61] = MMX_OP2(punpcklwd),
2801 [0x62] = MMX_OP2(punpckldq),
2802 [0x63] = MMX_OP2(packsswb),
2803 [0x64] = MMX_OP2(pcmpgtb),
2804 [0x65] = MMX_OP2(pcmpgtw),
2805 [0x66] = MMX_OP2(pcmpgtl),
2806 [0x67] = MMX_OP2(packuswb),
2807 [0x68] = MMX_OP2(punpckhbw),
2808 [0x69] = MMX_OP2(punpckhwd),
2809 [0x6a] = MMX_OP2(punpckhdq),
2810 [0x6b] = MMX_OP2(packssdw),
2811 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2812 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2813 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2814 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2815 [0x70] = { helper_pshufw_mmx,
2818 helper_pshuflw_xmm },
2819 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2820 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2821 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2822 [0x74] = MMX_OP2(pcmpeqb),
2823 [0x75] = MMX_OP2(pcmpeqw),
2824 [0x76] = MMX_OP2(pcmpeql),
2825 [0x77] = { SSE_DUMMY }, /* emms */
2826 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2827 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2828 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2829 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2830 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2831 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2832 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2833 [0xd1] = MMX_OP2(psrlw),
2834 [0xd2] = MMX_OP2(psrld),
2835 [0xd3] = MMX_OP2(psrlq),
2836 [0xd4] = MMX_OP2(paddq),
2837 [0xd5] = MMX_OP2(pmullw),
2838 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2839 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2840 [0xd8] = MMX_OP2(psubusb),
2841 [0xd9] = MMX_OP2(psubusw),
2842 [0xda] = MMX_OP2(pminub),
2843 [0xdb] = MMX_OP2(pand),
2844 [0xdc] = MMX_OP2(paddusb),
2845 [0xdd] = MMX_OP2(paddusw),
2846 [0xde] = MMX_OP2(pmaxub),
2847 [0xdf] = MMX_OP2(pandn),
2848 [0xe0] = MMX_OP2(pavgb),
2849 [0xe1] = MMX_OP2(psraw),
2850 [0xe2] = MMX_OP2(psrad),
2851 [0xe3] = MMX_OP2(pavgw),
2852 [0xe4] = MMX_OP2(pmulhuw),
2853 [0xe5] = MMX_OP2(pmulhw),
2854 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2855 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2856 [0xe8] = MMX_OP2(psubsb),
2857 [0xe9] = MMX_OP2(psubsw),
2858 [0xea] = MMX_OP2(pminsw),
2859 [0xeb] = MMX_OP2(por),
2860 [0xec] = MMX_OP2(paddsb),
2861 [0xed] = MMX_OP2(paddsw),
2862 [0xee] = MMX_OP2(pmaxsw),
2863 [0xef] = MMX_OP2(pxor),
2864 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2865 [0xf1] = MMX_OP2(psllw),
2866 [0xf2] = MMX_OP2(pslld),
2867 [0xf3] = MMX_OP2(psllq),
2868 [0xf4] = MMX_OP2(pmuludq),
2869 [0xf5] = MMX_OP2(pmaddwd),
2870 [0xf6] = MMX_OP2(psadbw),
2871 [0xf7] = MMX_OP2(maskmov),
2872 [0xf8] = MMX_OP2(psubb),
2873 [0xf9] = MMX_OP2(psubw),
2874 [0xfa] = MMX_OP2(psubl),
2875 [0xfb] = MMX_OP2(psubq),
2876 [0xfc] = MMX_OP2(paddb),
2877 [0xfd] = MMX_OP2(paddw),
2878 [0xfe] = MMX_OP2(paddl),
2881 static void *sse_op_table2[3 * 8][2] = {
2882 [0 + 2] = MMX_OP2(psrlw),
2883 [0 + 4] = MMX_OP2(psraw),
2884 [0 + 6] = MMX_OP2(psllw),
2885 [8 + 2] = MMX_OP2(psrld),
2886 [8 + 4] = MMX_OP2(psrad),
2887 [8 + 6] = MMX_OP2(pslld),
2888 [16 + 2] = MMX_OP2(psrlq),
2889 [16 + 3] = { NULL, helper_psrldq_xmm },
2890 [16 + 6] = MMX_OP2(psllq),
2891 [16 + 7] = { NULL, helper_pslldq_xmm },
2894 static void *sse_op_table3[4 * 3] = {
2897 X86_64_ONLY(helper_cvtsq2ss),
2898 X86_64_ONLY(helper_cvtsq2sd),
2902 X86_64_ONLY(helper_cvttss2sq),
2903 X86_64_ONLY(helper_cvttsd2sq),
2907 X86_64_ONLY(helper_cvtss2sq),
2908 X86_64_ONLY(helper_cvtsd2sq),
2911 static void *sse_op_table4[8][4] = {
2922 static void *sse_op_table5[256] = {
2923 [0x0c] = helper_pi2fw,
2924 [0x0d] = helper_pi2fd,
2925 [0x1c] = helper_pf2iw,
2926 [0x1d] = helper_pf2id,
2927 [0x8a] = helper_pfnacc,
2928 [0x8e] = helper_pfpnacc,
2929 [0x90] = helper_pfcmpge,
2930 [0x94] = helper_pfmin,
2931 [0x96] = helper_pfrcp,
2932 [0x97] = helper_pfrsqrt,
2933 [0x9a] = helper_pfsub,
2934 [0x9e] = helper_pfadd,
2935 [0xa0] = helper_pfcmpgt,
2936 [0xa4] = helper_pfmax,
2937 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2938 [0xa7] = helper_movq, /* pfrsqit1 */
2939 [0xaa] = helper_pfsubr,
2940 [0xae] = helper_pfacc,
2941 [0xb0] = helper_pfcmpeq,
2942 [0xb4] = helper_pfmul,
2943 [0xb6] = helper_movq, /* pfrcpit2 */
2944 [0xb7] = helper_pmulhrw_mmx,
2945 [0xbb] = helper_pswapd,
2946 [0xbf] = helper_pavgb_mmx /* pavgusb */
2949 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2951 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2952 int modrm, mod, rm, reg, reg_addr, offset_addr;
2956 if (s->prefix & PREFIX_DATA)
2958 else if (s->prefix & PREFIX_REPZ)
2960 else if (s->prefix & PREFIX_REPNZ)
2964 sse_op2 = sse_op_table1[b][b1];
2967 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2977 /* simple MMX/SSE operation */
2978 if (s->flags & HF_TS_MASK) {
2979 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2982 if (s->flags & HF_EM_MASK) {
2984 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2987 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2990 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2993 tcg_gen_helper_0_0(helper_emms);
2998 tcg_gen_helper_0_0(helper_emms);
3001 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3002 the static cpu state) */
3004 tcg_gen_helper_0_0(helper_enter_mmx);
3007 modrm = ldub_code(s->pc++);
3008 reg = ((modrm >> 3) & 7);
3011 mod = (modrm >> 6) & 3;
3012 if (sse_op2 == SSE_SPECIAL) {
3015 case 0x0e7: /* movntq */
3018 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3019 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3021 case 0x1e7: /* movntdq */
3022 case 0x02b: /* movntps */
3023 case 0x12b: /* movntps */
3024 case 0x3f0: /* lddqu */
3027 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3028 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3030 case 0x6e: /* movd mm, ea */
3031 #ifdef TARGET_X86_64
3032 if (s->dflag == 2) {
3033 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3034 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3038 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3039 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3040 offsetof(CPUX86State,fpregs[reg].mmx));
3041 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3044 case 0x16e: /* movd xmm, ea */
3045 #ifdef TARGET_X86_64
3046 if (s->dflag == 2) {
3047 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3048 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3049 offsetof(CPUX86State,xmm_regs[reg]));
3050 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3054 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3055 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3056 offsetof(CPUX86State,xmm_regs[reg]));
3057 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3058 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3061 case 0x6f: /* movq mm, ea */
3063 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3064 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3067 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3068 offsetof(CPUX86State,fpregs[rm].mmx));
3069 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3070 offsetof(CPUX86State,fpregs[reg].mmx));
3073 case 0x010: /* movups */
3074 case 0x110: /* movupd */
3075 case 0x028: /* movaps */
3076 case 0x128: /* movapd */
3077 case 0x16f: /* movdqa xmm, ea */
3078 case 0x26f: /* movdqu xmm, ea */
3080 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3081 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3083 rm = (modrm & 7) | REX_B(s);
3084 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3085 offsetof(CPUX86State,xmm_regs[rm]));
3088 case 0x210: /* movss xmm, ea */
3090 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3091 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3092 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3094 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3095 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3096 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3098 rm = (modrm & 7) | REX_B(s);
3099 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3100 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3103 case 0x310: /* movsd xmm, ea */
3105 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3106 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3108 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3109 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3111 rm = (modrm & 7) | REX_B(s);
3112 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3113 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3116 case 0x012: /* movlps */
3117 case 0x112: /* movlpd */
3119 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3120 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3123 rm = (modrm & 7) | REX_B(s);
3124 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3125 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3128 case 0x212: /* movsldup */
3130 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3131 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3133 rm = (modrm & 7) | REX_B(s);
3134 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3135 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3136 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3137 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3139 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3140 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3141 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3142 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3144 case 0x312: /* movddup */
3146 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3147 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3149 rm = (modrm & 7) | REX_B(s);
3150 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3151 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3153 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3154 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3156 case 0x016: /* movhps */
3157 case 0x116: /* movhpd */
3159 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3160 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3163 rm = (modrm & 7) | REX_B(s);
3164 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3165 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3168 case 0x216: /* movshdup */
3170 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3171 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3173 rm = (modrm & 7) | REX_B(s);
3174 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3175 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3176 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3177 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3179 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3180 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3181 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3182 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3184 case 0x7e: /* movd ea, mm */
3185 #ifdef TARGET_X86_64
3186 if (s->dflag == 2) {
3187 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3188 offsetof(CPUX86State,fpregs[reg].mmx));
3189 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3193 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3194 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3195 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3198 case 0x17e: /* movd ea, xmm */
3199 #ifdef TARGET_X86_64
3200 if (s->dflag == 2) {
3201 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3202 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3203 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3207 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3208 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3209 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3212 case 0x27e: /* movq xmm, ea */
3214 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3215 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3217 rm = (modrm & 7) | REX_B(s);
3218 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3219 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3221 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3223 case 0x7f: /* movq ea, mm */
3225 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3226 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3229 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3230 offsetof(CPUX86State,fpregs[reg].mmx));
3233 case 0x011: /* movups */
3234 case 0x111: /* movupd */
3235 case 0x029: /* movaps */
3236 case 0x129: /* movapd */
3237 case 0x17f: /* movdqa ea, xmm */
3238 case 0x27f: /* movdqu ea, xmm */
3240 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3241 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3243 rm = (modrm & 7) | REX_B(s);
3244 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3245 offsetof(CPUX86State,xmm_regs[reg]));
3248 case 0x211: /* movss ea, xmm */
3250 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3251 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3252 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3254 rm = (modrm & 7) | REX_B(s);
3255 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3256 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3259 case 0x311: /* movsd ea, xmm */
3261 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3262 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3264 rm = (modrm & 7) | REX_B(s);
3265 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3266 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3269 case 0x013: /* movlps */
3270 case 0x113: /* movlpd */
3272 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3273 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3278 case 0x017: /* movhps */
3279 case 0x117: /* movhpd */
3281 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3282 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3287 case 0x71: /* shift mm, im */
3290 case 0x171: /* shift xmm, im */
3293 val = ldub_code(s->pc++);
3295 gen_op_movl_T0_im(val);
3296 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3298 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3299 op1_offset = offsetof(CPUX86State,xmm_t0);
3301 gen_op_movl_T0_im(val);
3302 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3304 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3305 op1_offset = offsetof(CPUX86State,mmx_t0);
3307 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3311 rm = (modrm & 7) | REX_B(s);
3312 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3315 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3317 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3318 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3319 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3321 case 0x050: /* movmskps */
3322 rm = (modrm & 7) | REX_B(s);
3323 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3324 offsetof(CPUX86State,xmm_regs[rm]));
3325 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3326 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3327 gen_op_mov_reg_T0(OT_LONG, reg);
3329 case 0x150: /* movmskpd */
3330 rm = (modrm & 7) | REX_B(s);
3331 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3332 offsetof(CPUX86State,xmm_regs[rm]));
3333 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3334 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3335 gen_op_mov_reg_T0(OT_LONG, reg);
3337 case 0x02a: /* cvtpi2ps */
3338 case 0x12a: /* cvtpi2pd */
3339 tcg_gen_helper_0_0(helper_enter_mmx);
3341 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3342 op2_offset = offsetof(CPUX86State,mmx_t0);
3343 gen_ldq_env_A0(s->mem_index, op2_offset);
3346 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3348 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3349 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3350 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3353 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3357 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3361 case 0x22a: /* cvtsi2ss */
3362 case 0x32a: /* cvtsi2sd */
3363 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3364 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3365 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3366 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3367 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3368 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3369 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3371 case 0x02c: /* cvttps2pi */
3372 case 0x12c: /* cvttpd2pi */
3373 case 0x02d: /* cvtps2pi */
3374 case 0x12d: /* cvtpd2pi */
3375 tcg_gen_helper_0_0(helper_enter_mmx);
3377 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3378 op2_offset = offsetof(CPUX86State,xmm_t0);
3379 gen_ldo_env_A0(s->mem_index, op2_offset);
3381 rm = (modrm & 7) | REX_B(s);
3382 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3384 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3385 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3386 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3389 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3392 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3395 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3398 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3402 case 0x22c: /* cvttss2si */
3403 case 0x32c: /* cvttsd2si */
3404 case 0x22d: /* cvtss2si */
3405 case 0x32d: /* cvtsd2si */
3406 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3408 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3410 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3412 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3413 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3415 op2_offset = offsetof(CPUX86State,xmm_t0);
3417 rm = (modrm & 7) | REX_B(s);
3418 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3420 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3422 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3423 if (ot == OT_LONG) {
3424 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3425 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3427 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3429 gen_op_mov_reg_T0(ot, reg);
3431 case 0xc4: /* pinsrw */
3434 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3435 val = ldub_code(s->pc++);
3438 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3439 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3442 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3443 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3446 case 0xc5: /* pextrw */
3450 val = ldub_code(s->pc++);
3453 rm = (modrm & 7) | REX_B(s);
3454 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3455 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3459 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3460 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3462 reg = ((modrm >> 3) & 7) | rex_r;
3463 gen_op_mov_reg_T0(OT_LONG, reg);
3465 case 0x1d6: /* movq ea, xmm */
3467 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3468 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3470 rm = (modrm & 7) | REX_B(s);
3471 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3472 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3473 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3476 case 0x2d6: /* movq2dq */
3477 tcg_gen_helper_0_0(helper_enter_mmx);
3479 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3480 offsetof(CPUX86State,fpregs[rm].mmx));
3481 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3483 case 0x3d6: /* movdq2q */
3484 tcg_gen_helper_0_0(helper_enter_mmx);
3485 rm = (modrm & 7) | REX_B(s);
3486 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3487 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3489 case 0xd7: /* pmovmskb */
3494 rm = (modrm & 7) | REX_B(s);
3495 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3496 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3499 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3500 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3502 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3503 reg = ((modrm >> 3) & 7) | rex_r;
3504 gen_op_mov_reg_T0(OT_LONG, reg);
3510 /* generic MMX or SSE operation */
3512 case 0x70: /* pshufx insn */
3513 case 0xc6: /* pshufx insn */
3514 case 0xc2: /* compare insns */
3521 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3523 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3524 op2_offset = offsetof(CPUX86State,xmm_t0);
3525 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3527 /* specific case for SSE single instructions */
3530 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3531 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3534 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3537 gen_ldo_env_A0(s->mem_index, op2_offset);
3540 rm = (modrm & 7) | REX_B(s);
3541 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3544 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3546 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3547 op2_offset = offsetof(CPUX86State,mmx_t0);
3548 gen_ldq_env_A0(s->mem_index, op2_offset);
3551 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3555 case 0x0f: /* 3DNow! data insns */
3556 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3558 val = ldub_code(s->pc++);
3559 sse_op2 = sse_op_table5[val];
3562 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3563 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3564 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3566 case 0x70: /* pshufx insn */
3567 case 0xc6: /* pshufx insn */
3568 val = ldub_code(s->pc++);
3569 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3570 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3571 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3575 val = ldub_code(s->pc++);
3578 sse_op2 = sse_op_table4[val][b1];
3579 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3580 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3581 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3584 /* maskmov : we must prepare A0 */
3587 #ifdef TARGET_X86_64
3588 if (s->aflag == 2) {
3589 gen_op_movq_A0_reg(R_EDI);
3593 gen_op_movl_A0_reg(R_EDI);
3595 gen_op_andl_A0_ffff();
3597 gen_add_A0_ds_seg(s);
3599 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3600 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3601 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3604 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3605 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3606 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3609 if (b == 0x2e || b == 0x2f) {
3610 s->cc_op = CC_OP_EFLAGS;
3615 /* convert one instruction. s->is_jmp is set if the translation must
3616 be stopped. Return the next pc value */
3617 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3619 int b, prefixes, aflag, dflag;
3621 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3622 target_ulong next_eip, tval;
3625 if (unlikely(loglevel & CPU_LOG_TB_OP))
3626 tcg_gen_debug_insn_start(pc_start);
3634 #ifdef TARGET_X86_64
3639 s->rip_offset = 0; /* for relative ip address */
3641 b = ldub_code(s->pc);
3643 /* check prefixes */
3644 #ifdef TARGET_X86_64
3648 prefixes |= PREFIX_REPZ;
3651 prefixes |= PREFIX_REPNZ;
3654 prefixes |= PREFIX_LOCK;
3675 prefixes |= PREFIX_DATA;
3678 prefixes |= PREFIX_ADR;
3682 rex_w = (b >> 3) & 1;
3683 rex_r = (b & 0x4) << 1;
3684 s->rex_x = (b & 0x2) << 2;
3685 REX_B(s) = (b & 0x1) << 3;
3686 x86_64_hregs = 1; /* select uniform byte register addressing */
3690 /* 0x66 is ignored if rex.w is set */
3693 if (prefixes & PREFIX_DATA)
3696 if (!(prefixes & PREFIX_ADR))
3703 prefixes |= PREFIX_REPZ;
3706 prefixes |= PREFIX_REPNZ;
3709 prefixes |= PREFIX_LOCK;
3730 prefixes |= PREFIX_DATA;
3733 prefixes |= PREFIX_ADR;
3736 if (prefixes & PREFIX_DATA)
3738 if (prefixes & PREFIX_ADR)
3742 s->prefix = prefixes;
3746 /* lock generation */
3747 if (prefixes & PREFIX_LOCK)
3748 tcg_gen_helper_0_0(helper_lock);
3750 /* now check op code */
3754 /**************************/
3755 /* extended op code */
3756 b = ldub_code(s->pc++) | 0x100;
3759 /**************************/
3777 ot = dflag + OT_WORD;
3780 case 0: /* OP Ev, Gv */
3781 modrm = ldub_code(s->pc++);
3782 reg = ((modrm >> 3) & 7) | rex_r;
3783 mod = (modrm >> 6) & 3;
3784 rm = (modrm & 7) | REX_B(s);
3786 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3788 } else if (op == OP_XORL && rm == reg) {
3790 /* xor reg, reg optimisation */
3792 s->cc_op = CC_OP_LOGICB + ot;
3793 gen_op_mov_reg_T0(ot, reg);
3794 gen_op_update1_cc();
3799 gen_op_mov_TN_reg(ot, 1, reg);
3800 gen_op(s, op, ot, opreg);
3802 case 1: /* OP Gv, Ev */
3803 modrm = ldub_code(s->pc++);
3804 mod = (modrm >> 6) & 3;
3805 reg = ((modrm >> 3) & 7) | rex_r;
3806 rm = (modrm & 7) | REX_B(s);
3808 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3809 gen_op_ld_T1_A0(ot + s->mem_index);
3810 } else if (op == OP_XORL && rm == reg) {
3813 gen_op_mov_TN_reg(ot, 1, rm);
3815 gen_op(s, op, ot, reg);
3817 case 2: /* OP A, Iv */
3818 val = insn_get(s, ot);
3819 gen_op_movl_T1_im(val);
3820 gen_op(s, op, ot, OR_EAX);
3826 case 0x80: /* GRP1 */
3836 ot = dflag + OT_WORD;
3838 modrm = ldub_code(s->pc++);
3839 mod = (modrm >> 6) & 3;
3840 rm = (modrm & 7) | REX_B(s);
3841 op = (modrm >> 3) & 7;
3847 s->rip_offset = insn_const_size(ot);
3848 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3859 val = insn_get(s, ot);
3862 val = (int8_t)insn_get(s, OT_BYTE);
3865 gen_op_movl_T1_im(val);
3866 gen_op(s, op, ot, opreg);
3870 /**************************/
3871 /* inc, dec, and other misc arith */
3872 case 0x40 ... 0x47: /* inc Gv */
3873 ot = dflag ? OT_LONG : OT_WORD;
3874 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3876 case 0x48 ... 0x4f: /* dec Gv */
3877 ot = dflag ? OT_LONG : OT_WORD;
3878 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3880 case 0xf6: /* GRP3 */
3885 ot = dflag + OT_WORD;
3887 modrm = ldub_code(s->pc++);
3888 mod = (modrm >> 6) & 3;
3889 rm = (modrm & 7) | REX_B(s);
3890 op = (modrm >> 3) & 7;
3893 s->rip_offset = insn_const_size(ot);
3894 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3895 gen_op_ld_T0_A0(ot + s->mem_index);
3897 gen_op_mov_TN_reg(ot, 0, rm);
3902 val = insn_get(s, ot);
3903 gen_op_movl_T1_im(val);
3904 gen_op_testl_T0_T1_cc();
3905 s->cc_op = CC_OP_LOGICB + ot;
3908 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3910 gen_op_st_T0_A0(ot + s->mem_index);
3912 gen_op_mov_reg_T0(ot, rm);
3916 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3918 gen_op_st_T0_A0(ot + s->mem_index);
3920 gen_op_mov_reg_T0(ot, rm);
3922 gen_op_update_neg_cc();
3923 s->cc_op = CC_OP_SUBB + ot;
3928 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3929 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3930 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3931 /* XXX: use 32 bit mul which could be faster */
3932 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3933 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3934 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3935 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3936 s->cc_op = CC_OP_MULB;
3939 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3940 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3941 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3942 /* XXX: use 32 bit mul which could be faster */
3943 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3944 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3945 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3946 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3947 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3948 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3949 s->cc_op = CC_OP_MULW;
3953 #ifdef TARGET_X86_64
3954 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3955 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3956 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3957 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3958 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3959 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3960 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3961 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3962 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3966 t0 = tcg_temp_new(TCG_TYPE_I64);
3967 t1 = tcg_temp_new(TCG_TYPE_I64);
3968 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3969 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3970 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3971 tcg_gen_mul_i64(t0, t0, t1);
3972 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3973 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3974 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3975 tcg_gen_shri_i64(t0, t0, 32);
3976 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3977 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3978 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3981 s->cc_op = CC_OP_MULL;
3983 #ifdef TARGET_X86_64
3985 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3986 s->cc_op = CC_OP_MULQ;
3994 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3995 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3996 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3997 /* XXX: use 32 bit mul which could be faster */
3998 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3999 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4000 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4001 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4002 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4003 s->cc_op = CC_OP_MULB;
4006 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4007 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4008 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4009 /* XXX: use 32 bit mul which could be faster */
4010 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4011 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4012 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4013 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4014 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4015 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4016 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4017 s->cc_op = CC_OP_MULW;
4021 #ifdef TARGET_X86_64
4022 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4023 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4024 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4025 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4026 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4027 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4028 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4029 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4030 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4031 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4035 t0 = tcg_temp_new(TCG_TYPE_I64);
4036 t1 = tcg_temp_new(TCG_TYPE_I64);
4037 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4038 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4039 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4040 tcg_gen_mul_i64(t0, t0, t1);
4041 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4042 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4043 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4044 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4045 tcg_gen_shri_i64(t0, t0, 32);
4046 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4047 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4048 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4051 s->cc_op = CC_OP_MULL;
4053 #ifdef TARGET_X86_64
4055 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
4056 s->cc_op = CC_OP_MULQ;
4064 gen_jmp_im(pc_start - s->cs_base);
4065 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
4068 gen_jmp_im(pc_start - s->cs_base);
4069 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
4073 gen_jmp_im(pc_start - s->cs_base);
4074 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
4076 #ifdef TARGET_X86_64
4078 gen_jmp_im(pc_start - s->cs_base);
4079 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
4087 gen_jmp_im(pc_start - s->cs_base);
4088 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
4091 gen_jmp_im(pc_start - s->cs_base);
4092 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
4096 gen_jmp_im(pc_start - s->cs_base);
4097 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
4099 #ifdef TARGET_X86_64
4101 gen_jmp_im(pc_start - s->cs_base);
4102 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4112 case 0xfe: /* GRP4 */
4113 case 0xff: /* GRP5 */
4117 ot = dflag + OT_WORD;
4119 modrm = ldub_code(s->pc++);
4120 mod = (modrm >> 6) & 3;
4121 rm = (modrm & 7) | REX_B(s);
4122 op = (modrm >> 3) & 7;
4123 if (op >= 2 && b == 0xfe) {
4127 if (op == 2 || op == 4) {
4128 /* operand size for jumps is 64 bit */
4130 } else if (op == 3 || op == 5) {
4131 /* for call calls, the operand is 16 or 32 bit, even
4133 ot = dflag ? OT_LONG : OT_WORD;
4134 } else if (op == 6) {
4135 /* default push size is 64 bit */
4136 ot = dflag ? OT_QUAD : OT_WORD;
4140 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4141 if (op >= 2 && op != 3 && op != 5)
4142 gen_op_ld_T0_A0(ot + s->mem_index);
4144 gen_op_mov_TN_reg(ot, 0, rm);
4148 case 0: /* inc Ev */
4153 gen_inc(s, ot, opreg, 1);
4155 case 1: /* dec Ev */
4160 gen_inc(s, ot, opreg, -1);
4162 case 2: /* call Ev */
4163 /* XXX: optimize if memory (no 'and' is necessary) */
4165 gen_op_andl_T0_ffff();
4166 next_eip = s->pc - s->cs_base;
4167 gen_movtl_T1_im(next_eip);
4172 case 3: /* lcall Ev */
4173 gen_op_ld_T1_A0(ot + s->mem_index);
4174 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4175 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4177 if (s->pe && !s->vm86) {
4178 if (s->cc_op != CC_OP_DYNAMIC)
4179 gen_op_set_cc_op(s->cc_op);
4180 gen_jmp_im(pc_start - s->cs_base);
4181 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4182 tcg_gen_helper_0_4(helper_lcall_protected,
4183 cpu_tmp2_i32, cpu_T[1],
4184 tcg_const_i32(dflag),
4185 tcg_const_i32(s->pc - pc_start));
4187 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4188 tcg_gen_helper_0_4(helper_lcall_real,
4189 cpu_tmp2_i32, cpu_T[1],
4190 tcg_const_i32(dflag),
4191 tcg_const_i32(s->pc - s->cs_base));
4195 case 4: /* jmp Ev */
4197 gen_op_andl_T0_ffff();
4201 case 5: /* ljmp Ev */
4202 gen_op_ld_T1_A0(ot + s->mem_index);
4203 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4204 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4206 if (s->pe && !s->vm86) {
4207 if (s->cc_op != CC_OP_DYNAMIC)
4208 gen_op_set_cc_op(s->cc_op);
4209 gen_jmp_im(pc_start - s->cs_base);
4210 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4211 tcg_gen_helper_0_3(helper_ljmp_protected,
4214 tcg_const_i32(s->pc - pc_start));
4216 gen_op_movl_seg_T0_vm(R_CS);
4217 gen_op_movl_T0_T1();
4222 case 6: /* push Ev */
4230 case 0x84: /* test Ev, Gv */
4235 ot = dflag + OT_WORD;
4237 modrm = ldub_code(s->pc++);
4238 mod = (modrm >> 6) & 3;
4239 rm = (modrm & 7) | REX_B(s);
4240 reg = ((modrm >> 3) & 7) | rex_r;
4242 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4243 gen_op_mov_TN_reg(ot, 1, reg);
4244 gen_op_testl_T0_T1_cc();
4245 s->cc_op = CC_OP_LOGICB + ot;
4248 case 0xa8: /* test eAX, Iv */
4253 ot = dflag + OT_WORD;
4254 val = insn_get(s, ot);
4256 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4257 gen_op_movl_T1_im(val);
4258 gen_op_testl_T0_T1_cc();
4259 s->cc_op = CC_OP_LOGICB + ot;
4262 case 0x98: /* CWDE/CBW */
4263 #ifdef TARGET_X86_64
4265 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4266 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4267 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4271 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4272 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4273 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4275 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4276 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4277 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4280 case 0x99: /* CDQ/CWD */
4281 #ifdef TARGET_X86_64
4283 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4284 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4285 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4289 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4290 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4291 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4292 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4294 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4295 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4296 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4297 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4300 case 0x1af: /* imul Gv, Ev */
4301 case 0x69: /* imul Gv, Ev, I */
4303 ot = dflag + OT_WORD;
4304 modrm = ldub_code(s->pc++);
4305 reg = ((modrm >> 3) & 7) | rex_r;
4307 s->rip_offset = insn_const_size(ot);
4310 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4312 val = insn_get(s, ot);
4313 gen_op_movl_T1_im(val);
4314 } else if (b == 0x6b) {
4315 val = (int8_t)insn_get(s, OT_BYTE);
4316 gen_op_movl_T1_im(val);
4318 gen_op_mov_TN_reg(ot, 1, reg);
4321 #ifdef TARGET_X86_64
4322 if (ot == OT_QUAD) {
4323 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4326 if (ot == OT_LONG) {
4327 #ifdef TARGET_X86_64
4328 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4329 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4330 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4331 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4332 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4333 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4337 t0 = tcg_temp_new(TCG_TYPE_I64);
4338 t1 = tcg_temp_new(TCG_TYPE_I64);
4339 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4340 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4341 tcg_gen_mul_i64(t0, t0, t1);
4342 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4343 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4344 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4345 tcg_gen_shri_i64(t0, t0, 32);
4346 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4347 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4351 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4352 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4353 /* XXX: use 32 bit mul which could be faster */
4354 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4355 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4356 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4357 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4359 gen_op_mov_reg_T0(ot, reg);
4360 s->cc_op = CC_OP_MULB + ot;
4363 case 0x1c1: /* xadd Ev, Gv */
4367 ot = dflag + OT_WORD;
4368 modrm = ldub_code(s->pc++);
4369 reg = ((modrm >> 3) & 7) | rex_r;
4370 mod = (modrm >> 6) & 3;
4372 rm = (modrm & 7) | REX_B(s);
4373 gen_op_mov_TN_reg(ot, 0, reg);
4374 gen_op_mov_TN_reg(ot, 1, rm);
4375 gen_op_addl_T0_T1();
4376 gen_op_mov_reg_T1(ot, reg);
4377 gen_op_mov_reg_T0(ot, rm);
4379 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4380 gen_op_mov_TN_reg(ot, 0, reg);
4381 gen_op_ld_T1_A0(ot + s->mem_index);
4382 gen_op_addl_T0_T1();
4383 gen_op_st_T0_A0(ot + s->mem_index);
4384 gen_op_mov_reg_T1(ot, reg);
4386 gen_op_update2_cc();
4387 s->cc_op = CC_OP_ADDB + ot;
4390 case 0x1b1: /* cmpxchg Ev, Gv */
4393 TCGv t0, t1, t2, a0;
4398 ot = dflag + OT_WORD;
4399 modrm = ldub_code(s->pc++);
4400 reg = ((modrm >> 3) & 7) | rex_r;
4401 mod = (modrm >> 6) & 3;
4402 t0 = tcg_temp_local_new(TCG_TYPE_TL);
4403 t1 = tcg_temp_local_new(TCG_TYPE_TL);
4404 t2 = tcg_temp_local_new(TCG_TYPE_TL);
4405 a0 = tcg_temp_local_new(TCG_TYPE_TL);
4406 gen_op_mov_v_reg(ot, t1, reg);
4408 rm = (modrm & 7) | REX_B(s);
4409 gen_op_mov_v_reg(ot, t0, rm);
4411 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4412 tcg_gen_mov_tl(a0, cpu_A0);
4413 gen_op_ld_v(ot + s->mem_index, t0, a0);
4414 rm = 0; /* avoid warning */
4416 label1 = gen_new_label();
4417 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
4418 tcg_gen_sub_tl(t2, t2, t0);
4420 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4422 label2 = gen_new_label();
4423 gen_op_mov_reg_v(ot, R_EAX, t0);
4425 gen_set_label(label1);
4426 gen_op_mov_reg_v(ot, rm, t1);
4427 gen_set_label(label2);
4429 tcg_gen_mov_tl(t1, t0);
4430 gen_op_mov_reg_v(ot, R_EAX, t0);
4431 gen_set_label(label1);
4433 gen_op_st_v(ot + s->mem_index, t1, a0);
4435 tcg_gen_mov_tl(cpu_cc_src, t0);
4436 tcg_gen_mov_tl(cpu_cc_dst, t2);
4437 s->cc_op = CC_OP_SUBB + ot;
4444 case 0x1c7: /* cmpxchg8b */
4445 modrm = ldub_code(s->pc++);
4446 mod = (modrm >> 6) & 3;
4447 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4449 #ifdef TARGET_X86_64
4451 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4453 gen_jmp_im(pc_start - s->cs_base);
4454 if (s->cc_op != CC_OP_DYNAMIC)
4455 gen_op_set_cc_op(s->cc_op);
4456 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4457 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4461 if (!(s->cpuid_features & CPUID_CX8))
4463 gen_jmp_im(pc_start - s->cs_base);
4464 if (s->cc_op != CC_OP_DYNAMIC)
4465 gen_op_set_cc_op(s->cc_op);
4466 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4467 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4469 s->cc_op = CC_OP_EFLAGS;
4472 /**************************/
4474 case 0x50 ... 0x57: /* push */
4475 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4478 case 0x58 ... 0x5f: /* pop */
4480 ot = dflag ? OT_QUAD : OT_WORD;
4482 ot = dflag + OT_WORD;
4485 /* NOTE: order is important for pop %sp */
4487 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4489 case 0x60: /* pusha */
4494 case 0x61: /* popa */
4499 case 0x68: /* push Iv */
4502 ot = dflag ? OT_QUAD : OT_WORD;
4504 ot = dflag + OT_WORD;
4507 val = insn_get(s, ot);
4509 val = (int8_t)insn_get(s, OT_BYTE);
4510 gen_op_movl_T0_im(val);
4513 case 0x8f: /* pop Ev */
4515 ot = dflag ? OT_QUAD : OT_WORD;
4517 ot = dflag + OT_WORD;
4519 modrm = ldub_code(s->pc++);
4520 mod = (modrm >> 6) & 3;
4523 /* NOTE: order is important for pop %sp */
4525 rm = (modrm & 7) | REX_B(s);
4526 gen_op_mov_reg_T0(ot, rm);
4528 /* NOTE: order is important too for MMU exceptions */
4529 s->popl_esp_hack = 1 << ot;
4530 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4531 s->popl_esp_hack = 0;
4535 case 0xc8: /* enter */
4538 val = lduw_code(s->pc);
4540 level = ldub_code(s->pc++);
4541 gen_enter(s, val, level);
4544 case 0xc9: /* leave */
4545 /* XXX: exception not precise (ESP is updated before potential exception) */
4547 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4548 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4549 } else if (s->ss32) {
4550 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4551 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4553 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4554 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4558 ot = dflag ? OT_QUAD : OT_WORD;
4560 ot = dflag + OT_WORD;
4562 gen_op_mov_reg_T0(ot, R_EBP);
4565 case 0x06: /* push es */
4566 case 0x0e: /* push cs */
4567 case 0x16: /* push ss */
4568 case 0x1e: /* push ds */
4571 gen_op_movl_T0_seg(b >> 3);
4574 case 0x1a0: /* push fs */
4575 case 0x1a8: /* push gs */
4576 gen_op_movl_T0_seg((b >> 3) & 7);
4579 case 0x07: /* pop es */
4580 case 0x17: /* pop ss */
4581 case 0x1f: /* pop ds */
4586 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4589 /* if reg == SS, inhibit interrupts/trace. */
4590 /* If several instructions disable interrupts, only the
4592 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4593 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4597 gen_jmp_im(s->pc - s->cs_base);
4601 case 0x1a1: /* pop fs */
4602 case 0x1a9: /* pop gs */
4604 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4607 gen_jmp_im(s->pc - s->cs_base);
4612 /**************************/
4615 case 0x89: /* mov Gv, Ev */
4619 ot = dflag + OT_WORD;
4620 modrm = ldub_code(s->pc++);
4621 reg = ((modrm >> 3) & 7) | rex_r;
4623 /* generate a generic store */
4624 gen_ldst_modrm(s, modrm, ot, reg, 1);
4627 case 0xc7: /* mov Ev, Iv */
4631 ot = dflag + OT_WORD;
4632 modrm = ldub_code(s->pc++);
4633 mod = (modrm >> 6) & 3;
4635 s->rip_offset = insn_const_size(ot);
4636 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4638 val = insn_get(s, ot);
4639 gen_op_movl_T0_im(val);
4641 gen_op_st_T0_A0(ot + s->mem_index);
4643 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4646 case 0x8b: /* mov Ev, Gv */
4650 ot = OT_WORD + dflag;
4651 modrm = ldub_code(s->pc++);
4652 reg = ((modrm >> 3) & 7) | rex_r;
4654 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4655 gen_op_mov_reg_T0(ot, reg);
4657 case 0x8e: /* mov seg, Gv */
4658 modrm = ldub_code(s->pc++);
4659 reg = (modrm >> 3) & 7;
4660 if (reg >= 6 || reg == R_CS)
4662 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4663 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4665 /* if reg == SS, inhibit interrupts/trace */
4666 /* If several instructions disable interrupts, only the
4668 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4669 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4673 gen_jmp_im(s->pc - s->cs_base);
4677 case 0x8c: /* mov Gv, seg */
4678 modrm = ldub_code(s->pc++);
4679 reg = (modrm >> 3) & 7;
4680 mod = (modrm >> 6) & 3;
4683 gen_op_movl_T0_seg(reg);
4685 ot = OT_WORD + dflag;
4688 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4691 case 0x1b6: /* movzbS Gv, Eb */
4692 case 0x1b7: /* movzwS Gv, Eb */
4693 case 0x1be: /* movsbS Gv, Eb */
4694 case 0x1bf: /* movswS Gv, Eb */
4697 /* d_ot is the size of destination */
4698 d_ot = dflag + OT_WORD;
4699 /* ot is the size of source */
4700 ot = (b & 1) + OT_BYTE;
4701 modrm = ldub_code(s->pc++);
4702 reg = ((modrm >> 3) & 7) | rex_r;
4703 mod = (modrm >> 6) & 3;
4704 rm = (modrm & 7) | REX_B(s);
4707 gen_op_mov_TN_reg(ot, 0, rm);
4708 switch(ot | (b & 8)) {
4710 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4713 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4716 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4720 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4723 gen_op_mov_reg_T0(d_ot, reg);
4725 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4727 gen_op_lds_T0_A0(ot + s->mem_index);
4729 gen_op_ldu_T0_A0(ot + s->mem_index);
4731 gen_op_mov_reg_T0(d_ot, reg);
4736 case 0x8d: /* lea */
4737 ot = dflag + OT_WORD;
4738 modrm = ldub_code(s->pc++);
4739 mod = (modrm >> 6) & 3;
4742 reg = ((modrm >> 3) & 7) | rex_r;
4743 /* we must ensure that no segment is added */
4747 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4749 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4752 case 0xa0: /* mov EAX, Ov */
4754 case 0xa2: /* mov Ov, EAX */
4757 target_ulong offset_addr;
4762 ot = dflag + OT_WORD;
4763 #ifdef TARGET_X86_64
4764 if (s->aflag == 2) {
4765 offset_addr = ldq_code(s->pc);
4767 gen_op_movq_A0_im(offset_addr);
4772 offset_addr = insn_get(s, OT_LONG);
4774 offset_addr = insn_get(s, OT_WORD);
4776 gen_op_movl_A0_im(offset_addr);
4778 gen_add_A0_ds_seg(s);
4780 gen_op_ld_T0_A0(ot + s->mem_index);
4781 gen_op_mov_reg_T0(ot, R_EAX);
4783 gen_op_mov_TN_reg(ot, 0, R_EAX);
4784 gen_op_st_T0_A0(ot + s->mem_index);
4788 case 0xd7: /* xlat */
4789 #ifdef TARGET_X86_64
4790 if (s->aflag == 2) {
4791 gen_op_movq_A0_reg(R_EBX);
4792 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4793 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4794 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4798 gen_op_movl_A0_reg(R_EBX);
4799 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4800 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4801 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4803 gen_op_andl_A0_ffff();
4805 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4807 gen_add_A0_ds_seg(s);
4808 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4809 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4811 case 0xb0 ... 0xb7: /* mov R, Ib */
4812 val = insn_get(s, OT_BYTE);
4813 gen_op_movl_T0_im(val);
4814 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4816 case 0xb8 ... 0xbf: /* mov R, Iv */
4817 #ifdef TARGET_X86_64
4821 tmp = ldq_code(s->pc);
4823 reg = (b & 7) | REX_B(s);
4824 gen_movtl_T0_im(tmp);
4825 gen_op_mov_reg_T0(OT_QUAD, reg);
4829 ot = dflag ? OT_LONG : OT_WORD;
4830 val = insn_get(s, ot);
4831 reg = (b & 7) | REX_B(s);
4832 gen_op_movl_T0_im(val);
4833 gen_op_mov_reg_T0(ot, reg);
4837 case 0x91 ... 0x97: /* xchg R, EAX */
4838 ot = dflag + OT_WORD;
4839 reg = (b & 7) | REX_B(s);
4843 case 0x87: /* xchg Ev, Gv */
4847 ot = dflag + OT_WORD;
4848 modrm = ldub_code(s->pc++);
4849 reg = ((modrm >> 3) & 7) | rex_r;
4850 mod = (modrm >> 6) & 3;
4852 rm = (modrm & 7) | REX_B(s);
4854 gen_op_mov_TN_reg(ot, 0, reg);
4855 gen_op_mov_TN_reg(ot, 1, rm);
4856 gen_op_mov_reg_T0(ot, rm);
4857 gen_op_mov_reg_T1(ot, reg);
4859 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4860 gen_op_mov_TN_reg(ot, 0, reg);
4861 /* for xchg, lock is implicit */
4862 if (!(prefixes & PREFIX_LOCK))
4863 tcg_gen_helper_0_0(helper_lock);
4864 gen_op_ld_T1_A0(ot + s->mem_index);
4865 gen_op_st_T0_A0(ot + s->mem_index);
4866 if (!(prefixes & PREFIX_LOCK))
4867 tcg_gen_helper_0_0(helper_unlock);
4868 gen_op_mov_reg_T1(ot, reg);
4871 case 0xc4: /* les Gv */
4876 case 0xc5: /* lds Gv */
4881 case 0x1b2: /* lss Gv */
4884 case 0x1b4: /* lfs Gv */
4887 case 0x1b5: /* lgs Gv */
4890 ot = dflag ? OT_LONG : OT_WORD;
4891 modrm = ldub_code(s->pc++);
4892 reg = ((modrm >> 3) & 7) | rex_r;
4893 mod = (modrm >> 6) & 3;
4896 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4897 gen_op_ld_T1_A0(ot + s->mem_index);
4898 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4899 /* load the segment first to handle exceptions properly */
4900 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4901 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4902 /* then put the data */
4903 gen_op_mov_reg_T1(ot, reg);
4905 gen_jmp_im(s->pc - s->cs_base);
4910 /************************/
4921 ot = dflag + OT_WORD;
4923 modrm = ldub_code(s->pc++);
4924 mod = (modrm >> 6) & 3;
4925 op = (modrm >> 3) & 7;
4931 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4934 opreg = (modrm & 7) | REX_B(s);
4939 gen_shift(s, op, ot, opreg, OR_ECX);
4942 shift = ldub_code(s->pc++);
4944 gen_shifti(s, op, ot, opreg, shift);
4959 case 0x1a4: /* shld imm */
4963 case 0x1a5: /* shld cl */
4967 case 0x1ac: /* shrd imm */
4971 case 0x1ad: /* shrd cl */
4975 ot = dflag + OT_WORD;
4976 modrm = ldub_code(s->pc++);
4977 mod = (modrm >> 6) & 3;
4978 rm = (modrm & 7) | REX_B(s);
4979 reg = ((modrm >> 3) & 7) | rex_r;
4981 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4986 gen_op_mov_TN_reg(ot, 1, reg);
4989 val = ldub_code(s->pc++);
4990 tcg_gen_movi_tl(cpu_T3, val);
4992 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4994 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4997 /************************/
5000 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5001 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5002 /* XXX: what to do if illegal op ? */
5003 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5006 modrm = ldub_code(s->pc++);
5007 mod = (modrm >> 6) & 3;
5009 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5012 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5014 case 0x00 ... 0x07: /* fxxxs */
5015 case 0x10 ... 0x17: /* fixxxl */
5016 case 0x20 ... 0x27: /* fxxxl */
5017 case 0x30 ... 0x37: /* fixxx */
5024 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5025 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5026 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
5029 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5030 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5031 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5034 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5035 (s->mem_index >> 2) - 1);
5036 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
5040 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5041 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5042 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5046 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5048 /* fcomp needs pop */
5049 tcg_gen_helper_0_0(helper_fpop);
5053 case 0x08: /* flds */
5054 case 0x0a: /* fsts */
5055 case 0x0b: /* fstps */
5056 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5057 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5058 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5063 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5064 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5065 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
5068 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5069 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5070 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5073 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5074 (s->mem_index >> 2) - 1);
5075 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
5079 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5080 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5081 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5086 /* XXX: the corresponding CPUID bit must be tested ! */
5089 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
5090 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5091 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5094 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
5095 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5096 (s->mem_index >> 2) - 1);
5100 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
5101 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5102 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5105 tcg_gen_helper_0_0(helper_fpop);
5110 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
5111 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5112 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5115 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5116 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5117 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5120 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5121 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5122 (s->mem_index >> 2) - 1);
5126 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5127 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5128 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5132 tcg_gen_helper_0_0(helper_fpop);
5136 case 0x0c: /* fldenv mem */
5137 if (s->cc_op != CC_OP_DYNAMIC)
5138 gen_op_set_cc_op(s->cc_op);
5139 gen_jmp_im(pc_start - s->cs_base);
5140 tcg_gen_helper_0_2(helper_fldenv,
5141 cpu_A0, tcg_const_i32(s->dflag));
5143 case 0x0d: /* fldcw mem */
5144 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5145 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5146 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5148 case 0x0e: /* fnstenv mem */
5149 if (s->cc_op != CC_OP_DYNAMIC)
5150 gen_op_set_cc_op(s->cc_op);
5151 gen_jmp_im(pc_start - s->cs_base);
5152 tcg_gen_helper_0_2(helper_fstenv,
5153 cpu_A0, tcg_const_i32(s->dflag));
5155 case 0x0f: /* fnstcw mem */
5156 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5157 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5158 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5160 case 0x1d: /* fldt mem */
5161 if (s->cc_op != CC_OP_DYNAMIC)
5162 gen_op_set_cc_op(s->cc_op);
5163 gen_jmp_im(pc_start - s->cs_base);
5164 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5166 case 0x1f: /* fstpt mem */
5167 if (s->cc_op != CC_OP_DYNAMIC)
5168 gen_op_set_cc_op(s->cc_op);
5169 gen_jmp_im(pc_start - s->cs_base);
5170 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5171 tcg_gen_helper_0_0(helper_fpop);
5173 case 0x2c: /* frstor mem */
5174 if (s->cc_op != CC_OP_DYNAMIC)
5175 gen_op_set_cc_op(s->cc_op);
5176 gen_jmp_im(pc_start - s->cs_base);
5177 tcg_gen_helper_0_2(helper_frstor,
5178 cpu_A0, tcg_const_i32(s->dflag));
5180 case 0x2e: /* fnsave mem */
5181 if (s->cc_op != CC_OP_DYNAMIC)
5182 gen_op_set_cc_op(s->cc_op);
5183 gen_jmp_im(pc_start - s->cs_base);
5184 tcg_gen_helper_0_2(helper_fsave,
5185 cpu_A0, tcg_const_i32(s->dflag));
5187 case 0x2f: /* fnstsw mem */
5188 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5189 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5190 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5192 case 0x3c: /* fbld */
5193 if (s->cc_op != CC_OP_DYNAMIC)
5194 gen_op_set_cc_op(s->cc_op);
5195 gen_jmp_im(pc_start - s->cs_base);
5196 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5198 case 0x3e: /* fbstp */
5199 if (s->cc_op != CC_OP_DYNAMIC)
5200 gen_op_set_cc_op(s->cc_op);
5201 gen_jmp_im(pc_start - s->cs_base);
5202 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5203 tcg_gen_helper_0_0(helper_fpop);
5205 case 0x3d: /* fildll */
5206 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5207 (s->mem_index >> 2) - 1);
5208 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5210 case 0x3f: /* fistpll */
5211 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5212 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5213 (s->mem_index >> 2) - 1);
5214 tcg_gen_helper_0_0(helper_fpop);
5220 /* register float ops */
5224 case 0x08: /* fld sti */
5225 tcg_gen_helper_0_0(helper_fpush);
5226 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5228 case 0x09: /* fxchg sti */
5229 case 0x29: /* fxchg4 sti, undocumented op */
5230 case 0x39: /* fxchg7 sti, undocumented op */
5231 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5233 case 0x0a: /* grp d9/2 */
5236 /* check exceptions (FreeBSD FPU probe) */
5237 if (s->cc_op != CC_OP_DYNAMIC)
5238 gen_op_set_cc_op(s->cc_op);
5239 gen_jmp_im(pc_start - s->cs_base);
5240 tcg_gen_helper_0_0(helper_fwait);
5246 case 0x0c: /* grp d9/4 */
5249 tcg_gen_helper_0_0(helper_fchs_ST0);
5252 tcg_gen_helper_0_0(helper_fabs_ST0);
5255 tcg_gen_helper_0_0(helper_fldz_FT0);
5256 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5259 tcg_gen_helper_0_0(helper_fxam_ST0);
5265 case 0x0d: /* grp d9/5 */
5269 tcg_gen_helper_0_0(helper_fpush);
5270 tcg_gen_helper_0_0(helper_fld1_ST0);
5273 tcg_gen_helper_0_0(helper_fpush);
5274 tcg_gen_helper_0_0(helper_fldl2t_ST0);
5277 tcg_gen_helper_0_0(helper_fpush);
5278 tcg_gen_helper_0_0(helper_fldl2e_ST0);
5281 tcg_gen_helper_0_0(helper_fpush);
5282 tcg_gen_helper_0_0(helper_fldpi_ST0);
5285 tcg_gen_helper_0_0(helper_fpush);
5286 tcg_gen_helper_0_0(helper_fldlg2_ST0);
5289 tcg_gen_helper_0_0(helper_fpush);
5290 tcg_gen_helper_0_0(helper_fldln2_ST0);
5293 tcg_gen_helper_0_0(helper_fpush);
5294 tcg_gen_helper_0_0(helper_fldz_ST0);
5301 case 0x0e: /* grp d9/6 */
5304 tcg_gen_helper_0_0(helper_f2xm1);
5307 tcg_gen_helper_0_0(helper_fyl2x);
5310 tcg_gen_helper_0_0(helper_fptan);
5312 case 3: /* fpatan */
5313 tcg_gen_helper_0_0(helper_fpatan);
5315 case 4: /* fxtract */
5316 tcg_gen_helper_0_0(helper_fxtract);
5318 case 5: /* fprem1 */
5319 tcg_gen_helper_0_0(helper_fprem1);
5321 case 6: /* fdecstp */
5322 tcg_gen_helper_0_0(helper_fdecstp);
5325 case 7: /* fincstp */
5326 tcg_gen_helper_0_0(helper_fincstp);
5330 case 0x0f: /* grp d9/7 */
5333 tcg_gen_helper_0_0(helper_fprem);
5335 case 1: /* fyl2xp1 */
5336 tcg_gen_helper_0_0(helper_fyl2xp1);
5339 tcg_gen_helper_0_0(helper_fsqrt);
5341 case 3: /* fsincos */
5342 tcg_gen_helper_0_0(helper_fsincos);
5344 case 5: /* fscale */
5345 tcg_gen_helper_0_0(helper_fscale);
5347 case 4: /* frndint */
5348 tcg_gen_helper_0_0(helper_frndint);
5351 tcg_gen_helper_0_0(helper_fsin);
5355 tcg_gen_helper_0_0(helper_fcos);
5359 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5360 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5361 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5367 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5369 tcg_gen_helper_0_0(helper_fpop);
5371 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5372 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5376 case 0x02: /* fcom */
5377 case 0x22: /* fcom2, undocumented op */
5378 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5379 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5381 case 0x03: /* fcomp */
5382 case 0x23: /* fcomp3, undocumented op */
5383 case 0x32: /* fcomp5, undocumented op */
5384 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5385 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5386 tcg_gen_helper_0_0(helper_fpop);
5388 case 0x15: /* da/5 */
5390 case 1: /* fucompp */
5391 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5392 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5393 tcg_gen_helper_0_0(helper_fpop);
5394 tcg_gen_helper_0_0(helper_fpop);
5402 case 0: /* feni (287 only, just do nop here) */
5404 case 1: /* fdisi (287 only, just do nop here) */
5407 tcg_gen_helper_0_0(helper_fclex);
5409 case 3: /* fninit */
5410 tcg_gen_helper_0_0(helper_fninit);
5412 case 4: /* fsetpm (287 only, just do nop here) */
5418 case 0x1d: /* fucomi */
5419 if (s->cc_op != CC_OP_DYNAMIC)
5420 gen_op_set_cc_op(s->cc_op);
5421 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5422 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5423 s->cc_op = CC_OP_EFLAGS;
5425 case 0x1e: /* fcomi */
5426 if (s->cc_op != CC_OP_DYNAMIC)
5427 gen_op_set_cc_op(s->cc_op);
5428 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5429 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5430 s->cc_op = CC_OP_EFLAGS;
5432 case 0x28: /* ffree sti */
5433 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5435 case 0x2a: /* fst sti */
5436 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5438 case 0x2b: /* fstp sti */
5439 case 0x0b: /* fstp1 sti, undocumented op */
5440 case 0x3a: /* fstp8 sti, undocumented op */
5441 case 0x3b: /* fstp9 sti, undocumented op */
5442 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5443 tcg_gen_helper_0_0(helper_fpop);
5445 case 0x2c: /* fucom st(i) */
5446 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5447 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5449 case 0x2d: /* fucomp st(i) */
5450 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5451 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5452 tcg_gen_helper_0_0(helper_fpop);
5454 case 0x33: /* de/3 */
5456 case 1: /* fcompp */
5457 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5458 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5459 tcg_gen_helper_0_0(helper_fpop);
5460 tcg_gen_helper_0_0(helper_fpop);
5466 case 0x38: /* ffreep sti, undocumented op */
5467 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5468 tcg_gen_helper_0_0(helper_fpop);
5470 case 0x3c: /* df/4 */
5473 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5474 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5475 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5481 case 0x3d: /* fucomip */
5482 if (s->cc_op != CC_OP_DYNAMIC)
5483 gen_op_set_cc_op(s->cc_op);
5484 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5485 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5486 tcg_gen_helper_0_0(helper_fpop);
5487 s->cc_op = CC_OP_EFLAGS;
5489 case 0x3e: /* fcomip */
5490 if (s->cc_op != CC_OP_DYNAMIC)
5491 gen_op_set_cc_op(s->cc_op);
5492 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5493 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5494 tcg_gen_helper_0_0(helper_fpop);
5495 s->cc_op = CC_OP_EFLAGS;
5497 case 0x10 ... 0x13: /* fcmovxx */
5501 const static uint8_t fcmov_cc[8] = {
5507 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
5508 l1 = gen_new_label();
5509 gen_jcc1(s, s->cc_op, op1, l1);
5510 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5519 /************************/
5522 case 0xa4: /* movsS */
5527 ot = dflag + OT_WORD;
5529 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5530 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5536 case 0xaa: /* stosS */
5541 ot = dflag + OT_WORD;
5543 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5544 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5549 case 0xac: /* lodsS */
5554 ot = dflag + OT_WORD;
5555 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5556 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5561 case 0xae: /* scasS */
5566 ot = dflag + OT_WORD;
5567 if (prefixes & PREFIX_REPNZ) {
5568 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5569 } else if (prefixes & PREFIX_REPZ) {
5570 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5573 s->cc_op = CC_OP_SUBB + ot;
5577 case 0xa6: /* cmpsS */
5582 ot = dflag + OT_WORD;
5583 if (prefixes & PREFIX_REPNZ) {
5584 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5585 } else if (prefixes & PREFIX_REPZ) {
5586 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5589 s->cc_op = CC_OP_SUBB + ot;
5592 case 0x6c: /* insS */
5597 ot = dflag ? OT_LONG : OT_WORD;
5598 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5599 gen_op_andl_T0_ffff();
5600 gen_check_io(s, ot, pc_start - s->cs_base,
5601 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5602 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5603 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5608 case 0x6e: /* outsS */
5613 ot = dflag ? OT_LONG : OT_WORD;
5614 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5615 gen_op_andl_T0_ffff();
5616 gen_check_io(s, ot, pc_start - s->cs_base,
5617 svm_is_rep(prefixes) | 4);
5618 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5619 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5625 /************************/
5633 ot = dflag ? OT_LONG : OT_WORD;
5634 val = ldub_code(s->pc++);
5635 gen_op_movl_T0_im(val);
5636 gen_check_io(s, ot, pc_start - s->cs_base,
5637 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5638 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5639 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5640 gen_op_mov_reg_T1(ot, R_EAX);
5647 ot = dflag ? OT_LONG : OT_WORD;
5648 val = ldub_code(s->pc++);
5649 gen_op_movl_T0_im(val);
5650 gen_check_io(s, ot, pc_start - s->cs_base,
5651 svm_is_rep(prefixes));
5652 gen_op_mov_TN_reg(ot, 1, R_EAX);
5654 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5655 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5656 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5657 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5664 ot = dflag ? OT_LONG : OT_WORD;
5665 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5666 gen_op_andl_T0_ffff();
5667 gen_check_io(s, ot, pc_start - s->cs_base,
5668 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5669 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5670 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5671 gen_op_mov_reg_T1(ot, R_EAX);
5678 ot = dflag ? OT_LONG : OT_WORD;
5679 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5680 gen_op_andl_T0_ffff();
5681 gen_check_io(s, ot, pc_start - s->cs_base,
5682 svm_is_rep(prefixes));
5683 gen_op_mov_TN_reg(ot, 1, R_EAX);
5685 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5686 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5687 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5688 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5691 /************************/
5693 case 0xc2: /* ret im */
5694 val = ldsw_code(s->pc);
5697 if (CODE64(s) && s->dflag)
5699 gen_stack_update(s, val + (2 << s->dflag));
5701 gen_op_andl_T0_ffff();
5705 case 0xc3: /* ret */
5709 gen_op_andl_T0_ffff();
5713 case 0xca: /* lret im */
5714 val = ldsw_code(s->pc);
5717 if (s->pe && !s->vm86) {
5718 if (s->cc_op != CC_OP_DYNAMIC)
5719 gen_op_set_cc_op(s->cc_op);
5720 gen_jmp_im(pc_start - s->cs_base);
5721 tcg_gen_helper_0_2(helper_lret_protected,
5722 tcg_const_i32(s->dflag),
5723 tcg_const_i32(val));
5727 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5729 gen_op_andl_T0_ffff();
5730 /* NOTE: keeping EIP updated is not a problem in case of
5734 gen_op_addl_A0_im(2 << s->dflag);
5735 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5736 gen_op_movl_seg_T0_vm(R_CS);
5737 /* add stack offset */
5738 gen_stack_update(s, val + (4 << s->dflag));
5742 case 0xcb: /* lret */
5745 case 0xcf: /* iret */
5746 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5750 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5751 s->cc_op = CC_OP_EFLAGS;
5752 } else if (s->vm86) {
5754 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5756 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5757 s->cc_op = CC_OP_EFLAGS;
5760 if (s->cc_op != CC_OP_DYNAMIC)
5761 gen_op_set_cc_op(s->cc_op);
5762 gen_jmp_im(pc_start - s->cs_base);
5763 tcg_gen_helper_0_2(helper_iret_protected,
5764 tcg_const_i32(s->dflag),
5765 tcg_const_i32(s->pc - s->cs_base));
5766 s->cc_op = CC_OP_EFLAGS;
5770 case 0xe8: /* call im */
5773 tval = (int32_t)insn_get(s, OT_LONG);
5775 tval = (int16_t)insn_get(s, OT_WORD);
5776 next_eip = s->pc - s->cs_base;
5780 gen_movtl_T0_im(next_eip);
5785 case 0x9a: /* lcall im */
5787 unsigned int selector, offset;
5791 ot = dflag ? OT_LONG : OT_WORD;
5792 offset = insn_get(s, ot);
5793 selector = insn_get(s, OT_WORD);
5795 gen_op_movl_T0_im(selector);
5796 gen_op_movl_T1_imu(offset);
5799 case 0xe9: /* jmp im */
5801 tval = (int32_t)insn_get(s, OT_LONG);
5803 tval = (int16_t)insn_get(s, OT_WORD);
5804 tval += s->pc - s->cs_base;
5809 case 0xea: /* ljmp im */
5811 unsigned int selector, offset;
5815 ot = dflag ? OT_LONG : OT_WORD;
5816 offset = insn_get(s, ot);
5817 selector = insn_get(s, OT_WORD);
5819 gen_op_movl_T0_im(selector);
5820 gen_op_movl_T1_imu(offset);
5823 case 0xeb: /* jmp Jb */
5824 tval = (int8_t)insn_get(s, OT_BYTE);
5825 tval += s->pc - s->cs_base;
5830 case 0x70 ... 0x7f: /* jcc Jb */
5831 tval = (int8_t)insn_get(s, OT_BYTE);
5833 case 0x180 ... 0x18f: /* jcc Jv */
5835 tval = (int32_t)insn_get(s, OT_LONG);
5837 tval = (int16_t)insn_get(s, OT_WORD);
5840 next_eip = s->pc - s->cs_base;
5844 gen_jcc(s, b, tval, next_eip);
5847 case 0x190 ... 0x19f: /* setcc Gv */
5848 modrm = ldub_code(s->pc++);
5850 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5852 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5857 ot = dflag + OT_WORD;
5858 modrm = ldub_code(s->pc++);
5859 reg = ((modrm >> 3) & 7) | rex_r;
5860 mod = (modrm >> 6) & 3;
5861 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5863 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5864 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
5866 rm = (modrm & 7) | REX_B(s);
5867 gen_op_mov_v_reg(ot, t0, rm);
5869 #ifdef TARGET_X86_64
5870 if (ot == OT_LONG) {
5871 /* XXX: specific Intel behaviour ? */
5872 l1 = gen_new_label();
5873 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5874 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
5876 tcg_gen_movi_tl(cpu_tmp0, 0);
5877 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
5881 l1 = gen_new_label();
5882 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5883 gen_op_mov_reg_v(ot, reg, t0);
5890 /************************/
5892 case 0x9c: /* pushf */
5893 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5895 if (s->vm86 && s->iopl != 3) {
5896 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5898 if (s->cc_op != CC_OP_DYNAMIC)
5899 gen_op_set_cc_op(s->cc_op);
5900 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
5904 case 0x9d: /* popf */
5905 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5907 if (s->vm86 && s->iopl != 3) {
5908 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5913 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5914 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
5916 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5917 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
5920 if (s->cpl <= s->iopl) {
5922 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5923 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
5925 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5926 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
5930 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5931 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
5933 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5934 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
5939 s->cc_op = CC_OP_EFLAGS;
5940 /* abort translation because TF flag may change */
5941 gen_jmp_im(s->pc - s->cs_base);
5945 case 0x9e: /* sahf */
5946 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5948 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5949 if (s->cc_op != CC_OP_DYNAMIC)
5950 gen_op_set_cc_op(s->cc_op);
5951 gen_compute_eflags(cpu_cc_src);
5952 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
5953 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
5954 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
5955 s->cc_op = CC_OP_EFLAGS;
5957 case 0x9f: /* lahf */
5958 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5960 if (s->cc_op != CC_OP_DYNAMIC)
5961 gen_op_set_cc_op(s->cc_op);
5962 gen_compute_eflags(cpu_T[0]);
5963 /* Note: gen_compute_eflags() only gives the condition codes */
5964 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
5965 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5967 case 0xf5: /* cmc */
5968 if (s->cc_op != CC_OP_DYNAMIC)
5969 gen_op_set_cc_op(s->cc_op);
5970 gen_compute_eflags(cpu_cc_src);
5971 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5972 s->cc_op = CC_OP_EFLAGS;
5974 case 0xf8: /* clc */
5975 if (s->cc_op != CC_OP_DYNAMIC)
5976 gen_op_set_cc_op(s->cc_op);
5977 gen_compute_eflags(cpu_cc_src);
5978 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
5979 s->cc_op = CC_OP_EFLAGS;
5981 case 0xf9: /* stc */
5982 if (s->cc_op != CC_OP_DYNAMIC)
5983 gen_op_set_cc_op(s->cc_op);
5984 gen_compute_eflags(cpu_cc_src);
5985 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5986 s->cc_op = CC_OP_EFLAGS;
5988 case 0xfc: /* cld */
5989 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5990 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5992 case 0xfd: /* std */
5993 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5994 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5997 /************************/
5998 /* bit operations */
5999 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6000 ot = dflag + OT_WORD;
6001 modrm = ldub_code(s->pc++);
6002 op = (modrm >> 3) & 7;
6003 mod = (modrm >> 6) & 3;
6004 rm = (modrm & 7) | REX_B(s);
6007 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6008 gen_op_ld_T0_A0(ot + s->mem_index);
6010 gen_op_mov_TN_reg(ot, 0, rm);
6013 val = ldub_code(s->pc++);
6014 gen_op_movl_T1_im(val);
6019 case 0x1a3: /* bt Gv, Ev */
6022 case 0x1ab: /* bts */
6025 case 0x1b3: /* btr */
6028 case 0x1bb: /* btc */
6031 ot = dflag + OT_WORD;
6032 modrm = ldub_code(s->pc++);
6033 reg = ((modrm >> 3) & 7) | rex_r;
6034 mod = (modrm >> 6) & 3;
6035 rm = (modrm & 7) | REX_B(s);
6036 gen_op_mov_TN_reg(OT_LONG, 1, reg);
6038 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6039 /* specific case: we need to add a displacement */
6040 gen_exts(ot, cpu_T[1]);
6041 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
6042 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
6043 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
6044 gen_op_ld_T0_A0(ot + s->mem_index);
6046 gen_op_mov_TN_reg(ot, 0, rm);
6049 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6052 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6053 tcg_gen_movi_tl(cpu_cc_dst, 0);
6056 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6057 tcg_gen_movi_tl(cpu_tmp0, 1);
6058 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6059 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6062 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6063 tcg_gen_movi_tl(cpu_tmp0, 1);
6064 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6065 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6066 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6070 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6071 tcg_gen_movi_tl(cpu_tmp0, 1);
6072 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6073 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6076 s->cc_op = CC_OP_SARB + ot;
6079 gen_op_st_T0_A0(ot + s->mem_index);
6081 gen_op_mov_reg_T0(ot, rm);
6082 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6083 tcg_gen_movi_tl(cpu_cc_dst, 0);
6086 case 0x1bc: /* bsf */
6087 case 0x1bd: /* bsr */
6092 ot = dflag + OT_WORD;
6093 modrm = ldub_code(s->pc++);
6094 reg = ((modrm >> 3) & 7) | rex_r;
6095 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6096 gen_extu(ot, cpu_T[0]);
6097 label1 = gen_new_label();
6098 tcg_gen_movi_tl(cpu_cc_dst, 0);
6099 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6100 tcg_gen_mov_tl(t0, cpu_T[0]);
6101 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
6103 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
6105 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
6107 gen_op_mov_reg_T0(ot, reg);
6108 tcg_gen_movi_tl(cpu_cc_dst, 1);
6109 gen_set_label(label1);
6110 tcg_gen_discard_tl(cpu_cc_src);
6111 s->cc_op = CC_OP_LOGICB + ot;
6115 /************************/
6117 case 0x27: /* daa */
6120 if (s->cc_op != CC_OP_DYNAMIC)
6121 gen_op_set_cc_op(s->cc_op);
6122 tcg_gen_helper_0_0(helper_daa);
6123 s->cc_op = CC_OP_EFLAGS;
6125 case 0x2f: /* das */
6128 if (s->cc_op != CC_OP_DYNAMIC)
6129 gen_op_set_cc_op(s->cc_op);
6130 tcg_gen_helper_0_0(helper_das);
6131 s->cc_op = CC_OP_EFLAGS;
6133 case 0x37: /* aaa */
6136 if (s->cc_op != CC_OP_DYNAMIC)
6137 gen_op_set_cc_op(s->cc_op);
6138 tcg_gen_helper_0_0(helper_aaa);
6139 s->cc_op = CC_OP_EFLAGS;
6141 case 0x3f: /* aas */
6144 if (s->cc_op != CC_OP_DYNAMIC)
6145 gen_op_set_cc_op(s->cc_op);
6146 tcg_gen_helper_0_0(helper_aas);
6147 s->cc_op = CC_OP_EFLAGS;
6149 case 0xd4: /* aam */
6152 val = ldub_code(s->pc++);
6154 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6156 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
6157 s->cc_op = CC_OP_LOGICB;
6160 case 0xd5: /* aad */
6163 val = ldub_code(s->pc++);
6164 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
6165 s->cc_op = CC_OP_LOGICB;
6167 /************************/
6169 case 0x90: /* nop */
6170 /* XXX: xchg + rex handling */
6171 /* XXX: correct lock test for all insn */
6172 if (prefixes & PREFIX_LOCK)
6174 if (prefixes & PREFIX_REPZ) {
6175 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6178 case 0x9b: /* fwait */
6179 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6180 (HF_MP_MASK | HF_TS_MASK)) {
6181 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6183 if (s->cc_op != CC_OP_DYNAMIC)
6184 gen_op_set_cc_op(s->cc_op);
6185 gen_jmp_im(pc_start - s->cs_base);
6186 tcg_gen_helper_0_0(helper_fwait);
6189 case 0xcc: /* int3 */
6190 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6192 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6194 case 0xcd: /* int N */
6195 val = ldub_code(s->pc++);
6196 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6198 if (s->vm86 && s->iopl != 3) {
6199 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6201 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6204 case 0xce: /* into */
6207 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6209 if (s->cc_op != CC_OP_DYNAMIC)
6210 gen_op_set_cc_op(s->cc_op);
6211 gen_jmp_im(pc_start - s->cs_base);
6212 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6214 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6215 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
6218 gen_debug(s, pc_start - s->cs_base);
6221 tb_flush(cpu_single_env);
6222 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6225 case 0xfa: /* cli */
6227 if (s->cpl <= s->iopl) {
6228 tcg_gen_helper_0_0(helper_cli);
6230 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6234 tcg_gen_helper_0_0(helper_cli);
6236 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6240 case 0xfb: /* sti */
6242 if (s->cpl <= s->iopl) {
6244 tcg_gen_helper_0_0(helper_sti);
6245 /* interruptions are enabled only the first insn after sti */
6246 /* If several instructions disable interrupts, only the
6248 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6249 tcg_gen_helper_0_0(helper_set_inhibit_irq);
6250 /* give a chance to handle pending irqs */
6251 gen_jmp_im(s->pc - s->cs_base);
6254 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6260 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6264 case 0x62: /* bound */
6267 ot = dflag ? OT_LONG : OT_WORD;
6268 modrm = ldub_code(s->pc++);
6269 reg = (modrm >> 3) & 7;
6270 mod = (modrm >> 6) & 3;
6273 gen_op_mov_TN_reg(ot, 0, reg);
6274 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6275 gen_jmp_im(pc_start - s->cs_base);
6276 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6278 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6280 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6282 case 0x1c8 ... 0x1cf: /* bswap reg */
6283 reg = (b & 7) | REX_B(s);
6284 #ifdef TARGET_X86_64
6286 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6287 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6288 gen_op_mov_reg_T0(OT_QUAD, reg);
6292 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6294 tmp0 = tcg_temp_new(TCG_TYPE_I32);
6295 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6296 tcg_gen_bswap_i32(tmp0, tmp0);
6297 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6298 gen_op_mov_reg_T0(OT_LONG, reg);
6302 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6303 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6304 gen_op_mov_reg_T0(OT_LONG, reg);
6308 case 0xd6: /* salc */
6311 if (s->cc_op != CC_OP_DYNAMIC)
6312 gen_op_set_cc_op(s->cc_op);
6313 gen_compute_eflags_c(cpu_T[0]);
6314 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6315 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6317 case 0xe0: /* loopnz */
6318 case 0xe1: /* loopz */
6319 case 0xe2: /* loop */
6320 case 0xe3: /* jecxz */
6324 tval = (int8_t)insn_get(s, OT_BYTE);
6325 next_eip = s->pc - s->cs_base;
6330 l1 = gen_new_label();
6331 l2 = gen_new_label();
6332 l3 = gen_new_label();
6335 case 0: /* loopnz */
6337 if (s->cc_op != CC_OP_DYNAMIC)
6338 gen_op_set_cc_op(s->cc_op);
6339 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6340 gen_op_jz_ecx(s->aflag, l3);
6341 gen_compute_eflags(cpu_tmp0);
6342 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6344 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
6346 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
6350 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6351 gen_op_jnz_ecx(s->aflag, l1);
6355 gen_op_jz_ecx(s->aflag, l1);
6360 gen_jmp_im(next_eip);
6369 case 0x130: /* wrmsr */
6370 case 0x132: /* rdmsr */
6372 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6376 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6377 tcg_gen_helper_0_0(helper_rdmsr);
6379 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6380 tcg_gen_helper_0_0(helper_wrmsr);
6386 case 0x131: /* rdtsc */
6387 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6389 gen_jmp_im(pc_start - s->cs_base);
6390 tcg_gen_helper_0_0(helper_rdtsc);
6392 case 0x133: /* rdpmc */
6393 gen_jmp_im(pc_start - s->cs_base);
6394 tcg_gen_helper_0_0(helper_rdpmc);
6396 case 0x134: /* sysenter */
6400 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6402 if (s->cc_op != CC_OP_DYNAMIC) {
6403 gen_op_set_cc_op(s->cc_op);
6404 s->cc_op = CC_OP_DYNAMIC;
6406 gen_jmp_im(pc_start - s->cs_base);
6407 tcg_gen_helper_0_0(helper_sysenter);
6411 case 0x135: /* sysexit */
6415 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6417 if (s->cc_op != CC_OP_DYNAMIC) {
6418 gen_op_set_cc_op(s->cc_op);
6419 s->cc_op = CC_OP_DYNAMIC;
6421 gen_jmp_im(pc_start - s->cs_base);
6422 tcg_gen_helper_0_0(helper_sysexit);
6426 #ifdef TARGET_X86_64
6427 case 0x105: /* syscall */
6428 /* XXX: is it usable in real mode ? */
6429 if (s->cc_op != CC_OP_DYNAMIC) {
6430 gen_op_set_cc_op(s->cc_op);
6431 s->cc_op = CC_OP_DYNAMIC;
6433 gen_jmp_im(pc_start - s->cs_base);
6434 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6437 case 0x107: /* sysret */
6439 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6441 if (s->cc_op != CC_OP_DYNAMIC) {
6442 gen_op_set_cc_op(s->cc_op);
6443 s->cc_op = CC_OP_DYNAMIC;
6445 gen_jmp_im(pc_start - s->cs_base);
6446 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6447 /* condition codes are modified only in long mode */
6449 s->cc_op = CC_OP_EFLAGS;
6454 case 0x1a2: /* cpuid */
6455 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6457 tcg_gen_helper_0_0(helper_cpuid);
6459 case 0xf4: /* hlt */
6461 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6463 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6465 if (s->cc_op != CC_OP_DYNAMIC)
6466 gen_op_set_cc_op(s->cc_op);
6467 gen_jmp_im(s->pc - s->cs_base);
6468 tcg_gen_helper_0_0(helper_hlt);
6473 modrm = ldub_code(s->pc++);
6474 mod = (modrm >> 6) & 3;
6475 op = (modrm >> 3) & 7;
6478 if (!s->pe || s->vm86)
6480 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6482 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6486 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6489 if (!s->pe || s->vm86)
6492 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6494 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6496 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6497 gen_jmp_im(pc_start - s->cs_base);
6498 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6499 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6503 if (!s->pe || s->vm86)
6505 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6507 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6511 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6514 if (!s->pe || s->vm86)
6517 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6519 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6521 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6522 gen_jmp_im(pc_start - s->cs_base);
6523 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6524 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6529 if (!s->pe || s->vm86)
6531 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6532 if (s->cc_op != CC_OP_DYNAMIC)
6533 gen_op_set_cc_op(s->cc_op);
6535 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6537 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6538 s->cc_op = CC_OP_EFLAGS;
6545 modrm = ldub_code(s->pc++);
6546 mod = (modrm >> 6) & 3;
6547 op = (modrm >> 3) & 7;
6553 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6555 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6556 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6557 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6558 gen_add_A0_im(s, 2);
6559 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6561 gen_op_andl_T0_im(0xffffff);
6562 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6567 case 0: /* monitor */
6568 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6571 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6573 gen_jmp_im(pc_start - s->cs_base);
6574 #ifdef TARGET_X86_64
6575 if (s->aflag == 2) {
6576 gen_op_movq_A0_reg(R_EAX);
6580 gen_op_movl_A0_reg(R_EAX);
6582 gen_op_andl_A0_ffff();
6584 gen_add_A0_ds_seg(s);
6585 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6588 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6591 if (s->cc_op != CC_OP_DYNAMIC) {
6592 gen_op_set_cc_op(s->cc_op);
6593 s->cc_op = CC_OP_DYNAMIC;
6595 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6597 gen_jmp_im(s->pc - s->cs_base);
6598 tcg_gen_helper_0_0(helper_mwait);
6605 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6607 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6608 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6609 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6610 gen_add_A0_im(s, 2);
6611 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6613 gen_op_andl_T0_im(0xffffff);
6614 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6622 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6624 if (s->cc_op != CC_OP_DYNAMIC)
6625 gen_op_set_cc_op(s->cc_op);
6626 gen_jmp_im(s->pc - s->cs_base);
6627 tcg_gen_helper_0_0(helper_vmrun);
6628 s->cc_op = CC_OP_EFLAGS;
6631 case 1: /* VMMCALL */
6632 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6634 /* FIXME: cause #UD if hflags & SVM */
6635 tcg_gen_helper_0_0(helper_vmmcall);
6637 case 2: /* VMLOAD */
6638 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6640 tcg_gen_helper_0_0(helper_vmload);
6642 case 3: /* VMSAVE */
6643 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6645 tcg_gen_helper_0_0(helper_vmsave);
6648 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6650 tcg_gen_helper_0_0(helper_stgi);
6653 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6655 tcg_gen_helper_0_0(helper_clgi);
6657 case 6: /* SKINIT */
6658 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6660 tcg_gen_helper_0_0(helper_skinit);
6662 case 7: /* INVLPGA */
6663 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6665 tcg_gen_helper_0_0(helper_invlpga);
6670 } else if (s->cpl != 0) {
6671 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6673 if (gen_svm_check_intercept(s, pc_start,
6674 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6676 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6677 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6678 gen_add_A0_im(s, 2);
6679 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6681 gen_op_andl_T0_im(0xffffff);
6683 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
6684 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
6686 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
6687 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
6692 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6694 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
6695 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6699 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6701 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6703 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6704 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6705 gen_jmp_im(s->pc - s->cs_base);
6709 case 7: /* invlpg */
6711 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6714 #ifdef TARGET_X86_64
6715 if (CODE64(s) && rm == 0) {
6717 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6718 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
6719 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6720 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
6727 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6729 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6730 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6731 gen_jmp_im(s->pc - s->cs_base);
6740 case 0x108: /* invd */
6741 case 0x109: /* wbinvd */
6743 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6745 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6750 case 0x63: /* arpl or movslS (x86_64) */
6751 #ifdef TARGET_X86_64
6754 /* d_ot is the size of destination */
6755 d_ot = dflag + OT_WORD;
6757 modrm = ldub_code(s->pc++);
6758 reg = ((modrm >> 3) & 7) | rex_r;
6759 mod = (modrm >> 6) & 3;
6760 rm = (modrm & 7) | REX_B(s);
6763 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6765 if (d_ot == OT_QUAD)
6766 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6767 gen_op_mov_reg_T0(d_ot, reg);
6769 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6770 if (d_ot == OT_QUAD) {
6771 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6773 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6775 gen_op_mov_reg_T0(d_ot, reg);
6783 if (!s->pe || s->vm86)
6785 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6786 t1 = tcg_temp_local_new(TCG_TYPE_TL);
6787 t2 = tcg_temp_local_new(TCG_TYPE_TL);
6789 modrm = ldub_code(s->pc++);
6790 reg = (modrm >> 3) & 7;
6791 mod = (modrm >> 6) & 3;
6794 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6795 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6797 gen_op_mov_v_reg(ot, t0, rm);
6799 gen_op_mov_v_reg(ot, t1, reg);
6800 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
6801 tcg_gen_andi_tl(t1, t1, 3);
6802 tcg_gen_movi_tl(t2, 0);
6803 label1 = gen_new_label();
6804 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
6805 tcg_gen_andi_tl(t0, t0, ~3);
6806 tcg_gen_or_tl(t0, t0, t1);
6807 tcg_gen_movi_tl(t2, CC_Z);
6808 gen_set_label(label1);
6810 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
6812 gen_op_mov_reg_v(ot, rm, t0);
6814 if (s->cc_op != CC_OP_DYNAMIC)
6815 gen_op_set_cc_op(s->cc_op);
6816 gen_compute_eflags(cpu_cc_src);
6817 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6818 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
6819 s->cc_op = CC_OP_EFLAGS;
6825 case 0x102: /* lar */
6826 case 0x103: /* lsl */
6830 if (!s->pe || s->vm86)
6832 ot = dflag ? OT_LONG : OT_WORD;
6833 modrm = ldub_code(s->pc++);
6834 reg = ((modrm >> 3) & 7) | rex_r;
6835 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6836 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6837 if (s->cc_op != CC_OP_DYNAMIC)
6838 gen_op_set_cc_op(s->cc_op);
6840 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
6842 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
6843 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6844 label1 = gen_new_label();
6845 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
6846 gen_op_mov_reg_v(ot, reg, t0);
6847 gen_set_label(label1);
6848 s->cc_op = CC_OP_EFLAGS;
6853 modrm = ldub_code(s->pc++);
6854 mod = (modrm >> 6) & 3;
6855 op = (modrm >> 3) & 7;
6857 case 0: /* prefetchnta */
6858 case 1: /* prefetchnt0 */
6859 case 2: /* prefetchnt0 */
6860 case 3: /* prefetchnt0 */
6863 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6864 /* nothing more to do */
6866 default: /* nop (multi byte) */
6867 gen_nop_modrm(s, modrm);
6871 case 0x119 ... 0x11f: /* nop (multi byte) */
6872 modrm = ldub_code(s->pc++);
6873 gen_nop_modrm(s, modrm);
6875 case 0x120: /* mov reg, crN */
6876 case 0x122: /* mov crN, reg */
6878 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6880 modrm = ldub_code(s->pc++);
6881 if ((modrm & 0xc0) != 0xc0)
6883 rm = (modrm & 7) | REX_B(s);
6884 reg = ((modrm >> 3) & 7) | rex_r;
6896 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6897 gen_op_mov_TN_reg(ot, 0, rm);
6898 tcg_gen_helper_0_2(helper_movl_crN_T0,
6899 tcg_const_i32(reg), cpu_T[0]);
6900 gen_jmp_im(s->pc - s->cs_base);
6903 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6904 #if !defined(CONFIG_USER_ONLY)
6906 tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6909 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[reg]));
6910 gen_op_mov_reg_T0(ot, rm);
6918 case 0x121: /* mov reg, drN */
6919 case 0x123: /* mov drN, reg */
6921 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6923 modrm = ldub_code(s->pc++);
6924 if ((modrm & 0xc0) != 0xc0)
6926 rm = (modrm & 7) | REX_B(s);
6927 reg = ((modrm >> 3) & 7) | rex_r;
6932 /* XXX: do it dynamically with CR4.DE bit */
6933 if (reg == 4 || reg == 5 || reg >= 8)
6936 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6937 gen_op_mov_TN_reg(ot, 0, rm);
6938 tcg_gen_helper_0_2(helper_movl_drN_T0,
6939 tcg_const_i32(reg), cpu_T[0]);
6940 gen_jmp_im(s->pc - s->cs_base);
6943 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6944 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
6945 gen_op_mov_reg_T0(ot, rm);
6949 case 0x106: /* clts */
6951 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6953 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6954 tcg_gen_helper_0_0(helper_clts);
6955 /* abort block because static cpu state changed */
6956 gen_jmp_im(s->pc - s->cs_base);
6960 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6961 case 0x1c3: /* MOVNTI reg, mem */
6962 if (!(s->cpuid_features & CPUID_SSE2))
6964 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6965 modrm = ldub_code(s->pc++);
6966 mod = (modrm >> 6) & 3;
6969 reg = ((modrm >> 3) & 7) | rex_r;
6970 /* generate a generic store */
6971 gen_ldst_modrm(s, modrm, ot, reg, 1);
6974 modrm = ldub_code(s->pc++);
6975 mod = (modrm >> 6) & 3;
6976 op = (modrm >> 3) & 7;
6978 case 0: /* fxsave */
6979 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6980 (s->flags & HF_EM_MASK))
6982 if (s->flags & HF_TS_MASK) {
6983 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6986 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6987 if (s->cc_op != CC_OP_DYNAMIC)
6988 gen_op_set_cc_op(s->cc_op);
6989 gen_jmp_im(pc_start - s->cs_base);
6990 tcg_gen_helper_0_2(helper_fxsave,
6991 cpu_A0, tcg_const_i32((s->dflag == 2)));
6993 case 1: /* fxrstor */
6994 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6995 (s->flags & HF_EM_MASK))
6997 if (s->flags & HF_TS_MASK) {
6998 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7001 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7002 if (s->cc_op != CC_OP_DYNAMIC)
7003 gen_op_set_cc_op(s->cc_op);
7004 gen_jmp_im(pc_start - s->cs_base);
7005 tcg_gen_helper_0_2(helper_fxrstor,
7006 cpu_A0, tcg_const_i32((s->dflag == 2)));
7008 case 2: /* ldmxcsr */
7009 case 3: /* stmxcsr */
7010 if (s->flags & HF_TS_MASK) {
7011 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7014 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
7017 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7019 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7020 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7022 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7023 gen_op_st_T0_A0(OT_LONG + s->mem_index);
7026 case 5: /* lfence */
7027 case 6: /* mfence */
7028 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
7031 case 7: /* sfence / clflush */
7032 if ((modrm & 0xc7) == 0xc0) {
7034 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7035 if (!(s->cpuid_features & CPUID_SSE))
7039 if (!(s->cpuid_features & CPUID_CLFLUSH))
7041 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7048 case 0x10d: /* 3DNow! prefetch(w) */
7049 modrm = ldub_code(s->pc++);
7050 mod = (modrm >> 6) & 3;
7053 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7054 /* ignore for now */
7056 case 0x1aa: /* rsm */
7057 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
7059 if (!(s->flags & HF_SMM_MASK))
7061 if (s->cc_op != CC_OP_DYNAMIC) {
7062 gen_op_set_cc_op(s->cc_op);
7063 s->cc_op = CC_OP_DYNAMIC;
7065 gen_jmp_im(s->pc - s->cs_base);
7066 tcg_gen_helper_0_0(helper_rsm);
7069 case 0x10e ... 0x10f:
7070 /* 3DNow! instructions, ignore prefixes */
7071 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7072 case 0x110 ... 0x117:
7073 case 0x128 ... 0x12f:
7074 case 0x150 ... 0x177:
7075 case 0x17c ... 0x17f:
7077 case 0x1c4 ... 0x1c6:
7078 case 0x1d0 ... 0x1fe:
7079 gen_sse(s, b, pc_start, rex_r);
7084 /* lock generation */
7085 if (s->prefix & PREFIX_LOCK)
7086 tcg_gen_helper_0_0(helper_unlock);
7089 if (s->prefix & PREFIX_LOCK)
7090 tcg_gen_helper_0_0(helper_unlock);
7091 /* XXX: ensure that no lock was generated */
7092 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7096 void optimize_flags_init(void)
7098 #if TCG_TARGET_REG_BITS == 32
7099 assert(sizeof(CCTable) == (1 << 3));
7101 assert(sizeof(CCTable) == (1 << 4));
7103 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
7104 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7105 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7106 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7107 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7108 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7109 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7110 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
7111 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
7113 /* register helpers */
7115 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
7119 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7120 basic block 'tb'. If search_pc is TRUE, also generate PC
7121 information for each intermediate instruction. */
7122 static inline int gen_intermediate_code_internal(CPUState *env,
7123 TranslationBlock *tb,
7126 DisasContext dc1, *dc = &dc1;
7127 target_ulong pc_ptr;
7128 uint16_t *gen_opc_end;
7131 target_ulong pc_start;
7132 target_ulong cs_base;
7134 /* generate intermediate code */
7136 cs_base = tb->cs_base;
7138 cflags = tb->cflags;
7140 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7141 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7142 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7143 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7145 dc->vm86 = (flags >> VM_SHIFT) & 1;
7146 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7147 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7148 dc->tf = (flags >> TF_SHIFT) & 1;
7149 dc->singlestep_enabled = env->singlestep_enabled;
7150 dc->cc_op = CC_OP_DYNAMIC;
7151 dc->cs_base = cs_base;
7153 dc->popl_esp_hack = 0;
7154 /* select memory access functions */
7156 if (flags & HF_SOFTMMU_MASK) {
7158 dc->mem_index = 2 * 4;
7160 dc->mem_index = 1 * 4;
7162 dc->cpuid_features = env->cpuid_features;
7163 dc->cpuid_ext_features = env->cpuid_ext_features;
7164 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7165 dc->cpuid_ext3_features = env->cpuid_ext3_features;
7166 #ifdef TARGET_X86_64
7167 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7168 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7171 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7172 (flags & HF_INHIBIT_IRQ_MASK)
7173 #ifndef CONFIG_SOFTMMU
7174 || (flags & HF_SOFTMMU_MASK)
7178 /* check addseg logic */
7179 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7180 printf("ERROR addseg\n");
7183 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
7184 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
7185 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
7186 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
7188 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7189 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7190 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7191 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7192 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7193 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7194 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7195 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7196 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7198 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7200 dc->is_jmp = DISAS_NEXT;
7205 if (env->nb_breakpoints > 0) {
7206 for(j = 0; j < env->nb_breakpoints; j++) {
7207 if (env->breakpoints[j] == pc_ptr) {
7208 gen_debug(dc, pc_ptr - dc->cs_base);
7214 j = gen_opc_ptr - gen_opc_buf;
7218 gen_opc_instr_start[lj++] = 0;
7220 gen_opc_pc[lj] = pc_ptr;
7221 gen_opc_cc_op[lj] = dc->cc_op;
7222 gen_opc_instr_start[lj] = 1;
7224 pc_ptr = disas_insn(dc, pc_ptr);
7225 /* stop translation if indicated */
7228 /* if single step mode, we generate only one instruction and
7229 generate an exception */
7230 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7231 the flag and abort the translation to give the irqs a
7232 change to be happen */
7233 if (dc->tf || dc->singlestep_enabled ||
7234 (flags & HF_INHIBIT_IRQ_MASK) ||
7235 (cflags & CF_SINGLE_INSN)) {
7236 gen_jmp_im(pc_ptr - dc->cs_base);
7240 /* if too long translation, stop generation too */
7241 if (gen_opc_ptr >= gen_opc_end ||
7242 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7243 gen_jmp_im(pc_ptr - dc->cs_base);
7248 *gen_opc_ptr = INDEX_op_end;
7249 /* we don't forget to fill the last values */
7251 j = gen_opc_ptr - gen_opc_buf;
7254 gen_opc_instr_start[lj++] = 0;
7258 if (loglevel & CPU_LOG_TB_CPU) {
7259 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7261 if (loglevel & CPU_LOG_TB_IN_ASM) {
7263 fprintf(logfile, "----------------\n");
7264 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7265 #ifdef TARGET_X86_64
7270 disas_flags = !dc->code32;
7271 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7272 fprintf(logfile, "\n");
7277 tb->size = pc_ptr - pc_start;
7281 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7283 return gen_intermediate_code_internal(env, tb, 0);
7286 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7288 return gen_intermediate_code_internal(env, tb, 1);
7291 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7292 unsigned long searched_pc, int pc_pos, void *puc)
7296 if (loglevel & CPU_LOG_TB_OP) {
7298 fprintf(logfile, "RESTORE:\n");
7299 for(i = 0;i <= pc_pos; i++) {
7300 if (gen_opc_instr_start[i]) {
7301 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7304 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7305 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7306 (uint32_t)tb->cs_base);
7309 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7310 cc_op = gen_opc_cc_op[pc_pos];
7311 if (cc_op != CC_OP_DYNAMIC)