4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
63 /* local register indexes (only used inside old micro ops) */
64 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65 static TCGv cpu_tmp5, cpu_tmp6;
68 static int x86_64_hregs;
71 typedef struct DisasContext {
72 /* current insn context */
73 int override; /* -1 if no override */
76 target_ulong pc; /* pc = eip + cs_base */
77 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78 static state change (stop translation) */
79 /* current block context */
80 target_ulong cs_base; /* base of CS segment */
81 int pe; /* protected mode */
82 int code32; /* 32 bit code segment */
84 int lma; /* long mode active */
85 int code64; /* 64 bit code segment */
88 int ss32; /* 32 bit stack segment */
89 int cc_op; /* current CC operation */
90 int addseg; /* non zero if either DS/ES/SS have a non zero base */
91 int f_st; /* currently unused */
92 int vm86; /* vm86 mode */
95 int tf; /* TF cpu flag */
96 int singlestep_enabled; /* "hardware" single step enabled */
97 int jmp_opt; /* use direct block chaining for direct jumps */
98 int mem_index; /* select memory access functions */
99 uint64_t flags; /* all execution flags */
100 struct TranslationBlock *tb;
101 int popl_esp_hack; /* for correct popl with esp base handling */
102 int rip_offset; /* only used in x86_64, but left for simplicity */
104 int cpuid_ext_features;
105 int cpuid_ext2_features;
108 static void gen_eob(DisasContext *s);
109 static void gen_jmp(DisasContext *s, target_ulong eip);
110 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
112 /* i386 arith/logic operations */
132 OP_SHL1, /* undocumented */
156 /* I386 int registers */
157 OR_EAX, /* MUST be even numbered */
166 OR_TMP0 = 16, /* temporary operand register */
168 OR_A0, /* temporary register used when doing address evaluation */
171 static inline void gen_op_movl_T0_0(void)
173 tcg_gen_movi_tl(cpu_T[0], 0);
176 static inline void gen_op_movl_T0_im(int32_t val)
178 tcg_gen_movi_tl(cpu_T[0], val);
181 static inline void gen_op_movl_T0_imu(uint32_t val)
183 tcg_gen_movi_tl(cpu_T[0], val);
186 static inline void gen_op_movl_T1_im(int32_t val)
188 tcg_gen_movi_tl(cpu_T[1], val);
191 static inline void gen_op_movl_T1_imu(uint32_t val)
193 tcg_gen_movi_tl(cpu_T[1], val);
196 static inline void gen_op_movl_A0_im(uint32_t val)
198 tcg_gen_movi_tl(cpu_A0, val);
202 static inline void gen_op_movq_A0_im(int64_t val)
204 tcg_gen_movi_tl(cpu_A0, val);
208 static inline void gen_movtl_T0_im(target_ulong val)
210 tcg_gen_movi_tl(cpu_T[0], val);
213 static inline void gen_movtl_T1_im(target_ulong val)
215 tcg_gen_movi_tl(cpu_T[1], val);
218 static inline void gen_op_andl_T0_ffff(void)
220 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
223 static inline void gen_op_andl_T0_im(uint32_t val)
225 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
228 static inline void gen_op_movl_T0_T1(void)
230 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
233 static inline void gen_op_andl_A0_ffff(void)
235 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
240 #define NB_OP_SIZES 4
242 #else /* !TARGET_X86_64 */
244 #define NB_OP_SIZES 3
246 #endif /* !TARGET_X86_64 */
248 #if defined(WORDS_BIGENDIAN)
249 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
250 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
251 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
252 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
253 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
255 #define REG_B_OFFSET 0
256 #define REG_H_OFFSET 1
257 #define REG_W_OFFSET 0
258 #define REG_L_OFFSET 0
259 #define REG_LH_OFFSET 4
262 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
266 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
267 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
269 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
273 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
277 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
278 /* high part of register set to zero */
279 tcg_gen_movi_tl(cpu_tmp0, 0);
280 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
284 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
289 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295 static inline void gen_op_mov_reg_T0(int ot, int reg)
297 gen_op_mov_reg_TN(ot, 0, reg);
300 static inline void gen_op_mov_reg_T1(int ot, int reg)
302 gen_op_mov_reg_TN(ot, 1, reg);
305 static inline void gen_op_mov_reg_A0(int size, int reg)
309 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
313 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
314 /* high part of register set to zero */
315 tcg_gen_movi_tl(cpu_tmp0, 0);
316 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
320 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
325 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
335 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
338 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
343 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
348 static inline void gen_op_movl_A0_reg(int reg)
350 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
353 static inline void gen_op_addl_A0_im(int32_t val)
355 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
357 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
362 static inline void gen_op_addq_A0_im(int64_t val)
364 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
368 static void gen_add_A0_im(DisasContext *s, int val)
372 gen_op_addq_A0_im(val);
375 gen_op_addl_A0_im(val);
378 static inline void gen_op_addl_T0_T1(void)
380 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
383 static inline void gen_op_jmp_T0(void)
385 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
388 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
392 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
393 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
394 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
397 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
398 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
400 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
402 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
406 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
407 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
408 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
414 static inline void gen_op_add_reg_T0(int size, int reg)
418 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
419 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
420 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
423 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
424 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
426 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
428 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
432 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
433 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
434 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
440 static inline void gen_op_set_cc_op(int32_t val)
442 tcg_gen_movi_i32(cpu_cc_op, val);
445 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
447 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
449 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
450 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
452 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
456 static inline void gen_op_movl_A0_seg(int reg)
458 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
461 static inline void gen_op_addl_A0_seg(int reg)
463 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
464 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
466 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
471 static inline void gen_op_movq_A0_seg(int reg)
473 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
476 static inline void gen_op_addq_A0_seg(int reg)
478 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
479 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
482 static inline void gen_op_movq_A0_reg(int reg)
484 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
487 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
489 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
491 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
492 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
496 static inline void gen_op_lds_T0_A0(int idx)
498 int mem_index = (idx >> 2) - 1;
501 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
504 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
508 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
513 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
514 static inline void gen_op_ld_T0_A0(int idx)
516 int mem_index = (idx >> 2) - 1;
519 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
522 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
525 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
529 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
534 static inline void gen_op_ldu_T0_A0(int idx)
536 gen_op_ld_T0_A0(idx);
539 static inline void gen_op_ld_T1_A0(int idx)
541 int mem_index = (idx >> 2) - 1;
544 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
547 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
550 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
554 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
559 static inline void gen_op_st_T0_A0(int idx)
561 int mem_index = (idx >> 2) - 1;
564 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
567 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
570 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
574 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
579 static inline void gen_op_st_T1_A0(int idx)
581 int mem_index = (idx >> 2) - 1;
584 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
587 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
590 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
594 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
599 static inline void gen_jmp_im(target_ulong pc)
601 tcg_gen_movi_tl(cpu_tmp0, pc);
602 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
605 static inline void gen_string_movl_A0_ESI(DisasContext *s)
609 override = s->override;
613 gen_op_movq_A0_seg(override);
614 gen_op_addq_A0_reg_sN(0, R_ESI);
616 gen_op_movq_A0_reg(R_ESI);
622 if (s->addseg && override < 0)
625 gen_op_movl_A0_seg(override);
626 gen_op_addl_A0_reg_sN(0, R_ESI);
628 gen_op_movl_A0_reg(R_ESI);
631 /* 16 address, always override */
634 gen_op_movl_A0_reg(R_ESI);
635 gen_op_andl_A0_ffff();
636 gen_op_addl_A0_seg(override);
640 static inline void gen_string_movl_A0_EDI(DisasContext *s)
644 gen_op_movq_A0_reg(R_EDI);
649 gen_op_movl_A0_seg(R_ES);
650 gen_op_addl_A0_reg_sN(0, R_EDI);
652 gen_op_movl_A0_reg(R_EDI);
655 gen_op_movl_A0_reg(R_EDI);
656 gen_op_andl_A0_ffff();
657 gen_op_addl_A0_seg(R_ES);
661 static inline void gen_op_movl_T0_Dshift(int ot)
663 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
664 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
667 static void gen_extu(int ot, TCGv reg)
671 tcg_gen_ext8u_tl(reg, reg);
674 tcg_gen_ext16u_tl(reg, reg);
677 tcg_gen_ext32u_tl(reg, reg);
684 static void gen_exts(int ot, TCGv reg)
688 tcg_gen_ext8s_tl(reg, reg);
691 tcg_gen_ext16s_tl(reg, reg);
694 tcg_gen_ext32s_tl(reg, reg);
701 static inline void gen_op_jnz_ecx(int size, int label1)
703 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
704 gen_extu(size + 1, cpu_tmp0);
705 tcg_gen_brcond_tl(TCG_COND_NE, cpu_tmp0, tcg_const_tl(0), label1);
708 static inline void gen_op_jz_ecx(int size, int label1)
710 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
711 gen_extu(size + 1, cpu_tmp0);
712 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
715 static void *helper_in_func[3] = {
721 static void *helper_out_func[3] = {
727 static void *gen_check_io_func[3] = {
733 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
737 target_ulong next_eip;
740 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
741 if (s->cc_op != CC_OP_DYNAMIC)
742 gen_op_set_cc_op(s->cc_op);
745 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
746 tcg_gen_helper_0_1(gen_check_io_func[ot],
749 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
751 if (s->cc_op != CC_OP_DYNAMIC)
752 gen_op_set_cc_op(s->cc_op);
756 svm_flags |= (1 << (4 + ot));
757 next_eip = s->pc - s->cs_base;
758 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
759 tcg_gen_helper_0_3(helper_svm_check_io,
761 tcg_const_i32(svm_flags),
762 tcg_const_i32(next_eip - cur_eip));
766 static inline void gen_movs(DisasContext *s, int ot)
768 gen_string_movl_A0_ESI(s);
769 gen_op_ld_T0_A0(ot + s->mem_index);
770 gen_string_movl_A0_EDI(s);
771 gen_op_st_T0_A0(ot + s->mem_index);
772 gen_op_movl_T0_Dshift(ot);
773 gen_op_add_reg_T0(s->aflag, R_ESI);
774 gen_op_add_reg_T0(s->aflag, R_EDI);
777 static inline void gen_update_cc_op(DisasContext *s)
779 if (s->cc_op != CC_OP_DYNAMIC) {
780 gen_op_set_cc_op(s->cc_op);
781 s->cc_op = CC_OP_DYNAMIC;
785 static void gen_op_update1_cc(void)
787 tcg_gen_discard_tl(cpu_cc_src);
788 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
791 static void gen_op_update2_cc(void)
793 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
794 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
797 static inline void gen_op_cmpl_T0_T1_cc(void)
799 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
800 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
803 static inline void gen_op_testl_T0_T1_cc(void)
805 tcg_gen_discard_tl(cpu_cc_src);
806 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
809 static void gen_op_update_neg_cc(void)
811 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
812 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
815 /* compute eflags.C to reg */
816 static void gen_compute_eflags_c(TCGv reg)
818 #if TCG_TARGET_REG_BITS == 32
819 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
820 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
821 (long)cc_table + offsetof(CCTable, compute_c));
822 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
823 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
824 1, &cpu_tmp2_i32, 0, NULL);
826 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
827 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
828 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
829 (long)cc_table + offsetof(CCTable, compute_c));
830 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
831 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
832 1, &cpu_tmp2_i32, 0, NULL);
834 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
837 /* compute all eflags to cc_src */
838 static void gen_compute_eflags(TCGv reg)
840 #if TCG_TARGET_REG_BITS == 32
841 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
842 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
843 (long)cc_table + offsetof(CCTable, compute_all));
844 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
845 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
846 1, &cpu_tmp2_i32, 0, NULL);
848 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
849 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
850 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
851 (long)cc_table + offsetof(CCTable, compute_all));
852 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
853 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
854 1, &cpu_tmp2_i32, 0, NULL);
856 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
859 static inline void gen_setcc_slow_T0(int op)
863 gen_compute_eflags(cpu_T[0]);
864 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
865 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
868 gen_compute_eflags_c(cpu_T[0]);
871 gen_compute_eflags(cpu_T[0]);
872 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
873 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
876 gen_compute_eflags(cpu_tmp0);
877 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
878 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
879 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
882 gen_compute_eflags(cpu_T[0]);
883 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
884 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
887 gen_compute_eflags(cpu_T[0]);
888 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
889 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
892 gen_compute_eflags(cpu_tmp0);
893 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
894 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
895 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
896 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
900 gen_compute_eflags(cpu_tmp0);
901 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
902 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
903 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
904 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
905 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
906 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
911 /* return true if setcc_slow is not needed (WARNING: must be kept in
912 sync with gen_jcc1) */
913 static int is_fast_jcc_case(DisasContext *s, int b)
916 jcc_op = (b >> 1) & 7;
918 /* we optimize the cmp/jcc case */
923 if (jcc_op == JCC_O || jcc_op == JCC_P)
927 /* some jumps are easy to compute */
952 if (jcc_op != JCC_Z && jcc_op != JCC_S)
962 /* generate a conditional jump to label 'l1' according to jump opcode
963 value 'b'. In the fast case, T0 is guaranted not to be used. */
964 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
966 int inv, jcc_op, size, cond;
970 jcc_op = (b >> 1) & 7;
973 /* we optimize the cmp/jcc case */
979 size = cc_op - CC_OP_SUBB;
985 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
989 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
994 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1002 tcg_gen_brcond_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0,
1003 tcg_const_tl(0), l1);
1009 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1010 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1011 tcg_const_tl(0), l1);
1014 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1015 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1016 tcg_const_tl(0), l1);
1018 #ifdef TARGET_X86_64
1020 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1021 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1022 tcg_const_tl(0), l1);
1026 tcg_gen_brcond_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1027 tcg_const_tl(0), l1);
1033 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1036 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1038 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1042 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1043 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1047 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1048 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1050 #ifdef TARGET_X86_64
1053 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1054 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1061 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1065 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1068 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1070 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1074 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1075 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1079 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1080 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1082 #ifdef TARGET_X86_64
1085 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1086 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1093 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1101 /* some jumps are easy to compute */
1143 size = (cc_op - CC_OP_ADDB) & 3;
1146 size = (cc_op - CC_OP_ADDB) & 3;
1154 gen_setcc_slow_T0(jcc_op);
1155 tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1156 cpu_T[0], tcg_const_tl(0), l1);
1161 /* XXX: does not work with gdbstub "ice" single step - not a
1163 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1167 l1 = gen_new_label();
1168 l2 = gen_new_label();
1169 gen_op_jnz_ecx(s->aflag, l1);
1171 gen_jmp_tb(s, next_eip, 1);
1176 static inline void gen_stos(DisasContext *s, int ot)
1178 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1179 gen_string_movl_A0_EDI(s);
1180 gen_op_st_T0_A0(ot + s->mem_index);
1181 gen_op_movl_T0_Dshift(ot);
1182 gen_op_add_reg_T0(s->aflag, R_EDI);
1185 static inline void gen_lods(DisasContext *s, int ot)
1187 gen_string_movl_A0_ESI(s);
1188 gen_op_ld_T0_A0(ot + s->mem_index);
1189 gen_op_mov_reg_T0(ot, R_EAX);
1190 gen_op_movl_T0_Dshift(ot);
1191 gen_op_add_reg_T0(s->aflag, R_ESI);
1194 static inline void gen_scas(DisasContext *s, int ot)
1196 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1197 gen_string_movl_A0_EDI(s);
1198 gen_op_ld_T1_A0(ot + s->mem_index);
1199 gen_op_cmpl_T0_T1_cc();
1200 gen_op_movl_T0_Dshift(ot);
1201 gen_op_add_reg_T0(s->aflag, R_EDI);
1204 static inline void gen_cmps(DisasContext *s, int ot)
1206 gen_string_movl_A0_ESI(s);
1207 gen_op_ld_T0_A0(ot + s->mem_index);
1208 gen_string_movl_A0_EDI(s);
1209 gen_op_ld_T1_A0(ot + s->mem_index);
1210 gen_op_cmpl_T0_T1_cc();
1211 gen_op_movl_T0_Dshift(ot);
1212 gen_op_add_reg_T0(s->aflag, R_ESI);
1213 gen_op_add_reg_T0(s->aflag, R_EDI);
1216 static inline void gen_ins(DisasContext *s, int ot)
1218 gen_string_movl_A0_EDI(s);
1219 /* Note: we must do this dummy write first to be restartable in
1220 case of page fault. */
1222 gen_op_st_T0_A0(ot + s->mem_index);
1223 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1224 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1225 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1226 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1227 gen_op_st_T0_A0(ot + s->mem_index);
1228 gen_op_movl_T0_Dshift(ot);
1229 gen_op_add_reg_T0(s->aflag, R_EDI);
1232 static inline void gen_outs(DisasContext *s, int ot)
1234 gen_string_movl_A0_ESI(s);
1235 gen_op_ld_T0_A0(ot + s->mem_index);
1237 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1238 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1239 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1240 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1241 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1243 gen_op_movl_T0_Dshift(ot);
1244 gen_op_add_reg_T0(s->aflag, R_ESI);
1247 /* same method as Valgrind : we generate jumps to current or next
1249 #define GEN_REPZ(op) \
1250 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1251 target_ulong cur_eip, target_ulong next_eip) \
1254 gen_update_cc_op(s); \
1255 l2 = gen_jz_ecx_string(s, next_eip); \
1256 gen_ ## op(s, ot); \
1257 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1258 /* a loop would cause two single step exceptions if ECX = 1 \
1259 before rep string_insn */ \
1261 gen_op_jz_ecx(s->aflag, l2); \
1262 gen_jmp(s, cur_eip); \
1265 #define GEN_REPZ2(op) \
1266 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1267 target_ulong cur_eip, \
1268 target_ulong next_eip, \
1272 gen_update_cc_op(s); \
1273 l2 = gen_jz_ecx_string(s, next_eip); \
1274 gen_ ## op(s, ot); \
1275 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1276 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1277 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1279 gen_op_jz_ecx(s->aflag, l2); \
1280 gen_jmp(s, cur_eip); \
1291 static void *helper_fp_arith_ST0_FT0[8] = {
1292 helper_fadd_ST0_FT0,
1293 helper_fmul_ST0_FT0,
1294 helper_fcom_ST0_FT0,
1295 helper_fcom_ST0_FT0,
1296 helper_fsub_ST0_FT0,
1297 helper_fsubr_ST0_FT0,
1298 helper_fdiv_ST0_FT0,
1299 helper_fdivr_ST0_FT0,
1302 /* NOTE the exception in "r" op ordering */
1303 static void *helper_fp_arith_STN_ST0[8] = {
1304 helper_fadd_STN_ST0,
1305 helper_fmul_STN_ST0,
1308 helper_fsubr_STN_ST0,
1309 helper_fsub_STN_ST0,
1310 helper_fdivr_STN_ST0,
1311 helper_fdiv_STN_ST0,
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext *s1, int op, int ot, int d)
1318 gen_op_mov_TN_reg(ot, 0, d);
1320 gen_op_ld_T0_A0(ot + s1->mem_index);
1324 if (s1->cc_op != CC_OP_DYNAMIC)
1325 gen_op_set_cc_op(s1->cc_op);
1326 gen_compute_eflags_c(cpu_tmp4);
1327 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1328 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1330 gen_op_mov_reg_T0(ot, d);
1332 gen_op_st_T0_A0(ot + s1->mem_index);
1333 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1334 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1335 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1336 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1337 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1338 s1->cc_op = CC_OP_DYNAMIC;
1341 if (s1->cc_op != CC_OP_DYNAMIC)
1342 gen_op_set_cc_op(s1->cc_op);
1343 gen_compute_eflags_c(cpu_tmp4);
1344 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1345 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1347 gen_op_mov_reg_T0(ot, d);
1349 gen_op_st_T0_A0(ot + s1->mem_index);
1350 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1351 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1352 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1353 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1354 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1355 s1->cc_op = CC_OP_DYNAMIC;
1358 gen_op_addl_T0_T1();
1360 gen_op_mov_reg_T0(ot, d);
1362 gen_op_st_T0_A0(ot + s1->mem_index);
1363 gen_op_update2_cc();
1364 s1->cc_op = CC_OP_ADDB + ot;
1367 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1369 gen_op_mov_reg_T0(ot, d);
1371 gen_op_st_T0_A0(ot + s1->mem_index);
1372 gen_op_update2_cc();
1373 s1->cc_op = CC_OP_SUBB + ot;
1377 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1379 gen_op_mov_reg_T0(ot, d);
1381 gen_op_st_T0_A0(ot + s1->mem_index);
1382 gen_op_update1_cc();
1383 s1->cc_op = CC_OP_LOGICB + ot;
1386 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1388 gen_op_mov_reg_T0(ot, d);
1390 gen_op_st_T0_A0(ot + s1->mem_index);
1391 gen_op_update1_cc();
1392 s1->cc_op = CC_OP_LOGICB + ot;
1395 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1397 gen_op_mov_reg_T0(ot, d);
1399 gen_op_st_T0_A0(ot + s1->mem_index);
1400 gen_op_update1_cc();
1401 s1->cc_op = CC_OP_LOGICB + ot;
1404 gen_op_cmpl_T0_T1_cc();
1405 s1->cc_op = CC_OP_SUBB + ot;
1410 /* if d == OR_TMP0, it means memory operand (address in A0) */
1411 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1414 gen_op_mov_TN_reg(ot, 0, d);
1416 gen_op_ld_T0_A0(ot + s1->mem_index);
1417 if (s1->cc_op != CC_OP_DYNAMIC)
1418 gen_op_set_cc_op(s1->cc_op);
1420 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1421 s1->cc_op = CC_OP_INCB + ot;
1423 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1424 s1->cc_op = CC_OP_DECB + ot;
1427 gen_op_mov_reg_T0(ot, d);
1429 gen_op_st_T0_A0(ot + s1->mem_index);
1430 gen_compute_eflags_c(cpu_cc_src);
1431 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1434 /* XXX: add faster immediate case */
1435 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1436 int is_right, int is_arith)
1448 gen_op_ld_T0_A0(ot + s->mem_index);
1450 gen_op_mov_TN_reg(ot, 0, op1);
1452 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1454 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1458 gen_exts(ot, cpu_T[0]);
1459 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1460 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1462 gen_extu(ot, cpu_T[0]);
1463 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1464 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1467 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1468 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1473 gen_op_st_T0_A0(ot + s->mem_index);
1475 gen_op_mov_reg_T0(ot, op1);
1477 /* update eflags if non zero shift */
1478 if (s->cc_op != CC_OP_DYNAMIC)
1479 gen_op_set_cc_op(s->cc_op);
1481 shift_label = gen_new_label();
1482 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1484 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1485 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1487 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1489 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1491 gen_set_label(shift_label);
1492 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1495 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1498 tcg_gen_shli_tl(ret, arg1, arg2);
1500 tcg_gen_shri_tl(ret, arg1, -arg2);
1503 /* XXX: add faster immediate case */
1504 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1508 int label1, label2, data_bits;
1517 gen_op_ld_T0_A0(ot + s->mem_index);
1519 gen_op_mov_TN_reg(ot, 0, op1);
1521 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1523 /* Must test zero case to avoid using undefined behaviour in TCG
1525 label1 = gen_new_label();
1526 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1529 tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1531 tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1533 gen_extu(ot, cpu_T[0]);
1534 tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1536 data_bits = 8 << ot;
1537 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1538 fix TCG definition) */
1540 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1541 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1542 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1544 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1545 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1546 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1548 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1550 gen_set_label(label1);
1553 gen_op_st_T0_A0(ot + s->mem_index);
1555 gen_op_mov_reg_T0(ot, op1);
1558 if (s->cc_op != CC_OP_DYNAMIC)
1559 gen_op_set_cc_op(s->cc_op);
1561 label2 = gen_new_label();
1562 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1564 gen_compute_eflags(cpu_cc_src);
1565 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1566 tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1567 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1568 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1569 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1571 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1573 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1574 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1576 tcg_gen_discard_tl(cpu_cc_dst);
1577 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1579 gen_set_label(label2);
1580 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1583 static void *helper_rotc[8] = {
1587 X86_64_ONLY(helper_rclq),
1591 X86_64_ONLY(helper_rcrq),
1594 /* XXX: add faster immediate = 1 case */
1595 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1600 if (s->cc_op != CC_OP_DYNAMIC)
1601 gen_op_set_cc_op(s->cc_op);
1605 gen_op_ld_T0_A0(ot + s->mem_index);
1607 gen_op_mov_TN_reg(ot, 0, op1);
1609 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1610 cpu_T[0], cpu_T[0], cpu_T[1]);
1613 gen_op_st_T0_A0(ot + s->mem_index);
1615 gen_op_mov_reg_T0(ot, op1);
1618 label1 = gen_new_label();
1619 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1621 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1622 tcg_gen_discard_tl(cpu_cc_dst);
1623 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1625 gen_set_label(label1);
1626 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1629 /* XXX: add faster immediate case */
1630 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1633 int label1, label2, data_bits;
1643 gen_op_ld_T0_A0(ot + s->mem_index);
1645 gen_op_mov_TN_reg(ot, 0, op1);
1647 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1648 /* Must test zero case to avoid using undefined behaviour in TCG
1650 label1 = gen_new_label();
1651 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1653 tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1654 if (ot == OT_WORD) {
1655 /* Note: we implement the Intel behaviour for shift count > 16 */
1657 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1658 tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1659 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1660 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1662 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1664 /* only needed if count > 16, but a test would complicate */
1665 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1666 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1668 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1670 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1672 /* XXX: not optimal */
1673 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1674 tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1675 tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1676 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1678 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1679 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1680 tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1681 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1683 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1684 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1685 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1686 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1689 data_bits = 8 << ot;
1692 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1694 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1696 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1697 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1698 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1699 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1703 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1705 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1707 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1708 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1709 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1710 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1713 tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1715 gen_set_label(label1);
1718 gen_op_st_T0_A0(ot + s->mem_index);
1720 gen_op_mov_reg_T0(ot, op1);
1723 if (s->cc_op != CC_OP_DYNAMIC)
1724 gen_op_set_cc_op(s->cc_op);
1726 label2 = gen_new_label();
1727 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1729 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1730 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1732 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1734 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1736 gen_set_label(label2);
1737 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1740 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1743 gen_op_mov_TN_reg(ot, 1, s);
1746 gen_rot_rm_T1(s1, ot, d, 0);
1749 gen_rot_rm_T1(s1, ot, d, 1);
1753 gen_shift_rm_T1(s1, ot, d, 0, 0);
1756 gen_shift_rm_T1(s1, ot, d, 1, 0);
1759 gen_shift_rm_T1(s1, ot, d, 1, 1);
1762 gen_rotc_rm_T1(s1, ot, d, 0);
1765 gen_rotc_rm_T1(s1, ot, d, 1);
1770 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1772 /* currently not optimized */
1773 gen_op_movl_T1_im(c);
1774 gen_shift(s1, op, ot, d, OR_TMP1);
1777 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1785 int mod, rm, code, override, must_add_seg;
1787 override = s->override;
1788 must_add_seg = s->addseg;
1791 mod = (modrm >> 6) & 3;
1803 code = ldub_code(s->pc++);
1804 scale = (code >> 6) & 3;
1805 index = ((code >> 3) & 7) | REX_X(s);
1812 if ((base & 7) == 5) {
1814 disp = (int32_t)ldl_code(s->pc);
1816 if (CODE64(s) && !havesib) {
1817 disp += s->pc + s->rip_offset;
1824 disp = (int8_t)ldub_code(s->pc++);
1828 disp = ldl_code(s->pc);
1834 /* for correct popl handling with esp */
1835 if (base == 4 && s->popl_esp_hack)
1836 disp += s->popl_esp_hack;
1837 #ifdef TARGET_X86_64
1838 if (s->aflag == 2) {
1839 gen_op_movq_A0_reg(base);
1841 gen_op_addq_A0_im(disp);
1846 gen_op_movl_A0_reg(base);
1848 gen_op_addl_A0_im(disp);
1851 #ifdef TARGET_X86_64
1852 if (s->aflag == 2) {
1853 gen_op_movq_A0_im(disp);
1857 gen_op_movl_A0_im(disp);
1860 /* XXX: index == 4 is always invalid */
1861 if (havesib && (index != 4 || scale != 0)) {
1862 #ifdef TARGET_X86_64
1863 if (s->aflag == 2) {
1864 gen_op_addq_A0_reg_sN(scale, index);
1868 gen_op_addl_A0_reg_sN(scale, index);
1873 if (base == R_EBP || base == R_ESP)
1878 #ifdef TARGET_X86_64
1879 if (s->aflag == 2) {
1880 gen_op_addq_A0_seg(override);
1884 gen_op_addl_A0_seg(override);
1891 disp = lduw_code(s->pc);
1893 gen_op_movl_A0_im(disp);
1894 rm = 0; /* avoid SS override */
1901 disp = (int8_t)ldub_code(s->pc++);
1905 disp = lduw_code(s->pc);
1911 gen_op_movl_A0_reg(R_EBX);
1912 gen_op_addl_A0_reg_sN(0, R_ESI);
1915 gen_op_movl_A0_reg(R_EBX);
1916 gen_op_addl_A0_reg_sN(0, R_EDI);
1919 gen_op_movl_A0_reg(R_EBP);
1920 gen_op_addl_A0_reg_sN(0, R_ESI);
1923 gen_op_movl_A0_reg(R_EBP);
1924 gen_op_addl_A0_reg_sN(0, R_EDI);
1927 gen_op_movl_A0_reg(R_ESI);
1930 gen_op_movl_A0_reg(R_EDI);
1933 gen_op_movl_A0_reg(R_EBP);
1937 gen_op_movl_A0_reg(R_EBX);
1941 gen_op_addl_A0_im(disp);
1942 gen_op_andl_A0_ffff();
1946 if (rm == 2 || rm == 3 || rm == 6)
1951 gen_op_addl_A0_seg(override);
1961 static void gen_nop_modrm(DisasContext *s, int modrm)
1963 int mod, rm, base, code;
1965 mod = (modrm >> 6) & 3;
1975 code = ldub_code(s->pc++);
2011 /* used for LEA and MOV AX, mem */
2012 static void gen_add_A0_ds_seg(DisasContext *s)
2014 int override, must_add_seg;
2015 must_add_seg = s->addseg;
2017 if (s->override >= 0) {
2018 override = s->override;
2024 #ifdef TARGET_X86_64
2026 gen_op_addq_A0_seg(override);
2030 gen_op_addl_A0_seg(override);
2035 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2037 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2039 int mod, rm, opreg, disp;
2041 mod = (modrm >> 6) & 3;
2042 rm = (modrm & 7) | REX_B(s);
2046 gen_op_mov_TN_reg(ot, 0, reg);
2047 gen_op_mov_reg_T0(ot, rm);
2049 gen_op_mov_TN_reg(ot, 0, rm);
2051 gen_op_mov_reg_T0(ot, reg);
2054 gen_lea_modrm(s, modrm, &opreg, &disp);
2057 gen_op_mov_TN_reg(ot, 0, reg);
2058 gen_op_st_T0_A0(ot + s->mem_index);
2060 gen_op_ld_T0_A0(ot + s->mem_index);
2062 gen_op_mov_reg_T0(ot, reg);
2067 static inline uint32_t insn_get(DisasContext *s, int ot)
2073 ret = ldub_code(s->pc);
2077 ret = lduw_code(s->pc);
2082 ret = ldl_code(s->pc);
2089 static inline int insn_const_size(unsigned int ot)
2097 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2099 TranslationBlock *tb;
2102 pc = s->cs_base + eip;
2104 /* NOTE: we handle the case where the TB spans two pages here */
2105 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2106 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2107 /* jump to same page: we can use a direct jump */
2108 tcg_gen_goto_tb(tb_num);
2110 tcg_gen_exit_tb((long)tb + tb_num);
2112 /* jump to another page: currently not optimized */
2118 static inline void gen_jcc(DisasContext *s, int b,
2119 target_ulong val, target_ulong next_eip)
2124 if (s->cc_op != CC_OP_DYNAMIC) {
2125 gen_op_set_cc_op(s->cc_op);
2126 s->cc_op = CC_OP_DYNAMIC;
2129 l1 = gen_new_label();
2130 gen_jcc1(s, cc_op, b, l1);
2132 gen_goto_tb(s, 0, next_eip);
2135 gen_goto_tb(s, 1, val);
2139 l1 = gen_new_label();
2140 l2 = gen_new_label();
2141 gen_jcc1(s, cc_op, b, l1);
2143 gen_jmp_im(next_eip);
2153 static void gen_setcc(DisasContext *s, int b)
2155 int inv, jcc_op, l1;
2157 if (is_fast_jcc_case(s, b)) {
2158 /* nominal case: we use a jump */
2159 tcg_gen_movi_tl(cpu_T[0], 0);
2160 l1 = gen_new_label();
2161 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2162 tcg_gen_movi_tl(cpu_T[0], 1);
2165 /* slow case: it is more efficient not to generate a jump,
2166 although it is questionnable whether this optimization is
2169 jcc_op = (b >> 1) & 7;
2170 if (s->cc_op != CC_OP_DYNAMIC)
2171 gen_op_set_cc_op(s->cc_op);
2172 gen_setcc_slow_T0(jcc_op);
2174 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2179 static inline void gen_op_movl_T0_seg(int seg_reg)
2181 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2182 offsetof(CPUX86State,segs[seg_reg].selector));
2185 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2187 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2188 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2189 offsetof(CPUX86State,segs[seg_reg].selector));
2190 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2191 tcg_gen_st_tl(cpu_T[0], cpu_env,
2192 offsetof(CPUX86State,segs[seg_reg].base));
2195 /* move T0 to seg_reg and compute if the CPU state may change. Never
2196 call this function with seg_reg == R_CS */
2197 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2199 if (s->pe && !s->vm86) {
2200 /* XXX: optimize by finding processor state dynamically */
2201 if (s->cc_op != CC_OP_DYNAMIC)
2202 gen_op_set_cc_op(s->cc_op);
2203 gen_jmp_im(cur_eip);
2204 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2205 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2206 /* abort translation because the addseg value may change or
2207 because ss32 may change. For R_SS, translation must always
2208 stop as a special handling must be done to disable hardware
2209 interrupts for the next instruction */
2210 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2213 gen_op_movl_seg_T0_vm(seg_reg);
2214 if (seg_reg == R_SS)
2219 static inline int svm_is_rep(int prefixes)
2221 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2225 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2226 uint32_t type, uint64_t param)
2228 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2229 /* no SVM activated */
2232 /* CRx and DRx reads/writes */
2233 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2234 if (s->cc_op != CC_OP_DYNAMIC) {
2235 gen_op_set_cc_op(s->cc_op);
2237 gen_jmp_im(pc_start - s->cs_base);
2238 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2239 tcg_const_i32(type), tcg_const_i64(param));
2240 /* this is a special case as we do not know if the interception occurs
2241 so we assume there was none */
2244 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2245 if (s->cc_op != CC_OP_DYNAMIC) {
2246 gen_op_set_cc_op(s->cc_op);
2248 gen_jmp_im(pc_start - s->cs_base);
2249 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2250 tcg_const_i32(type), tcg_const_i64(param));
2251 /* this is a special case as we do not know if the interception occurs
2252 so we assume there was none */
2257 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2258 if (s->cc_op != CC_OP_DYNAMIC) {
2259 gen_op_set_cc_op(s->cc_op);
2261 gen_jmp_im(pc_start - s->cs_base);
2262 tcg_gen_helper_0_2(helper_vmexit,
2263 tcg_const_i32(type), tcg_const_i64(param));
2264 /* we can optimize this one so TBs don't get longer
2265 than up to vmexit */
2274 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2276 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2279 static inline void gen_stack_update(DisasContext *s, int addend)
2281 #ifdef TARGET_X86_64
2283 gen_op_add_reg_im(2, R_ESP, addend);
2287 gen_op_add_reg_im(1, R_ESP, addend);
2289 gen_op_add_reg_im(0, R_ESP, addend);
2293 /* generate a push. It depends on ss32, addseg and dflag */
2294 static void gen_push_T0(DisasContext *s)
2296 #ifdef TARGET_X86_64
2298 gen_op_movq_A0_reg(R_ESP);
2300 gen_op_addq_A0_im(-8);
2301 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2303 gen_op_addq_A0_im(-2);
2304 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2306 gen_op_mov_reg_A0(2, R_ESP);
2310 gen_op_movl_A0_reg(R_ESP);
2312 gen_op_addl_A0_im(-2);
2314 gen_op_addl_A0_im(-4);
2317 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2318 gen_op_addl_A0_seg(R_SS);
2321 gen_op_andl_A0_ffff();
2322 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2323 gen_op_addl_A0_seg(R_SS);
2325 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2326 if (s->ss32 && !s->addseg)
2327 gen_op_mov_reg_A0(1, R_ESP);
2329 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2333 /* generate a push. It depends on ss32, addseg and dflag */
2334 /* slower version for T1, only used for call Ev */
2335 static void gen_push_T1(DisasContext *s)
2337 #ifdef TARGET_X86_64
2339 gen_op_movq_A0_reg(R_ESP);
2341 gen_op_addq_A0_im(-8);
2342 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2344 gen_op_addq_A0_im(-2);
2345 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2347 gen_op_mov_reg_A0(2, R_ESP);
2351 gen_op_movl_A0_reg(R_ESP);
2353 gen_op_addl_A0_im(-2);
2355 gen_op_addl_A0_im(-4);
2358 gen_op_addl_A0_seg(R_SS);
2361 gen_op_andl_A0_ffff();
2362 gen_op_addl_A0_seg(R_SS);
2364 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2366 if (s->ss32 && !s->addseg)
2367 gen_op_mov_reg_A0(1, R_ESP);
2369 gen_stack_update(s, (-2) << s->dflag);
2373 /* two step pop is necessary for precise exceptions */
2374 static void gen_pop_T0(DisasContext *s)
2376 #ifdef TARGET_X86_64
2378 gen_op_movq_A0_reg(R_ESP);
2379 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2383 gen_op_movl_A0_reg(R_ESP);
2386 gen_op_addl_A0_seg(R_SS);
2388 gen_op_andl_A0_ffff();
2389 gen_op_addl_A0_seg(R_SS);
2391 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2395 static void gen_pop_update(DisasContext *s)
2397 #ifdef TARGET_X86_64
2398 if (CODE64(s) && s->dflag) {
2399 gen_stack_update(s, 8);
2403 gen_stack_update(s, 2 << s->dflag);
2407 static void gen_stack_A0(DisasContext *s)
2409 gen_op_movl_A0_reg(R_ESP);
2411 gen_op_andl_A0_ffff();
2412 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2414 gen_op_addl_A0_seg(R_SS);
2417 /* NOTE: wrap around in 16 bit not fully handled */
2418 static void gen_pusha(DisasContext *s)
2421 gen_op_movl_A0_reg(R_ESP);
2422 gen_op_addl_A0_im(-16 << s->dflag);
2424 gen_op_andl_A0_ffff();
2425 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2427 gen_op_addl_A0_seg(R_SS);
2428 for(i = 0;i < 8; i++) {
2429 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2430 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2431 gen_op_addl_A0_im(2 << s->dflag);
2433 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2436 /* NOTE: wrap around in 16 bit not fully handled */
2437 static void gen_popa(DisasContext *s)
2440 gen_op_movl_A0_reg(R_ESP);
2442 gen_op_andl_A0_ffff();
2443 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2444 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2446 gen_op_addl_A0_seg(R_SS);
2447 for(i = 0;i < 8; i++) {
2448 /* ESP is not reloaded */
2450 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2451 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2453 gen_op_addl_A0_im(2 << s->dflag);
2455 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2458 static void gen_enter(DisasContext *s, int esp_addend, int level)
2463 #ifdef TARGET_X86_64
2465 ot = s->dflag ? OT_QUAD : OT_WORD;
2468 gen_op_movl_A0_reg(R_ESP);
2469 gen_op_addq_A0_im(-opsize);
2470 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2473 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2474 gen_op_st_T0_A0(ot + s->mem_index);
2476 /* XXX: must save state */
2477 tcg_gen_helper_0_3(helper_enter64_level,
2478 tcg_const_i32(level),
2479 tcg_const_i32((ot == OT_QUAD)),
2482 gen_op_mov_reg_T1(ot, R_EBP);
2483 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2484 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2488 ot = s->dflag + OT_WORD;
2489 opsize = 2 << s->dflag;
2491 gen_op_movl_A0_reg(R_ESP);
2492 gen_op_addl_A0_im(-opsize);
2494 gen_op_andl_A0_ffff();
2495 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2497 gen_op_addl_A0_seg(R_SS);
2499 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2500 gen_op_st_T0_A0(ot + s->mem_index);
2502 /* XXX: must save state */
2503 tcg_gen_helper_0_3(helper_enter_level,
2504 tcg_const_i32(level),
2505 tcg_const_i32(s->dflag),
2508 gen_op_mov_reg_T1(ot, R_EBP);
2509 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2510 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2514 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2516 if (s->cc_op != CC_OP_DYNAMIC)
2517 gen_op_set_cc_op(s->cc_op);
2518 gen_jmp_im(cur_eip);
2519 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2523 /* an interrupt is different from an exception because of the
2525 static void gen_interrupt(DisasContext *s, int intno,
2526 target_ulong cur_eip, target_ulong next_eip)
2528 if (s->cc_op != CC_OP_DYNAMIC)
2529 gen_op_set_cc_op(s->cc_op);
2530 gen_jmp_im(cur_eip);
2531 tcg_gen_helper_0_2(helper_raise_interrupt,
2532 tcg_const_i32(intno),
2533 tcg_const_i32(next_eip - cur_eip));
2537 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2539 if (s->cc_op != CC_OP_DYNAMIC)
2540 gen_op_set_cc_op(s->cc_op);
2541 gen_jmp_im(cur_eip);
2542 tcg_gen_helper_0_0(helper_debug);
2546 /* generate a generic end of block. Trace exception is also generated
2548 static void gen_eob(DisasContext *s)
2550 if (s->cc_op != CC_OP_DYNAMIC)
2551 gen_op_set_cc_op(s->cc_op);
2552 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2553 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2555 if (s->singlestep_enabled) {
2556 tcg_gen_helper_0_0(helper_debug);
2558 tcg_gen_helper_0_0(helper_single_step);
2565 /* generate a jump to eip. No segment change must happen before as a
2566 direct call to the next block may occur */
2567 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2570 if (s->cc_op != CC_OP_DYNAMIC) {
2571 gen_op_set_cc_op(s->cc_op);
2572 s->cc_op = CC_OP_DYNAMIC;
2574 gen_goto_tb(s, tb_num, eip);
2582 static void gen_jmp(DisasContext *s, target_ulong eip)
2584 gen_jmp_tb(s, eip, 0);
2587 static inline void gen_ldq_env_A0(int idx, int offset)
2589 int mem_index = (idx >> 2) - 1;
2590 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2591 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2594 static inline void gen_stq_env_A0(int idx, int offset)
2596 int mem_index = (idx >> 2) - 1;
2597 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2598 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2601 static inline void gen_ldo_env_A0(int idx, int offset)
2603 int mem_index = (idx >> 2) - 1;
2604 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2605 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2606 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2607 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2608 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2611 static inline void gen_sto_env_A0(int idx, int offset)
2613 int mem_index = (idx >> 2) - 1;
2614 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2615 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2616 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2617 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2618 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2621 static inline void gen_op_movo(int d_offset, int s_offset)
2623 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2624 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2625 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2626 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2629 static inline void gen_op_movq(int d_offset, int s_offset)
2631 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2632 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2635 static inline void gen_op_movl(int d_offset, int s_offset)
2637 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2638 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2641 static inline void gen_op_movq_env_0(int d_offset)
2643 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2644 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2647 #define SSE_SPECIAL ((void *)1)
2648 #define SSE_DUMMY ((void *)2)
2650 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2651 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2652 helper_ ## x ## ss, helper_ ## x ## sd, }
2654 static void *sse_op_table1[256][4] = {
2655 /* 3DNow! extensions */
2656 [0x0e] = { SSE_DUMMY }, /* femms */
2657 [0x0f] = { SSE_DUMMY }, /* pf... */
2658 /* pure SSE operations */
2659 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2660 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2661 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2662 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2663 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2664 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2665 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2666 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2668 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2669 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2670 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2671 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2672 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2673 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2674 [0x2e] = { helper_ucomiss, helper_ucomisd },
2675 [0x2f] = { helper_comiss, helper_comisd },
2676 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2677 [0x51] = SSE_FOP(sqrt),
2678 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2679 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2680 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2681 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2682 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2683 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2684 [0x58] = SSE_FOP(add),
2685 [0x59] = SSE_FOP(mul),
2686 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2687 helper_cvtss2sd, helper_cvtsd2ss },
2688 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2689 [0x5c] = SSE_FOP(sub),
2690 [0x5d] = SSE_FOP(min),
2691 [0x5e] = SSE_FOP(div),
2692 [0x5f] = SSE_FOP(max),
2694 [0xc2] = SSE_FOP(cmpeq),
2695 [0xc6] = { helper_shufps, helper_shufpd },
2697 /* MMX ops and their SSE extensions */
2698 [0x60] = MMX_OP2(punpcklbw),
2699 [0x61] = MMX_OP2(punpcklwd),
2700 [0x62] = MMX_OP2(punpckldq),
2701 [0x63] = MMX_OP2(packsswb),
2702 [0x64] = MMX_OP2(pcmpgtb),
2703 [0x65] = MMX_OP2(pcmpgtw),
2704 [0x66] = MMX_OP2(pcmpgtl),
2705 [0x67] = MMX_OP2(packuswb),
2706 [0x68] = MMX_OP2(punpckhbw),
2707 [0x69] = MMX_OP2(punpckhwd),
2708 [0x6a] = MMX_OP2(punpckhdq),
2709 [0x6b] = MMX_OP2(packssdw),
2710 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2711 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2712 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2713 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2714 [0x70] = { helper_pshufw_mmx,
2717 helper_pshuflw_xmm },
2718 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2719 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2720 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2721 [0x74] = MMX_OP2(pcmpeqb),
2722 [0x75] = MMX_OP2(pcmpeqw),
2723 [0x76] = MMX_OP2(pcmpeql),
2724 [0x77] = { SSE_DUMMY }, /* emms */
2725 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2726 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2727 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2728 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2729 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2730 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2731 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2732 [0xd1] = MMX_OP2(psrlw),
2733 [0xd2] = MMX_OP2(psrld),
2734 [0xd3] = MMX_OP2(psrlq),
2735 [0xd4] = MMX_OP2(paddq),
2736 [0xd5] = MMX_OP2(pmullw),
2737 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2738 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2739 [0xd8] = MMX_OP2(psubusb),
2740 [0xd9] = MMX_OP2(psubusw),
2741 [0xda] = MMX_OP2(pminub),
2742 [0xdb] = MMX_OP2(pand),
2743 [0xdc] = MMX_OP2(paddusb),
2744 [0xdd] = MMX_OP2(paddusw),
2745 [0xde] = MMX_OP2(pmaxub),
2746 [0xdf] = MMX_OP2(pandn),
2747 [0xe0] = MMX_OP2(pavgb),
2748 [0xe1] = MMX_OP2(psraw),
2749 [0xe2] = MMX_OP2(psrad),
2750 [0xe3] = MMX_OP2(pavgw),
2751 [0xe4] = MMX_OP2(pmulhuw),
2752 [0xe5] = MMX_OP2(pmulhw),
2753 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2754 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2755 [0xe8] = MMX_OP2(psubsb),
2756 [0xe9] = MMX_OP2(psubsw),
2757 [0xea] = MMX_OP2(pminsw),
2758 [0xeb] = MMX_OP2(por),
2759 [0xec] = MMX_OP2(paddsb),
2760 [0xed] = MMX_OP2(paddsw),
2761 [0xee] = MMX_OP2(pmaxsw),
2762 [0xef] = MMX_OP2(pxor),
2763 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2764 [0xf1] = MMX_OP2(psllw),
2765 [0xf2] = MMX_OP2(pslld),
2766 [0xf3] = MMX_OP2(psllq),
2767 [0xf4] = MMX_OP2(pmuludq),
2768 [0xf5] = MMX_OP2(pmaddwd),
2769 [0xf6] = MMX_OP2(psadbw),
2770 [0xf7] = MMX_OP2(maskmov),
2771 [0xf8] = MMX_OP2(psubb),
2772 [0xf9] = MMX_OP2(psubw),
2773 [0xfa] = MMX_OP2(psubl),
2774 [0xfb] = MMX_OP2(psubq),
2775 [0xfc] = MMX_OP2(paddb),
2776 [0xfd] = MMX_OP2(paddw),
2777 [0xfe] = MMX_OP2(paddl),
2780 static void *sse_op_table2[3 * 8][2] = {
2781 [0 + 2] = MMX_OP2(psrlw),
2782 [0 + 4] = MMX_OP2(psraw),
2783 [0 + 6] = MMX_OP2(psllw),
2784 [8 + 2] = MMX_OP2(psrld),
2785 [8 + 4] = MMX_OP2(psrad),
2786 [8 + 6] = MMX_OP2(pslld),
2787 [16 + 2] = MMX_OP2(psrlq),
2788 [16 + 3] = { NULL, helper_psrldq_xmm },
2789 [16 + 6] = MMX_OP2(psllq),
2790 [16 + 7] = { NULL, helper_pslldq_xmm },
2793 static void *sse_op_table3[4 * 3] = {
2796 X86_64_ONLY(helper_cvtsq2ss),
2797 X86_64_ONLY(helper_cvtsq2sd),
2801 X86_64_ONLY(helper_cvttss2sq),
2802 X86_64_ONLY(helper_cvttsd2sq),
2806 X86_64_ONLY(helper_cvtss2sq),
2807 X86_64_ONLY(helper_cvtsd2sq),
2810 static void *sse_op_table4[8][4] = {
2821 static void *sse_op_table5[256] = {
2822 [0x0c] = helper_pi2fw,
2823 [0x0d] = helper_pi2fd,
2824 [0x1c] = helper_pf2iw,
2825 [0x1d] = helper_pf2id,
2826 [0x8a] = helper_pfnacc,
2827 [0x8e] = helper_pfpnacc,
2828 [0x90] = helper_pfcmpge,
2829 [0x94] = helper_pfmin,
2830 [0x96] = helper_pfrcp,
2831 [0x97] = helper_pfrsqrt,
2832 [0x9a] = helper_pfsub,
2833 [0x9e] = helper_pfadd,
2834 [0xa0] = helper_pfcmpgt,
2835 [0xa4] = helper_pfmax,
2836 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2837 [0xa7] = helper_movq, /* pfrsqit1 */
2838 [0xaa] = helper_pfsubr,
2839 [0xae] = helper_pfacc,
2840 [0xb0] = helper_pfcmpeq,
2841 [0xb4] = helper_pfmul,
2842 [0xb6] = helper_movq, /* pfrcpit2 */
2843 [0xb7] = helper_pmulhrw_mmx,
2844 [0xbb] = helper_pswapd,
2845 [0xbf] = helper_pavgb_mmx /* pavgusb */
2848 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2850 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2851 int modrm, mod, rm, reg, reg_addr, offset_addr;
2855 if (s->prefix & PREFIX_DATA)
2857 else if (s->prefix & PREFIX_REPZ)
2859 else if (s->prefix & PREFIX_REPNZ)
2863 sse_op2 = sse_op_table1[b][b1];
2866 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2876 /* simple MMX/SSE operation */
2877 if (s->flags & HF_TS_MASK) {
2878 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2881 if (s->flags & HF_EM_MASK) {
2883 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2886 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2889 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2892 tcg_gen_helper_0_0(helper_emms);
2897 tcg_gen_helper_0_0(helper_emms);
2900 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2901 the static cpu state) */
2903 tcg_gen_helper_0_0(helper_enter_mmx);
2906 modrm = ldub_code(s->pc++);
2907 reg = ((modrm >> 3) & 7);
2910 mod = (modrm >> 6) & 3;
2911 if (sse_op2 == SSE_SPECIAL) {
2914 case 0x0e7: /* movntq */
2917 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2918 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2920 case 0x1e7: /* movntdq */
2921 case 0x02b: /* movntps */
2922 case 0x12b: /* movntps */
2923 case 0x3f0: /* lddqu */
2926 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2927 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2929 case 0x6e: /* movd mm, ea */
2930 #ifdef TARGET_X86_64
2931 if (s->dflag == 2) {
2932 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2933 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2937 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2938 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2939 offsetof(CPUX86State,fpregs[reg].mmx));
2940 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2943 case 0x16e: /* movd xmm, ea */
2944 #ifdef TARGET_X86_64
2945 if (s->dflag == 2) {
2946 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2947 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2948 offsetof(CPUX86State,xmm_regs[reg]));
2949 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2953 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2954 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2955 offsetof(CPUX86State,xmm_regs[reg]));
2956 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2957 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2960 case 0x6f: /* movq mm, ea */
2962 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2963 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2966 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
2967 offsetof(CPUX86State,fpregs[rm].mmx));
2968 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
2969 offsetof(CPUX86State,fpregs[reg].mmx));
2972 case 0x010: /* movups */
2973 case 0x110: /* movupd */
2974 case 0x028: /* movaps */
2975 case 0x128: /* movapd */
2976 case 0x16f: /* movdqa xmm, ea */
2977 case 0x26f: /* movdqu xmm, ea */
2979 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2980 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2982 rm = (modrm & 7) | REX_B(s);
2983 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2984 offsetof(CPUX86State,xmm_regs[rm]));
2987 case 0x210: /* movss xmm, ea */
2989 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2990 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2991 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2993 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2994 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2995 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2997 rm = (modrm & 7) | REX_B(s);
2998 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2999 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3002 case 0x310: /* movsd xmm, ea */
3004 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3005 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3007 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3008 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3010 rm = (modrm & 7) | REX_B(s);
3011 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3012 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3015 case 0x012: /* movlps */
3016 case 0x112: /* movlpd */
3018 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3019 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3022 rm = (modrm & 7) | REX_B(s);
3023 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3024 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3027 case 0x212: /* movsldup */
3029 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3030 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3032 rm = (modrm & 7) | REX_B(s);
3033 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3034 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3035 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3036 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3038 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3039 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3040 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3041 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3043 case 0x312: /* movddup */
3045 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3046 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3048 rm = (modrm & 7) | REX_B(s);
3049 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3050 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3052 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3053 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3055 case 0x016: /* movhps */
3056 case 0x116: /* movhpd */
3058 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3059 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3062 rm = (modrm & 7) | REX_B(s);
3063 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3064 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3067 case 0x216: /* movshdup */
3069 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3070 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3072 rm = (modrm & 7) | REX_B(s);
3073 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3074 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3075 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3076 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3078 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3079 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3080 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3081 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3083 case 0x7e: /* movd ea, mm */
3084 #ifdef TARGET_X86_64
3085 if (s->dflag == 2) {
3086 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3087 offsetof(CPUX86State,fpregs[reg].mmx));
3088 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3092 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3093 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3094 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3097 case 0x17e: /* movd ea, xmm */
3098 #ifdef TARGET_X86_64
3099 if (s->dflag == 2) {
3100 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3101 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3102 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3106 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3107 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3108 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3111 case 0x27e: /* movq xmm, ea */
3113 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3114 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3116 rm = (modrm & 7) | REX_B(s);
3117 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3118 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3120 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3122 case 0x7f: /* movq ea, mm */
3124 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3125 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3128 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3129 offsetof(CPUX86State,fpregs[reg].mmx));
3132 case 0x011: /* movups */
3133 case 0x111: /* movupd */
3134 case 0x029: /* movaps */
3135 case 0x129: /* movapd */
3136 case 0x17f: /* movdqa ea, xmm */
3137 case 0x27f: /* movdqu ea, xmm */
3139 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3140 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3142 rm = (modrm & 7) | REX_B(s);
3143 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3144 offsetof(CPUX86State,xmm_regs[reg]));
3147 case 0x211: /* movss ea, xmm */
3149 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3150 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3151 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3153 rm = (modrm & 7) | REX_B(s);
3154 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3155 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3158 case 0x311: /* movsd ea, xmm */
3160 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3161 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3163 rm = (modrm & 7) | REX_B(s);
3164 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3165 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3168 case 0x013: /* movlps */
3169 case 0x113: /* movlpd */
3171 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3172 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3177 case 0x017: /* movhps */
3178 case 0x117: /* movhpd */
3180 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3181 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3186 case 0x71: /* shift mm, im */
3189 case 0x171: /* shift xmm, im */
3192 val = ldub_code(s->pc++);
3194 gen_op_movl_T0_im(val);
3195 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3197 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3198 op1_offset = offsetof(CPUX86State,xmm_t0);
3200 gen_op_movl_T0_im(val);
3201 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3203 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3204 op1_offset = offsetof(CPUX86State,mmx_t0);
3206 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3210 rm = (modrm & 7) | REX_B(s);
3211 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3214 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3216 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3217 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3218 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3220 case 0x050: /* movmskps */
3221 rm = (modrm & 7) | REX_B(s);
3222 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3223 offsetof(CPUX86State,xmm_regs[rm]));
3224 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3225 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3226 gen_op_mov_reg_T0(OT_LONG, reg);
3228 case 0x150: /* movmskpd */
3229 rm = (modrm & 7) | REX_B(s);
3230 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3231 offsetof(CPUX86State,xmm_regs[rm]));
3232 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3233 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3234 gen_op_mov_reg_T0(OT_LONG, reg);
3236 case 0x02a: /* cvtpi2ps */
3237 case 0x12a: /* cvtpi2pd */
3238 tcg_gen_helper_0_0(helper_enter_mmx);
3240 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3241 op2_offset = offsetof(CPUX86State,mmx_t0);
3242 gen_ldq_env_A0(s->mem_index, op2_offset);
3245 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3247 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3248 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3249 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3252 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3256 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3260 case 0x22a: /* cvtsi2ss */
3261 case 0x32a: /* cvtsi2sd */
3262 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3263 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3264 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3265 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3266 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3267 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3268 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3270 case 0x02c: /* cvttps2pi */
3271 case 0x12c: /* cvttpd2pi */
3272 case 0x02d: /* cvtps2pi */
3273 case 0x12d: /* cvtpd2pi */
3274 tcg_gen_helper_0_0(helper_enter_mmx);
3276 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3277 op2_offset = offsetof(CPUX86State,xmm_t0);
3278 gen_ldo_env_A0(s->mem_index, op2_offset);
3280 rm = (modrm & 7) | REX_B(s);
3281 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3283 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3284 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3285 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3288 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3291 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3294 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3297 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3301 case 0x22c: /* cvttss2si */
3302 case 0x32c: /* cvttsd2si */
3303 case 0x22d: /* cvtss2si */
3304 case 0x32d: /* cvtsd2si */
3305 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3307 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3309 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3311 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3312 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3314 op2_offset = offsetof(CPUX86State,xmm_t0);
3316 rm = (modrm & 7) | REX_B(s);
3317 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3319 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3321 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3322 if (ot == OT_LONG) {
3323 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3324 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3326 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3328 gen_op_mov_reg_T0(ot, reg);
3330 case 0xc4: /* pinsrw */
3333 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3334 val = ldub_code(s->pc++);
3337 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3338 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3341 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3342 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3345 case 0xc5: /* pextrw */
3349 val = ldub_code(s->pc++);
3352 rm = (modrm & 7) | REX_B(s);
3353 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3354 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3358 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3359 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3361 reg = ((modrm >> 3) & 7) | rex_r;
3362 gen_op_mov_reg_T0(OT_LONG, reg);
3364 case 0x1d6: /* movq ea, xmm */
3366 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3367 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3369 rm = (modrm & 7) | REX_B(s);
3370 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3371 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3372 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3375 case 0x2d6: /* movq2dq */
3376 tcg_gen_helper_0_0(helper_enter_mmx);
3378 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3379 offsetof(CPUX86State,fpregs[rm].mmx));
3380 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3382 case 0x3d6: /* movdq2q */
3383 tcg_gen_helper_0_0(helper_enter_mmx);
3384 rm = (modrm & 7) | REX_B(s);
3385 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3386 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3388 case 0xd7: /* pmovmskb */
3393 rm = (modrm & 7) | REX_B(s);
3394 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3395 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3398 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3399 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3401 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3402 reg = ((modrm >> 3) & 7) | rex_r;
3403 gen_op_mov_reg_T0(OT_LONG, reg);
3409 /* generic MMX or SSE operation */
3411 case 0x70: /* pshufx insn */
3412 case 0xc6: /* pshufx insn */
3413 case 0xc2: /* compare insns */
3420 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3422 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3423 op2_offset = offsetof(CPUX86State,xmm_t0);
3424 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3426 /* specific case for SSE single instructions */
3429 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3430 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3433 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3436 gen_ldo_env_A0(s->mem_index, op2_offset);
3439 rm = (modrm & 7) | REX_B(s);
3440 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3443 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3445 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3446 op2_offset = offsetof(CPUX86State,mmx_t0);
3447 gen_ldq_env_A0(s->mem_index, op2_offset);
3450 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3454 case 0x0f: /* 3DNow! data insns */
3455 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3457 val = ldub_code(s->pc++);
3458 sse_op2 = sse_op_table5[val];
3461 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3462 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3463 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3465 case 0x70: /* pshufx insn */
3466 case 0xc6: /* pshufx insn */
3467 val = ldub_code(s->pc++);
3468 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3469 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3470 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3474 val = ldub_code(s->pc++);
3477 sse_op2 = sse_op_table4[val][b1];
3478 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3479 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3480 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3483 /* maskmov : we must prepare A0 */
3486 #ifdef TARGET_X86_64
3487 if (s->aflag == 2) {
3488 gen_op_movq_A0_reg(R_EDI);
3492 gen_op_movl_A0_reg(R_EDI);
3494 gen_op_andl_A0_ffff();
3496 gen_add_A0_ds_seg(s);
3498 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3499 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3500 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3503 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3504 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3505 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3508 if (b == 0x2e || b == 0x2f) {
3509 s->cc_op = CC_OP_EFLAGS;
3514 /* convert one instruction. s->is_jmp is set if the translation must
3515 be stopped. Return the next pc value */
3516 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3518 int b, prefixes, aflag, dflag;
3520 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3521 target_ulong next_eip, tval;
3531 #ifdef TARGET_X86_64
3536 s->rip_offset = 0; /* for relative ip address */
3538 b = ldub_code(s->pc);
3540 /* check prefixes */
3541 #ifdef TARGET_X86_64
3545 prefixes |= PREFIX_REPZ;
3548 prefixes |= PREFIX_REPNZ;
3551 prefixes |= PREFIX_LOCK;
3572 prefixes |= PREFIX_DATA;
3575 prefixes |= PREFIX_ADR;
3579 rex_w = (b >> 3) & 1;
3580 rex_r = (b & 0x4) << 1;
3581 s->rex_x = (b & 0x2) << 2;
3582 REX_B(s) = (b & 0x1) << 3;
3583 x86_64_hregs = 1; /* select uniform byte register addressing */
3587 /* 0x66 is ignored if rex.w is set */
3590 if (prefixes & PREFIX_DATA)
3593 if (!(prefixes & PREFIX_ADR))
3600 prefixes |= PREFIX_REPZ;
3603 prefixes |= PREFIX_REPNZ;
3606 prefixes |= PREFIX_LOCK;
3627 prefixes |= PREFIX_DATA;
3630 prefixes |= PREFIX_ADR;
3633 if (prefixes & PREFIX_DATA)
3635 if (prefixes & PREFIX_ADR)
3639 s->prefix = prefixes;
3643 /* lock generation */
3644 if (prefixes & PREFIX_LOCK)
3645 tcg_gen_helper_0_0(helper_lock);
3647 /* now check op code */
3651 /**************************/
3652 /* extended op code */
3653 b = ldub_code(s->pc++) | 0x100;
3656 /**************************/
3674 ot = dflag + OT_WORD;
3677 case 0: /* OP Ev, Gv */
3678 modrm = ldub_code(s->pc++);
3679 reg = ((modrm >> 3) & 7) | rex_r;
3680 mod = (modrm >> 6) & 3;
3681 rm = (modrm & 7) | REX_B(s);
3683 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3685 } else if (op == OP_XORL && rm == reg) {
3687 /* xor reg, reg optimisation */
3689 s->cc_op = CC_OP_LOGICB + ot;
3690 gen_op_mov_reg_T0(ot, reg);
3691 gen_op_update1_cc();
3696 gen_op_mov_TN_reg(ot, 1, reg);
3697 gen_op(s, op, ot, opreg);
3699 case 1: /* OP Gv, Ev */
3700 modrm = ldub_code(s->pc++);
3701 mod = (modrm >> 6) & 3;
3702 reg = ((modrm >> 3) & 7) | rex_r;
3703 rm = (modrm & 7) | REX_B(s);
3705 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3706 gen_op_ld_T1_A0(ot + s->mem_index);
3707 } else if (op == OP_XORL && rm == reg) {
3710 gen_op_mov_TN_reg(ot, 1, rm);
3712 gen_op(s, op, ot, reg);
3714 case 2: /* OP A, Iv */
3715 val = insn_get(s, ot);
3716 gen_op_movl_T1_im(val);
3717 gen_op(s, op, ot, OR_EAX);
3723 case 0x80: /* GRP1 */
3733 ot = dflag + OT_WORD;
3735 modrm = ldub_code(s->pc++);
3736 mod = (modrm >> 6) & 3;
3737 rm = (modrm & 7) | REX_B(s);
3738 op = (modrm >> 3) & 7;
3744 s->rip_offset = insn_const_size(ot);
3745 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3756 val = insn_get(s, ot);
3759 val = (int8_t)insn_get(s, OT_BYTE);
3762 gen_op_movl_T1_im(val);
3763 gen_op(s, op, ot, opreg);
3767 /**************************/
3768 /* inc, dec, and other misc arith */
3769 case 0x40 ... 0x47: /* inc Gv */
3770 ot = dflag ? OT_LONG : OT_WORD;
3771 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3773 case 0x48 ... 0x4f: /* dec Gv */
3774 ot = dflag ? OT_LONG : OT_WORD;
3775 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3777 case 0xf6: /* GRP3 */
3782 ot = dflag + OT_WORD;
3784 modrm = ldub_code(s->pc++);
3785 mod = (modrm >> 6) & 3;
3786 rm = (modrm & 7) | REX_B(s);
3787 op = (modrm >> 3) & 7;
3790 s->rip_offset = insn_const_size(ot);
3791 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3792 gen_op_ld_T0_A0(ot + s->mem_index);
3794 gen_op_mov_TN_reg(ot, 0, rm);
3799 val = insn_get(s, ot);
3800 gen_op_movl_T1_im(val);
3801 gen_op_testl_T0_T1_cc();
3802 s->cc_op = CC_OP_LOGICB + ot;
3805 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3807 gen_op_st_T0_A0(ot + s->mem_index);
3809 gen_op_mov_reg_T0(ot, rm);
3813 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3815 gen_op_st_T0_A0(ot + s->mem_index);
3817 gen_op_mov_reg_T0(ot, rm);
3819 gen_op_update_neg_cc();
3820 s->cc_op = CC_OP_SUBB + ot;
3825 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3826 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3827 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3828 /* XXX: use 32 bit mul which could be faster */
3829 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3830 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3831 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3832 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3833 s->cc_op = CC_OP_MULB;
3836 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3837 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3838 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3839 /* XXX: use 32 bit mul which could be faster */
3840 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3841 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3842 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3843 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3844 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3845 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3846 s->cc_op = CC_OP_MULW;
3850 #ifdef TARGET_X86_64
3851 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3852 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3853 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3854 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3855 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3856 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3857 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3858 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3859 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3863 t0 = tcg_temp_new(TCG_TYPE_I64);
3864 t1 = tcg_temp_new(TCG_TYPE_I64);
3865 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3866 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3867 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3868 tcg_gen_mul_i64(t0, t0, t1);
3869 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3870 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3871 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3872 tcg_gen_shri_i64(t0, t0, 32);
3873 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3874 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3875 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3878 s->cc_op = CC_OP_MULL;
3880 #ifdef TARGET_X86_64
3882 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3883 s->cc_op = CC_OP_MULQ;
3891 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3892 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3893 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3894 /* XXX: use 32 bit mul which could be faster */
3895 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3896 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3897 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3898 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3899 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3900 s->cc_op = CC_OP_MULB;
3903 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3904 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3905 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3906 /* XXX: use 32 bit mul which could be faster */
3907 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3908 gen_op_mov_reg_T0(OT_WORD, R_EAX);
3909 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3910 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3911 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3912 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3913 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3914 s->cc_op = CC_OP_MULW;
3918 #ifdef TARGET_X86_64
3919 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3920 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3921 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3922 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3923 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3924 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3925 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3926 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3927 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3928 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3932 t0 = tcg_temp_new(TCG_TYPE_I64);
3933 t1 = tcg_temp_new(TCG_TYPE_I64);
3934 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3935 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
3936 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
3937 tcg_gen_mul_i64(t0, t0, t1);
3938 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3939 gen_op_mov_reg_T0(OT_LONG, R_EAX);
3940 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3941 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
3942 tcg_gen_shri_i64(t0, t0, 32);
3943 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3944 gen_op_mov_reg_T0(OT_LONG, R_EDX);
3945 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3948 s->cc_op = CC_OP_MULL;
3950 #ifdef TARGET_X86_64
3952 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
3953 s->cc_op = CC_OP_MULQ;
3961 gen_jmp_im(pc_start - s->cs_base);
3962 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3965 gen_jmp_im(pc_start - s->cs_base);
3966 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3970 gen_jmp_im(pc_start - s->cs_base);
3971 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3973 #ifdef TARGET_X86_64
3975 gen_jmp_im(pc_start - s->cs_base);
3976 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3984 gen_jmp_im(pc_start - s->cs_base);
3985 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3988 gen_jmp_im(pc_start - s->cs_base);
3989 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3993 gen_jmp_im(pc_start - s->cs_base);
3994 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3996 #ifdef TARGET_X86_64
3998 gen_jmp_im(pc_start - s->cs_base);
3999 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4009 case 0xfe: /* GRP4 */
4010 case 0xff: /* GRP5 */
4014 ot = dflag + OT_WORD;
4016 modrm = ldub_code(s->pc++);
4017 mod = (modrm >> 6) & 3;
4018 rm = (modrm & 7) | REX_B(s);
4019 op = (modrm >> 3) & 7;
4020 if (op >= 2 && b == 0xfe) {
4024 if (op == 2 || op == 4) {
4025 /* operand size for jumps is 64 bit */
4027 } else if (op == 3 || op == 5) {
4028 /* for call calls, the operand is 16 or 32 bit, even
4030 ot = dflag ? OT_LONG : OT_WORD;
4031 } else if (op == 6) {
4032 /* default push size is 64 bit */
4033 ot = dflag ? OT_QUAD : OT_WORD;
4037 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4038 if (op >= 2 && op != 3 && op != 5)
4039 gen_op_ld_T0_A0(ot + s->mem_index);
4041 gen_op_mov_TN_reg(ot, 0, rm);
4045 case 0: /* inc Ev */
4050 gen_inc(s, ot, opreg, 1);
4052 case 1: /* dec Ev */
4057 gen_inc(s, ot, opreg, -1);
4059 case 2: /* call Ev */
4060 /* XXX: optimize if memory (no 'and' is necessary) */
4062 gen_op_andl_T0_ffff();
4063 next_eip = s->pc - s->cs_base;
4064 gen_movtl_T1_im(next_eip);
4069 case 3: /* lcall Ev */
4070 gen_op_ld_T1_A0(ot + s->mem_index);
4071 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4072 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4074 if (s->pe && !s->vm86) {
4075 if (s->cc_op != CC_OP_DYNAMIC)
4076 gen_op_set_cc_op(s->cc_op);
4077 gen_jmp_im(pc_start - s->cs_base);
4078 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4079 tcg_gen_helper_0_4(helper_lcall_protected,
4080 cpu_tmp2_i32, cpu_T[1],
4081 tcg_const_i32(dflag),
4082 tcg_const_i32(s->pc - pc_start));
4084 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4085 tcg_gen_helper_0_4(helper_lcall_real,
4086 cpu_tmp2_i32, cpu_T[1],
4087 tcg_const_i32(dflag),
4088 tcg_const_i32(s->pc - s->cs_base));
4092 case 4: /* jmp Ev */
4094 gen_op_andl_T0_ffff();
4098 case 5: /* ljmp Ev */
4099 gen_op_ld_T1_A0(ot + s->mem_index);
4100 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4101 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4103 if (s->pe && !s->vm86) {
4104 if (s->cc_op != CC_OP_DYNAMIC)
4105 gen_op_set_cc_op(s->cc_op);
4106 gen_jmp_im(pc_start - s->cs_base);
4107 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4108 tcg_gen_helper_0_3(helper_ljmp_protected,
4111 tcg_const_i32(s->pc - pc_start));
4113 gen_op_movl_seg_T0_vm(R_CS);
4114 gen_op_movl_T0_T1();
4119 case 6: /* push Ev */
4127 case 0x84: /* test Ev, Gv */
4132 ot = dflag + OT_WORD;
4134 modrm = ldub_code(s->pc++);
4135 mod = (modrm >> 6) & 3;
4136 rm = (modrm & 7) | REX_B(s);
4137 reg = ((modrm >> 3) & 7) | rex_r;
4139 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4140 gen_op_mov_TN_reg(ot, 1, reg);
4141 gen_op_testl_T0_T1_cc();
4142 s->cc_op = CC_OP_LOGICB + ot;
4145 case 0xa8: /* test eAX, Iv */
4150 ot = dflag + OT_WORD;
4151 val = insn_get(s, ot);
4153 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4154 gen_op_movl_T1_im(val);
4155 gen_op_testl_T0_T1_cc();
4156 s->cc_op = CC_OP_LOGICB + ot;
4159 case 0x98: /* CWDE/CBW */
4160 #ifdef TARGET_X86_64
4162 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4163 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4164 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4168 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4169 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4170 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4172 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4173 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4174 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4177 case 0x99: /* CDQ/CWD */
4178 #ifdef TARGET_X86_64
4180 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4181 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4182 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4186 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4187 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4188 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4189 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4191 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4192 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4193 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4194 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4197 case 0x1af: /* imul Gv, Ev */
4198 case 0x69: /* imul Gv, Ev, I */
4200 ot = dflag + OT_WORD;
4201 modrm = ldub_code(s->pc++);
4202 reg = ((modrm >> 3) & 7) | rex_r;
4204 s->rip_offset = insn_const_size(ot);
4207 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4209 val = insn_get(s, ot);
4210 gen_op_movl_T1_im(val);
4211 } else if (b == 0x6b) {
4212 val = (int8_t)insn_get(s, OT_BYTE);
4213 gen_op_movl_T1_im(val);
4215 gen_op_mov_TN_reg(ot, 1, reg);
4218 #ifdef TARGET_X86_64
4219 if (ot == OT_QUAD) {
4220 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4223 if (ot == OT_LONG) {
4224 #ifdef TARGET_X86_64
4225 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4226 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4227 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4228 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4229 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4230 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4234 t0 = tcg_temp_new(TCG_TYPE_I64);
4235 t1 = tcg_temp_new(TCG_TYPE_I64);
4236 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4237 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4238 tcg_gen_mul_i64(t0, t0, t1);
4239 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4240 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4241 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4242 tcg_gen_shri_i64(t0, t0, 32);
4243 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4244 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4248 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4249 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4250 /* XXX: use 32 bit mul which could be faster */
4251 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4252 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4253 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4254 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4256 gen_op_mov_reg_T0(ot, reg);
4257 s->cc_op = CC_OP_MULB + ot;
4260 case 0x1c1: /* xadd Ev, Gv */
4264 ot = dflag + OT_WORD;
4265 modrm = ldub_code(s->pc++);
4266 reg = ((modrm >> 3) & 7) | rex_r;
4267 mod = (modrm >> 6) & 3;
4269 rm = (modrm & 7) | REX_B(s);
4270 gen_op_mov_TN_reg(ot, 0, reg);
4271 gen_op_mov_TN_reg(ot, 1, rm);
4272 gen_op_addl_T0_T1();
4273 gen_op_mov_reg_T1(ot, reg);
4274 gen_op_mov_reg_T0(ot, rm);
4276 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4277 gen_op_mov_TN_reg(ot, 0, reg);
4278 gen_op_ld_T1_A0(ot + s->mem_index);
4279 gen_op_addl_T0_T1();
4280 gen_op_st_T0_A0(ot + s->mem_index);
4281 gen_op_mov_reg_T1(ot, reg);
4283 gen_op_update2_cc();
4284 s->cc_op = CC_OP_ADDB + ot;
4287 case 0x1b1: /* cmpxchg Ev, Gv */
4294 ot = dflag + OT_WORD;
4295 modrm = ldub_code(s->pc++);
4296 reg = ((modrm >> 3) & 7) | rex_r;
4297 mod = (modrm >> 6) & 3;
4298 gen_op_mov_TN_reg(ot, 1, reg);
4300 rm = (modrm & 7) | REX_B(s);
4301 gen_op_mov_TN_reg(ot, 0, rm);
4303 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4304 gen_op_ld_T0_A0(ot + s->mem_index);
4305 rm = 0; /* avoid warning */
4307 label1 = gen_new_label();
4308 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4309 tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4310 gen_extu(ot, cpu_T3);
4311 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4313 label2 = gen_new_label();
4314 gen_op_mov_reg_T0(ot, R_EAX);
4316 gen_set_label(label1);
4317 gen_op_mov_reg_T1(ot, rm);
4318 gen_set_label(label2);
4320 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4321 gen_op_mov_reg_T0(ot, R_EAX);
4322 gen_set_label(label1);
4324 gen_op_st_T1_A0(ot + s->mem_index);
4326 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4327 tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4328 s->cc_op = CC_OP_SUBB + ot;
4331 case 0x1c7: /* cmpxchg8b */
4332 modrm = ldub_code(s->pc++);
4333 mod = (modrm >> 6) & 3;
4334 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4336 #ifdef TARGET_X86_64
4338 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4340 gen_jmp_im(pc_start - s->cs_base);
4341 if (s->cc_op != CC_OP_DYNAMIC)
4342 gen_op_set_cc_op(s->cc_op);
4343 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4344 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4348 if (!(s->cpuid_features & CPUID_CX8))
4350 gen_jmp_im(pc_start - s->cs_base);
4351 if (s->cc_op != CC_OP_DYNAMIC)
4352 gen_op_set_cc_op(s->cc_op);
4353 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4354 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4356 s->cc_op = CC_OP_EFLAGS;
4359 /**************************/
4361 case 0x50 ... 0x57: /* push */
4362 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4365 case 0x58 ... 0x5f: /* pop */
4367 ot = dflag ? OT_QUAD : OT_WORD;
4369 ot = dflag + OT_WORD;
4372 /* NOTE: order is important for pop %sp */
4374 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4376 case 0x60: /* pusha */
4381 case 0x61: /* popa */
4386 case 0x68: /* push Iv */
4389 ot = dflag ? OT_QUAD : OT_WORD;
4391 ot = dflag + OT_WORD;
4394 val = insn_get(s, ot);
4396 val = (int8_t)insn_get(s, OT_BYTE);
4397 gen_op_movl_T0_im(val);
4400 case 0x8f: /* pop Ev */
4402 ot = dflag ? OT_QUAD : OT_WORD;
4404 ot = dflag + OT_WORD;
4406 modrm = ldub_code(s->pc++);
4407 mod = (modrm >> 6) & 3;
4410 /* NOTE: order is important for pop %sp */
4412 rm = (modrm & 7) | REX_B(s);
4413 gen_op_mov_reg_T0(ot, rm);
4415 /* NOTE: order is important too for MMU exceptions */
4416 s->popl_esp_hack = 1 << ot;
4417 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4418 s->popl_esp_hack = 0;
4422 case 0xc8: /* enter */
4425 val = lduw_code(s->pc);
4427 level = ldub_code(s->pc++);
4428 gen_enter(s, val, level);
4431 case 0xc9: /* leave */
4432 /* XXX: exception not precise (ESP is updated before potential exception) */
4434 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4435 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4436 } else if (s->ss32) {
4437 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4438 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4440 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4441 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4445 ot = dflag ? OT_QUAD : OT_WORD;
4447 ot = dflag + OT_WORD;
4449 gen_op_mov_reg_T0(ot, R_EBP);
4452 case 0x06: /* push es */
4453 case 0x0e: /* push cs */
4454 case 0x16: /* push ss */
4455 case 0x1e: /* push ds */
4458 gen_op_movl_T0_seg(b >> 3);
4461 case 0x1a0: /* push fs */
4462 case 0x1a8: /* push gs */
4463 gen_op_movl_T0_seg((b >> 3) & 7);
4466 case 0x07: /* pop es */
4467 case 0x17: /* pop ss */
4468 case 0x1f: /* pop ds */
4473 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4476 /* if reg == SS, inhibit interrupts/trace. */
4477 /* If several instructions disable interrupts, only the
4479 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4480 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4484 gen_jmp_im(s->pc - s->cs_base);
4488 case 0x1a1: /* pop fs */
4489 case 0x1a9: /* pop gs */
4491 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4494 gen_jmp_im(s->pc - s->cs_base);
4499 /**************************/
4502 case 0x89: /* mov Gv, Ev */
4506 ot = dflag + OT_WORD;
4507 modrm = ldub_code(s->pc++);
4508 reg = ((modrm >> 3) & 7) | rex_r;
4510 /* generate a generic store */
4511 gen_ldst_modrm(s, modrm, ot, reg, 1);
4514 case 0xc7: /* mov Ev, Iv */
4518 ot = dflag + OT_WORD;
4519 modrm = ldub_code(s->pc++);
4520 mod = (modrm >> 6) & 3;
4522 s->rip_offset = insn_const_size(ot);
4523 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4525 val = insn_get(s, ot);
4526 gen_op_movl_T0_im(val);
4528 gen_op_st_T0_A0(ot + s->mem_index);
4530 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4533 case 0x8b: /* mov Ev, Gv */
4537 ot = OT_WORD + dflag;
4538 modrm = ldub_code(s->pc++);
4539 reg = ((modrm >> 3) & 7) | rex_r;
4541 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4542 gen_op_mov_reg_T0(ot, reg);
4544 case 0x8e: /* mov seg, Gv */
4545 modrm = ldub_code(s->pc++);
4546 reg = (modrm >> 3) & 7;
4547 if (reg >= 6 || reg == R_CS)
4549 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4550 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4552 /* if reg == SS, inhibit interrupts/trace */
4553 /* If several instructions disable interrupts, only the
4555 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4556 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4560 gen_jmp_im(s->pc - s->cs_base);
4564 case 0x8c: /* mov Gv, seg */
4565 modrm = ldub_code(s->pc++);
4566 reg = (modrm >> 3) & 7;
4567 mod = (modrm >> 6) & 3;
4570 gen_op_movl_T0_seg(reg);
4572 ot = OT_WORD + dflag;
4575 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4578 case 0x1b6: /* movzbS Gv, Eb */
4579 case 0x1b7: /* movzwS Gv, Eb */
4580 case 0x1be: /* movsbS Gv, Eb */
4581 case 0x1bf: /* movswS Gv, Eb */
4584 /* d_ot is the size of destination */
4585 d_ot = dflag + OT_WORD;
4586 /* ot is the size of source */
4587 ot = (b & 1) + OT_BYTE;
4588 modrm = ldub_code(s->pc++);
4589 reg = ((modrm >> 3) & 7) | rex_r;
4590 mod = (modrm >> 6) & 3;
4591 rm = (modrm & 7) | REX_B(s);
4594 gen_op_mov_TN_reg(ot, 0, rm);
4595 switch(ot | (b & 8)) {
4597 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4600 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4603 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4607 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4610 gen_op_mov_reg_T0(d_ot, reg);
4612 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4614 gen_op_lds_T0_A0(ot + s->mem_index);
4616 gen_op_ldu_T0_A0(ot + s->mem_index);
4618 gen_op_mov_reg_T0(d_ot, reg);
4623 case 0x8d: /* lea */
4624 ot = dflag + OT_WORD;
4625 modrm = ldub_code(s->pc++);
4626 mod = (modrm >> 6) & 3;
4629 reg = ((modrm >> 3) & 7) | rex_r;
4630 /* we must ensure that no segment is added */
4634 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4636 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4639 case 0xa0: /* mov EAX, Ov */
4641 case 0xa2: /* mov Ov, EAX */
4644 target_ulong offset_addr;
4649 ot = dflag + OT_WORD;
4650 #ifdef TARGET_X86_64
4651 if (s->aflag == 2) {
4652 offset_addr = ldq_code(s->pc);
4654 gen_op_movq_A0_im(offset_addr);
4659 offset_addr = insn_get(s, OT_LONG);
4661 offset_addr = insn_get(s, OT_WORD);
4663 gen_op_movl_A0_im(offset_addr);
4665 gen_add_A0_ds_seg(s);
4667 gen_op_ld_T0_A0(ot + s->mem_index);
4668 gen_op_mov_reg_T0(ot, R_EAX);
4670 gen_op_mov_TN_reg(ot, 0, R_EAX);
4671 gen_op_st_T0_A0(ot + s->mem_index);
4675 case 0xd7: /* xlat */
4676 #ifdef TARGET_X86_64
4677 if (s->aflag == 2) {
4678 gen_op_movq_A0_reg(R_EBX);
4679 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4680 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4681 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4685 gen_op_movl_A0_reg(R_EBX);
4686 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4687 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4688 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4690 gen_op_andl_A0_ffff();
4692 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4694 gen_add_A0_ds_seg(s);
4695 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4696 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4698 case 0xb0 ... 0xb7: /* mov R, Ib */
4699 val = insn_get(s, OT_BYTE);
4700 gen_op_movl_T0_im(val);
4701 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4703 case 0xb8 ... 0xbf: /* mov R, Iv */
4704 #ifdef TARGET_X86_64
4708 tmp = ldq_code(s->pc);
4710 reg = (b & 7) | REX_B(s);
4711 gen_movtl_T0_im(tmp);
4712 gen_op_mov_reg_T0(OT_QUAD, reg);
4716 ot = dflag ? OT_LONG : OT_WORD;
4717 val = insn_get(s, ot);
4718 reg = (b & 7) | REX_B(s);
4719 gen_op_movl_T0_im(val);
4720 gen_op_mov_reg_T0(ot, reg);
4724 case 0x91 ... 0x97: /* xchg R, EAX */
4725 ot = dflag + OT_WORD;
4726 reg = (b & 7) | REX_B(s);
4730 case 0x87: /* xchg Ev, Gv */
4734 ot = dflag + OT_WORD;
4735 modrm = ldub_code(s->pc++);
4736 reg = ((modrm >> 3) & 7) | rex_r;
4737 mod = (modrm >> 6) & 3;
4739 rm = (modrm & 7) | REX_B(s);
4741 gen_op_mov_TN_reg(ot, 0, reg);
4742 gen_op_mov_TN_reg(ot, 1, rm);
4743 gen_op_mov_reg_T0(ot, rm);
4744 gen_op_mov_reg_T1(ot, reg);
4746 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4747 gen_op_mov_TN_reg(ot, 0, reg);
4748 /* for xchg, lock is implicit */
4749 if (!(prefixes & PREFIX_LOCK))
4750 tcg_gen_helper_0_0(helper_lock);
4751 gen_op_ld_T1_A0(ot + s->mem_index);
4752 gen_op_st_T0_A0(ot + s->mem_index);
4753 if (!(prefixes & PREFIX_LOCK))
4754 tcg_gen_helper_0_0(helper_unlock);
4755 gen_op_mov_reg_T1(ot, reg);
4758 case 0xc4: /* les Gv */
4763 case 0xc5: /* lds Gv */
4768 case 0x1b2: /* lss Gv */
4771 case 0x1b4: /* lfs Gv */
4774 case 0x1b5: /* lgs Gv */
4777 ot = dflag ? OT_LONG : OT_WORD;
4778 modrm = ldub_code(s->pc++);
4779 reg = ((modrm >> 3) & 7) | rex_r;
4780 mod = (modrm >> 6) & 3;
4783 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4784 gen_op_ld_T1_A0(ot + s->mem_index);
4785 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4786 /* load the segment first to handle exceptions properly */
4787 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4788 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4789 /* then put the data */
4790 gen_op_mov_reg_T1(ot, reg);
4792 gen_jmp_im(s->pc - s->cs_base);
4797 /************************/
4808 ot = dflag + OT_WORD;
4810 modrm = ldub_code(s->pc++);
4811 mod = (modrm >> 6) & 3;
4812 op = (modrm >> 3) & 7;
4818 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4821 opreg = (modrm & 7) | REX_B(s);
4826 gen_shift(s, op, ot, opreg, OR_ECX);
4829 shift = ldub_code(s->pc++);
4831 gen_shifti(s, op, ot, opreg, shift);
4846 case 0x1a4: /* shld imm */
4850 case 0x1a5: /* shld cl */
4854 case 0x1ac: /* shrd imm */
4858 case 0x1ad: /* shrd cl */
4862 ot = dflag + OT_WORD;
4863 modrm = ldub_code(s->pc++);
4864 mod = (modrm >> 6) & 3;
4865 rm = (modrm & 7) | REX_B(s);
4866 reg = ((modrm >> 3) & 7) | rex_r;
4868 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4873 gen_op_mov_TN_reg(ot, 1, reg);
4876 val = ldub_code(s->pc++);
4877 tcg_gen_movi_tl(cpu_T3, val);
4879 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4881 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4884 /************************/
4887 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4888 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4889 /* XXX: what to do if illegal op ? */
4890 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4893 modrm = ldub_code(s->pc++);
4894 mod = (modrm >> 6) & 3;
4896 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4899 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4901 case 0x00 ... 0x07: /* fxxxs */
4902 case 0x10 ... 0x17: /* fixxxl */
4903 case 0x20 ... 0x27: /* fxxxl */
4904 case 0x30 ... 0x37: /* fixxx */
4911 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4912 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4913 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4916 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4917 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4918 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4921 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4922 (s->mem_index >> 2) - 1);
4923 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4927 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4928 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4929 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4933 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4935 /* fcomp needs pop */
4936 tcg_gen_helper_0_0(helper_fpop);
4940 case 0x08: /* flds */
4941 case 0x0a: /* fsts */
4942 case 0x0b: /* fstps */
4943 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4944 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4945 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4950 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4951 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4952 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4955 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4956 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4957 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4960 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4961 (s->mem_index >> 2) - 1);
4962 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4966 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4967 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4968 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4973 /* XXX: the corresponding CPUID bit must be tested ! */
4976 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4977 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4978 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4981 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4982 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4983 (s->mem_index >> 2) - 1);
4987 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4988 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4989 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4992 tcg_gen_helper_0_0(helper_fpop);
4997 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4998 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4999 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5002 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5003 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5004 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5007 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5008 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5009 (s->mem_index >> 2) - 1);
5013 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5014 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5015 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5019 tcg_gen_helper_0_0(helper_fpop);
5023 case 0x0c: /* fldenv mem */
5024 if (s->cc_op != CC_OP_DYNAMIC)
5025 gen_op_set_cc_op(s->cc_op);
5026 gen_jmp_im(pc_start - s->cs_base);
5027 tcg_gen_helper_0_2(helper_fldenv,
5028 cpu_A0, tcg_const_i32(s->dflag));
5030 case 0x0d: /* fldcw mem */
5031 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5032 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5033 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5035 case 0x0e: /* fnstenv mem */
5036 if (s->cc_op != CC_OP_DYNAMIC)
5037 gen_op_set_cc_op(s->cc_op);
5038 gen_jmp_im(pc_start - s->cs_base);
5039 tcg_gen_helper_0_2(helper_fstenv,
5040 cpu_A0, tcg_const_i32(s->dflag));
5042 case 0x0f: /* fnstcw mem */
5043 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5044 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5045 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5047 case 0x1d: /* fldt mem */
5048 if (s->cc_op != CC_OP_DYNAMIC)
5049 gen_op_set_cc_op(s->cc_op);
5050 gen_jmp_im(pc_start - s->cs_base);
5051 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5053 case 0x1f: /* fstpt mem */
5054 if (s->cc_op != CC_OP_DYNAMIC)
5055 gen_op_set_cc_op(s->cc_op);
5056 gen_jmp_im(pc_start - s->cs_base);
5057 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5058 tcg_gen_helper_0_0(helper_fpop);
5060 case 0x2c: /* frstor mem */
5061 if (s->cc_op != CC_OP_DYNAMIC)
5062 gen_op_set_cc_op(s->cc_op);
5063 gen_jmp_im(pc_start - s->cs_base);
5064 tcg_gen_helper_0_2(helper_frstor,
5065 cpu_A0, tcg_const_i32(s->dflag));
5067 case 0x2e: /* fnsave mem */
5068 if (s->cc_op != CC_OP_DYNAMIC)
5069 gen_op_set_cc_op(s->cc_op);
5070 gen_jmp_im(pc_start - s->cs_base);
5071 tcg_gen_helper_0_2(helper_fsave,
5072 cpu_A0, tcg_const_i32(s->dflag));
5074 case 0x2f: /* fnstsw mem */
5075 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5076 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5077 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5079 case 0x3c: /* fbld */
5080 if (s->cc_op != CC_OP_DYNAMIC)
5081 gen_op_set_cc_op(s->cc_op);
5082 gen_jmp_im(pc_start - s->cs_base);
5083 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5085 case 0x3e: /* fbstp */
5086 if (s->cc_op != CC_OP_DYNAMIC)
5087 gen_op_set_cc_op(s->cc_op);
5088 gen_jmp_im(pc_start - s->cs_base);
5089 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5090 tcg_gen_helper_0_0(helper_fpop);
5092 case 0x3d: /* fildll */
5093 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5094 (s->mem_index >> 2) - 1);
5095 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5097 case 0x3f: /* fistpll */
5098 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5099 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5100 (s->mem_index >> 2) - 1);
5101 tcg_gen_helper_0_0(helper_fpop);
5107 /* register float ops */
5111 case 0x08: /* fld sti */
5112 tcg_gen_helper_0_0(helper_fpush);
5113 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5115 case 0x09: /* fxchg sti */
5116 case 0x29: /* fxchg4 sti, undocumented op */
5117 case 0x39: /* fxchg7 sti, undocumented op */
5118 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5120 case 0x0a: /* grp d9/2 */
5123 /* check exceptions (FreeBSD FPU probe) */
5124 if (s->cc_op != CC_OP_DYNAMIC)
5125 gen_op_set_cc_op(s->cc_op);
5126 gen_jmp_im(pc_start - s->cs_base);
5127 tcg_gen_helper_0_0(helper_fwait);
5133 case 0x0c: /* grp d9/4 */
5136 tcg_gen_helper_0_0(helper_fchs_ST0);
5139 tcg_gen_helper_0_0(helper_fabs_ST0);
5142 tcg_gen_helper_0_0(helper_fldz_FT0);
5143 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5146 tcg_gen_helper_0_0(helper_fxam_ST0);
5152 case 0x0d: /* grp d9/5 */
5156 tcg_gen_helper_0_0(helper_fpush);
5157 tcg_gen_helper_0_0(helper_fld1_ST0);
5160 tcg_gen_helper_0_0(helper_fpush);
5161 tcg_gen_helper_0_0(helper_fldl2t_ST0);
5164 tcg_gen_helper_0_0(helper_fpush);
5165 tcg_gen_helper_0_0(helper_fldl2e_ST0);
5168 tcg_gen_helper_0_0(helper_fpush);
5169 tcg_gen_helper_0_0(helper_fldpi_ST0);
5172 tcg_gen_helper_0_0(helper_fpush);
5173 tcg_gen_helper_0_0(helper_fldlg2_ST0);
5176 tcg_gen_helper_0_0(helper_fpush);
5177 tcg_gen_helper_0_0(helper_fldln2_ST0);
5180 tcg_gen_helper_0_0(helper_fpush);
5181 tcg_gen_helper_0_0(helper_fldz_ST0);
5188 case 0x0e: /* grp d9/6 */
5191 tcg_gen_helper_0_0(helper_f2xm1);
5194 tcg_gen_helper_0_0(helper_fyl2x);
5197 tcg_gen_helper_0_0(helper_fptan);
5199 case 3: /* fpatan */
5200 tcg_gen_helper_0_0(helper_fpatan);
5202 case 4: /* fxtract */
5203 tcg_gen_helper_0_0(helper_fxtract);
5205 case 5: /* fprem1 */
5206 tcg_gen_helper_0_0(helper_fprem1);
5208 case 6: /* fdecstp */
5209 tcg_gen_helper_0_0(helper_fdecstp);
5212 case 7: /* fincstp */
5213 tcg_gen_helper_0_0(helper_fincstp);
5217 case 0x0f: /* grp d9/7 */
5220 tcg_gen_helper_0_0(helper_fprem);
5222 case 1: /* fyl2xp1 */
5223 tcg_gen_helper_0_0(helper_fyl2xp1);
5226 tcg_gen_helper_0_0(helper_fsqrt);
5228 case 3: /* fsincos */
5229 tcg_gen_helper_0_0(helper_fsincos);
5231 case 5: /* fscale */
5232 tcg_gen_helper_0_0(helper_fscale);
5234 case 4: /* frndint */
5235 tcg_gen_helper_0_0(helper_frndint);
5238 tcg_gen_helper_0_0(helper_fsin);
5242 tcg_gen_helper_0_0(helper_fcos);
5246 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5247 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5248 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5254 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5256 tcg_gen_helper_0_0(helper_fpop);
5258 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5259 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5263 case 0x02: /* fcom */
5264 case 0x22: /* fcom2, undocumented op */
5265 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5266 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5268 case 0x03: /* fcomp */
5269 case 0x23: /* fcomp3, undocumented op */
5270 case 0x32: /* fcomp5, undocumented op */
5271 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5272 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5273 tcg_gen_helper_0_0(helper_fpop);
5275 case 0x15: /* da/5 */
5277 case 1: /* fucompp */
5278 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5279 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5280 tcg_gen_helper_0_0(helper_fpop);
5281 tcg_gen_helper_0_0(helper_fpop);
5289 case 0: /* feni (287 only, just do nop here) */
5291 case 1: /* fdisi (287 only, just do nop here) */
5294 tcg_gen_helper_0_0(helper_fclex);
5296 case 3: /* fninit */
5297 tcg_gen_helper_0_0(helper_fninit);
5299 case 4: /* fsetpm (287 only, just do nop here) */
5305 case 0x1d: /* fucomi */
5306 if (s->cc_op != CC_OP_DYNAMIC)
5307 gen_op_set_cc_op(s->cc_op);
5308 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5309 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5310 s->cc_op = CC_OP_EFLAGS;
5312 case 0x1e: /* fcomi */
5313 if (s->cc_op != CC_OP_DYNAMIC)
5314 gen_op_set_cc_op(s->cc_op);
5315 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5316 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5317 s->cc_op = CC_OP_EFLAGS;
5319 case 0x28: /* ffree sti */
5320 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5322 case 0x2a: /* fst sti */
5323 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5325 case 0x2b: /* fstp sti */
5326 case 0x0b: /* fstp1 sti, undocumented op */
5327 case 0x3a: /* fstp8 sti, undocumented op */
5328 case 0x3b: /* fstp9 sti, undocumented op */
5329 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5330 tcg_gen_helper_0_0(helper_fpop);
5332 case 0x2c: /* fucom st(i) */
5333 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5334 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5336 case 0x2d: /* fucomp st(i) */
5337 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5338 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5339 tcg_gen_helper_0_0(helper_fpop);
5341 case 0x33: /* de/3 */
5343 case 1: /* fcompp */
5344 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5345 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5346 tcg_gen_helper_0_0(helper_fpop);
5347 tcg_gen_helper_0_0(helper_fpop);
5353 case 0x38: /* ffreep sti, undocumented op */
5354 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5355 tcg_gen_helper_0_0(helper_fpop);
5357 case 0x3c: /* df/4 */
5360 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5361 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5362 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5368 case 0x3d: /* fucomip */
5369 if (s->cc_op != CC_OP_DYNAMIC)
5370 gen_op_set_cc_op(s->cc_op);
5371 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5372 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5373 tcg_gen_helper_0_0(helper_fpop);
5374 s->cc_op = CC_OP_EFLAGS;
5376 case 0x3e: /* fcomip */
5377 if (s->cc_op != CC_OP_DYNAMIC)
5378 gen_op_set_cc_op(s->cc_op);
5379 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5380 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5381 tcg_gen_helper_0_0(helper_fpop);
5382 s->cc_op = CC_OP_EFLAGS;
5384 case 0x10 ... 0x13: /* fcmovxx */
5388 const static uint8_t fcmov_cc[8] = {
5394 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5396 l1 = gen_new_label();
5397 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5398 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5407 /************************/
5410 case 0xa4: /* movsS */
5415 ot = dflag + OT_WORD;
5417 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5418 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5424 case 0xaa: /* stosS */
5429 ot = dflag + OT_WORD;
5431 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5432 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5437 case 0xac: /* lodsS */
5442 ot = dflag + OT_WORD;
5443 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5444 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5449 case 0xae: /* scasS */
5454 ot = dflag + OT_WORD;
5455 if (prefixes & PREFIX_REPNZ) {
5456 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5457 } else if (prefixes & PREFIX_REPZ) {
5458 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5461 s->cc_op = CC_OP_SUBB + ot;
5465 case 0xa6: /* cmpsS */
5470 ot = dflag + OT_WORD;
5471 if (prefixes & PREFIX_REPNZ) {
5472 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5473 } else if (prefixes & PREFIX_REPZ) {
5474 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5477 s->cc_op = CC_OP_SUBB + ot;
5480 case 0x6c: /* insS */
5485 ot = dflag ? OT_LONG : OT_WORD;
5486 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5487 gen_op_andl_T0_ffff();
5488 gen_check_io(s, ot, pc_start - s->cs_base,
5489 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5490 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5491 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5496 case 0x6e: /* outsS */
5501 ot = dflag ? OT_LONG : OT_WORD;
5502 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5503 gen_op_andl_T0_ffff();
5504 gen_check_io(s, ot, pc_start - s->cs_base,
5505 svm_is_rep(prefixes) | 4);
5506 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5507 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5513 /************************/
5521 ot = dflag ? OT_LONG : OT_WORD;
5522 val = ldub_code(s->pc++);
5523 gen_op_movl_T0_im(val);
5524 gen_check_io(s, ot, pc_start - s->cs_base,
5525 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5526 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5527 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5528 gen_op_mov_reg_T1(ot, R_EAX);
5535 ot = dflag ? OT_LONG : OT_WORD;
5536 val = ldub_code(s->pc++);
5537 gen_op_movl_T0_im(val);
5538 gen_check_io(s, ot, pc_start - s->cs_base,
5539 svm_is_rep(prefixes));
5540 gen_op_mov_TN_reg(ot, 1, R_EAX);
5542 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5543 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5544 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5545 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5552 ot = dflag ? OT_LONG : OT_WORD;
5553 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5554 gen_op_andl_T0_ffff();
5555 gen_check_io(s, ot, pc_start - s->cs_base,
5556 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5557 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5558 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5559 gen_op_mov_reg_T1(ot, R_EAX);
5566 ot = dflag ? OT_LONG : OT_WORD;
5567 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5568 gen_op_andl_T0_ffff();
5569 gen_check_io(s, ot, pc_start - s->cs_base,
5570 svm_is_rep(prefixes));
5571 gen_op_mov_TN_reg(ot, 1, R_EAX);
5573 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5574 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5575 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5576 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5579 /************************/
5581 case 0xc2: /* ret im */
5582 val = ldsw_code(s->pc);
5585 if (CODE64(s) && s->dflag)
5587 gen_stack_update(s, val + (2 << s->dflag));
5589 gen_op_andl_T0_ffff();
5593 case 0xc3: /* ret */
5597 gen_op_andl_T0_ffff();
5601 case 0xca: /* lret im */
5602 val = ldsw_code(s->pc);
5605 if (s->pe && !s->vm86) {
5606 if (s->cc_op != CC_OP_DYNAMIC)
5607 gen_op_set_cc_op(s->cc_op);
5608 gen_jmp_im(pc_start - s->cs_base);
5609 tcg_gen_helper_0_2(helper_lret_protected,
5610 tcg_const_i32(s->dflag),
5611 tcg_const_i32(val));
5615 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5617 gen_op_andl_T0_ffff();
5618 /* NOTE: keeping EIP updated is not a problem in case of
5622 gen_op_addl_A0_im(2 << s->dflag);
5623 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5624 gen_op_movl_seg_T0_vm(R_CS);
5625 /* add stack offset */
5626 gen_stack_update(s, val + (4 << s->dflag));
5630 case 0xcb: /* lret */
5633 case 0xcf: /* iret */
5634 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5638 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5639 s->cc_op = CC_OP_EFLAGS;
5640 } else if (s->vm86) {
5642 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5644 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5645 s->cc_op = CC_OP_EFLAGS;
5648 if (s->cc_op != CC_OP_DYNAMIC)
5649 gen_op_set_cc_op(s->cc_op);
5650 gen_jmp_im(pc_start - s->cs_base);
5651 tcg_gen_helper_0_2(helper_iret_protected,
5652 tcg_const_i32(s->dflag),
5653 tcg_const_i32(s->pc - s->cs_base));
5654 s->cc_op = CC_OP_EFLAGS;
5658 case 0xe8: /* call im */
5661 tval = (int32_t)insn_get(s, OT_LONG);
5663 tval = (int16_t)insn_get(s, OT_WORD);
5664 next_eip = s->pc - s->cs_base;
5668 gen_movtl_T0_im(next_eip);
5673 case 0x9a: /* lcall im */
5675 unsigned int selector, offset;
5679 ot = dflag ? OT_LONG : OT_WORD;
5680 offset = insn_get(s, ot);
5681 selector = insn_get(s, OT_WORD);
5683 gen_op_movl_T0_im(selector);
5684 gen_op_movl_T1_imu(offset);
5687 case 0xe9: /* jmp im */
5689 tval = (int32_t)insn_get(s, OT_LONG);
5691 tval = (int16_t)insn_get(s, OT_WORD);
5692 tval += s->pc - s->cs_base;
5697 case 0xea: /* ljmp im */
5699 unsigned int selector, offset;
5703 ot = dflag ? OT_LONG : OT_WORD;
5704 offset = insn_get(s, ot);
5705 selector = insn_get(s, OT_WORD);
5707 gen_op_movl_T0_im(selector);
5708 gen_op_movl_T1_imu(offset);
5711 case 0xeb: /* jmp Jb */
5712 tval = (int8_t)insn_get(s, OT_BYTE);
5713 tval += s->pc - s->cs_base;
5718 case 0x70 ... 0x7f: /* jcc Jb */
5719 tval = (int8_t)insn_get(s, OT_BYTE);
5721 case 0x180 ... 0x18f: /* jcc Jv */
5723 tval = (int32_t)insn_get(s, OT_LONG);
5725 tval = (int16_t)insn_get(s, OT_WORD);
5728 next_eip = s->pc - s->cs_base;
5732 gen_jcc(s, b, tval, next_eip);
5735 case 0x190 ... 0x19f: /* setcc Gv */
5736 modrm = ldub_code(s->pc++);
5738 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5740 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5743 ot = dflag + OT_WORD;
5744 modrm = ldub_code(s->pc++);
5745 reg = ((modrm >> 3) & 7) | rex_r;
5746 mod = (modrm >> 6) & 3;
5748 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5749 gen_op_ld_T1_A0(ot + s->mem_index);
5751 rm = (modrm & 7) | REX_B(s);
5752 gen_op_mov_TN_reg(ot, 1, rm);
5754 if (s->cc_op != CC_OP_DYNAMIC)
5755 gen_op_set_cc_op(s->cc_op);
5756 #ifdef TARGET_X86_64
5757 if (ot == OT_LONG) {
5758 /* XXX: specific Intel behaviour ? */
5759 l1 = gen_new_label();
5760 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5761 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
5763 tcg_gen_movi_tl(cpu_tmp0, 0);
5764 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
5768 l1 = gen_new_label();
5769 gen_jcc1(s, s->cc_op, b ^ 1, l1);
5770 gen_op_mov_reg_T1(ot, reg);
5776 /************************/
5778 case 0x9c: /* pushf */
5779 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5781 if (s->vm86 && s->iopl != 3) {
5782 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5784 if (s->cc_op != CC_OP_DYNAMIC)
5785 gen_op_set_cc_op(s->cc_op);
5786 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
5790 case 0x9d: /* popf */
5791 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5793 if (s->vm86 && s->iopl != 3) {
5794 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5799 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5800 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
5802 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5803 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
5806 if (s->cpl <= s->iopl) {
5808 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5809 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
5811 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5812 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
5816 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5817 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
5819 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5820 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
5825 s->cc_op = CC_OP_EFLAGS;
5826 /* abort translation because TF flag may change */
5827 gen_jmp_im(s->pc - s->cs_base);
5831 case 0x9e: /* sahf */
5834 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5835 if (s->cc_op != CC_OP_DYNAMIC)
5836 gen_op_set_cc_op(s->cc_op);
5837 gen_compute_eflags(cpu_cc_src);
5838 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
5839 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
5840 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
5841 s->cc_op = CC_OP_EFLAGS;
5843 case 0x9f: /* lahf */
5846 if (s->cc_op != CC_OP_DYNAMIC)
5847 gen_op_set_cc_op(s->cc_op);
5848 gen_compute_eflags(cpu_T[0]);
5849 /* Note: gen_compute_eflags() only gives the condition codes */
5850 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
5851 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5853 case 0xf5: /* cmc */
5854 if (s->cc_op != CC_OP_DYNAMIC)
5855 gen_op_set_cc_op(s->cc_op);
5856 gen_compute_eflags(cpu_cc_src);
5857 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5858 s->cc_op = CC_OP_EFLAGS;
5860 case 0xf8: /* clc */
5861 if (s->cc_op != CC_OP_DYNAMIC)
5862 gen_op_set_cc_op(s->cc_op);
5863 gen_compute_eflags(cpu_cc_src);
5864 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
5865 s->cc_op = CC_OP_EFLAGS;
5867 case 0xf9: /* stc */
5868 if (s->cc_op != CC_OP_DYNAMIC)
5869 gen_op_set_cc_op(s->cc_op);
5870 gen_compute_eflags(cpu_cc_src);
5871 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5872 s->cc_op = CC_OP_EFLAGS;
5874 case 0xfc: /* cld */
5875 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5876 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5878 case 0xfd: /* std */
5879 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5880 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5883 /************************/
5884 /* bit operations */
5885 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5886 ot = dflag + OT_WORD;
5887 modrm = ldub_code(s->pc++);
5888 op = (modrm >> 3) & 7;
5889 mod = (modrm >> 6) & 3;
5890 rm = (modrm & 7) | REX_B(s);
5893 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5894 gen_op_ld_T0_A0(ot + s->mem_index);
5896 gen_op_mov_TN_reg(ot, 0, rm);
5899 val = ldub_code(s->pc++);
5900 gen_op_movl_T1_im(val);
5905 case 0x1a3: /* bt Gv, Ev */
5908 case 0x1ab: /* bts */
5911 case 0x1b3: /* btr */
5914 case 0x1bb: /* btc */
5917 ot = dflag + OT_WORD;
5918 modrm = ldub_code(s->pc++);
5919 reg = ((modrm >> 3) & 7) | rex_r;
5920 mod = (modrm >> 6) & 3;
5921 rm = (modrm & 7) | REX_B(s);
5922 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5924 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5925 /* specific case: we need to add a displacement */
5926 gen_exts(ot, cpu_T[1]);
5927 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5928 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5929 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5930 gen_op_ld_T0_A0(ot + s->mem_index);
5932 gen_op_mov_TN_reg(ot, 0, rm);
5935 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5938 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5939 tcg_gen_movi_tl(cpu_cc_dst, 0);
5942 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5943 tcg_gen_movi_tl(cpu_tmp0, 1);
5944 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5945 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5948 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5949 tcg_gen_movi_tl(cpu_tmp0, 1);
5950 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5951 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5952 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5956 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5957 tcg_gen_movi_tl(cpu_tmp0, 1);
5958 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5959 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5962 s->cc_op = CC_OP_SARB + ot;
5965 gen_op_st_T0_A0(ot + s->mem_index);
5967 gen_op_mov_reg_T0(ot, rm);
5968 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5969 tcg_gen_movi_tl(cpu_cc_dst, 0);
5972 case 0x1bc: /* bsf */
5973 case 0x1bd: /* bsr */
5976 ot = dflag + OT_WORD;
5977 modrm = ldub_code(s->pc++);
5978 reg = ((modrm >> 3) & 7) | rex_r;
5979 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5980 gen_extu(ot, cpu_T[0]);
5981 label1 = gen_new_label();
5982 tcg_gen_movi_tl(cpu_cc_dst, 0);
5983 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5985 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5987 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5989 gen_op_mov_reg_T0(ot, reg);
5990 tcg_gen_movi_tl(cpu_cc_dst, 1);
5991 gen_set_label(label1);
5992 tcg_gen_discard_tl(cpu_cc_src);
5993 s->cc_op = CC_OP_LOGICB + ot;
5996 /************************/
5998 case 0x27: /* daa */
6001 if (s->cc_op != CC_OP_DYNAMIC)
6002 gen_op_set_cc_op(s->cc_op);
6003 tcg_gen_helper_0_0(helper_daa);
6004 s->cc_op = CC_OP_EFLAGS;
6006 case 0x2f: /* das */
6009 if (s->cc_op != CC_OP_DYNAMIC)
6010 gen_op_set_cc_op(s->cc_op);
6011 tcg_gen_helper_0_0(helper_das);
6012 s->cc_op = CC_OP_EFLAGS;
6014 case 0x37: /* aaa */
6017 if (s->cc_op != CC_OP_DYNAMIC)
6018 gen_op_set_cc_op(s->cc_op);
6019 tcg_gen_helper_0_0(helper_aaa);
6020 s->cc_op = CC_OP_EFLAGS;
6022 case 0x3f: /* aas */
6025 if (s->cc_op != CC_OP_DYNAMIC)
6026 gen_op_set_cc_op(s->cc_op);
6027 tcg_gen_helper_0_0(helper_aas);
6028 s->cc_op = CC_OP_EFLAGS;
6030 case 0xd4: /* aam */
6033 val = ldub_code(s->pc++);
6035 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6037 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
6038 s->cc_op = CC_OP_LOGICB;
6041 case 0xd5: /* aad */
6044 val = ldub_code(s->pc++);
6045 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
6046 s->cc_op = CC_OP_LOGICB;
6048 /************************/
6050 case 0x90: /* nop */
6051 /* XXX: xchg + rex handling */
6052 /* XXX: correct lock test for all insn */
6053 if (prefixes & PREFIX_LOCK)
6055 if (prefixes & PREFIX_REPZ) {
6056 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6059 case 0x9b: /* fwait */
6060 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6061 (HF_MP_MASK | HF_TS_MASK)) {
6062 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6064 if (s->cc_op != CC_OP_DYNAMIC)
6065 gen_op_set_cc_op(s->cc_op);
6066 gen_jmp_im(pc_start - s->cs_base);
6067 tcg_gen_helper_0_0(helper_fwait);
6070 case 0xcc: /* int3 */
6071 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6073 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6075 case 0xcd: /* int N */
6076 val = ldub_code(s->pc++);
6077 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6079 if (s->vm86 && s->iopl != 3) {
6080 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6082 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6085 case 0xce: /* into */
6088 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6090 if (s->cc_op != CC_OP_DYNAMIC)
6091 gen_op_set_cc_op(s->cc_op);
6092 gen_jmp_im(pc_start - s->cs_base);
6093 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6095 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6096 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
6099 gen_debug(s, pc_start - s->cs_base);
6102 tb_flush(cpu_single_env);
6103 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6106 case 0xfa: /* cli */
6108 if (s->cpl <= s->iopl) {
6109 tcg_gen_helper_0_0(helper_cli);
6111 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6115 tcg_gen_helper_0_0(helper_cli);
6117 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6121 case 0xfb: /* sti */
6123 if (s->cpl <= s->iopl) {
6125 tcg_gen_helper_0_0(helper_sti);
6126 /* interruptions are enabled only the first insn after sti */
6127 /* If several instructions disable interrupts, only the
6129 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6130 tcg_gen_helper_0_0(helper_set_inhibit_irq);
6131 /* give a chance to handle pending irqs */
6132 gen_jmp_im(s->pc - s->cs_base);
6135 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6141 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6145 case 0x62: /* bound */
6148 ot = dflag ? OT_LONG : OT_WORD;
6149 modrm = ldub_code(s->pc++);
6150 reg = (modrm >> 3) & 7;
6151 mod = (modrm >> 6) & 3;
6154 gen_op_mov_TN_reg(ot, 0, reg);
6155 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6156 gen_jmp_im(pc_start - s->cs_base);
6157 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6159 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6161 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6163 case 0x1c8 ... 0x1cf: /* bswap reg */
6164 reg = (b & 7) | REX_B(s);
6165 #ifdef TARGET_X86_64
6167 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6168 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6169 gen_op_mov_reg_T0(OT_QUAD, reg);
6173 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6175 tmp0 = tcg_temp_new(TCG_TYPE_I32);
6176 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6177 tcg_gen_bswap_i32(tmp0, tmp0);
6178 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6179 gen_op_mov_reg_T0(OT_LONG, reg);
6183 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6184 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6185 gen_op_mov_reg_T0(OT_LONG, reg);
6189 case 0xd6: /* salc */
6192 if (s->cc_op != CC_OP_DYNAMIC)
6193 gen_op_set_cc_op(s->cc_op);
6194 gen_compute_eflags_c(cpu_T[0]);
6195 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6196 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6198 case 0xe0: /* loopnz */
6199 case 0xe1: /* loopz */
6200 case 0xe2: /* loop */
6201 case 0xe3: /* jecxz */
6205 tval = (int8_t)insn_get(s, OT_BYTE);
6206 next_eip = s->pc - s->cs_base;
6211 l1 = gen_new_label();
6212 l2 = gen_new_label();
6213 l3 = gen_new_label();
6216 case 0: /* loopnz */
6218 if (s->cc_op != CC_OP_DYNAMIC)
6219 gen_op_set_cc_op(s->cc_op);
6220 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6221 gen_op_jz_ecx(s->aflag, l3);
6222 gen_compute_eflags(cpu_tmp0);
6223 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6225 tcg_gen_brcond_tl(TCG_COND_EQ,
6226 cpu_tmp0, tcg_const_tl(0), l1);
6228 tcg_gen_brcond_tl(TCG_COND_NE,
6229 cpu_tmp0, tcg_const_tl(0), l1);
6233 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6234 gen_op_jnz_ecx(s->aflag, l1);
6238 gen_op_jz_ecx(s->aflag, l1);
6243 gen_jmp_im(next_eip);
6252 case 0x130: /* wrmsr */
6253 case 0x132: /* rdmsr */
6255 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6259 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6260 tcg_gen_helper_0_0(helper_rdmsr);
6262 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6263 tcg_gen_helper_0_0(helper_wrmsr);
6269 case 0x131: /* rdtsc */
6270 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6272 gen_jmp_im(pc_start - s->cs_base);
6273 tcg_gen_helper_0_0(helper_rdtsc);
6275 case 0x133: /* rdpmc */
6276 gen_jmp_im(pc_start - s->cs_base);
6277 tcg_gen_helper_0_0(helper_rdpmc);
6279 case 0x134: /* sysenter */
6283 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6285 if (s->cc_op != CC_OP_DYNAMIC) {
6286 gen_op_set_cc_op(s->cc_op);
6287 s->cc_op = CC_OP_DYNAMIC;
6289 gen_jmp_im(pc_start - s->cs_base);
6290 tcg_gen_helper_0_0(helper_sysenter);
6294 case 0x135: /* sysexit */
6298 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6300 if (s->cc_op != CC_OP_DYNAMIC) {
6301 gen_op_set_cc_op(s->cc_op);
6302 s->cc_op = CC_OP_DYNAMIC;
6304 gen_jmp_im(pc_start - s->cs_base);
6305 tcg_gen_helper_0_0(helper_sysexit);
6309 #ifdef TARGET_X86_64
6310 case 0x105: /* syscall */
6311 /* XXX: is it usable in real mode ? */
6312 if (s->cc_op != CC_OP_DYNAMIC) {
6313 gen_op_set_cc_op(s->cc_op);
6314 s->cc_op = CC_OP_DYNAMIC;
6316 gen_jmp_im(pc_start - s->cs_base);
6317 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6320 case 0x107: /* sysret */
6322 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6324 if (s->cc_op != CC_OP_DYNAMIC) {
6325 gen_op_set_cc_op(s->cc_op);
6326 s->cc_op = CC_OP_DYNAMIC;
6328 gen_jmp_im(pc_start - s->cs_base);
6329 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6330 /* condition codes are modified only in long mode */
6332 s->cc_op = CC_OP_EFLAGS;
6337 case 0x1a2: /* cpuid */
6338 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6340 tcg_gen_helper_0_0(helper_cpuid);
6342 case 0xf4: /* hlt */
6344 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6346 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6348 if (s->cc_op != CC_OP_DYNAMIC)
6349 gen_op_set_cc_op(s->cc_op);
6350 gen_jmp_im(s->pc - s->cs_base);
6351 tcg_gen_helper_0_0(helper_hlt);
6356 modrm = ldub_code(s->pc++);
6357 mod = (modrm >> 6) & 3;
6358 op = (modrm >> 3) & 7;
6361 if (!s->pe || s->vm86)
6363 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6365 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6369 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6372 if (!s->pe || s->vm86)
6375 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6377 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6379 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6380 gen_jmp_im(pc_start - s->cs_base);
6381 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6382 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6386 if (!s->pe || s->vm86)
6388 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6390 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6394 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6397 if (!s->pe || s->vm86)
6400 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6402 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6404 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6405 gen_jmp_im(pc_start - s->cs_base);
6406 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6407 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6412 if (!s->pe || s->vm86)
6414 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6415 if (s->cc_op != CC_OP_DYNAMIC)
6416 gen_op_set_cc_op(s->cc_op);
6418 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6420 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6421 s->cc_op = CC_OP_EFLAGS;
6428 modrm = ldub_code(s->pc++);
6429 mod = (modrm >> 6) & 3;
6430 op = (modrm >> 3) & 7;
6436 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6438 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6439 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6440 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6441 gen_add_A0_im(s, 2);
6442 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6444 gen_op_andl_T0_im(0xffffff);
6445 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6450 case 0: /* monitor */
6451 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6454 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6456 gen_jmp_im(pc_start - s->cs_base);
6457 #ifdef TARGET_X86_64
6458 if (s->aflag == 2) {
6459 gen_op_movq_A0_reg(R_EAX);
6463 gen_op_movl_A0_reg(R_EAX);
6465 gen_op_andl_A0_ffff();
6467 gen_add_A0_ds_seg(s);
6468 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6471 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6474 if (s->cc_op != CC_OP_DYNAMIC) {
6475 gen_op_set_cc_op(s->cc_op);
6476 s->cc_op = CC_OP_DYNAMIC;
6478 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6480 gen_jmp_im(s->pc - s->cs_base);
6481 tcg_gen_helper_0_0(helper_mwait);
6488 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6490 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6491 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6492 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6493 gen_add_A0_im(s, 2);
6494 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6496 gen_op_andl_T0_im(0xffffff);
6497 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6505 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6507 if (s->cc_op != CC_OP_DYNAMIC)
6508 gen_op_set_cc_op(s->cc_op);
6509 gen_jmp_im(s->pc - s->cs_base);
6510 tcg_gen_helper_0_0(helper_vmrun);
6511 s->cc_op = CC_OP_EFLAGS;
6514 case 1: /* VMMCALL */
6515 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6517 /* FIXME: cause #UD if hflags & SVM */
6518 tcg_gen_helper_0_0(helper_vmmcall);
6520 case 2: /* VMLOAD */
6521 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6523 tcg_gen_helper_0_0(helper_vmload);
6525 case 3: /* VMSAVE */
6526 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6528 tcg_gen_helper_0_0(helper_vmsave);
6531 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6533 tcg_gen_helper_0_0(helper_stgi);
6536 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6538 tcg_gen_helper_0_0(helper_clgi);
6540 case 6: /* SKINIT */
6541 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6543 tcg_gen_helper_0_0(helper_skinit);
6545 case 7: /* INVLPGA */
6546 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6548 tcg_gen_helper_0_0(helper_invlpga);
6553 } else if (s->cpl != 0) {
6554 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6556 if (gen_svm_check_intercept(s, pc_start,
6557 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6559 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6560 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6561 gen_add_A0_im(s, 2);
6562 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6564 gen_op_andl_T0_im(0xffffff);
6566 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
6567 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
6569 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
6570 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
6575 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6577 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
6578 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6582 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6584 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6586 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6587 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6588 gen_jmp_im(s->pc - s->cs_base);
6592 case 7: /* invlpg */
6594 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6597 #ifdef TARGET_X86_64
6598 if (CODE64(s) && rm == 0) {
6600 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6601 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
6602 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6603 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
6610 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6612 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6613 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6614 gen_jmp_im(s->pc - s->cs_base);
6623 case 0x108: /* invd */
6624 case 0x109: /* wbinvd */
6626 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6628 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6633 case 0x63: /* arpl or movslS (x86_64) */
6634 #ifdef TARGET_X86_64
6637 /* d_ot is the size of destination */
6638 d_ot = dflag + OT_WORD;
6640 modrm = ldub_code(s->pc++);
6641 reg = ((modrm >> 3) & 7) | rex_r;
6642 mod = (modrm >> 6) & 3;
6643 rm = (modrm & 7) | REX_B(s);
6646 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6648 if (d_ot == OT_QUAD)
6649 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6650 gen_op_mov_reg_T0(d_ot, reg);
6652 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6653 if (d_ot == OT_QUAD) {
6654 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6656 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6658 gen_op_mov_reg_T0(d_ot, reg);
6664 if (!s->pe || s->vm86)
6667 modrm = ldub_code(s->pc++);
6668 reg = (modrm >> 3) & 7;
6669 mod = (modrm >> 6) & 3;
6672 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6673 gen_op_ld_T0_A0(ot + s->mem_index);
6675 gen_op_mov_TN_reg(ot, 0, rm);
6677 gen_op_mov_TN_reg(ot, 1, reg);
6678 tcg_gen_andi_tl(cpu_tmp0, cpu_T[0], 3);
6679 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 3);
6680 tcg_gen_movi_tl(cpu_T3, 0);
6681 label1 = gen_new_label();
6682 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, cpu_T[1], label1);
6683 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], ~3);
6684 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
6685 tcg_gen_movi_tl(cpu_T3, CC_Z);
6686 gen_set_label(label1);
6688 gen_op_st_T0_A0(ot + s->mem_index);
6690 gen_op_mov_reg_T0(ot, rm);
6692 if (s->cc_op != CC_OP_DYNAMIC)
6693 gen_op_set_cc_op(s->cc_op);
6694 gen_compute_eflags(cpu_cc_src);
6695 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6696 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T3);
6697 s->cc_op = CC_OP_EFLAGS;
6700 case 0x102: /* lar */
6701 case 0x103: /* lsl */
6704 if (!s->pe || s->vm86)
6706 ot = dflag ? OT_LONG : OT_WORD;
6707 modrm = ldub_code(s->pc++);
6708 reg = ((modrm >> 3) & 7) | rex_r;
6709 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6710 if (s->cc_op != CC_OP_DYNAMIC)
6711 gen_op_set_cc_op(s->cc_op);
6713 tcg_gen_helper_1_1(helper_lar, cpu_T[0], cpu_T[0]);
6715 tcg_gen_helper_1_1(helper_lsl, cpu_T[0], cpu_T[0]);
6716 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6717 label1 = gen_new_label();
6718 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
6719 gen_op_mov_reg_T0(ot, reg);
6720 gen_set_label(label1);
6721 s->cc_op = CC_OP_EFLAGS;
6725 modrm = ldub_code(s->pc++);
6726 mod = (modrm >> 6) & 3;
6727 op = (modrm >> 3) & 7;
6729 case 0: /* prefetchnta */
6730 case 1: /* prefetchnt0 */
6731 case 2: /* prefetchnt0 */
6732 case 3: /* prefetchnt0 */
6735 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6736 /* nothing more to do */
6738 default: /* nop (multi byte) */
6739 gen_nop_modrm(s, modrm);
6743 case 0x119 ... 0x11f: /* nop (multi byte) */
6744 modrm = ldub_code(s->pc++);
6745 gen_nop_modrm(s, modrm);
6747 case 0x120: /* mov reg, crN */
6748 case 0x122: /* mov crN, reg */
6750 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6752 modrm = ldub_code(s->pc++);
6753 if ((modrm & 0xc0) != 0xc0)
6755 rm = (modrm & 7) | REX_B(s);
6756 reg = ((modrm >> 3) & 7) | rex_r;
6768 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6769 gen_op_mov_TN_reg(ot, 0, rm);
6770 tcg_gen_helper_0_2(helper_movl_crN_T0,
6771 tcg_const_i32(reg), cpu_T[0]);
6772 gen_jmp_im(s->pc - s->cs_base);
6775 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6776 #if !defined(CONFIG_USER_ONLY)
6778 tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6781 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[reg]));
6782 gen_op_mov_reg_T0(ot, rm);
6790 case 0x121: /* mov reg, drN */
6791 case 0x123: /* mov drN, reg */
6793 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6795 modrm = ldub_code(s->pc++);
6796 if ((modrm & 0xc0) != 0xc0)
6798 rm = (modrm & 7) | REX_B(s);
6799 reg = ((modrm >> 3) & 7) | rex_r;
6804 /* XXX: do it dynamically with CR4.DE bit */
6805 if (reg == 4 || reg == 5 || reg >= 8)
6808 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6809 gen_op_mov_TN_reg(ot, 0, rm);
6810 tcg_gen_helper_0_2(helper_movl_drN_T0,
6811 tcg_const_i32(reg), cpu_T[0]);
6812 gen_jmp_im(s->pc - s->cs_base);
6815 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6816 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
6817 gen_op_mov_reg_T0(ot, rm);
6821 case 0x106: /* clts */
6823 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6825 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6826 tcg_gen_helper_0_0(helper_clts);
6827 /* abort block because static cpu state changed */
6828 gen_jmp_im(s->pc - s->cs_base);
6832 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6833 case 0x1c3: /* MOVNTI reg, mem */
6834 if (!(s->cpuid_features & CPUID_SSE2))
6836 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6837 modrm = ldub_code(s->pc++);
6838 mod = (modrm >> 6) & 3;
6841 reg = ((modrm >> 3) & 7) | rex_r;
6842 /* generate a generic store */
6843 gen_ldst_modrm(s, modrm, ot, reg, 1);
6846 modrm = ldub_code(s->pc++);
6847 mod = (modrm >> 6) & 3;
6848 op = (modrm >> 3) & 7;
6850 case 0: /* fxsave */
6851 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6852 (s->flags & HF_EM_MASK))
6854 if (s->flags & HF_TS_MASK) {
6855 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6858 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6859 if (s->cc_op != CC_OP_DYNAMIC)
6860 gen_op_set_cc_op(s->cc_op);
6861 gen_jmp_im(pc_start - s->cs_base);
6862 tcg_gen_helper_0_2(helper_fxsave,
6863 cpu_A0, tcg_const_i32((s->dflag == 2)));
6865 case 1: /* fxrstor */
6866 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6867 (s->flags & HF_EM_MASK))
6869 if (s->flags & HF_TS_MASK) {
6870 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6873 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6874 if (s->cc_op != CC_OP_DYNAMIC)
6875 gen_op_set_cc_op(s->cc_op);
6876 gen_jmp_im(pc_start - s->cs_base);
6877 tcg_gen_helper_0_2(helper_fxrstor,
6878 cpu_A0, tcg_const_i32((s->dflag == 2)));
6880 case 2: /* ldmxcsr */
6881 case 3: /* stmxcsr */
6882 if (s->flags & HF_TS_MASK) {
6883 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6886 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6889 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6891 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6892 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6894 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6895 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6898 case 5: /* lfence */
6899 case 6: /* mfence */
6900 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6903 case 7: /* sfence / clflush */
6904 if ((modrm & 0xc7) == 0xc0) {
6906 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6907 if (!(s->cpuid_features & CPUID_SSE))
6911 if (!(s->cpuid_features & CPUID_CLFLUSH))
6913 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6920 case 0x10d: /* 3DNow! prefetch(w) */
6921 modrm = ldub_code(s->pc++);
6922 mod = (modrm >> 6) & 3;
6925 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6926 /* ignore for now */
6928 case 0x1aa: /* rsm */
6929 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6931 if (!(s->flags & HF_SMM_MASK))
6933 if (s->cc_op != CC_OP_DYNAMIC) {
6934 gen_op_set_cc_op(s->cc_op);
6935 s->cc_op = CC_OP_DYNAMIC;
6937 gen_jmp_im(s->pc - s->cs_base);
6938 tcg_gen_helper_0_0(helper_rsm);
6941 case 0x10e ... 0x10f:
6942 /* 3DNow! instructions, ignore prefixes */
6943 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6944 case 0x110 ... 0x117:
6945 case 0x128 ... 0x12f:
6946 case 0x150 ... 0x177:
6947 case 0x17c ... 0x17f:
6949 case 0x1c4 ... 0x1c6:
6950 case 0x1d0 ... 0x1fe:
6951 gen_sse(s, b, pc_start, rex_r);
6956 /* lock generation */
6957 if (s->prefix & PREFIX_LOCK)
6958 tcg_gen_helper_0_0(helper_unlock);
6961 if (s->prefix & PREFIX_LOCK)
6962 tcg_gen_helper_0_0(helper_unlock);
6963 /* XXX: ensure that no lock was generated */
6964 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6968 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6973 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6979 void optimize_flags_init(void)
6981 #if TCG_TARGET_REG_BITS == 32
6982 assert(sizeof(CCTable) == (1 << 3));
6984 assert(sizeof(CCTable) == (1 << 4));
6986 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6988 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6989 #if TARGET_LONG_BITS > HOST_LONG_BITS
6990 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6991 TCG_AREG0, offsetof(CPUState, t0), "T0");
6992 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6993 TCG_AREG0, offsetof(CPUState, t1), "T1");
6994 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6995 TCG_AREG0, offsetof(CPUState, t2), "A0");
6997 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6998 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6999 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
7001 cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
7002 TCG_AREG0, offsetof(CPUState, t3), "T3");
7003 #if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
7004 /* XXX: must be suppressed once there are less fixed registers */
7005 cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
7007 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7008 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7009 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7010 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7011 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7012 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7015 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7016 basic block 'tb'. If search_pc is TRUE, also generate PC
7017 information for each intermediate instruction. */
7018 static inline int gen_intermediate_code_internal(CPUState *env,
7019 TranslationBlock *tb,
7022 DisasContext dc1, *dc = &dc1;
7023 target_ulong pc_ptr;
7024 uint16_t *gen_opc_end;
7027 target_ulong pc_start;
7028 target_ulong cs_base;
7030 /* generate intermediate code */
7032 cs_base = tb->cs_base;
7034 cflags = tb->cflags;
7036 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7037 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7038 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7039 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7041 dc->vm86 = (flags >> VM_SHIFT) & 1;
7042 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7043 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7044 dc->tf = (flags >> TF_SHIFT) & 1;
7045 dc->singlestep_enabled = env->singlestep_enabled;
7046 dc->cc_op = CC_OP_DYNAMIC;
7047 dc->cs_base = cs_base;
7049 dc->popl_esp_hack = 0;
7050 /* select memory access functions */
7052 if (flags & HF_SOFTMMU_MASK) {
7054 dc->mem_index = 2 * 4;
7056 dc->mem_index = 1 * 4;
7058 dc->cpuid_features = env->cpuid_features;
7059 dc->cpuid_ext_features = env->cpuid_ext_features;
7060 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7061 #ifdef TARGET_X86_64
7062 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7063 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7066 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7067 (flags & HF_INHIBIT_IRQ_MASK)
7068 #ifndef CONFIG_SOFTMMU
7069 || (flags & HF_SOFTMMU_MASK)
7073 /* check addseg logic */
7074 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7075 printf("ERROR addseg\n");
7078 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7079 #if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
7080 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7082 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7083 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7084 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7085 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7086 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7087 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7088 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7090 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7092 dc->is_jmp = DISAS_NEXT;
7097 if (env->nb_breakpoints > 0) {
7098 for(j = 0; j < env->nb_breakpoints; j++) {
7099 if (env->breakpoints[j] == pc_ptr) {
7100 gen_debug(dc, pc_ptr - dc->cs_base);
7106 j = gen_opc_ptr - gen_opc_buf;
7110 gen_opc_instr_start[lj++] = 0;
7112 gen_opc_pc[lj] = pc_ptr;
7113 gen_opc_cc_op[lj] = dc->cc_op;
7114 gen_opc_instr_start[lj] = 1;
7116 pc_ptr = disas_insn(dc, pc_ptr);
7117 /* stop translation if indicated */
7120 /* if single step mode, we generate only one instruction and
7121 generate an exception */
7122 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7123 the flag and abort the translation to give the irqs a
7124 change to be happen */
7125 if (dc->tf || dc->singlestep_enabled ||
7126 (flags & HF_INHIBIT_IRQ_MASK) ||
7127 (cflags & CF_SINGLE_INSN)) {
7128 gen_jmp_im(pc_ptr - dc->cs_base);
7132 /* if too long translation, stop generation too */
7133 if (gen_opc_ptr >= gen_opc_end ||
7134 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7135 gen_jmp_im(pc_ptr - dc->cs_base);
7140 *gen_opc_ptr = INDEX_op_end;
7141 /* we don't forget to fill the last values */
7143 j = gen_opc_ptr - gen_opc_buf;
7146 gen_opc_instr_start[lj++] = 0;
7150 if (loglevel & CPU_LOG_TB_CPU) {
7151 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7153 if (loglevel & CPU_LOG_TB_IN_ASM) {
7155 fprintf(logfile, "----------------\n");
7156 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7157 #ifdef TARGET_X86_64
7162 disas_flags = !dc->code32;
7163 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7164 fprintf(logfile, "\n");
7165 if (loglevel & CPU_LOG_TB_OP_OPT) {
7166 fprintf(logfile, "OP before opt:\n");
7167 tcg_dump_ops(&tcg_ctx, logfile);
7168 fprintf(logfile, "\n");
7174 tb->size = pc_ptr - pc_start;
7178 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7180 return gen_intermediate_code_internal(env, tb, 0);
7183 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7185 return gen_intermediate_code_internal(env, tb, 1);
7188 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7189 unsigned long searched_pc, int pc_pos, void *puc)
7193 if (loglevel & CPU_LOG_TB_OP) {
7195 fprintf(logfile, "RESTORE:\n");
7196 for(i = 0;i <= pc_pos; i++) {
7197 if (gen_opc_instr_start[i]) {
7198 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7201 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7202 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7203 (uint32_t)tb->cs_base);
7206 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7207 cc_op = gen_opc_cc_op[pc_pos];
7208 if (cc_op != CC_OP_DYNAMIC)