4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0;
62 /* local register indexes (only used inside old micro ops) */
66 static int x86_64_hregs;
69 typedef struct DisasContext {
70 /* current insn context */
71 int override; /* -1 if no override */
74 target_ulong pc; /* pc = eip + cs_base */
75 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base; /* base of CS segment */
79 int pe; /* protected mode */
80 int code32; /* 32 bit code segment */
82 int lma; /* long mode active */
83 int code64; /* 64 bit code segment */
86 int ss32; /* 32 bit stack segment */
87 int cc_op; /* current CC operation */
88 int addseg; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st; /* currently unused */
90 int vm86; /* vm86 mode */
93 int tf; /* TF cpu flag */
94 int singlestep_enabled; /* "hardware" single step enabled */
95 int jmp_opt; /* use direct block chaining for direct jumps */
96 int mem_index; /* select memory access functions */
97 uint64_t flags; /* all execution flags */
98 struct TranslationBlock *tb;
99 int popl_esp_hack; /* for correct popl with esp base handling */
100 int rip_offset; /* only used in x86_64, but left for simplicity */
102 int cpuid_ext_features;
105 static void gen_eob(DisasContext *s);
106 static void gen_jmp(DisasContext *s, target_ulong eip);
107 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109 /* i386 arith/logic operations */
129 OP_SHL1, /* undocumented */
142 /* I386 int registers */
143 OR_EAX, /* MUST be even numbered */
152 OR_TMP0 = 16, /* temporary operand register */
154 OR_A0, /* temporary register used when doing address evaluation */
157 static inline void gen_op_movl_T0_0(void)
159 tcg_gen_movi_tl(cpu_T[0], 0);
162 static inline void gen_op_movl_T0_im(int32_t val)
164 tcg_gen_movi_tl(cpu_T[0], val);
167 static inline void gen_op_movl_T0_imu(uint32_t val)
169 tcg_gen_movi_tl(cpu_T[0], val);
172 static inline void gen_op_movl_T1_im(int32_t val)
174 tcg_gen_movi_tl(cpu_T[1], val);
177 static inline void gen_op_movl_T1_imu(uint32_t val)
179 tcg_gen_movi_tl(cpu_T[1], val);
182 static inline void gen_op_movl_A0_im(uint32_t val)
184 tcg_gen_movi_tl(cpu_A0, val);
188 static inline void gen_op_movq_A0_im(int64_t val)
190 tcg_gen_movi_tl(cpu_A0, val);
194 static inline void gen_movtl_T0_im(target_ulong val)
196 tcg_gen_movi_tl(cpu_T[0], val);
199 static inline void gen_movtl_T1_im(target_ulong val)
201 tcg_gen_movi_tl(cpu_T[1], val);
204 static inline void gen_op_andl_T0_ffff(void)
206 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
209 static inline void gen_op_andl_T0_im(uint32_t val)
211 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
214 static inline void gen_op_movl_T0_T1(void)
216 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
219 static inline void gen_op_andl_A0_ffff(void)
221 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
226 #define NB_OP_SIZES 4
228 #define DEF_REGS(prefix, suffix) \
229 prefix ## EAX ## suffix,\
230 prefix ## ECX ## suffix,\
231 prefix ## EDX ## suffix,\
232 prefix ## EBX ## suffix,\
233 prefix ## ESP ## suffix,\
234 prefix ## EBP ## suffix,\
235 prefix ## ESI ## suffix,\
236 prefix ## EDI ## suffix,\
237 prefix ## R8 ## suffix,\
238 prefix ## R9 ## suffix,\
239 prefix ## R10 ## suffix,\
240 prefix ## R11 ## suffix,\
241 prefix ## R12 ## suffix,\
242 prefix ## R13 ## suffix,\
243 prefix ## R14 ## suffix,\
244 prefix ## R15 ## suffix,
246 #else /* !TARGET_X86_64 */
248 #define NB_OP_SIZES 3
250 #define DEF_REGS(prefix, suffix) \
251 prefix ## EAX ## suffix,\
252 prefix ## ECX ## suffix,\
253 prefix ## EDX ## suffix,\
254 prefix ## EBX ## suffix,\
255 prefix ## ESP ## suffix,\
256 prefix ## EBP ## suffix,\
257 prefix ## ESI ## suffix,\
258 prefix ## EDI ## suffix,
260 #endif /* !TARGET_X86_64 */
262 #if defined(WORDS_BIGENDIAN)
263 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
264 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
265 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
267 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
269 #define REG_B_OFFSET 0
270 #define REG_H_OFFSET 1
271 #define REG_W_OFFSET 0
272 #define REG_L_OFFSET 0
273 #define REG_LH_OFFSET 4
276 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
280 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
281 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
283 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
287 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
291 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
292 /* high part of register set to zero */
293 tcg_gen_movi_tl(cpu_tmp0, 0);
294 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
298 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
303 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
309 static inline void gen_op_mov_reg_T0(int ot, int reg)
311 gen_op_mov_reg_TN(ot, 0, reg);
314 static inline void gen_op_mov_reg_T1(int ot, int reg)
316 gen_op_mov_reg_TN(ot, 1, reg);
319 static inline void gen_op_mov_reg_A0(int size, int reg)
323 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
327 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
328 /* high part of register set to zero */
329 tcg_gen_movi_tl(cpu_tmp0, 0);
330 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
334 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
339 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
345 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
349 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
352 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
357 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
362 static inline void gen_op_movl_A0_reg(int reg)
364 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
367 static inline void gen_op_addl_A0_im(int32_t val)
369 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
371 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
376 static inline void gen_op_addq_A0_im(int64_t val)
378 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382 static void gen_add_A0_im(DisasContext *s, int val)
386 gen_op_addq_A0_im(val);
389 gen_op_addl_A0_im(val);
392 static inline void gen_op_addl_T0_T1(void)
394 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
397 static inline void gen_op_jmp_T0(void)
399 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
402 static inline void gen_op_addw_ESP_im(int32_t val)
404 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
405 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
406 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
409 static inline void gen_op_addl_ESP_im(int32_t val)
411 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
412 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
414 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
416 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
420 static inline void gen_op_addq_ESP_im(int32_t val)
422 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
423 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
424 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
428 static inline void gen_op_set_cc_op(int32_t val)
430 tcg_gen_movi_tl(cpu_tmp0, val);
431 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
434 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
436 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
438 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
439 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
441 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
445 static inline void gen_op_movl_A0_seg(int reg)
447 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
450 static inline void gen_op_addl_A0_seg(int reg)
452 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
453 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
455 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
460 static inline void gen_op_movq_A0_seg(int reg)
462 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
465 static inline void gen_op_addq_A0_seg(int reg)
467 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
468 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
471 static inline void gen_op_movq_A0_reg(int reg)
473 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
476 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
478 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
480 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
481 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
485 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
487 DEF_REGS(gen_op_cmovw_, _T1_T0)
490 DEF_REGS(gen_op_cmovl_, _T1_T0)
494 DEF_REGS(gen_op_cmovq_, _T1_T0)
499 #define DEF_ARITHC(SUFFIX)\
501 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
502 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
505 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
506 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
509 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
510 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
513 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
514 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
517 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
521 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
523 #ifndef CONFIG_USER_ONLY
529 static const int cc_op_arithb[8] = {
540 #define DEF_CMPXCHG(SUFFIX)\
541 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
542 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
544 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
546 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
550 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
552 #ifndef CONFIG_USER_ONLY
558 #define DEF_SHIFT(SUFFIX)\
560 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
561 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
570 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
571 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
580 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
581 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
584 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
587 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
590 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
591 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
600 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
604 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
606 #ifndef CONFIG_USER_ONLY
612 #define DEF_SHIFTD(SUFFIX, op)\
618 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
619 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
622 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
626 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
630 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
634 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
638 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
640 #ifndef CONFIG_USER_ONLY
641 DEF_SHIFTD(_kernel, im)
642 DEF_SHIFTD(_user, im)
646 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
647 DEF_SHIFTD(_raw, ECX)
648 #ifndef CONFIG_USER_ONLY
649 DEF_SHIFTD(_kernel, ECX)
650 DEF_SHIFTD(_user, ECX)
654 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
657 gen_op_btsw_T0_T1_cc,
658 gen_op_btrw_T0_T1_cc,
659 gen_op_btcw_T0_T1_cc,
663 gen_op_btsl_T0_T1_cc,
664 gen_op_btrl_T0_T1_cc,
665 gen_op_btcl_T0_T1_cc,
670 gen_op_btsq_T0_T1_cc,
671 gen_op_btrq_T0_T1_cc,
672 gen_op_btcq_T0_T1_cc,
677 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
678 gen_op_add_bitw_A0_T1,
679 gen_op_add_bitl_A0_T1,
680 X86_64_ONLY(gen_op_add_bitq_A0_T1),
683 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
700 static inline void gen_op_lds_T0_A0(int idx)
702 int mem_index = (idx >> 2) - 1;
705 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
708 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
712 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
717 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
718 static inline void gen_op_ld_T0_A0(int idx)
720 int mem_index = (idx >> 2) - 1;
723 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
726 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
729 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
733 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
738 static inline void gen_op_ldu_T0_A0(int idx)
740 gen_op_ld_T0_A0(idx);
743 static inline void gen_op_ld_T1_A0(int idx)
745 int mem_index = (idx >> 2) - 1;
748 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
751 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
754 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
758 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
763 static inline void gen_op_st_T0_A0(int idx)
765 int mem_index = (idx >> 2) - 1;
768 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
771 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
774 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
778 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
783 static inline void gen_op_st_T1_A0(int idx)
785 int mem_index = (idx >> 2) - 1;
788 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
791 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
794 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
798 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
803 static inline void gen_jmp_im(target_ulong pc)
805 tcg_gen_movi_tl(cpu_tmp0, pc);
806 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
809 static inline void gen_string_movl_A0_ESI(DisasContext *s)
813 override = s->override;
817 gen_op_movq_A0_seg(override);
818 gen_op_addq_A0_reg_sN(0, R_ESI);
820 gen_op_movq_A0_reg(R_ESI);
826 if (s->addseg && override < 0)
829 gen_op_movl_A0_seg(override);
830 gen_op_addl_A0_reg_sN(0, R_ESI);
832 gen_op_movl_A0_reg(R_ESI);
835 /* 16 address, always override */
838 gen_op_movl_A0_reg(R_ESI);
839 gen_op_andl_A0_ffff();
840 gen_op_addl_A0_seg(override);
844 static inline void gen_string_movl_A0_EDI(DisasContext *s)
848 gen_op_movq_A0_reg(R_EDI);
853 gen_op_movl_A0_seg(R_ES);
854 gen_op_addl_A0_reg_sN(0, R_EDI);
856 gen_op_movl_A0_reg(R_EDI);
859 gen_op_movl_A0_reg(R_EDI);
860 gen_op_andl_A0_ffff();
861 gen_op_addl_A0_seg(R_ES);
865 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
866 gen_op_movl_T0_Dshiftb,
867 gen_op_movl_T0_Dshiftw,
868 gen_op_movl_T0_Dshiftl,
869 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
872 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
875 X86_64_ONLY(gen_op_jnz_ecxq),
878 static GenOpFunc1 *gen_op_jz_ecx[3] = {
881 X86_64_ONLY(gen_op_jz_ecxq),
884 static GenOpFunc *gen_op_dec_ECX[3] = {
887 X86_64_ONLY(gen_op_decq_ECX),
890 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
895 X86_64_ONLY(gen_op_jnz_subq),
901 X86_64_ONLY(gen_op_jz_subq),
905 static GenOpFunc *gen_op_in_DX_T0[3] = {
911 static GenOpFunc *gen_op_out_DX_T0[3] = {
917 static GenOpFunc *gen_op_in[3] = {
923 static GenOpFunc *gen_op_out[3] = {
929 static GenOpFunc *gen_check_io_T0[3] = {
935 static GenOpFunc *gen_check_io_DX[3] = {
941 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
943 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
944 if (s->cc_op != CC_OP_DYNAMIC)
945 gen_op_set_cc_op(s->cc_op);
948 gen_check_io_DX[ot]();
950 gen_check_io_T0[ot]();
954 static inline void gen_movs(DisasContext *s, int ot)
956 gen_string_movl_A0_ESI(s);
957 gen_op_ld_T0_A0(ot + s->mem_index);
958 gen_string_movl_A0_EDI(s);
959 gen_op_st_T0_A0(ot + s->mem_index);
960 gen_op_movl_T0_Dshift[ot]();
963 gen_op_addq_ESI_T0();
964 gen_op_addq_EDI_T0();
968 gen_op_addl_ESI_T0();
969 gen_op_addl_EDI_T0();
971 gen_op_addw_ESI_T0();
972 gen_op_addw_EDI_T0();
976 static inline void gen_update_cc_op(DisasContext *s)
978 if (s->cc_op != CC_OP_DYNAMIC) {
979 gen_op_set_cc_op(s->cc_op);
980 s->cc_op = CC_OP_DYNAMIC;
984 /* XXX: does not work with gdbstub "ice" single step - not a
986 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
990 l1 = gen_new_label();
991 l2 = gen_new_label();
992 gen_op_jnz_ecx[s->aflag](l1);
994 gen_jmp_tb(s, next_eip, 1);
999 static inline void gen_stos(DisasContext *s, int ot)
1001 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1002 gen_string_movl_A0_EDI(s);
1003 gen_op_st_T0_A0(ot + s->mem_index);
1004 gen_op_movl_T0_Dshift[ot]();
1005 #ifdef TARGET_X86_64
1006 if (s->aflag == 2) {
1007 gen_op_addq_EDI_T0();
1011 gen_op_addl_EDI_T0();
1013 gen_op_addw_EDI_T0();
1017 static inline void gen_lods(DisasContext *s, int ot)
1019 gen_string_movl_A0_ESI(s);
1020 gen_op_ld_T0_A0(ot + s->mem_index);
1021 gen_op_mov_reg_T0(ot, R_EAX);
1022 gen_op_movl_T0_Dshift[ot]();
1023 #ifdef TARGET_X86_64
1024 if (s->aflag == 2) {
1025 gen_op_addq_ESI_T0();
1029 gen_op_addl_ESI_T0();
1031 gen_op_addw_ESI_T0();
1035 static inline void gen_scas(DisasContext *s, int ot)
1037 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1038 gen_string_movl_A0_EDI(s);
1039 gen_op_ld_T1_A0(ot + s->mem_index);
1040 gen_op_cmpl_T0_T1_cc();
1041 gen_op_movl_T0_Dshift[ot]();
1042 #ifdef TARGET_X86_64
1043 if (s->aflag == 2) {
1044 gen_op_addq_EDI_T0();
1048 gen_op_addl_EDI_T0();
1050 gen_op_addw_EDI_T0();
1054 static inline void gen_cmps(DisasContext *s, int ot)
1056 gen_string_movl_A0_ESI(s);
1057 gen_op_ld_T0_A0(ot + s->mem_index);
1058 gen_string_movl_A0_EDI(s);
1059 gen_op_ld_T1_A0(ot + s->mem_index);
1060 gen_op_cmpl_T0_T1_cc();
1061 gen_op_movl_T0_Dshift[ot]();
1062 #ifdef TARGET_X86_64
1063 if (s->aflag == 2) {
1064 gen_op_addq_ESI_T0();
1065 gen_op_addq_EDI_T0();
1069 gen_op_addl_ESI_T0();
1070 gen_op_addl_EDI_T0();
1072 gen_op_addw_ESI_T0();
1073 gen_op_addw_EDI_T0();
1077 static inline void gen_ins(DisasContext *s, int ot)
1079 gen_string_movl_A0_EDI(s);
1081 gen_op_st_T0_A0(ot + s->mem_index);
1082 gen_op_in_DX_T0[ot]();
1083 gen_op_st_T0_A0(ot + s->mem_index);
1084 gen_op_movl_T0_Dshift[ot]();
1085 #ifdef TARGET_X86_64
1086 if (s->aflag == 2) {
1087 gen_op_addq_EDI_T0();
1091 gen_op_addl_EDI_T0();
1093 gen_op_addw_EDI_T0();
1097 static inline void gen_outs(DisasContext *s, int ot)
1099 gen_string_movl_A0_ESI(s);
1100 gen_op_ld_T0_A0(ot + s->mem_index);
1101 gen_op_out_DX_T0[ot]();
1102 gen_op_movl_T0_Dshift[ot]();
1103 #ifdef TARGET_X86_64
1104 if (s->aflag == 2) {
1105 gen_op_addq_ESI_T0();
1109 gen_op_addl_ESI_T0();
1111 gen_op_addw_ESI_T0();
1115 /* same method as Valgrind : we generate jumps to current or next
1117 #define GEN_REPZ(op) \
1118 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1119 target_ulong cur_eip, target_ulong next_eip) \
1122 gen_update_cc_op(s); \
1123 l2 = gen_jz_ecx_string(s, next_eip); \
1124 gen_ ## op(s, ot); \
1125 gen_op_dec_ECX[s->aflag](); \
1126 /* a loop would cause two single step exceptions if ECX = 1 \
1127 before rep string_insn */ \
1129 gen_op_jz_ecx[s->aflag](l2); \
1130 gen_jmp(s, cur_eip); \
1133 #define GEN_REPZ2(op) \
1134 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1135 target_ulong cur_eip, \
1136 target_ulong next_eip, \
1140 gen_update_cc_op(s); \
1141 l2 = gen_jz_ecx_string(s, next_eip); \
1142 gen_ ## op(s, ot); \
1143 gen_op_dec_ECX[s->aflag](); \
1144 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1145 gen_op_string_jnz_sub[nz][ot](l2);\
1147 gen_op_jz_ecx[s->aflag](l2); \
1148 gen_jmp(s, cur_eip); \
1170 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1201 #ifdef TARGET_X86_64
1204 BUGGY_64(gen_op_jb_subq),
1206 BUGGY_64(gen_op_jbe_subq),
1209 BUGGY_64(gen_op_jl_subq),
1210 BUGGY_64(gen_op_jle_subq),
1214 static GenOpFunc1 *gen_op_loop[3][4] = {
1225 #ifdef TARGET_X86_64
1234 static GenOpFunc *gen_setcc_slow[8] = {
1245 static GenOpFunc *gen_setcc_sub[4][8] = {
1248 gen_op_setb_T0_subb,
1249 gen_op_setz_T0_subb,
1250 gen_op_setbe_T0_subb,
1251 gen_op_sets_T0_subb,
1253 gen_op_setl_T0_subb,
1254 gen_op_setle_T0_subb,
1258 gen_op_setb_T0_subw,
1259 gen_op_setz_T0_subw,
1260 gen_op_setbe_T0_subw,
1261 gen_op_sets_T0_subw,
1263 gen_op_setl_T0_subw,
1264 gen_op_setle_T0_subw,
1268 gen_op_setb_T0_subl,
1269 gen_op_setz_T0_subl,
1270 gen_op_setbe_T0_subl,
1271 gen_op_sets_T0_subl,
1273 gen_op_setl_T0_subl,
1274 gen_op_setle_T0_subl,
1276 #ifdef TARGET_X86_64
1279 gen_op_setb_T0_subq,
1280 gen_op_setz_T0_subq,
1281 gen_op_setbe_T0_subq,
1282 gen_op_sets_T0_subq,
1284 gen_op_setl_T0_subq,
1285 gen_op_setle_T0_subq,
1290 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1291 gen_op_fadd_ST0_FT0,
1292 gen_op_fmul_ST0_FT0,
1293 gen_op_fcom_ST0_FT0,
1294 gen_op_fcom_ST0_FT0,
1295 gen_op_fsub_ST0_FT0,
1296 gen_op_fsubr_ST0_FT0,
1297 gen_op_fdiv_ST0_FT0,
1298 gen_op_fdivr_ST0_FT0,
1301 /* NOTE the exception in "r" op ordering */
1302 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1303 gen_op_fadd_STN_ST0,
1304 gen_op_fmul_STN_ST0,
1307 gen_op_fsubr_STN_ST0,
1308 gen_op_fsub_STN_ST0,
1309 gen_op_fdivr_STN_ST0,
1310 gen_op_fdiv_STN_ST0,
1313 /* if d == OR_TMP0, it means memory operand (address in A0) */
1314 static void gen_op(DisasContext *s1, int op, int ot, int d)
1316 GenOpFunc *gen_update_cc;
1319 gen_op_mov_TN_reg(ot, 0, d);
1321 gen_op_ld_T0_A0(ot + s1->mem_index);
1326 if (s1->cc_op != CC_OP_DYNAMIC)
1327 gen_op_set_cc_op(s1->cc_op);
1329 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1330 gen_op_mov_reg_T0(ot, d);
1332 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1334 s1->cc_op = CC_OP_DYNAMIC;
1337 gen_op_addl_T0_T1();
1338 s1->cc_op = CC_OP_ADDB + ot;
1339 gen_update_cc = gen_op_update2_cc;
1342 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1343 s1->cc_op = CC_OP_SUBB + ot;
1344 gen_update_cc = gen_op_update2_cc;
1348 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1349 s1->cc_op = CC_OP_LOGICB + ot;
1350 gen_update_cc = gen_op_update1_cc;
1353 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1354 s1->cc_op = CC_OP_LOGICB + ot;
1355 gen_update_cc = gen_op_update1_cc;
1358 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1359 s1->cc_op = CC_OP_LOGICB + ot;
1360 gen_update_cc = gen_op_update1_cc;
1363 gen_op_cmpl_T0_T1_cc();
1364 s1->cc_op = CC_OP_SUBB + ot;
1365 gen_update_cc = NULL;
1368 if (op != OP_CMPL) {
1370 gen_op_mov_reg_T0(ot, d);
1372 gen_op_st_T0_A0(ot + s1->mem_index);
1374 /* the flags update must happen after the memory write (precise
1375 exception support) */
1381 /* if d == OR_TMP0, it means memory operand (address in A0) */
1382 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1385 gen_op_mov_TN_reg(ot, 0, d);
1387 gen_op_ld_T0_A0(ot + s1->mem_index);
1388 if (s1->cc_op != CC_OP_DYNAMIC)
1389 gen_op_set_cc_op(s1->cc_op);
1392 s1->cc_op = CC_OP_INCB + ot;
1395 s1->cc_op = CC_OP_DECB + ot;
1398 gen_op_mov_reg_T0(ot, d);
1400 gen_op_st_T0_A0(ot + s1->mem_index);
1401 gen_op_update_inc_cc();
1404 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1407 gen_op_mov_TN_reg(ot, 0, d);
1409 gen_op_ld_T0_A0(ot + s1->mem_index);
1411 gen_op_mov_TN_reg(ot, 1, s);
1412 /* for zero counts, flags are not updated, so must do it dynamically */
1413 if (s1->cc_op != CC_OP_DYNAMIC)
1414 gen_op_set_cc_op(s1->cc_op);
1417 gen_op_shift_T0_T1_cc[ot][op]();
1419 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1421 gen_op_mov_reg_T0(ot, d);
1422 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1425 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1427 /* currently not optimized */
1428 gen_op_movl_T1_im(c);
1429 gen_shift(s1, op, ot, d, OR_TMP1);
1432 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1440 int mod, rm, code, override, must_add_seg;
1442 override = s->override;
1443 must_add_seg = s->addseg;
1446 mod = (modrm >> 6) & 3;
1458 code = ldub_code(s->pc++);
1459 scale = (code >> 6) & 3;
1460 index = ((code >> 3) & 7) | REX_X(s);
1467 if ((base & 7) == 5) {
1469 disp = (int32_t)ldl_code(s->pc);
1471 if (CODE64(s) && !havesib) {
1472 disp += s->pc + s->rip_offset;
1479 disp = (int8_t)ldub_code(s->pc++);
1483 disp = ldl_code(s->pc);
1489 /* for correct popl handling with esp */
1490 if (base == 4 && s->popl_esp_hack)
1491 disp += s->popl_esp_hack;
1492 #ifdef TARGET_X86_64
1493 if (s->aflag == 2) {
1494 gen_op_movq_A0_reg(base);
1496 gen_op_addq_A0_im(disp);
1501 gen_op_movl_A0_reg(base);
1503 gen_op_addl_A0_im(disp);
1506 #ifdef TARGET_X86_64
1507 if (s->aflag == 2) {
1508 gen_op_movq_A0_im(disp);
1512 gen_op_movl_A0_im(disp);
1515 /* XXX: index == 4 is always invalid */
1516 if (havesib && (index != 4 || scale != 0)) {
1517 #ifdef TARGET_X86_64
1518 if (s->aflag == 2) {
1519 gen_op_addq_A0_reg_sN(scale, index);
1523 gen_op_addl_A0_reg_sN(scale, index);
1528 if (base == R_EBP || base == R_ESP)
1533 #ifdef TARGET_X86_64
1534 if (s->aflag == 2) {
1535 gen_op_addq_A0_seg(override);
1539 gen_op_addl_A0_seg(override);
1546 disp = lduw_code(s->pc);
1548 gen_op_movl_A0_im(disp);
1549 rm = 0; /* avoid SS override */
1556 disp = (int8_t)ldub_code(s->pc++);
1560 disp = lduw_code(s->pc);
1566 gen_op_movl_A0_reg(R_EBX);
1567 gen_op_addl_A0_reg_sN(0, R_ESI);
1570 gen_op_movl_A0_reg(R_EBX);
1571 gen_op_addl_A0_reg_sN(0, R_EDI);
1574 gen_op_movl_A0_reg(R_EBP);
1575 gen_op_addl_A0_reg_sN(0, R_ESI);
1578 gen_op_movl_A0_reg(R_EBP);
1579 gen_op_addl_A0_reg_sN(0, R_EDI);
1582 gen_op_movl_A0_reg(R_ESI);
1585 gen_op_movl_A0_reg(R_EDI);
1588 gen_op_movl_A0_reg(R_EBP);
1592 gen_op_movl_A0_reg(R_EBX);
1596 gen_op_addl_A0_im(disp);
1597 gen_op_andl_A0_ffff();
1601 if (rm == 2 || rm == 3 || rm == 6)
1606 gen_op_addl_A0_seg(override);
1616 static void gen_nop_modrm(DisasContext *s, int modrm)
1618 int mod, rm, base, code;
1620 mod = (modrm >> 6) & 3;
1630 code = ldub_code(s->pc++);
1666 /* used for LEA and MOV AX, mem */
1667 static void gen_add_A0_ds_seg(DisasContext *s)
1669 int override, must_add_seg;
1670 must_add_seg = s->addseg;
1672 if (s->override >= 0) {
1673 override = s->override;
1679 #ifdef TARGET_X86_64
1681 gen_op_addq_A0_seg(override);
1685 gen_op_addl_A0_seg(override);
1690 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1692 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1694 int mod, rm, opreg, disp;
1696 mod = (modrm >> 6) & 3;
1697 rm = (modrm & 7) | REX_B(s);
1701 gen_op_mov_TN_reg(ot, 0, reg);
1702 gen_op_mov_reg_T0(ot, rm);
1704 gen_op_mov_TN_reg(ot, 0, rm);
1706 gen_op_mov_reg_T0(ot, reg);
1709 gen_lea_modrm(s, modrm, &opreg, &disp);
1712 gen_op_mov_TN_reg(ot, 0, reg);
1713 gen_op_st_T0_A0(ot + s->mem_index);
1715 gen_op_ld_T0_A0(ot + s->mem_index);
1717 gen_op_mov_reg_T0(ot, reg);
1722 static inline uint32_t insn_get(DisasContext *s, int ot)
1728 ret = ldub_code(s->pc);
1732 ret = lduw_code(s->pc);
1737 ret = ldl_code(s->pc);
1744 static inline int insn_const_size(unsigned int ot)
1752 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1754 TranslationBlock *tb;
1757 pc = s->cs_base + eip;
1759 /* NOTE: we handle the case where the TB spans two pages here */
1760 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1761 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1762 /* jump to same page: we can use a direct jump */
1763 tcg_gen_goto_tb(tb_num);
1765 tcg_gen_exit_tb((long)tb + tb_num);
1767 /* jump to another page: currently not optimized */
1773 static inline void gen_jcc(DisasContext *s, int b,
1774 target_ulong val, target_ulong next_eip)
1776 TranslationBlock *tb;
1783 jcc_op = (b >> 1) & 7;
1787 /* we optimize the cmp/jcc case */
1792 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1795 /* some jumps are easy to compute */
1837 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1840 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1852 if (s->cc_op != CC_OP_DYNAMIC) {
1853 gen_op_set_cc_op(s->cc_op);
1854 s->cc_op = CC_OP_DYNAMIC;
1858 gen_setcc_slow[jcc_op]();
1859 func = gen_op_jnz_T0_label;
1869 l1 = gen_new_label();
1872 gen_goto_tb(s, 0, next_eip);
1875 gen_goto_tb(s, 1, val);
1880 if (s->cc_op != CC_OP_DYNAMIC) {
1881 gen_op_set_cc_op(s->cc_op);
1882 s->cc_op = CC_OP_DYNAMIC;
1884 gen_setcc_slow[jcc_op]();
1890 l1 = gen_new_label();
1891 l2 = gen_new_label();
1892 gen_op_jnz_T0_label(l1);
1893 gen_jmp_im(next_eip);
1894 gen_op_jmp_label(l2);
1902 static void gen_setcc(DisasContext *s, int b)
1908 jcc_op = (b >> 1) & 7;
1910 /* we optimize the cmp/jcc case */
1915 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1920 /* some jumps are easy to compute */
1947 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1950 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1958 if (s->cc_op != CC_OP_DYNAMIC)
1959 gen_op_set_cc_op(s->cc_op);
1960 func = gen_setcc_slow[jcc_op];
1969 /* move T0 to seg_reg and compute if the CPU state may change. Never
1970 call this function with seg_reg == R_CS */
1971 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1973 if (s->pe && !s->vm86) {
1974 /* XXX: optimize by finding processor state dynamically */
1975 if (s->cc_op != CC_OP_DYNAMIC)
1976 gen_op_set_cc_op(s->cc_op);
1977 gen_jmp_im(cur_eip);
1978 gen_op_movl_seg_T0(seg_reg);
1979 /* abort translation because the addseg value may change or
1980 because ss32 may change. For R_SS, translation must always
1981 stop as a special handling must be done to disable hardware
1982 interrupts for the next instruction */
1983 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1986 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1987 if (seg_reg == R_SS)
1992 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1995 gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1997 #if !defined(CONFIG_USER_ONLY)
1998 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
1999 if (s->cc_op != CC_OP_DYNAMIC)
2000 gen_op_set_cc_op(s->cc_op);
2001 SVM_movq_T1_im(s->pc - s->cs_base);
2002 gen_jmp_im(pc_start - s->cs_base);
2004 gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2005 s->cc_op = CC_OP_DYNAMIC;
2006 /* FIXME: maybe we could move the io intercept vector to the TB as well
2007 so we know if this is an EOB or not ... let's assume it's not
2014 static inline int svm_is_rep(int prefixes)
2016 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2020 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2021 uint64_t type, uint64_t param)
2023 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2024 /* no SVM activated */
2027 /* CRx and DRx reads/writes */
2028 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2029 if (s->cc_op != CC_OP_DYNAMIC) {
2030 gen_op_set_cc_op(s->cc_op);
2031 s->cc_op = CC_OP_DYNAMIC;
2033 gen_jmp_im(pc_start - s->cs_base);
2034 SVM_movq_T1_im(param);
2036 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2037 /* this is a special case as we do not know if the interception occurs
2038 so we assume there was none */
2041 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2042 if (s->cc_op != CC_OP_DYNAMIC) {
2043 gen_op_set_cc_op(s->cc_op);
2044 s->cc_op = CC_OP_DYNAMIC;
2046 gen_jmp_im(pc_start - s->cs_base);
2047 SVM_movq_T1_im(param);
2049 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2050 /* this is a special case as we do not know if the interception occurs
2051 so we assume there was none */
2056 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2057 if (s->cc_op != CC_OP_DYNAMIC) {
2058 gen_op_set_cc_op(s->cc_op);
2059 s->cc_op = CC_OP_EFLAGS;
2061 gen_jmp_im(pc_start - s->cs_base);
2062 SVM_movq_T1_im(param);
2064 gen_op_svm_vmexit(type >> 32, type);
2065 /* we can optimize this one so TBs don't get longer
2066 than up to vmexit */
2075 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2077 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2080 static inline void gen_stack_update(DisasContext *s, int addend)
2082 #ifdef TARGET_X86_64
2084 gen_op_addq_ESP_im(addend);
2088 gen_op_addl_ESP_im(addend);
2090 gen_op_addw_ESP_im(addend);
2094 /* generate a push. It depends on ss32, addseg and dflag */
2095 static void gen_push_T0(DisasContext *s)
2097 #ifdef TARGET_X86_64
2099 gen_op_movq_A0_reg(R_ESP);
2101 gen_op_addq_A0_im(-8);
2102 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2104 gen_op_addq_A0_im(-2);
2105 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2107 gen_op_mov_reg_A0(2, R_ESP);
2111 gen_op_movl_A0_reg(R_ESP);
2113 gen_op_addl_A0_im(-2);
2115 gen_op_addl_A0_im(-4);
2118 gen_op_movl_T1_A0();
2119 gen_op_addl_A0_seg(R_SS);
2122 gen_op_andl_A0_ffff();
2123 gen_op_movl_T1_A0();
2124 gen_op_addl_A0_seg(R_SS);
2126 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2127 if (s->ss32 && !s->addseg)
2128 gen_op_mov_reg_A0(1, R_ESP);
2130 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2134 /* generate a push. It depends on ss32, addseg and dflag */
2135 /* slower version for T1, only used for call Ev */
2136 static void gen_push_T1(DisasContext *s)
2138 #ifdef TARGET_X86_64
2140 gen_op_movq_A0_reg(R_ESP);
2142 gen_op_addq_A0_im(-8);
2143 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2145 gen_op_addq_A0_im(-2);
2146 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2148 gen_op_mov_reg_A0(2, R_ESP);
2152 gen_op_movl_A0_reg(R_ESP);
2154 gen_op_addl_A0_im(-2);
2156 gen_op_addl_A0_im(-4);
2159 gen_op_addl_A0_seg(R_SS);
2162 gen_op_andl_A0_ffff();
2163 gen_op_addl_A0_seg(R_SS);
2165 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2167 if (s->ss32 && !s->addseg)
2168 gen_op_mov_reg_A0(1, R_ESP);
2170 gen_stack_update(s, (-2) << s->dflag);
2174 /* two step pop is necessary for precise exceptions */
2175 static void gen_pop_T0(DisasContext *s)
2177 #ifdef TARGET_X86_64
2179 gen_op_movq_A0_reg(R_ESP);
2180 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2184 gen_op_movl_A0_reg(R_ESP);
2187 gen_op_addl_A0_seg(R_SS);
2189 gen_op_andl_A0_ffff();
2190 gen_op_addl_A0_seg(R_SS);
2192 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2196 static void gen_pop_update(DisasContext *s)
2198 #ifdef TARGET_X86_64
2199 if (CODE64(s) && s->dflag) {
2200 gen_stack_update(s, 8);
2204 gen_stack_update(s, 2 << s->dflag);
2208 static void gen_stack_A0(DisasContext *s)
2210 gen_op_movl_A0_reg(R_ESP);
2212 gen_op_andl_A0_ffff();
2213 gen_op_movl_T1_A0();
2215 gen_op_addl_A0_seg(R_SS);
2218 /* NOTE: wrap around in 16 bit not fully handled */
2219 static void gen_pusha(DisasContext *s)
2222 gen_op_movl_A0_reg(R_ESP);
2223 gen_op_addl_A0_im(-16 << s->dflag);
2225 gen_op_andl_A0_ffff();
2226 gen_op_movl_T1_A0();
2228 gen_op_addl_A0_seg(R_SS);
2229 for(i = 0;i < 8; i++) {
2230 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2231 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2232 gen_op_addl_A0_im(2 << s->dflag);
2234 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2237 /* NOTE: wrap around in 16 bit not fully handled */
2238 static void gen_popa(DisasContext *s)
2241 gen_op_movl_A0_reg(R_ESP);
2243 gen_op_andl_A0_ffff();
2244 gen_op_movl_T1_A0();
2245 gen_op_addl_T1_im(16 << s->dflag);
2247 gen_op_addl_A0_seg(R_SS);
2248 for(i = 0;i < 8; i++) {
2249 /* ESP is not reloaded */
2251 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2252 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2254 gen_op_addl_A0_im(2 << s->dflag);
2256 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2259 static void gen_enter(DisasContext *s, int esp_addend, int level)
2264 #ifdef TARGET_X86_64
2266 ot = s->dflag ? OT_QUAD : OT_WORD;
2269 gen_op_movl_A0_reg(R_ESP);
2270 gen_op_addq_A0_im(-opsize);
2271 gen_op_movl_T1_A0();
2274 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2275 gen_op_st_T0_A0(ot + s->mem_index);
2277 gen_op_enter64_level(level, (ot == OT_QUAD));
2279 gen_op_mov_reg_T1(ot, R_EBP);
2280 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2281 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2285 ot = s->dflag + OT_WORD;
2286 opsize = 2 << s->dflag;
2288 gen_op_movl_A0_reg(R_ESP);
2289 gen_op_addl_A0_im(-opsize);
2291 gen_op_andl_A0_ffff();
2292 gen_op_movl_T1_A0();
2294 gen_op_addl_A0_seg(R_SS);
2296 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2297 gen_op_st_T0_A0(ot + s->mem_index);
2299 gen_op_enter_level(level, s->dflag);
2301 gen_op_mov_reg_T1(ot, R_EBP);
2302 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2303 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2307 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2309 if (s->cc_op != CC_OP_DYNAMIC)
2310 gen_op_set_cc_op(s->cc_op);
2311 gen_jmp_im(cur_eip);
2312 gen_op_raise_exception(trapno);
2316 /* an interrupt is different from an exception because of the
2318 static void gen_interrupt(DisasContext *s, int intno,
2319 target_ulong cur_eip, target_ulong next_eip)
2321 if (s->cc_op != CC_OP_DYNAMIC)
2322 gen_op_set_cc_op(s->cc_op);
2323 gen_jmp_im(cur_eip);
2324 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2328 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2330 if (s->cc_op != CC_OP_DYNAMIC)
2331 gen_op_set_cc_op(s->cc_op);
2332 gen_jmp_im(cur_eip);
2337 /* generate a generic end of block. Trace exception is also generated
2339 static void gen_eob(DisasContext *s)
2341 if (s->cc_op != CC_OP_DYNAMIC)
2342 gen_op_set_cc_op(s->cc_op);
2343 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2344 gen_op_reset_inhibit_irq();
2346 if (s->singlestep_enabled) {
2349 gen_op_single_step();
2356 /* generate a jump to eip. No segment change must happen before as a
2357 direct call to the next block may occur */
2358 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2361 if (s->cc_op != CC_OP_DYNAMIC) {
2362 gen_op_set_cc_op(s->cc_op);
2363 s->cc_op = CC_OP_DYNAMIC;
2365 gen_goto_tb(s, tb_num, eip);
2373 static void gen_jmp(DisasContext *s, target_ulong eip)
2375 gen_jmp_tb(s, eip, 0);
2378 static GenOpFunc1 *gen_ldq_env_A0[3] = {
2379 gen_op_ldq_raw_env_A0,
2380 #ifndef CONFIG_USER_ONLY
2381 gen_op_ldq_kernel_env_A0,
2382 gen_op_ldq_user_env_A0,
2386 static GenOpFunc1 *gen_stq_env_A0[3] = {
2387 gen_op_stq_raw_env_A0,
2388 #ifndef CONFIG_USER_ONLY
2389 gen_op_stq_kernel_env_A0,
2390 gen_op_stq_user_env_A0,
2394 static GenOpFunc1 *gen_ldo_env_A0[3] = {
2395 gen_op_ldo_raw_env_A0,
2396 #ifndef CONFIG_USER_ONLY
2397 gen_op_ldo_kernel_env_A0,
2398 gen_op_ldo_user_env_A0,
2402 static GenOpFunc1 *gen_sto_env_A0[3] = {
2403 gen_op_sto_raw_env_A0,
2404 #ifndef CONFIG_USER_ONLY
2405 gen_op_sto_kernel_env_A0,
2406 gen_op_sto_user_env_A0,
2410 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2411 #define SSE_DUMMY ((GenOpFunc2 *)2)
2413 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2414 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2415 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2417 static GenOpFunc2 *sse_op_table1[256][4] = {
2418 /* 3DNow! extensions */
2419 [0x0e] = { SSE_DUMMY }, /* femms */
2420 [0x0f] = { SSE_DUMMY }, /* pf... */
2421 /* pure SSE operations */
2422 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2423 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2424 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2425 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2426 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2427 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2428 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2429 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2431 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2432 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2433 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2434 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2435 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2436 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2437 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2438 [0x2f] = { gen_op_comiss, gen_op_comisd },
2439 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2440 [0x51] = SSE_FOP(sqrt),
2441 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2442 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2443 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2444 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2445 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2446 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2447 [0x58] = SSE_FOP(add),
2448 [0x59] = SSE_FOP(mul),
2449 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2450 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2451 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2452 [0x5c] = SSE_FOP(sub),
2453 [0x5d] = SSE_FOP(min),
2454 [0x5e] = SSE_FOP(div),
2455 [0x5f] = SSE_FOP(max),
2457 [0xc2] = SSE_FOP(cmpeq),
2458 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2460 /* MMX ops and their SSE extensions */
2461 [0x60] = MMX_OP2(punpcklbw),
2462 [0x61] = MMX_OP2(punpcklwd),
2463 [0x62] = MMX_OP2(punpckldq),
2464 [0x63] = MMX_OP2(packsswb),
2465 [0x64] = MMX_OP2(pcmpgtb),
2466 [0x65] = MMX_OP2(pcmpgtw),
2467 [0x66] = MMX_OP2(pcmpgtl),
2468 [0x67] = MMX_OP2(packuswb),
2469 [0x68] = MMX_OP2(punpckhbw),
2470 [0x69] = MMX_OP2(punpckhwd),
2471 [0x6a] = MMX_OP2(punpckhdq),
2472 [0x6b] = MMX_OP2(packssdw),
2473 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2474 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2475 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2476 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2477 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2478 (GenOpFunc2 *)gen_op_pshufd_xmm,
2479 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2480 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2481 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2482 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2483 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2484 [0x74] = MMX_OP2(pcmpeqb),
2485 [0x75] = MMX_OP2(pcmpeqw),
2486 [0x76] = MMX_OP2(pcmpeql),
2487 [0x77] = { SSE_DUMMY }, /* emms */
2488 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2489 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2490 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2491 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2492 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2493 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2494 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2495 [0xd1] = MMX_OP2(psrlw),
2496 [0xd2] = MMX_OP2(psrld),
2497 [0xd3] = MMX_OP2(psrlq),
2498 [0xd4] = MMX_OP2(paddq),
2499 [0xd5] = MMX_OP2(pmullw),
2500 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2501 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2502 [0xd8] = MMX_OP2(psubusb),
2503 [0xd9] = MMX_OP2(psubusw),
2504 [0xda] = MMX_OP2(pminub),
2505 [0xdb] = MMX_OP2(pand),
2506 [0xdc] = MMX_OP2(paddusb),
2507 [0xdd] = MMX_OP2(paddusw),
2508 [0xde] = MMX_OP2(pmaxub),
2509 [0xdf] = MMX_OP2(pandn),
2510 [0xe0] = MMX_OP2(pavgb),
2511 [0xe1] = MMX_OP2(psraw),
2512 [0xe2] = MMX_OP2(psrad),
2513 [0xe3] = MMX_OP2(pavgw),
2514 [0xe4] = MMX_OP2(pmulhuw),
2515 [0xe5] = MMX_OP2(pmulhw),
2516 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2517 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2518 [0xe8] = MMX_OP2(psubsb),
2519 [0xe9] = MMX_OP2(psubsw),
2520 [0xea] = MMX_OP2(pminsw),
2521 [0xeb] = MMX_OP2(por),
2522 [0xec] = MMX_OP2(paddsb),
2523 [0xed] = MMX_OP2(paddsw),
2524 [0xee] = MMX_OP2(pmaxsw),
2525 [0xef] = MMX_OP2(pxor),
2526 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2527 [0xf1] = MMX_OP2(psllw),
2528 [0xf2] = MMX_OP2(pslld),
2529 [0xf3] = MMX_OP2(psllq),
2530 [0xf4] = MMX_OP2(pmuludq),
2531 [0xf5] = MMX_OP2(pmaddwd),
2532 [0xf6] = MMX_OP2(psadbw),
2533 [0xf7] = MMX_OP2(maskmov),
2534 [0xf8] = MMX_OP2(psubb),
2535 [0xf9] = MMX_OP2(psubw),
2536 [0xfa] = MMX_OP2(psubl),
2537 [0xfb] = MMX_OP2(psubq),
2538 [0xfc] = MMX_OP2(paddb),
2539 [0xfd] = MMX_OP2(paddw),
2540 [0xfe] = MMX_OP2(paddl),
2543 static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2544 [0 + 2] = MMX_OP2(psrlw),
2545 [0 + 4] = MMX_OP2(psraw),
2546 [0 + 6] = MMX_OP2(psllw),
2547 [8 + 2] = MMX_OP2(psrld),
2548 [8 + 4] = MMX_OP2(psrad),
2549 [8 + 6] = MMX_OP2(pslld),
2550 [16 + 2] = MMX_OP2(psrlq),
2551 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2552 [16 + 6] = MMX_OP2(psllq),
2553 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2556 static GenOpFunc1 *sse_op_table3[4 * 3] = {
2559 X86_64_ONLY(gen_op_cvtsq2ss),
2560 X86_64_ONLY(gen_op_cvtsq2sd),
2564 X86_64_ONLY(gen_op_cvttss2sq),
2565 X86_64_ONLY(gen_op_cvttsd2sq),
2569 X86_64_ONLY(gen_op_cvtss2sq),
2570 X86_64_ONLY(gen_op_cvtsd2sq),
2573 static GenOpFunc2 *sse_op_table4[8][4] = {
2584 static GenOpFunc2 *sse_op_table5[256] = {
2585 [0x0c] = gen_op_pi2fw,
2586 [0x0d] = gen_op_pi2fd,
2587 [0x1c] = gen_op_pf2iw,
2588 [0x1d] = gen_op_pf2id,
2589 [0x8a] = gen_op_pfnacc,
2590 [0x8e] = gen_op_pfpnacc,
2591 [0x90] = gen_op_pfcmpge,
2592 [0x94] = gen_op_pfmin,
2593 [0x96] = gen_op_pfrcp,
2594 [0x97] = gen_op_pfrsqrt,
2595 [0x9a] = gen_op_pfsub,
2596 [0x9e] = gen_op_pfadd,
2597 [0xa0] = gen_op_pfcmpgt,
2598 [0xa4] = gen_op_pfmax,
2599 [0xa6] = gen_op_movq, /* pfrcpit1; no need to actually increase precision */
2600 [0xa7] = gen_op_movq, /* pfrsqit1 */
2601 [0xaa] = gen_op_pfsubr,
2602 [0xae] = gen_op_pfacc,
2603 [0xb0] = gen_op_pfcmpeq,
2604 [0xb4] = gen_op_pfmul,
2605 [0xb6] = gen_op_movq, /* pfrcpit2 */
2606 [0xb7] = gen_op_pmulhrw_mmx,
2607 [0xbb] = gen_op_pswapd,
2608 [0xbf] = gen_op_pavgb_mmx /* pavgusb */
2611 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2613 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2614 int modrm, mod, rm, reg, reg_addr, offset_addr;
2615 GenOpFunc2 *sse_op2;
2616 GenOpFunc3 *sse_op3;
2619 if (s->prefix & PREFIX_DATA)
2621 else if (s->prefix & PREFIX_REPZ)
2623 else if (s->prefix & PREFIX_REPNZ)
2627 sse_op2 = sse_op_table1[b][b1];
2630 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2640 /* simple MMX/SSE operation */
2641 if (s->flags & HF_TS_MASK) {
2642 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2645 if (s->flags & HF_EM_MASK) {
2647 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2650 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2652 if (b == 0x77 || b == 0x0e) {
2657 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2658 the static cpu state) */
2663 modrm = ldub_code(s->pc++);
2664 reg = ((modrm >> 3) & 7);
2667 mod = (modrm >> 6) & 3;
2668 if (sse_op2 == SSE_SPECIAL) {
2671 case 0x0e7: /* movntq */
2674 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2675 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2677 case 0x1e7: /* movntdq */
2678 case 0x02b: /* movntps */
2679 case 0x12b: /* movntps */
2680 case 0x3f0: /* lddqu */
2683 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2684 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2686 case 0x6e: /* movd mm, ea */
2687 #ifdef TARGET_X86_64
2688 if (s->dflag == 2) {
2689 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2690 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2694 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2695 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2698 case 0x16e: /* movd xmm, ea */
2699 #ifdef TARGET_X86_64
2700 if (s->dflag == 2) {
2701 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2702 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2706 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2707 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2710 case 0x6f: /* movq mm, ea */
2712 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2713 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2716 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2717 offsetof(CPUX86State,fpregs[rm].mmx));
2720 case 0x010: /* movups */
2721 case 0x110: /* movupd */
2722 case 0x028: /* movaps */
2723 case 0x128: /* movapd */
2724 case 0x16f: /* movdqa xmm, ea */
2725 case 0x26f: /* movdqu xmm, ea */
2727 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2728 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2730 rm = (modrm & 7) | REX_B(s);
2731 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2732 offsetof(CPUX86State,xmm_regs[rm]));
2735 case 0x210: /* movss xmm, ea */
2737 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2738 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2739 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2741 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2742 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2743 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2745 rm = (modrm & 7) | REX_B(s);
2746 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2747 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2750 case 0x310: /* movsd xmm, ea */
2752 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2753 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2755 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2756 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2758 rm = (modrm & 7) | REX_B(s);
2759 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2760 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2763 case 0x012: /* movlps */
2764 case 0x112: /* movlpd */
2766 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2767 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2770 rm = (modrm & 7) | REX_B(s);
2771 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2772 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2775 case 0x212: /* movsldup */
2777 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2778 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2780 rm = (modrm & 7) | REX_B(s);
2781 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2782 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2783 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2784 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2786 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2787 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2788 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2789 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2791 case 0x312: /* movddup */
2793 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2794 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2796 rm = (modrm & 7) | REX_B(s);
2797 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2798 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2800 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2801 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2803 case 0x016: /* movhps */
2804 case 0x116: /* movhpd */
2806 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2807 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2810 rm = (modrm & 7) | REX_B(s);
2811 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2812 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2815 case 0x216: /* movshdup */
2817 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2818 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2820 rm = (modrm & 7) | REX_B(s);
2821 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2822 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2823 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2824 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2826 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2827 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2828 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2829 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2831 case 0x7e: /* movd ea, mm */
2832 #ifdef TARGET_X86_64
2833 if (s->dflag == 2) {
2834 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2835 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2839 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2840 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2843 case 0x17e: /* movd ea, xmm */
2844 #ifdef TARGET_X86_64
2845 if (s->dflag == 2) {
2846 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2847 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2851 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2852 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2855 case 0x27e: /* movq xmm, ea */
2857 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2858 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2860 rm = (modrm & 7) | REX_B(s);
2861 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2862 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2864 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2866 case 0x7f: /* movq ea, mm */
2868 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2869 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2872 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2873 offsetof(CPUX86State,fpregs[reg].mmx));
2876 case 0x011: /* movups */
2877 case 0x111: /* movupd */
2878 case 0x029: /* movaps */
2879 case 0x129: /* movapd */
2880 case 0x17f: /* movdqa ea, xmm */
2881 case 0x27f: /* movdqu ea, xmm */
2883 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2884 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2886 rm = (modrm & 7) | REX_B(s);
2887 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2888 offsetof(CPUX86State,xmm_regs[reg]));
2891 case 0x211: /* movss ea, xmm */
2893 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2894 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2895 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2897 rm = (modrm & 7) | REX_B(s);
2898 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2899 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2902 case 0x311: /* movsd ea, xmm */
2904 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2905 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2907 rm = (modrm & 7) | REX_B(s);
2908 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2909 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2912 case 0x013: /* movlps */
2913 case 0x113: /* movlpd */
2915 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2916 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2921 case 0x017: /* movhps */
2922 case 0x117: /* movhpd */
2924 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2925 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2930 case 0x71: /* shift mm, im */
2933 case 0x171: /* shift xmm, im */
2936 val = ldub_code(s->pc++);
2938 gen_op_movl_T0_im(val);
2939 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2941 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2942 op1_offset = offsetof(CPUX86State,xmm_t0);
2944 gen_op_movl_T0_im(val);
2945 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2947 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2948 op1_offset = offsetof(CPUX86State,mmx_t0);
2950 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2954 rm = (modrm & 7) | REX_B(s);
2955 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2958 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2960 sse_op2(op2_offset, op1_offset);
2962 case 0x050: /* movmskps */
2963 rm = (modrm & 7) | REX_B(s);
2964 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2965 gen_op_mov_reg_T0(OT_LONG, reg);
2967 case 0x150: /* movmskpd */
2968 rm = (modrm & 7) | REX_B(s);
2969 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2970 gen_op_mov_reg_T0(OT_LONG, reg);
2972 case 0x02a: /* cvtpi2ps */
2973 case 0x12a: /* cvtpi2pd */
2976 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
2977 op2_offset = offsetof(CPUX86State,mmx_t0);
2978 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2981 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2983 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2986 gen_op_cvtpi2ps(op1_offset, op2_offset);
2990 gen_op_cvtpi2pd(op1_offset, op2_offset);
2994 case 0x22a: /* cvtsi2ss */
2995 case 0x32a: /* cvtsi2sd */
2996 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2997 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2998 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2999 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3001 case 0x02c: /* cvttps2pi */
3002 case 0x12c: /* cvttpd2pi */
3003 case 0x02d: /* cvtps2pi */
3004 case 0x12d: /* cvtpd2pi */
3007 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3008 op2_offset = offsetof(CPUX86State,xmm_t0);
3009 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3011 rm = (modrm & 7) | REX_B(s);
3012 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3014 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3017 gen_op_cvttps2pi(op1_offset, op2_offset);
3020 gen_op_cvttpd2pi(op1_offset, op2_offset);
3023 gen_op_cvtps2pi(op1_offset, op2_offset);
3026 gen_op_cvtpd2pi(op1_offset, op2_offset);
3030 case 0x22c: /* cvttss2si */
3031 case 0x32c: /* cvttsd2si */
3032 case 0x22d: /* cvtss2si */
3033 case 0x32d: /* cvtsd2si */
3034 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3036 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3038 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3040 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3041 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3043 op2_offset = offsetof(CPUX86State,xmm_t0);
3045 rm = (modrm & 7) | REX_B(s);
3046 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3048 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3049 (b & 1) * 4](op2_offset);
3050 gen_op_mov_reg_T0(ot, reg);
3052 case 0xc4: /* pinsrw */
3055 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3056 val = ldub_code(s->pc++);
3059 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3062 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3065 case 0xc5: /* pextrw */
3069 val = ldub_code(s->pc++);
3072 rm = (modrm & 7) | REX_B(s);
3073 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3077 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3079 reg = ((modrm >> 3) & 7) | rex_r;
3080 gen_op_mov_reg_T0(OT_LONG, reg);
3082 case 0x1d6: /* movq ea, xmm */
3084 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3085 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3087 rm = (modrm & 7) | REX_B(s);
3088 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3089 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3090 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3093 case 0x2d6: /* movq2dq */
3096 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3097 offsetof(CPUX86State,fpregs[rm].mmx));
3098 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3100 case 0x3d6: /* movdq2q */
3102 rm = (modrm & 7) | REX_B(s);
3103 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3104 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3106 case 0xd7: /* pmovmskb */
3111 rm = (modrm & 7) | REX_B(s);
3112 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3115 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3117 reg = ((modrm >> 3) & 7) | rex_r;
3118 gen_op_mov_reg_T0(OT_LONG, reg);
3124 /* generic MMX or SSE operation */
3127 /* maskmov : we must prepare A0 */
3130 #ifdef TARGET_X86_64
3131 if (s->aflag == 2) {
3132 gen_op_movq_A0_reg(R_EDI);
3136 gen_op_movl_A0_reg(R_EDI);
3138 gen_op_andl_A0_ffff();
3140 gen_add_A0_ds_seg(s);
3142 case 0x70: /* pshufx insn */
3143 case 0xc6: /* pshufx insn */
3144 case 0xc2: /* compare insns */
3151 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3153 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3154 op2_offset = offsetof(CPUX86State,xmm_t0);
3155 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3157 /* specific case for SSE single instructions */
3160 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3161 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3164 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3167 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3170 rm = (modrm & 7) | REX_B(s);
3171 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3174 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3176 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3177 op2_offset = offsetof(CPUX86State,mmx_t0);
3178 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3181 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3185 case 0x0f: /* 3DNow! data insns */
3186 val = ldub_code(s->pc++);
3187 sse_op2 = sse_op_table5[val];
3190 sse_op2(op1_offset, op2_offset);
3192 case 0x70: /* pshufx insn */
3193 case 0xc6: /* pshufx insn */
3194 val = ldub_code(s->pc++);
3195 sse_op3 = (GenOpFunc3 *)sse_op2;
3196 sse_op3(op1_offset, op2_offset, val);
3200 val = ldub_code(s->pc++);
3203 sse_op2 = sse_op_table4[val][b1];
3204 sse_op2(op1_offset, op2_offset);
3207 sse_op2(op1_offset, op2_offset);
3210 if (b == 0x2e || b == 0x2f) {
3211 s->cc_op = CC_OP_EFLAGS;
3217 /* convert one instruction. s->is_jmp is set if the translation must
3218 be stopped. Return the next pc value */
3219 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3221 int b, prefixes, aflag, dflag;
3223 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3224 target_ulong next_eip, tval;
3234 #ifdef TARGET_X86_64
3239 s->rip_offset = 0; /* for relative ip address */
3241 b = ldub_code(s->pc);
3243 /* check prefixes */
3244 #ifdef TARGET_X86_64
3248 prefixes |= PREFIX_REPZ;
3251 prefixes |= PREFIX_REPNZ;
3254 prefixes |= PREFIX_LOCK;
3275 prefixes |= PREFIX_DATA;
3278 prefixes |= PREFIX_ADR;
3282 rex_w = (b >> 3) & 1;
3283 rex_r = (b & 0x4) << 1;
3284 s->rex_x = (b & 0x2) << 2;
3285 REX_B(s) = (b & 0x1) << 3;
3286 x86_64_hregs = 1; /* select uniform byte register addressing */
3290 /* 0x66 is ignored if rex.w is set */
3293 if (prefixes & PREFIX_DATA)
3296 if (!(prefixes & PREFIX_ADR))
3303 prefixes |= PREFIX_REPZ;
3306 prefixes |= PREFIX_REPNZ;
3309 prefixes |= PREFIX_LOCK;
3330 prefixes |= PREFIX_DATA;
3333 prefixes |= PREFIX_ADR;
3336 if (prefixes & PREFIX_DATA)
3338 if (prefixes & PREFIX_ADR)
3342 s->prefix = prefixes;
3346 /* lock generation */
3347 if (prefixes & PREFIX_LOCK)
3350 /* now check op code */
3354 /**************************/
3355 /* extended op code */
3356 b = ldub_code(s->pc++) | 0x100;
3359 /**************************/
3377 ot = dflag + OT_WORD;
3380 case 0: /* OP Ev, Gv */
3381 modrm = ldub_code(s->pc++);
3382 reg = ((modrm >> 3) & 7) | rex_r;
3383 mod = (modrm >> 6) & 3;
3384 rm = (modrm & 7) | REX_B(s);
3386 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3388 } else if (op == OP_XORL && rm == reg) {
3390 /* xor reg, reg optimisation */
3392 s->cc_op = CC_OP_LOGICB + ot;
3393 gen_op_mov_reg_T0(ot, reg);
3394 gen_op_update1_cc();
3399 gen_op_mov_TN_reg(ot, 1, reg);
3400 gen_op(s, op, ot, opreg);
3402 case 1: /* OP Gv, Ev */
3403 modrm = ldub_code(s->pc++);
3404 mod = (modrm >> 6) & 3;
3405 reg = ((modrm >> 3) & 7) | rex_r;
3406 rm = (modrm & 7) | REX_B(s);
3408 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3409 gen_op_ld_T1_A0(ot + s->mem_index);
3410 } else if (op == OP_XORL && rm == reg) {
3413 gen_op_mov_TN_reg(ot, 1, rm);
3415 gen_op(s, op, ot, reg);
3417 case 2: /* OP A, Iv */
3418 val = insn_get(s, ot);
3419 gen_op_movl_T1_im(val);
3420 gen_op(s, op, ot, OR_EAX);
3426 case 0x80: /* GRP1 */
3436 ot = dflag + OT_WORD;
3438 modrm = ldub_code(s->pc++);
3439 mod = (modrm >> 6) & 3;
3440 rm = (modrm & 7) | REX_B(s);
3441 op = (modrm >> 3) & 7;
3447 s->rip_offset = insn_const_size(ot);
3448 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3459 val = insn_get(s, ot);
3462 val = (int8_t)insn_get(s, OT_BYTE);
3465 gen_op_movl_T1_im(val);
3466 gen_op(s, op, ot, opreg);
3470 /**************************/
3471 /* inc, dec, and other misc arith */
3472 case 0x40 ... 0x47: /* inc Gv */
3473 ot = dflag ? OT_LONG : OT_WORD;
3474 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3476 case 0x48 ... 0x4f: /* dec Gv */
3477 ot = dflag ? OT_LONG : OT_WORD;
3478 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3480 case 0xf6: /* GRP3 */
3485 ot = dflag + OT_WORD;
3487 modrm = ldub_code(s->pc++);
3488 mod = (modrm >> 6) & 3;
3489 rm = (modrm & 7) | REX_B(s);
3490 op = (modrm >> 3) & 7;
3493 s->rip_offset = insn_const_size(ot);
3494 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3495 gen_op_ld_T0_A0(ot + s->mem_index);
3497 gen_op_mov_TN_reg(ot, 0, rm);
3502 val = insn_get(s, ot);
3503 gen_op_movl_T1_im(val);
3504 gen_op_testl_T0_T1_cc();
3505 s->cc_op = CC_OP_LOGICB + ot;
3510 gen_op_st_T0_A0(ot + s->mem_index);
3512 gen_op_mov_reg_T0(ot, rm);
3518 gen_op_st_T0_A0(ot + s->mem_index);
3520 gen_op_mov_reg_T0(ot, rm);
3522 gen_op_update_neg_cc();
3523 s->cc_op = CC_OP_SUBB + ot;
3528 gen_op_mulb_AL_T0();
3529 s->cc_op = CC_OP_MULB;
3532 gen_op_mulw_AX_T0();
3533 s->cc_op = CC_OP_MULW;
3537 gen_op_mull_EAX_T0();
3538 s->cc_op = CC_OP_MULL;
3540 #ifdef TARGET_X86_64
3542 gen_op_mulq_EAX_T0();
3543 s->cc_op = CC_OP_MULQ;
3551 gen_op_imulb_AL_T0();
3552 s->cc_op = CC_OP_MULB;
3555 gen_op_imulw_AX_T0();
3556 s->cc_op = CC_OP_MULW;
3560 gen_op_imull_EAX_T0();
3561 s->cc_op = CC_OP_MULL;
3563 #ifdef TARGET_X86_64
3565 gen_op_imulq_EAX_T0();
3566 s->cc_op = CC_OP_MULQ;
3574 gen_jmp_im(pc_start - s->cs_base);
3575 gen_op_divb_AL_T0();
3578 gen_jmp_im(pc_start - s->cs_base);
3579 gen_op_divw_AX_T0();
3583 gen_jmp_im(pc_start - s->cs_base);
3585 /* XXX: this is just a test */
3586 tcg_gen_macro_2(cpu_T[0], cpu_T[0], MACRO_TEST);
3588 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
3591 #ifdef TARGET_X86_64
3593 gen_jmp_im(pc_start - s->cs_base);
3594 gen_op_divq_EAX_T0();
3602 gen_jmp_im(pc_start - s->cs_base);
3603 gen_op_idivb_AL_T0();
3606 gen_jmp_im(pc_start - s->cs_base);
3607 gen_op_idivw_AX_T0();
3611 gen_jmp_im(pc_start - s->cs_base);
3612 tcg_gen_helper_0_1(helper_idivl_EAX_T0, cpu_T[0]);
3614 #ifdef TARGET_X86_64
3616 gen_jmp_im(pc_start - s->cs_base);
3617 gen_op_idivq_EAX_T0();
3627 case 0xfe: /* GRP4 */
3628 case 0xff: /* GRP5 */
3632 ot = dflag + OT_WORD;
3634 modrm = ldub_code(s->pc++);
3635 mod = (modrm >> 6) & 3;
3636 rm = (modrm & 7) | REX_B(s);
3637 op = (modrm >> 3) & 7;
3638 if (op >= 2 && b == 0xfe) {
3642 if (op == 2 || op == 4) {
3643 /* operand size for jumps is 64 bit */
3645 } else if (op == 3 || op == 5) {
3646 /* for call calls, the operand is 16 or 32 bit, even
3648 ot = dflag ? OT_LONG : OT_WORD;
3649 } else if (op == 6) {
3650 /* default push size is 64 bit */
3651 ot = dflag ? OT_QUAD : OT_WORD;
3655 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3656 if (op >= 2 && op != 3 && op != 5)
3657 gen_op_ld_T0_A0(ot + s->mem_index);
3659 gen_op_mov_TN_reg(ot, 0, rm);
3663 case 0: /* inc Ev */
3668 gen_inc(s, ot, opreg, 1);
3670 case 1: /* dec Ev */
3675 gen_inc(s, ot, opreg, -1);
3677 case 2: /* call Ev */
3678 /* XXX: optimize if memory (no 'and' is necessary) */
3680 gen_op_andl_T0_ffff();
3681 next_eip = s->pc - s->cs_base;
3682 gen_movtl_T1_im(next_eip);
3687 case 3: /* lcall Ev */
3688 gen_op_ld_T1_A0(ot + s->mem_index);
3689 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3690 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3692 if (s->pe && !s->vm86) {
3693 if (s->cc_op != CC_OP_DYNAMIC)
3694 gen_op_set_cc_op(s->cc_op);
3695 gen_jmp_im(pc_start - s->cs_base);
3696 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3698 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3702 case 4: /* jmp Ev */
3704 gen_op_andl_T0_ffff();
3708 case 5: /* ljmp Ev */
3709 gen_op_ld_T1_A0(ot + s->mem_index);
3710 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3711 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3713 if (s->pe && !s->vm86) {
3714 if (s->cc_op != CC_OP_DYNAMIC)
3715 gen_op_set_cc_op(s->cc_op);
3716 gen_jmp_im(pc_start - s->cs_base);
3717 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3719 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3720 gen_op_movl_T0_T1();
3725 case 6: /* push Ev */
3733 case 0x84: /* test Ev, Gv */
3738 ot = dflag + OT_WORD;
3740 modrm = ldub_code(s->pc++);
3741 mod = (modrm >> 6) & 3;
3742 rm = (modrm & 7) | REX_B(s);
3743 reg = ((modrm >> 3) & 7) | rex_r;
3745 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3746 gen_op_mov_TN_reg(ot, 1, reg);
3747 gen_op_testl_T0_T1_cc();
3748 s->cc_op = CC_OP_LOGICB + ot;
3751 case 0xa8: /* test eAX, Iv */
3756 ot = dflag + OT_WORD;
3757 val = insn_get(s, ot);
3759 gen_op_mov_TN_reg(ot, 0, OR_EAX);
3760 gen_op_movl_T1_im(val);
3761 gen_op_testl_T0_T1_cc();
3762 s->cc_op = CC_OP_LOGICB + ot;
3765 case 0x98: /* CWDE/CBW */
3766 #ifdef TARGET_X86_64
3768 gen_op_movslq_RAX_EAX();
3772 gen_op_movswl_EAX_AX();
3774 gen_op_movsbw_AX_AL();
3776 case 0x99: /* CDQ/CWD */
3777 #ifdef TARGET_X86_64
3779 gen_op_movsqo_RDX_RAX();
3783 gen_op_movslq_EDX_EAX();
3785 gen_op_movswl_DX_AX();
3787 case 0x1af: /* imul Gv, Ev */
3788 case 0x69: /* imul Gv, Ev, I */
3790 ot = dflag + OT_WORD;
3791 modrm = ldub_code(s->pc++);
3792 reg = ((modrm >> 3) & 7) | rex_r;
3794 s->rip_offset = insn_const_size(ot);
3797 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3799 val = insn_get(s, ot);
3800 gen_op_movl_T1_im(val);
3801 } else if (b == 0x6b) {
3802 val = (int8_t)insn_get(s, OT_BYTE);
3803 gen_op_movl_T1_im(val);
3805 gen_op_mov_TN_reg(ot, 1, reg);
3808 #ifdef TARGET_X86_64
3809 if (ot == OT_QUAD) {
3810 gen_op_imulq_T0_T1();
3813 if (ot == OT_LONG) {
3814 gen_op_imull_T0_T1();
3816 gen_op_imulw_T0_T1();
3818 gen_op_mov_reg_T0(ot, reg);
3819 s->cc_op = CC_OP_MULB + ot;
3822 case 0x1c1: /* xadd Ev, Gv */
3826 ot = dflag + OT_WORD;
3827 modrm = ldub_code(s->pc++);
3828 reg = ((modrm >> 3) & 7) | rex_r;
3829 mod = (modrm >> 6) & 3;
3831 rm = (modrm & 7) | REX_B(s);
3832 gen_op_mov_TN_reg(ot, 0, reg);
3833 gen_op_mov_TN_reg(ot, 1, rm);
3834 gen_op_addl_T0_T1();
3835 gen_op_mov_reg_T1(ot, reg);
3836 gen_op_mov_reg_T0(ot, rm);
3838 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3839 gen_op_mov_TN_reg(ot, 0, reg);
3840 gen_op_ld_T1_A0(ot + s->mem_index);
3841 gen_op_addl_T0_T1();
3842 gen_op_st_T0_A0(ot + s->mem_index);
3843 gen_op_mov_reg_T1(ot, reg);
3845 gen_op_update2_cc();
3846 s->cc_op = CC_OP_ADDB + ot;
3849 case 0x1b1: /* cmpxchg Ev, Gv */
3853 ot = dflag + OT_WORD;
3854 modrm = ldub_code(s->pc++);
3855 reg = ((modrm >> 3) & 7) | rex_r;
3856 mod = (modrm >> 6) & 3;
3857 gen_op_mov_TN_reg(ot, 1, reg);
3859 rm = (modrm & 7) | REX_B(s);
3860 gen_op_mov_TN_reg(ot, 0, rm);
3861 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3862 gen_op_mov_reg_T0(ot, rm);
3864 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3865 gen_op_ld_T0_A0(ot + s->mem_index);
3866 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3868 s->cc_op = CC_OP_SUBB + ot;
3870 case 0x1c7: /* cmpxchg8b */
3871 modrm = ldub_code(s->pc++);
3872 mod = (modrm >> 6) & 3;
3873 if ((mod == 3) || ((modrm & 0x38) != 0x8))
3875 gen_jmp_im(pc_start - s->cs_base);
3876 if (s->cc_op != CC_OP_DYNAMIC)
3877 gen_op_set_cc_op(s->cc_op);
3878 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3880 s->cc_op = CC_OP_EFLAGS;
3883 /**************************/
3885 case 0x50 ... 0x57: /* push */
3886 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3889 case 0x58 ... 0x5f: /* pop */
3891 ot = dflag ? OT_QUAD : OT_WORD;
3893 ot = dflag + OT_WORD;
3896 /* NOTE: order is important for pop %sp */
3898 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3900 case 0x60: /* pusha */
3905 case 0x61: /* popa */
3910 case 0x68: /* push Iv */
3913 ot = dflag ? OT_QUAD : OT_WORD;
3915 ot = dflag + OT_WORD;
3918 val = insn_get(s, ot);
3920 val = (int8_t)insn_get(s, OT_BYTE);
3921 gen_op_movl_T0_im(val);
3924 case 0x8f: /* pop Ev */
3926 ot = dflag ? OT_QUAD : OT_WORD;
3928 ot = dflag + OT_WORD;
3930 modrm = ldub_code(s->pc++);
3931 mod = (modrm >> 6) & 3;
3934 /* NOTE: order is important for pop %sp */
3936 rm = (modrm & 7) | REX_B(s);
3937 gen_op_mov_reg_T0(ot, rm);
3939 /* NOTE: order is important too for MMU exceptions */
3940 s->popl_esp_hack = 1 << ot;
3941 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3942 s->popl_esp_hack = 0;
3946 case 0xc8: /* enter */
3949 val = lduw_code(s->pc);
3951 level = ldub_code(s->pc++);
3952 gen_enter(s, val, level);
3955 case 0xc9: /* leave */
3956 /* XXX: exception not precise (ESP is updated before potential exception) */
3958 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
3959 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
3960 } else if (s->ss32) {
3961 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3962 gen_op_mov_reg_T0(OT_LONG, R_ESP);
3964 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
3965 gen_op_mov_reg_T0(OT_WORD, R_ESP);
3969 ot = dflag ? OT_QUAD : OT_WORD;
3971 ot = dflag + OT_WORD;
3973 gen_op_mov_reg_T0(ot, R_EBP);
3976 case 0x06: /* push es */
3977 case 0x0e: /* push cs */
3978 case 0x16: /* push ss */
3979 case 0x1e: /* push ds */
3982 gen_op_movl_T0_seg(b >> 3);
3985 case 0x1a0: /* push fs */
3986 case 0x1a8: /* push gs */
3987 gen_op_movl_T0_seg((b >> 3) & 7);
3990 case 0x07: /* pop es */
3991 case 0x17: /* pop ss */
3992 case 0x1f: /* pop ds */
3997 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4000 /* if reg == SS, inhibit interrupts/trace. */
4001 /* If several instructions disable interrupts, only the
4003 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4004 gen_op_set_inhibit_irq();
4008 gen_jmp_im(s->pc - s->cs_base);
4012 case 0x1a1: /* pop fs */
4013 case 0x1a9: /* pop gs */
4015 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4018 gen_jmp_im(s->pc - s->cs_base);
4023 /**************************/
4026 case 0x89: /* mov Gv, Ev */
4030 ot = dflag + OT_WORD;
4031 modrm = ldub_code(s->pc++);
4032 reg = ((modrm >> 3) & 7) | rex_r;
4034 /* generate a generic store */
4035 gen_ldst_modrm(s, modrm, ot, reg, 1);
4038 case 0xc7: /* mov Ev, Iv */
4042 ot = dflag + OT_WORD;
4043 modrm = ldub_code(s->pc++);
4044 mod = (modrm >> 6) & 3;
4046 s->rip_offset = insn_const_size(ot);
4047 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4049 val = insn_get(s, ot);
4050 gen_op_movl_T0_im(val);
4052 gen_op_st_T0_A0(ot + s->mem_index);
4054 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4057 case 0x8b: /* mov Ev, Gv */
4061 ot = OT_WORD + dflag;
4062 modrm = ldub_code(s->pc++);
4063 reg = ((modrm >> 3) & 7) | rex_r;
4065 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4066 gen_op_mov_reg_T0(ot, reg);
4068 case 0x8e: /* mov seg, Gv */
4069 modrm = ldub_code(s->pc++);
4070 reg = (modrm >> 3) & 7;
4071 if (reg >= 6 || reg == R_CS)
4073 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4074 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4076 /* if reg == SS, inhibit interrupts/trace */
4077 /* If several instructions disable interrupts, only the
4079 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4080 gen_op_set_inhibit_irq();
4084 gen_jmp_im(s->pc - s->cs_base);
4088 case 0x8c: /* mov Gv, seg */
4089 modrm = ldub_code(s->pc++);
4090 reg = (modrm >> 3) & 7;
4091 mod = (modrm >> 6) & 3;
4094 gen_op_movl_T0_seg(reg);
4096 ot = OT_WORD + dflag;
4099 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4102 case 0x1b6: /* movzbS Gv, Eb */
4103 case 0x1b7: /* movzwS Gv, Eb */
4104 case 0x1be: /* movsbS Gv, Eb */
4105 case 0x1bf: /* movswS Gv, Eb */
4108 /* d_ot is the size of destination */
4109 d_ot = dflag + OT_WORD;
4110 /* ot is the size of source */
4111 ot = (b & 1) + OT_BYTE;
4112 modrm = ldub_code(s->pc++);
4113 reg = ((modrm >> 3) & 7) | rex_r;
4114 mod = (modrm >> 6) & 3;
4115 rm = (modrm & 7) | REX_B(s);
4118 gen_op_mov_TN_reg(ot, 0, rm);
4119 switch(ot | (b & 8)) {
4121 gen_op_movzbl_T0_T0();
4124 gen_op_movsbl_T0_T0();
4127 gen_op_movzwl_T0_T0();
4131 gen_op_movswl_T0_T0();
4134 gen_op_mov_reg_T0(d_ot, reg);
4136 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4138 gen_op_lds_T0_A0(ot + s->mem_index);
4140 gen_op_ldu_T0_A0(ot + s->mem_index);
4142 gen_op_mov_reg_T0(d_ot, reg);
4147 case 0x8d: /* lea */
4148 ot = dflag + OT_WORD;
4149 modrm = ldub_code(s->pc++);
4150 mod = (modrm >> 6) & 3;
4153 reg = ((modrm >> 3) & 7) | rex_r;
4154 /* we must ensure that no segment is added */
4158 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4160 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4163 case 0xa0: /* mov EAX, Ov */
4165 case 0xa2: /* mov Ov, EAX */
4168 target_ulong offset_addr;
4173 ot = dflag + OT_WORD;
4174 #ifdef TARGET_X86_64
4175 if (s->aflag == 2) {
4176 offset_addr = ldq_code(s->pc);
4178 gen_op_movq_A0_im(offset_addr);
4183 offset_addr = insn_get(s, OT_LONG);
4185 offset_addr = insn_get(s, OT_WORD);
4187 gen_op_movl_A0_im(offset_addr);
4189 gen_add_A0_ds_seg(s);
4191 gen_op_ld_T0_A0(ot + s->mem_index);
4192 gen_op_mov_reg_T0(ot, R_EAX);
4194 gen_op_mov_TN_reg(ot, 0, R_EAX);
4195 gen_op_st_T0_A0(ot + s->mem_index);
4199 case 0xd7: /* xlat */
4200 #ifdef TARGET_X86_64
4201 if (s->aflag == 2) {
4202 gen_op_movq_A0_reg(R_EBX);
4203 gen_op_addq_A0_AL();
4207 gen_op_movl_A0_reg(R_EBX);
4208 gen_op_addl_A0_AL();
4210 gen_op_andl_A0_ffff();
4212 gen_add_A0_ds_seg(s);
4213 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4214 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4216 case 0xb0 ... 0xb7: /* mov R, Ib */
4217 val = insn_get(s, OT_BYTE);
4218 gen_op_movl_T0_im(val);
4219 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4221 case 0xb8 ... 0xbf: /* mov R, Iv */
4222 #ifdef TARGET_X86_64
4226 tmp = ldq_code(s->pc);
4228 reg = (b & 7) | REX_B(s);
4229 gen_movtl_T0_im(tmp);
4230 gen_op_mov_reg_T0(OT_QUAD, reg);
4234 ot = dflag ? OT_LONG : OT_WORD;
4235 val = insn_get(s, ot);
4236 reg = (b & 7) | REX_B(s);
4237 gen_op_movl_T0_im(val);
4238 gen_op_mov_reg_T0(ot, reg);
4242 case 0x91 ... 0x97: /* xchg R, EAX */
4243 ot = dflag + OT_WORD;
4244 reg = (b & 7) | REX_B(s);
4248 case 0x87: /* xchg Ev, Gv */
4252 ot = dflag + OT_WORD;
4253 modrm = ldub_code(s->pc++);
4254 reg = ((modrm >> 3) & 7) | rex_r;
4255 mod = (modrm >> 6) & 3;
4257 rm = (modrm & 7) | REX_B(s);
4259 gen_op_mov_TN_reg(ot, 0, reg);
4260 gen_op_mov_TN_reg(ot, 1, rm);
4261 gen_op_mov_reg_T0(ot, rm);
4262 gen_op_mov_reg_T1(ot, reg);
4264 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4265 gen_op_mov_TN_reg(ot, 0, reg);
4266 /* for xchg, lock is implicit */
4267 if (!(prefixes & PREFIX_LOCK))
4269 gen_op_ld_T1_A0(ot + s->mem_index);
4270 gen_op_st_T0_A0(ot + s->mem_index);
4271 if (!(prefixes & PREFIX_LOCK))
4273 gen_op_mov_reg_T1(ot, reg);
4276 case 0xc4: /* les Gv */
4281 case 0xc5: /* lds Gv */
4286 case 0x1b2: /* lss Gv */
4289 case 0x1b4: /* lfs Gv */
4292 case 0x1b5: /* lgs Gv */
4295 ot = dflag ? OT_LONG : OT_WORD;
4296 modrm = ldub_code(s->pc++);
4297 reg = ((modrm >> 3) & 7) | rex_r;
4298 mod = (modrm >> 6) & 3;
4301 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4302 gen_op_ld_T1_A0(ot + s->mem_index);
4303 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4304 /* load the segment first to handle exceptions properly */
4305 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4306 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4307 /* then put the data */
4308 gen_op_mov_reg_T1(ot, reg);
4310 gen_jmp_im(s->pc - s->cs_base);
4315 /************************/
4326 ot = dflag + OT_WORD;
4328 modrm = ldub_code(s->pc++);
4329 mod = (modrm >> 6) & 3;
4330 op = (modrm >> 3) & 7;
4336 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4339 opreg = (modrm & 7) | REX_B(s);
4344 gen_shift(s, op, ot, opreg, OR_ECX);
4347 shift = ldub_code(s->pc++);
4349 gen_shifti(s, op, ot, opreg, shift);
4364 case 0x1a4: /* shld imm */
4368 case 0x1a5: /* shld cl */
4372 case 0x1ac: /* shrd imm */
4376 case 0x1ad: /* shrd cl */
4380 ot = dflag + OT_WORD;
4381 modrm = ldub_code(s->pc++);
4382 mod = (modrm >> 6) & 3;
4383 rm = (modrm & 7) | REX_B(s);
4384 reg = ((modrm >> 3) & 7) | rex_r;
4387 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4388 gen_op_ld_T0_A0(ot + s->mem_index);
4390 gen_op_mov_TN_reg(ot, 0, rm);
4392 gen_op_mov_TN_reg(ot, 1, reg);
4395 val = ldub_code(s->pc++);
4402 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4404 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4405 if (op == 0 && ot != OT_WORD)
4406 s->cc_op = CC_OP_SHLB + ot;
4408 s->cc_op = CC_OP_SARB + ot;
4411 if (s->cc_op != CC_OP_DYNAMIC)
4412 gen_op_set_cc_op(s->cc_op);
4414 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4416 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4417 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4420 gen_op_mov_reg_T0(ot, rm);
4424 /************************/
4427 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4428 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4429 /* XXX: what to do if illegal op ? */
4430 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4433 modrm = ldub_code(s->pc++);
4434 mod = (modrm >> 6) & 3;
4436 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4439 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4441 case 0x00 ... 0x07: /* fxxxs */
4442 case 0x10 ... 0x17: /* fixxxl */
4443 case 0x20 ... 0x27: /* fxxxl */
4444 case 0x30 ... 0x37: /* fixxx */
4451 gen_op_flds_FT0_A0();
4454 gen_op_fildl_FT0_A0();
4457 gen_op_fldl_FT0_A0();
4461 gen_op_fild_FT0_A0();
4465 gen_op_fp_arith_ST0_FT0[op1]();
4467 /* fcomp needs pop */
4472 case 0x08: /* flds */
4473 case 0x0a: /* fsts */
4474 case 0x0b: /* fstps */
4475 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4476 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4477 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4482 gen_op_flds_ST0_A0();
4485 gen_op_fildl_ST0_A0();
4488 gen_op_fldl_ST0_A0();
4492 gen_op_fild_ST0_A0();
4499 gen_op_fisttl_ST0_A0();
4502 gen_op_fisttll_ST0_A0();
4506 gen_op_fistt_ST0_A0();
4513 gen_op_fsts_ST0_A0();
4516 gen_op_fistl_ST0_A0();
4519 gen_op_fstl_ST0_A0();
4523 gen_op_fist_ST0_A0();
4531 case 0x0c: /* fldenv mem */
4532 gen_op_fldenv_A0(s->dflag);
4534 case 0x0d: /* fldcw mem */
4537 case 0x0e: /* fnstenv mem */
4538 gen_op_fnstenv_A0(s->dflag);
4540 case 0x0f: /* fnstcw mem */
4543 case 0x1d: /* fldt mem */
4544 gen_op_fldt_ST0_A0();
4546 case 0x1f: /* fstpt mem */
4547 gen_op_fstt_ST0_A0();
4550 case 0x2c: /* frstor mem */
4551 gen_op_frstor_A0(s->dflag);
4553 case 0x2e: /* fnsave mem */
4554 gen_op_fnsave_A0(s->dflag);
4556 case 0x2f: /* fnstsw mem */
4559 case 0x3c: /* fbld */
4560 gen_op_fbld_ST0_A0();
4562 case 0x3e: /* fbstp */
4563 gen_op_fbst_ST0_A0();
4566 case 0x3d: /* fildll */
4567 gen_op_fildll_ST0_A0();
4569 case 0x3f: /* fistpll */
4570 gen_op_fistll_ST0_A0();
4577 /* register float ops */
4581 case 0x08: /* fld sti */
4583 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4585 case 0x09: /* fxchg sti */
4586 case 0x29: /* fxchg4 sti, undocumented op */
4587 case 0x39: /* fxchg7 sti, undocumented op */
4588 gen_op_fxchg_ST0_STN(opreg);
4590 case 0x0a: /* grp d9/2 */
4593 /* check exceptions (FreeBSD FPU probe) */
4594 if (s->cc_op != CC_OP_DYNAMIC)
4595 gen_op_set_cc_op(s->cc_op);
4596 gen_jmp_im(pc_start - s->cs_base);
4603 case 0x0c: /* grp d9/4 */
4613 gen_op_fcom_ST0_FT0();
4622 case 0x0d: /* grp d9/5 */
4631 gen_op_fldl2t_ST0();
4635 gen_op_fldl2e_ST0();
4643 gen_op_fldlg2_ST0();
4647 gen_op_fldln2_ST0();
4658 case 0x0e: /* grp d9/6 */
4669 case 3: /* fpatan */
4672 case 4: /* fxtract */
4675 case 5: /* fprem1 */
4678 case 6: /* fdecstp */
4682 case 7: /* fincstp */
4687 case 0x0f: /* grp d9/7 */
4692 case 1: /* fyl2xp1 */
4698 case 3: /* fsincos */
4701 case 5: /* fscale */
4704 case 4: /* frndint */
4716 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4717 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4718 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4724 gen_op_fp_arith_STN_ST0[op1](opreg);
4728 gen_op_fmov_FT0_STN(opreg);
4729 gen_op_fp_arith_ST0_FT0[op1]();
4733 case 0x02: /* fcom */
4734 case 0x22: /* fcom2, undocumented op */
4735 gen_op_fmov_FT0_STN(opreg);
4736 gen_op_fcom_ST0_FT0();
4738 case 0x03: /* fcomp */
4739 case 0x23: /* fcomp3, undocumented op */
4740 case 0x32: /* fcomp5, undocumented op */
4741 gen_op_fmov_FT0_STN(opreg);
4742 gen_op_fcom_ST0_FT0();
4745 case 0x15: /* da/5 */
4747 case 1: /* fucompp */
4748 gen_op_fmov_FT0_STN(1);
4749 gen_op_fucom_ST0_FT0();
4759 case 0: /* feni (287 only, just do nop here) */
4761 case 1: /* fdisi (287 only, just do nop here) */
4766 case 3: /* fninit */
4769 case 4: /* fsetpm (287 only, just do nop here) */
4775 case 0x1d: /* fucomi */
4776 if (s->cc_op != CC_OP_DYNAMIC)
4777 gen_op_set_cc_op(s->cc_op);
4778 gen_op_fmov_FT0_STN(opreg);
4779 gen_op_fucomi_ST0_FT0();
4780 s->cc_op = CC_OP_EFLAGS;
4782 case 0x1e: /* fcomi */
4783 if (s->cc_op != CC_OP_DYNAMIC)
4784 gen_op_set_cc_op(s->cc_op);
4785 gen_op_fmov_FT0_STN(opreg);
4786 gen_op_fcomi_ST0_FT0();
4787 s->cc_op = CC_OP_EFLAGS;
4789 case 0x28: /* ffree sti */
4790 gen_op_ffree_STN(opreg);
4792 case 0x2a: /* fst sti */
4793 gen_op_fmov_STN_ST0(opreg);
4795 case 0x2b: /* fstp sti */
4796 case 0x0b: /* fstp1 sti, undocumented op */
4797 case 0x3a: /* fstp8 sti, undocumented op */
4798 case 0x3b: /* fstp9 sti, undocumented op */
4799 gen_op_fmov_STN_ST0(opreg);
4802 case 0x2c: /* fucom st(i) */
4803 gen_op_fmov_FT0_STN(opreg);
4804 gen_op_fucom_ST0_FT0();
4806 case 0x2d: /* fucomp st(i) */
4807 gen_op_fmov_FT0_STN(opreg);
4808 gen_op_fucom_ST0_FT0();
4811 case 0x33: /* de/3 */
4813 case 1: /* fcompp */
4814 gen_op_fmov_FT0_STN(1);
4815 gen_op_fcom_ST0_FT0();
4823 case 0x38: /* ffreep sti, undocumented op */
4824 gen_op_ffree_STN(opreg);
4827 case 0x3c: /* df/4 */
4830 gen_op_fnstsw_EAX();
4836 case 0x3d: /* fucomip */
4837 if (s->cc_op != CC_OP_DYNAMIC)
4838 gen_op_set_cc_op(s->cc_op);
4839 gen_op_fmov_FT0_STN(opreg);
4840 gen_op_fucomi_ST0_FT0();
4842 s->cc_op = CC_OP_EFLAGS;
4844 case 0x3e: /* fcomip */
4845 if (s->cc_op != CC_OP_DYNAMIC)
4846 gen_op_set_cc_op(s->cc_op);
4847 gen_op_fmov_FT0_STN(opreg);
4848 gen_op_fcomi_ST0_FT0();
4850 s->cc_op = CC_OP_EFLAGS;
4852 case 0x10 ... 0x13: /* fcmovxx */
4856 const static uint8_t fcmov_cc[8] = {
4862 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4864 gen_op_fcmov_ST0_STN_T0(opreg);
4872 /************************/
4875 case 0xa4: /* movsS */
4880 ot = dflag + OT_WORD;
4882 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4883 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4889 case 0xaa: /* stosS */
4894 ot = dflag + OT_WORD;
4896 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4897 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4902 case 0xac: /* lodsS */
4907 ot = dflag + OT_WORD;
4908 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4909 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4914 case 0xae: /* scasS */
4919 ot = dflag + OT_WORD;
4920 if (prefixes & PREFIX_REPNZ) {
4921 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4922 } else if (prefixes & PREFIX_REPZ) {
4923 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4926 s->cc_op = CC_OP_SUBB + ot;
4930 case 0xa6: /* cmpsS */
4935 ot = dflag + OT_WORD;
4936 if (prefixes & PREFIX_REPNZ) {
4937 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4938 } else if (prefixes & PREFIX_REPZ) {
4939 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4942 s->cc_op = CC_OP_SUBB + ot;
4945 case 0x6c: /* insS */
4950 ot = dflag ? OT_LONG : OT_WORD;
4951 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4952 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4953 gen_op_andl_T0_ffff();
4954 if (gen_svm_check_io(s, pc_start,
4955 SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
4956 svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
4958 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4959 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4964 case 0x6e: /* outsS */
4969 ot = dflag ? OT_LONG : OT_WORD;
4970 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4971 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4972 gen_op_andl_T0_ffff();
4973 if (gen_svm_check_io(s, pc_start,
4974 (1 << (4+ot)) | svm_is_rep(prefixes) |
4975 4 | (1 << (7+s->aflag))))
4977 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4978 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4984 /************************/
4992 ot = dflag ? OT_LONG : OT_WORD;
4993 val = ldub_code(s->pc++);
4994 gen_op_movl_T0_im(val);
4995 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4996 if (gen_svm_check_io(s, pc_start,
4997 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5001 gen_op_mov_reg_T1(ot, R_EAX);
5008 ot = dflag ? OT_LONG : OT_WORD;
5009 val = ldub_code(s->pc++);
5010 gen_op_movl_T0_im(val);
5011 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5012 if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5015 gen_op_mov_TN_reg(ot, 1, R_EAX);
5023 ot = dflag ? OT_LONG : OT_WORD;
5024 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5025 gen_op_andl_T0_ffff();
5026 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5027 if (gen_svm_check_io(s, pc_start,
5028 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5032 gen_op_mov_reg_T1(ot, R_EAX);
5039 ot = dflag ? OT_LONG : OT_WORD;
5040 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5041 gen_op_andl_T0_ffff();
5042 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5043 if (gen_svm_check_io(s, pc_start,
5044 svm_is_rep(prefixes) | (1 << (4+ot))))
5046 gen_op_mov_TN_reg(ot, 1, R_EAX);
5050 /************************/
5052 case 0xc2: /* ret im */
5053 val = ldsw_code(s->pc);
5056 if (CODE64(s) && s->dflag)
5058 gen_stack_update(s, val + (2 << s->dflag));
5060 gen_op_andl_T0_ffff();
5064 case 0xc3: /* ret */
5068 gen_op_andl_T0_ffff();
5072 case 0xca: /* lret im */
5073 val = ldsw_code(s->pc);
5076 if (s->pe && !s->vm86) {
5077 if (s->cc_op != CC_OP_DYNAMIC)
5078 gen_op_set_cc_op(s->cc_op);
5079 gen_jmp_im(pc_start - s->cs_base);
5080 gen_op_lret_protected(s->dflag, val);
5084 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5086 gen_op_andl_T0_ffff();
5087 /* NOTE: keeping EIP updated is not a problem in case of
5091 gen_op_addl_A0_im(2 << s->dflag);
5092 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5093 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5094 /* add stack offset */
5095 gen_stack_update(s, val + (4 << s->dflag));
5099 case 0xcb: /* lret */
5102 case 0xcf: /* iret */
5103 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5107 gen_op_iret_real(s->dflag);
5108 s->cc_op = CC_OP_EFLAGS;
5109 } else if (s->vm86) {
5111 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5113 gen_op_iret_real(s->dflag);
5114 s->cc_op = CC_OP_EFLAGS;
5117 if (s->cc_op != CC_OP_DYNAMIC)
5118 gen_op_set_cc_op(s->cc_op);
5119 gen_jmp_im(pc_start - s->cs_base);
5120 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5121 s->cc_op = CC_OP_EFLAGS;
5125 case 0xe8: /* call im */
5128 tval = (int32_t)insn_get(s, OT_LONG);
5130 tval = (int16_t)insn_get(s, OT_WORD);
5131 next_eip = s->pc - s->cs_base;
5135 gen_movtl_T0_im(next_eip);
5140 case 0x9a: /* lcall im */
5142 unsigned int selector, offset;
5146 ot = dflag ? OT_LONG : OT_WORD;
5147 offset = insn_get(s, ot);
5148 selector = insn_get(s, OT_WORD);
5150 gen_op_movl_T0_im(selector);
5151 gen_op_movl_T1_imu(offset);
5154 case 0xe9: /* jmp im */
5156 tval = (int32_t)insn_get(s, OT_LONG);
5158 tval = (int16_t)insn_get(s, OT_WORD);
5159 tval += s->pc - s->cs_base;
5164 case 0xea: /* ljmp im */
5166 unsigned int selector, offset;
5170 ot = dflag ? OT_LONG : OT_WORD;
5171 offset = insn_get(s, ot);
5172 selector = insn_get(s, OT_WORD);
5174 gen_op_movl_T0_im(selector);
5175 gen_op_movl_T1_imu(offset);
5178 case 0xeb: /* jmp Jb */
5179 tval = (int8_t)insn_get(s, OT_BYTE);
5180 tval += s->pc - s->cs_base;
5185 case 0x70 ... 0x7f: /* jcc Jb */
5186 tval = (int8_t)insn_get(s, OT_BYTE);
5188 case 0x180 ... 0x18f: /* jcc Jv */
5190 tval = (int32_t)insn_get(s, OT_LONG);
5192 tval = (int16_t)insn_get(s, OT_WORD);
5195 next_eip = s->pc - s->cs_base;
5199 gen_jcc(s, b, tval, next_eip);
5202 case 0x190 ... 0x19f: /* setcc Gv */
5203 modrm = ldub_code(s->pc++);
5205 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5207 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5208 ot = dflag + OT_WORD;
5209 modrm = ldub_code(s->pc++);
5210 reg = ((modrm >> 3) & 7) | rex_r;
5211 mod = (modrm >> 6) & 3;
5214 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5215 gen_op_ld_T1_A0(ot + s->mem_index);
5217 rm = (modrm & 7) | REX_B(s);
5218 gen_op_mov_TN_reg(ot, 1, rm);
5220 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5223 /************************/
5225 case 0x9c: /* pushf */
5226 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5228 if (s->vm86 && s->iopl != 3) {
5229 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5231 if (s->cc_op != CC_OP_DYNAMIC)
5232 gen_op_set_cc_op(s->cc_op);
5233 gen_op_movl_T0_eflags();
5237 case 0x9d: /* popf */
5238 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5240 if (s->vm86 && s->iopl != 3) {
5241 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5246 gen_op_movl_eflags_T0_cpl0();
5248 gen_op_movw_eflags_T0_cpl0();
5251 if (s->cpl <= s->iopl) {
5253 gen_op_movl_eflags_T0_io();
5255 gen_op_movw_eflags_T0_io();
5259 gen_op_movl_eflags_T0();
5261 gen_op_movw_eflags_T0();
5266 s->cc_op = CC_OP_EFLAGS;
5267 /* abort translation because TF flag may change */
5268 gen_jmp_im(s->pc - s->cs_base);
5272 case 0x9e: /* sahf */
5275 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5276 if (s->cc_op != CC_OP_DYNAMIC)
5277 gen_op_set_cc_op(s->cc_op);
5278 gen_op_movb_eflags_T0();
5279 s->cc_op = CC_OP_EFLAGS;
5281 case 0x9f: /* lahf */
5284 if (s->cc_op != CC_OP_DYNAMIC)
5285 gen_op_set_cc_op(s->cc_op);
5286 gen_op_movl_T0_eflags();
5287 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5289 case 0xf5: /* cmc */
5290 if (s->cc_op != CC_OP_DYNAMIC)
5291 gen_op_set_cc_op(s->cc_op);
5293 s->cc_op = CC_OP_EFLAGS;
5295 case 0xf8: /* clc */
5296 if (s->cc_op != CC_OP_DYNAMIC)
5297 gen_op_set_cc_op(s->cc_op);
5299 s->cc_op = CC_OP_EFLAGS;
5301 case 0xf9: /* stc */
5302 if (s->cc_op != CC_OP_DYNAMIC)
5303 gen_op_set_cc_op(s->cc_op);
5305 s->cc_op = CC_OP_EFLAGS;
5307 case 0xfc: /* cld */
5310 case 0xfd: /* std */
5314 /************************/
5315 /* bit operations */
5316 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5317 ot = dflag + OT_WORD;
5318 modrm = ldub_code(s->pc++);
5319 op = (modrm >> 3) & 7;
5320 mod = (modrm >> 6) & 3;
5321 rm = (modrm & 7) | REX_B(s);
5324 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5325 gen_op_ld_T0_A0(ot + s->mem_index);
5327 gen_op_mov_TN_reg(ot, 0, rm);
5330 val = ldub_code(s->pc++);
5331 gen_op_movl_T1_im(val);
5335 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5336 s->cc_op = CC_OP_SARB + ot;
5339 gen_op_st_T0_A0(ot + s->mem_index);
5341 gen_op_mov_reg_T0(ot, rm);
5342 gen_op_update_bt_cc();
5345 case 0x1a3: /* bt Gv, Ev */
5348 case 0x1ab: /* bts */
5351 case 0x1b3: /* btr */
5354 case 0x1bb: /* btc */
5357 ot = dflag + OT_WORD;
5358 modrm = ldub_code(s->pc++);
5359 reg = ((modrm >> 3) & 7) | rex_r;
5360 mod = (modrm >> 6) & 3;
5361 rm = (modrm & 7) | REX_B(s);
5362 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5364 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5365 /* specific case: we need to add a displacement */
5366 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5367 gen_op_ld_T0_A0(ot + s->mem_index);
5369 gen_op_mov_TN_reg(ot, 0, rm);
5371 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5372 s->cc_op = CC_OP_SARB + ot;
5375 gen_op_st_T0_A0(ot + s->mem_index);
5377 gen_op_mov_reg_T0(ot, rm);
5378 gen_op_update_bt_cc();
5381 case 0x1bc: /* bsf */
5382 case 0x1bd: /* bsr */
5383 ot = dflag + OT_WORD;
5384 modrm = ldub_code(s->pc++);
5385 reg = ((modrm >> 3) & 7) | rex_r;
5386 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5387 /* NOTE: in order to handle the 0 case, we must load the
5388 result. It could be optimized with a generated jump */
5389 gen_op_mov_TN_reg(ot, 1, reg);
5390 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5391 gen_op_mov_reg_T1(ot, reg);
5392 s->cc_op = CC_OP_LOGICB + ot;
5394 /************************/
5396 case 0x27: /* daa */
5399 if (s->cc_op != CC_OP_DYNAMIC)
5400 gen_op_set_cc_op(s->cc_op);
5402 s->cc_op = CC_OP_EFLAGS;
5404 case 0x2f: /* das */
5407 if (s->cc_op != CC_OP_DYNAMIC)
5408 gen_op_set_cc_op(s->cc_op);
5410 s->cc_op = CC_OP_EFLAGS;
5412 case 0x37: /* aaa */
5415 if (s->cc_op != CC_OP_DYNAMIC)
5416 gen_op_set_cc_op(s->cc_op);
5418 s->cc_op = CC_OP_EFLAGS;
5420 case 0x3f: /* aas */
5423 if (s->cc_op != CC_OP_DYNAMIC)
5424 gen_op_set_cc_op(s->cc_op);
5426 s->cc_op = CC_OP_EFLAGS;
5428 case 0xd4: /* aam */
5431 val = ldub_code(s->pc++);
5433 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5436 s->cc_op = CC_OP_LOGICB;
5439 case 0xd5: /* aad */
5442 val = ldub_code(s->pc++);
5444 s->cc_op = CC_OP_LOGICB;
5446 /************************/
5448 case 0x90: /* nop */
5449 /* XXX: xchg + rex handling */
5450 /* XXX: correct lock test for all insn */
5451 if (prefixes & PREFIX_LOCK)
5453 if (prefixes & PREFIX_REPZ) {
5454 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5457 case 0x9b: /* fwait */
5458 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5459 (HF_MP_MASK | HF_TS_MASK)) {
5460 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5462 if (s->cc_op != CC_OP_DYNAMIC)
5463 gen_op_set_cc_op(s->cc_op);
5464 gen_jmp_im(pc_start - s->cs_base);
5468 case 0xcc: /* int3 */
5469 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5471 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5473 case 0xcd: /* int N */
5474 val = ldub_code(s->pc++);
5475 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5477 if (s->vm86 && s->iopl != 3) {
5478 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5480 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5483 case 0xce: /* into */
5486 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5488 if (s->cc_op != CC_OP_DYNAMIC)
5489 gen_op_set_cc_op(s->cc_op);
5490 gen_jmp_im(pc_start - s->cs_base);
5491 gen_op_into(s->pc - pc_start);
5493 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5494 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5497 gen_debug(s, pc_start - s->cs_base);
5500 tb_flush(cpu_single_env);
5501 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5504 case 0xfa: /* cli */
5506 if (s->cpl <= s->iopl) {
5509 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5515 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5519 case 0xfb: /* sti */
5521 if (s->cpl <= s->iopl) {
5524 /* interruptions are enabled only the first insn after sti */
5525 /* If several instructions disable interrupts, only the
5527 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5528 gen_op_set_inhibit_irq();
5529 /* give a chance to handle pending irqs */
5530 gen_jmp_im(s->pc - s->cs_base);
5533 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5539 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5543 case 0x62: /* bound */
5546 ot = dflag ? OT_LONG : OT_WORD;
5547 modrm = ldub_code(s->pc++);
5548 reg = (modrm >> 3) & 7;
5549 mod = (modrm >> 6) & 3;
5552 gen_op_mov_TN_reg(ot, 0, reg);
5553 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5554 gen_jmp_im(pc_start - s->cs_base);
5560 case 0x1c8 ... 0x1cf: /* bswap reg */
5561 reg = (b & 7) | REX_B(s);
5562 #ifdef TARGET_X86_64
5564 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5565 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5566 gen_op_mov_reg_T0(OT_QUAD, reg);
5570 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5572 tmp0 = tcg_temp_new(TCG_TYPE_I32);
5573 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5574 tcg_gen_bswap_i32(tmp0, tmp0);
5575 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5576 gen_op_mov_reg_T0(OT_LONG, reg);
5580 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5581 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5582 gen_op_mov_reg_T0(OT_LONG, reg);
5586 case 0xd6: /* salc */
5589 if (s->cc_op != CC_OP_DYNAMIC)
5590 gen_op_set_cc_op(s->cc_op);
5593 case 0xe0: /* loopnz */
5594 case 0xe1: /* loopz */
5595 if (s->cc_op != CC_OP_DYNAMIC)
5596 gen_op_set_cc_op(s->cc_op);
5598 case 0xe2: /* loop */
5599 case 0xe3: /* jecxz */
5603 tval = (int8_t)insn_get(s, OT_BYTE);
5604 next_eip = s->pc - s->cs_base;
5609 l1 = gen_new_label();
5610 l2 = gen_new_label();
5613 gen_op_jz_ecx[s->aflag](l1);
5615 gen_op_dec_ECX[s->aflag]();
5618 gen_op_loop[s->aflag][b](l1);
5621 gen_jmp_im(next_eip);
5622 gen_op_jmp_label(l2);
5629 case 0x130: /* wrmsr */
5630 case 0x132: /* rdmsr */
5632 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5636 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5639 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5646 case 0x131: /* rdtsc */
5647 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5649 gen_jmp_im(pc_start - s->cs_base);
5652 case 0x133: /* rdpmc */
5653 gen_jmp_im(pc_start - s->cs_base);
5656 case 0x134: /* sysenter */
5660 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5662 if (s->cc_op != CC_OP_DYNAMIC) {
5663 gen_op_set_cc_op(s->cc_op);
5664 s->cc_op = CC_OP_DYNAMIC;
5666 gen_jmp_im(pc_start - s->cs_base);
5671 case 0x135: /* sysexit */
5675 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5677 if (s->cc_op != CC_OP_DYNAMIC) {
5678 gen_op_set_cc_op(s->cc_op);
5679 s->cc_op = CC_OP_DYNAMIC;
5681 gen_jmp_im(pc_start - s->cs_base);
5686 #ifdef TARGET_X86_64
5687 case 0x105: /* syscall */
5688 /* XXX: is it usable in real mode ? */
5689 if (s->cc_op != CC_OP_DYNAMIC) {
5690 gen_op_set_cc_op(s->cc_op);
5691 s->cc_op = CC_OP_DYNAMIC;
5693 gen_jmp_im(pc_start - s->cs_base);
5694 gen_op_syscall(s->pc - pc_start);
5697 case 0x107: /* sysret */
5699 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5701 if (s->cc_op != CC_OP_DYNAMIC) {
5702 gen_op_set_cc_op(s->cc_op);
5703 s->cc_op = CC_OP_DYNAMIC;
5705 gen_jmp_im(pc_start - s->cs_base);
5706 gen_op_sysret(s->dflag);
5707 /* condition codes are modified only in long mode */
5709 s->cc_op = CC_OP_EFLAGS;
5714 case 0x1a2: /* cpuid */
5715 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5719 case 0xf4: /* hlt */
5721 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5723 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5725 if (s->cc_op != CC_OP_DYNAMIC)
5726 gen_op_set_cc_op(s->cc_op);
5727 gen_jmp_im(s->pc - s->cs_base);
5733 modrm = ldub_code(s->pc++);
5734 mod = (modrm >> 6) & 3;
5735 op = (modrm >> 3) & 7;
5738 if (!s->pe || s->vm86)
5740 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5742 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5746 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5749 if (!s->pe || s->vm86)
5752 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5754 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5756 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5757 gen_jmp_im(pc_start - s->cs_base);
5762 if (!s->pe || s->vm86)
5764 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5766 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5770 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5773 if (!s->pe || s->vm86)
5776 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5778 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5780 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5781 gen_jmp_im(pc_start - s->cs_base);
5787 if (!s->pe || s->vm86)
5789 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5790 if (s->cc_op != CC_OP_DYNAMIC)
5791 gen_op_set_cc_op(s->cc_op);
5796 s->cc_op = CC_OP_EFLAGS;
5803 modrm = ldub_code(s->pc++);
5804 mod = (modrm >> 6) & 3;
5805 op = (modrm >> 3) & 7;
5811 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5813 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5814 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5815 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5816 gen_add_A0_im(s, 2);
5817 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5819 gen_op_andl_T0_im(0xffffff);
5820 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5825 case 0: /* monitor */
5826 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5829 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5831 gen_jmp_im(pc_start - s->cs_base);
5832 #ifdef TARGET_X86_64
5833 if (s->aflag == 2) {
5834 gen_op_movq_A0_reg(R_EBX);
5835 gen_op_addq_A0_AL();
5839 gen_op_movl_A0_reg(R_EBX);
5840 gen_op_addl_A0_AL();
5842 gen_op_andl_A0_ffff();
5844 gen_add_A0_ds_seg(s);
5848 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5851 if (s->cc_op != CC_OP_DYNAMIC) {
5852 gen_op_set_cc_op(s->cc_op);
5853 s->cc_op = CC_OP_DYNAMIC;
5855 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5857 gen_jmp_im(s->pc - s->cs_base);
5865 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5867 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5868 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5869 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5870 gen_add_A0_im(s, 2);
5871 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5873 gen_op_andl_T0_im(0xffffff);
5874 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5882 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5884 if (s->cc_op != CC_OP_DYNAMIC)
5885 gen_op_set_cc_op(s->cc_op);
5886 gen_jmp_im(s->pc - s->cs_base);
5888 s->cc_op = CC_OP_EFLAGS;
5891 case 1: /* VMMCALL */
5892 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5894 /* FIXME: cause #UD if hflags & SVM */
5897 case 2: /* VMLOAD */
5898 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5902 case 3: /* VMSAVE */
5903 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5908 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5913 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
5917 case 6: /* SKINIT */
5918 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
5922 case 7: /* INVLPGA */
5923 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
5930 } else if (s->cpl != 0) {
5931 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5933 if (gen_svm_check_intercept(s, pc_start,
5934 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
5936 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5937 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
5938 gen_add_A0_im(s, 2);
5939 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5941 gen_op_andl_T0_im(0xffffff);
5943 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5944 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5946 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5947 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5952 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
5954 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5955 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5959 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5961 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
5963 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5965 gen_jmp_im(s->pc - s->cs_base);
5969 case 7: /* invlpg */
5971 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5974 #ifdef TARGET_X86_64
5975 if (CODE64(s) && rm == 0) {
5977 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5978 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5979 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5980 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5987 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
5989 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5991 gen_jmp_im(s->pc - s->cs_base);
6000 case 0x108: /* invd */
6001 case 0x109: /* wbinvd */
6003 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6005 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6010 case 0x63: /* arpl or movslS (x86_64) */
6011 #ifdef TARGET_X86_64
6014 /* d_ot is the size of destination */
6015 d_ot = dflag + OT_WORD;
6017 modrm = ldub_code(s->pc++);
6018 reg = ((modrm >> 3) & 7) | rex_r;
6019 mod = (modrm >> 6) & 3;
6020 rm = (modrm & 7) | REX_B(s);
6023 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6025 if (d_ot == OT_QUAD)
6026 gen_op_movslq_T0_T0();
6027 gen_op_mov_reg_T0(d_ot, reg);
6029 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6030 if (d_ot == OT_QUAD) {
6031 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6033 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6035 gen_op_mov_reg_T0(d_ot, reg);
6040 if (!s->pe || s->vm86)
6042 ot = dflag ? OT_LONG : OT_WORD;
6043 modrm = ldub_code(s->pc++);
6044 reg = (modrm >> 3) & 7;
6045 mod = (modrm >> 6) & 3;
6048 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6049 gen_op_ld_T0_A0(ot + s->mem_index);
6051 gen_op_mov_TN_reg(ot, 0, rm);
6053 if (s->cc_op != CC_OP_DYNAMIC)
6054 gen_op_set_cc_op(s->cc_op);
6056 s->cc_op = CC_OP_EFLAGS;
6058 gen_op_st_T0_A0(ot + s->mem_index);
6060 gen_op_mov_reg_T0(ot, rm);
6062 gen_op_arpl_update();
6065 case 0x102: /* lar */
6066 case 0x103: /* lsl */
6067 if (!s->pe || s->vm86)
6069 ot = dflag ? OT_LONG : OT_WORD;
6070 modrm = ldub_code(s->pc++);
6071 reg = ((modrm >> 3) & 7) | rex_r;
6072 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6073 gen_op_mov_TN_reg(ot, 1, reg);
6074 if (s->cc_op != CC_OP_DYNAMIC)
6075 gen_op_set_cc_op(s->cc_op);
6080 s->cc_op = CC_OP_EFLAGS;
6081 gen_op_mov_reg_T1(ot, reg);
6084 modrm = ldub_code(s->pc++);
6085 mod = (modrm >> 6) & 3;
6086 op = (modrm >> 3) & 7;
6088 case 0: /* prefetchnta */
6089 case 1: /* prefetchnt0 */
6090 case 2: /* prefetchnt0 */
6091 case 3: /* prefetchnt0 */
6094 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6095 /* nothing more to do */
6097 default: /* nop (multi byte) */
6098 gen_nop_modrm(s, modrm);
6102 case 0x119 ... 0x11f: /* nop (multi byte) */
6103 modrm = ldub_code(s->pc++);
6104 gen_nop_modrm(s, modrm);
6106 case 0x120: /* mov reg, crN */
6107 case 0x122: /* mov crN, reg */
6109 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6111 modrm = ldub_code(s->pc++);
6112 if ((modrm & 0xc0) != 0xc0)
6114 rm = (modrm & 7) | REX_B(s);
6115 reg = ((modrm >> 3) & 7) | rex_r;
6127 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6128 gen_op_mov_TN_reg(ot, 0, rm);
6129 gen_op_movl_crN_T0(reg);
6130 gen_jmp_im(s->pc - s->cs_base);
6133 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6134 #if !defined(CONFIG_USER_ONLY)
6136 gen_op_movtl_T0_cr8();
6139 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6140 gen_op_mov_reg_T0(ot, rm);
6148 case 0x121: /* mov reg, drN */
6149 case 0x123: /* mov drN, reg */
6151 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6153 modrm = ldub_code(s->pc++);
6154 if ((modrm & 0xc0) != 0xc0)
6156 rm = (modrm & 7) | REX_B(s);
6157 reg = ((modrm >> 3) & 7) | rex_r;
6162 /* XXX: do it dynamically with CR4.DE bit */
6163 if (reg == 4 || reg == 5 || reg >= 8)
6166 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6167 gen_op_mov_TN_reg(ot, 0, rm);
6168 gen_op_movl_drN_T0(reg);
6169 gen_jmp_im(s->pc - s->cs_base);
6172 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6173 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6174 gen_op_mov_reg_T0(ot, rm);
6178 case 0x106: /* clts */
6180 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6182 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6184 /* abort block because static cpu state changed */
6185 gen_jmp_im(s->pc - s->cs_base);
6189 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6190 case 0x1c3: /* MOVNTI reg, mem */
6191 if (!(s->cpuid_features & CPUID_SSE2))
6193 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6194 modrm = ldub_code(s->pc++);
6195 mod = (modrm >> 6) & 3;
6198 reg = ((modrm >> 3) & 7) | rex_r;
6199 /* generate a generic store */
6200 gen_ldst_modrm(s, modrm, ot, reg, 1);
6203 modrm = ldub_code(s->pc++);
6204 mod = (modrm >> 6) & 3;
6205 op = (modrm >> 3) & 7;
6207 case 0: /* fxsave */
6208 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6209 (s->flags & HF_EM_MASK))
6211 if (s->flags & HF_TS_MASK) {
6212 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6215 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6216 gen_op_fxsave_A0((s->dflag == 2));
6218 case 1: /* fxrstor */
6219 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6220 (s->flags & HF_EM_MASK))
6222 if (s->flags & HF_TS_MASK) {
6223 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6226 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6227 gen_op_fxrstor_A0((s->dflag == 2));
6229 case 2: /* ldmxcsr */
6230 case 3: /* stmxcsr */
6231 if (s->flags & HF_TS_MASK) {
6232 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6235 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6238 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6240 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6241 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6243 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6244 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6247 case 5: /* lfence */
6248 case 6: /* mfence */
6249 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6252 case 7: /* sfence / clflush */
6253 if ((modrm & 0xc7) == 0xc0) {
6255 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6256 if (!(s->cpuid_features & CPUID_SSE))
6260 if (!(s->cpuid_features & CPUID_CLFLUSH))
6262 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6269 case 0x10d: /* 3DNow! prefetch(w) */
6270 modrm = ldub_code(s->pc++);
6271 mod = (modrm >> 6) & 3;
6274 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6275 /* ignore for now */
6277 case 0x1aa: /* rsm */
6278 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6280 if (!(s->flags & HF_SMM_MASK))
6282 if (s->cc_op != CC_OP_DYNAMIC) {
6283 gen_op_set_cc_op(s->cc_op);
6284 s->cc_op = CC_OP_DYNAMIC;
6286 gen_jmp_im(s->pc - s->cs_base);
6290 case 0x10e ... 0x10f:
6291 /* 3DNow! instructions, ignore prefixes */
6292 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6293 case 0x110 ... 0x117:
6294 case 0x128 ... 0x12f:
6295 case 0x150 ... 0x177:
6296 case 0x17c ... 0x17f:
6298 case 0x1c4 ... 0x1c6:
6299 case 0x1d0 ... 0x1fe:
6300 gen_sse(s, b, pc_start, rex_r);
6305 /* lock generation */
6306 if (s->prefix & PREFIX_LOCK)
6310 if (s->prefix & PREFIX_LOCK)
6312 /* XXX: ensure that no lock was generated */
6313 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6317 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6318 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6320 /* flags read by an operation */
6321 static uint16_t opc_read_flags[NB_OPS] = {
6322 [INDEX_op_aas] = CC_A,
6323 [INDEX_op_aaa] = CC_A,
6324 [INDEX_op_das] = CC_A | CC_C,
6325 [INDEX_op_daa] = CC_A | CC_C,
6327 /* subtle: due to the incl/decl implementation, C is used */
6328 [INDEX_op_update_inc_cc] = CC_C,
6330 [INDEX_op_into] = CC_O,
6332 [INDEX_op_jb_subb] = CC_C,
6333 [INDEX_op_jb_subw] = CC_C,
6334 [INDEX_op_jb_subl] = CC_C,
6336 [INDEX_op_jz_subb] = CC_Z,
6337 [INDEX_op_jz_subw] = CC_Z,
6338 [INDEX_op_jz_subl] = CC_Z,
6340 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6341 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6342 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6344 [INDEX_op_js_subb] = CC_S,
6345 [INDEX_op_js_subw] = CC_S,
6346 [INDEX_op_js_subl] = CC_S,
6348 [INDEX_op_jl_subb] = CC_O | CC_S,
6349 [INDEX_op_jl_subw] = CC_O | CC_S,
6350 [INDEX_op_jl_subl] = CC_O | CC_S,
6352 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6353 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6354 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6356 [INDEX_op_loopnzw] = CC_Z,
6357 [INDEX_op_loopnzl] = CC_Z,
6358 [INDEX_op_loopzw] = CC_Z,
6359 [INDEX_op_loopzl] = CC_Z,
6361 [INDEX_op_seto_T0_cc] = CC_O,
6362 [INDEX_op_setb_T0_cc] = CC_C,
6363 [INDEX_op_setz_T0_cc] = CC_Z,
6364 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6365 [INDEX_op_sets_T0_cc] = CC_S,
6366 [INDEX_op_setp_T0_cc] = CC_P,
6367 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6368 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6370 [INDEX_op_setb_T0_subb] = CC_C,
6371 [INDEX_op_setb_T0_subw] = CC_C,
6372 [INDEX_op_setb_T0_subl] = CC_C,
6374 [INDEX_op_setz_T0_subb] = CC_Z,
6375 [INDEX_op_setz_T0_subw] = CC_Z,
6376 [INDEX_op_setz_T0_subl] = CC_Z,
6378 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6379 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6380 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6382 [INDEX_op_sets_T0_subb] = CC_S,
6383 [INDEX_op_sets_T0_subw] = CC_S,
6384 [INDEX_op_sets_T0_subl] = CC_S,
6386 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6387 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6388 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6390 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6391 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6392 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6394 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6395 [INDEX_op_cmc] = CC_C,
6396 [INDEX_op_salc] = CC_C,
6398 /* needed for correct flag optimisation before string ops */
6399 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6400 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6401 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6402 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6404 #ifdef TARGET_X86_64
6405 [INDEX_op_jb_subq] = CC_C,
6406 [INDEX_op_jz_subq] = CC_Z,
6407 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6408 [INDEX_op_js_subq] = CC_S,
6409 [INDEX_op_jl_subq] = CC_O | CC_S,
6410 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6412 [INDEX_op_loopnzq] = CC_Z,
6413 [INDEX_op_loopzq] = CC_Z,
6415 [INDEX_op_setb_T0_subq] = CC_C,
6416 [INDEX_op_setz_T0_subq] = CC_Z,
6417 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6418 [INDEX_op_sets_T0_subq] = CC_S,
6419 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6420 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6422 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6423 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6426 #define DEF_READF(SUFFIX)\
6427 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6428 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6429 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6430 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6431 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6432 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6433 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6434 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6436 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6437 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6438 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6439 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6440 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6441 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6442 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6443 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6447 #ifndef CONFIG_USER_ONLY
6453 /* flags written by an operation */
6454 static uint16_t opc_write_flags[NB_OPS] = {
6455 [INDEX_op_update2_cc] = CC_OSZAPC,
6456 [INDEX_op_update1_cc] = CC_OSZAPC,
6457 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6458 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6459 /* subtle: due to the incl/decl implementation, C is used */
6460 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6461 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6463 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6464 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6465 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6466 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6467 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6468 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6469 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6470 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6471 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6472 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6473 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6476 [INDEX_op_ucomiss] = CC_OSZAPC,
6477 [INDEX_op_ucomisd] = CC_OSZAPC,
6478 [INDEX_op_comiss] = CC_OSZAPC,
6479 [INDEX_op_comisd] = CC_OSZAPC,
6482 [INDEX_op_aam] = CC_OSZAPC,
6483 [INDEX_op_aad] = CC_OSZAPC,
6484 [INDEX_op_aas] = CC_OSZAPC,
6485 [INDEX_op_aaa] = CC_OSZAPC,
6486 [INDEX_op_das] = CC_OSZAPC,
6487 [INDEX_op_daa] = CC_OSZAPC,
6489 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6490 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6491 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6492 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6493 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6494 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6495 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6496 [INDEX_op_clc] = CC_C,
6497 [INDEX_op_stc] = CC_C,
6498 [INDEX_op_cmc] = CC_C,
6500 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6501 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6502 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6503 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6504 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6505 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6506 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6507 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6508 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6509 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6510 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6511 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6513 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6514 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6515 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6516 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6517 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6518 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6520 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6521 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6522 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6523 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6525 [INDEX_op_cmpxchg8b] = CC_Z,
6526 [INDEX_op_lar] = CC_Z,
6527 [INDEX_op_lsl] = CC_Z,
6528 [INDEX_op_verr] = CC_Z,
6529 [INDEX_op_verw] = CC_Z,
6530 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6531 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6533 #define DEF_WRITEF(SUFFIX)\
6534 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6535 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6536 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6537 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6538 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6539 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6540 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6541 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6543 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6544 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6545 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6546 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6547 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6548 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6549 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6550 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6552 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6553 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6554 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6555 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6556 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6557 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6558 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6559 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6561 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6562 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6563 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6564 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6566 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6567 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6568 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6569 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6571 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6572 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6573 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6574 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6576 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6577 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6578 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6579 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6580 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6581 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6583 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6584 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6585 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6586 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6587 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6588 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6590 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6591 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6592 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6593 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6598 #ifndef CONFIG_USER_ONLY
6604 /* simpler form of an operation if no flags need to be generated */
6605 static uint16_t opc_simpler[NB_OPS] = {
6606 [INDEX_op_update2_cc] = INDEX_op_nop,
6607 [INDEX_op_update1_cc] = INDEX_op_nop,
6608 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6610 /* broken: CC_OP logic must be rewritten */
6611 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6614 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6615 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6616 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6617 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6619 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6620 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6621 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6622 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6624 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6625 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6626 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6627 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6629 #define DEF_SIMPLER(SUFFIX)\
6630 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6631 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6632 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6633 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6635 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6636 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6637 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6638 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6642 #ifndef CONFIG_USER_ONLY
6643 DEF_SIMPLER(_kernel)
6648 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6653 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6659 void optimize_flags_init(void)
6662 /* put default values in arrays */
6663 for(i = 0; i < NB_OPS; i++) {
6664 if (opc_simpler[i] == 0)
6668 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6670 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6671 #if TARGET_LONG_BITS > HOST_LONG_BITS
6672 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6673 TCG_AREG0, offsetof(CPUState, t0), "T0");
6674 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6675 TCG_AREG0, offsetof(CPUState, t1), "T1");
6676 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6677 TCG_AREG0, offsetof(CPUState, t2), "A0");
6679 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6680 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6681 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6683 /* the helpers are only registered to print debug info */
6684 TCG_HELPER(helper_divl_EAX_T0);
6685 TCG_HELPER(helper_idivl_EAX_T0);
6688 /* CPU flags computation optimization: we move backward thru the
6689 generated code to see which flags are needed. The operation is
6690 modified if suitable */
6691 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6694 int live_flags, write_flags, op;
6696 opc_ptr = opc_buf + opc_buf_len;
6697 /* live_flags contains the flags needed by the next instructions
6698 in the code. At the end of the block, we consider that all the
6700 live_flags = CC_OSZAPC;
6701 while (opc_ptr > opc_buf) {
6703 /* if none of the flags written by the instruction is used,
6704 then we can try to find a simpler instruction */
6705 write_flags = opc_write_flags[op];
6706 if ((live_flags & write_flags) == 0) {
6707 *opc_ptr = opc_simpler[op];
6709 /* compute the live flags before the instruction */
6710 live_flags &= ~write_flags;
6711 live_flags |= opc_read_flags[op];
6715 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6716 basic block 'tb'. If search_pc is TRUE, also generate PC
6717 information for each intermediate instruction. */
6718 static inline int gen_intermediate_code_internal(CPUState *env,
6719 TranslationBlock *tb,
6722 DisasContext dc1, *dc = &dc1;
6723 target_ulong pc_ptr;
6724 uint16_t *gen_opc_end;
6727 target_ulong pc_start;
6728 target_ulong cs_base;
6730 /* generate intermediate code */
6732 cs_base = tb->cs_base;
6734 cflags = tb->cflags;
6736 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6737 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6738 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6739 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6741 dc->vm86 = (flags >> VM_SHIFT) & 1;
6742 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6743 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6744 dc->tf = (flags >> TF_SHIFT) & 1;
6745 dc->singlestep_enabled = env->singlestep_enabled;
6746 dc->cc_op = CC_OP_DYNAMIC;
6747 dc->cs_base = cs_base;
6749 dc->popl_esp_hack = 0;
6750 /* select memory access functions */
6752 if (flags & HF_SOFTMMU_MASK) {
6754 dc->mem_index = 2 * 4;
6756 dc->mem_index = 1 * 4;
6758 dc->cpuid_features = env->cpuid_features;
6759 dc->cpuid_ext_features = env->cpuid_ext_features;
6760 #ifdef TARGET_X86_64
6761 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6762 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6765 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6766 (flags & HF_INHIBIT_IRQ_MASK)
6767 #ifndef CONFIG_SOFTMMU
6768 || (flags & HF_SOFTMMU_MASK)
6772 /* check addseg logic */
6773 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6774 printf("ERROR addseg\n");
6777 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6779 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6781 dc->is_jmp = DISAS_NEXT;
6786 if (env->nb_breakpoints > 0) {
6787 for(j = 0; j < env->nb_breakpoints; j++) {
6788 if (env->breakpoints[j] == pc_ptr) {
6789 gen_debug(dc, pc_ptr - dc->cs_base);
6795 j = gen_opc_ptr - gen_opc_buf;
6799 gen_opc_instr_start[lj++] = 0;
6801 gen_opc_pc[lj] = pc_ptr;
6802 gen_opc_cc_op[lj] = dc->cc_op;
6803 gen_opc_instr_start[lj] = 1;
6805 pc_ptr = disas_insn(dc, pc_ptr);
6806 /* stop translation if indicated */
6809 /* if single step mode, we generate only one instruction and
6810 generate an exception */
6811 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6812 the flag and abort the translation to give the irqs a
6813 change to be happen */
6814 if (dc->tf || dc->singlestep_enabled ||
6815 (flags & HF_INHIBIT_IRQ_MASK) ||
6816 (cflags & CF_SINGLE_INSN)) {
6817 gen_jmp_im(pc_ptr - dc->cs_base);
6821 /* if too long translation, stop generation too */
6822 if (gen_opc_ptr >= gen_opc_end ||
6823 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6824 gen_jmp_im(pc_ptr - dc->cs_base);
6829 *gen_opc_ptr = INDEX_op_end;
6830 /* we don't forget to fill the last values */
6832 j = gen_opc_ptr - gen_opc_buf;
6835 gen_opc_instr_start[lj++] = 0;
6839 if (loglevel & CPU_LOG_TB_CPU) {
6840 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6842 if (loglevel & CPU_LOG_TB_IN_ASM) {
6844 fprintf(logfile, "----------------\n");
6845 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6846 #ifdef TARGET_X86_64
6851 disas_flags = !dc->code32;
6852 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6853 fprintf(logfile, "\n");
6854 if (loglevel & CPU_LOG_TB_OP_OPT) {
6855 fprintf(logfile, "OP before opt:\n");
6856 tcg_dump_ops(&tcg_ctx, logfile);
6857 fprintf(logfile, "\n");
6862 /* optimize flag computations */
6863 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6866 tb->size = pc_ptr - pc_start;
6870 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6872 return gen_intermediate_code_internal(env, tb, 0);
6875 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6877 return gen_intermediate_code_internal(env, tb, 1);