4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env, cpu_T[3], cpu_regwptr;
50 /* local register indexes (only used inside old micro ops) */
53 typedef struct DisasContext {
54 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
55 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
56 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
60 struct TranslationBlock *tb;
63 typedef struct sparc_def_t sparc_def_t;
66 const unsigned char *name;
67 target_ulong iu_version;
71 uint32_t mmu_ctpr_mask;
72 uint32_t mmu_cxr_mask;
73 uint32_t mmu_sfsr_mask;
74 uint32_t mmu_trcr_mask;
77 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name);
82 // This function uses non-native bit order
83 #define GET_FIELD(X, FROM, TO) \
84 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
86 // This function uses the order in the manuals, i.e. bit 0 is 2^0
87 #define GET_FIELD_SP(X, FROM, TO) \
88 GET_FIELD(X, 31 - (TO), 31 - (FROM))
90 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
91 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
95 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
97 #define DFPREG(r) (r & 0x1e)
98 #define QFPREG(r) (r & 0x1c)
101 static int sign_extend(int x, int len)
104 return (x << len) >> len;
107 #define IS_IMM (insn & (1<<13))
109 static void disas_sparc_insn(DisasContext * dc);
111 #ifdef TARGET_SPARC64
112 #define GEN32(func, NAME) \
113 static GenOpFunc * const NAME ## _table [64] = { \
114 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
115 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
116 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
117 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
118 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
119 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
120 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
121 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
122 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
123 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
124 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
125 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
127 static inline void func(int n) \
129 NAME ## _table[n](); \
132 #define GEN32(func, NAME) \
133 static GenOpFunc *const NAME ## _table [32] = { \
134 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
135 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
136 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
137 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
138 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
139 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
140 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
141 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
143 static inline void func(int n) \
145 NAME ## _table[n](); \
149 /* floating point registers moves */
150 GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
151 GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
152 GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
153 GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
155 GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
156 GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
157 GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
158 GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
160 #if defined(CONFIG_USER_ONLY)
161 GEN32(gen_op_load_fpr_QT0, gen_op_load_fpr_QT0_fprf);
162 GEN32(gen_op_load_fpr_QT1, gen_op_load_fpr_QT1_fprf);
163 GEN32(gen_op_store_QT0_fpr, gen_op_store_QT0_fpr_fprf);
164 GEN32(gen_op_store_QT1_fpr, gen_op_store_QT1_fpr_fprf);
168 #ifdef CONFIG_USER_ONLY
169 #define supervisor(dc) 0
170 #ifdef TARGET_SPARC64
171 #define hypervisor(dc) 0
173 #define gen_op_ldst(name) gen_op_##name##_raw()
175 #define supervisor(dc) (dc->mem_idx >= 1)
176 #ifdef TARGET_SPARC64
177 #define hypervisor(dc) (dc->mem_idx == 2)
178 #define OP_LD_TABLE(width) \
179 static GenOpFunc * const gen_op_##width[] = { \
180 &gen_op_##width##_user, \
181 &gen_op_##width##_kernel, \
182 &gen_op_##width##_hypv, \
185 #define OP_LD_TABLE(width) \
186 static GenOpFunc * const gen_op_##width[] = { \
187 &gen_op_##width##_user, \
188 &gen_op_##width##_kernel, \
191 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
194 #ifndef CONFIG_USER_ONLY
197 #endif /* __i386__ */
205 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
207 #define ABI32_MASK(addr)
210 static inline void gen_movl_simm_T1(int32_t val)
212 tcg_gen_movi_tl(cpu_T[1], val);
215 static inline void gen_movl_reg_TN(int reg, TCGv tn)
218 tcg_gen_movi_tl(tn, 0);
220 tcg_gen_ld_tl(tn, cpu_env, offsetof(CPUState, gregs[reg]));
222 tcg_gen_ld_ptr(cpu_regwptr, cpu_env, offsetof(CPUState, regwptr)); // XXX
223 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_movl_reg_T0(int reg)
229 gen_movl_reg_TN(reg, cpu_T[0]);
232 static inline void gen_movl_reg_T1(int reg)
234 gen_movl_reg_TN(reg, cpu_T[1]);
238 static inline void gen_movl_reg_T2(int reg)
240 gen_movl_reg_TN(reg, cpu_T[2]);
243 #endif /* __i386__ */
244 static inline void gen_movl_TN_reg(int reg, TCGv tn)
249 tcg_gen_st_tl(tn, cpu_env, offsetof(CPUState, gregs[reg]));
251 tcg_gen_ld_ptr(cpu_regwptr, cpu_env, offsetof(CPUState, regwptr)); // XXX
252 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
256 static inline void gen_movl_T0_reg(int reg)
258 gen_movl_TN_reg(reg, cpu_T[0]);
261 static inline void gen_movl_T1_reg(int reg)
263 gen_movl_TN_reg(reg, cpu_T[1]);
266 static inline void gen_op_movl_T0_env(size_t offset)
268 tcg_gen_ld_i32(cpu_T[0], cpu_env, offset);
271 static inline void gen_op_movl_env_T0(size_t offset)
273 tcg_gen_st_i32(cpu_T[0], cpu_env, offset);
276 static inline void gen_op_movtl_T0_env(size_t offset)
278 tcg_gen_ld_tl(cpu_T[0], cpu_env, offset);
281 static inline void gen_op_movtl_env_T0(size_t offset)
283 tcg_gen_st_tl(cpu_T[0], cpu_env, offset);
286 static inline void gen_op_add_T1_T0(void)
288 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
291 static inline void gen_op_or_T1_T0(void)
293 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
296 static inline void gen_op_xor_T1_T0(void)
298 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
301 static inline void gen_jmp_im(target_ulong pc)
303 tcg_gen_movi_tl(cpu_tmp0, pc);
304 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, pc));
307 static inline void gen_movl_npc_im(target_ulong npc)
309 tcg_gen_movi_tl(cpu_tmp0, npc);
310 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, npc));
313 static inline void gen_goto_tb(DisasContext *s, int tb_num,
314 target_ulong pc, target_ulong npc)
316 TranslationBlock *tb;
319 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
320 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
321 /* jump to same page: we can use a direct jump */
322 tcg_gen_goto_tb(tb_num);
324 gen_movl_npc_im(npc);
325 tcg_gen_exit_tb((long)tb + tb_num);
327 /* jump to another page: currently not optimized */
329 gen_movl_npc_im(npc);
334 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
339 l1 = gen_new_label();
341 gen_op_jz_T2_label(l1);
343 gen_goto_tb(dc, 0, pc1, pc1 + 4);
346 gen_goto_tb(dc, 1, pc2, pc2 + 4);
349 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
354 l1 = gen_new_label();
356 gen_op_jz_T2_label(l1);
358 gen_goto_tb(dc, 0, pc2, pc1);
361 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
364 static inline void gen_branch(DisasContext *dc, target_ulong pc,
367 gen_goto_tb(dc, 0, pc, npc);
370 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2)
374 l1 = gen_new_label();
375 l2 = gen_new_label();
376 gen_op_jz_T2_label(l1);
378 gen_movl_npc_im(npc1);
379 gen_op_jmp_label(l2);
382 gen_movl_npc_im(npc2);
386 /* call this function before using T2 as it may have been set for a jump */
387 static inline void flush_T2(DisasContext * dc)
389 if (dc->npc == JUMP_PC) {
390 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
391 dc->npc = DYNAMIC_PC;
395 static inline void save_npc(DisasContext * dc)
397 if (dc->npc == JUMP_PC) {
398 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
399 dc->npc = DYNAMIC_PC;
400 } else if (dc->npc != DYNAMIC_PC) {
401 gen_movl_npc_im(dc->npc);
405 static inline void save_state(DisasContext * dc)
411 static inline void gen_mov_pc_npc(DisasContext * dc)
413 if (dc->npc == JUMP_PC) {
414 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
415 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
416 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
418 } else if (dc->npc == DYNAMIC_PC) {
419 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
420 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
427 static inline void gen_op_next_insn(void)
429 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
430 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
431 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, 4);
432 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
435 static GenOpFunc * const gen_cond[2][16] = {
455 #ifdef TARGET_SPARC64
476 static GenOpFunc * const gen_fcond[4][16] = {
495 #ifdef TARGET_SPARC64
498 gen_op_eval_fbne_fcc1,
499 gen_op_eval_fblg_fcc1,
500 gen_op_eval_fbul_fcc1,
501 gen_op_eval_fbl_fcc1,
502 gen_op_eval_fbug_fcc1,
503 gen_op_eval_fbg_fcc1,
504 gen_op_eval_fbu_fcc1,
506 gen_op_eval_fbe_fcc1,
507 gen_op_eval_fbue_fcc1,
508 gen_op_eval_fbge_fcc1,
509 gen_op_eval_fbuge_fcc1,
510 gen_op_eval_fble_fcc1,
511 gen_op_eval_fbule_fcc1,
512 gen_op_eval_fbo_fcc1,
516 gen_op_eval_fbne_fcc2,
517 gen_op_eval_fblg_fcc2,
518 gen_op_eval_fbul_fcc2,
519 gen_op_eval_fbl_fcc2,
520 gen_op_eval_fbug_fcc2,
521 gen_op_eval_fbg_fcc2,
522 gen_op_eval_fbu_fcc2,
524 gen_op_eval_fbe_fcc2,
525 gen_op_eval_fbue_fcc2,
526 gen_op_eval_fbge_fcc2,
527 gen_op_eval_fbuge_fcc2,
528 gen_op_eval_fble_fcc2,
529 gen_op_eval_fbule_fcc2,
530 gen_op_eval_fbo_fcc2,
534 gen_op_eval_fbne_fcc3,
535 gen_op_eval_fblg_fcc3,
536 gen_op_eval_fbul_fcc3,
537 gen_op_eval_fbl_fcc3,
538 gen_op_eval_fbug_fcc3,
539 gen_op_eval_fbg_fcc3,
540 gen_op_eval_fbu_fcc3,
542 gen_op_eval_fbe_fcc3,
543 gen_op_eval_fbue_fcc3,
544 gen_op_eval_fbge_fcc3,
545 gen_op_eval_fbuge_fcc3,
546 gen_op_eval_fble_fcc3,
547 gen_op_eval_fbule_fcc3,
548 gen_op_eval_fbo_fcc3,
555 #ifdef TARGET_SPARC64
556 static void gen_cond_reg(int cond)
582 static const int gen_tcg_cond_reg[8] = {
594 /* XXX: potentially incorrect if dynamic npc */
595 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
597 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
598 target_ulong target = dc->pc + offset;
601 /* unconditional not taken */
603 dc->pc = dc->npc + 4;
604 dc->npc = dc->pc + 4;
607 dc->npc = dc->pc + 4;
609 } else if (cond == 0x8) {
610 /* unconditional taken */
613 dc->npc = dc->pc + 4;
620 gen_cond[cc][cond]();
622 gen_branch_a(dc, target, dc->npc);
626 dc->jump_pc[0] = target;
627 dc->jump_pc[1] = dc->npc + 4;
633 /* XXX: potentially incorrect if dynamic npc */
634 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
636 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
637 target_ulong target = dc->pc + offset;
640 /* unconditional not taken */
642 dc->pc = dc->npc + 4;
643 dc->npc = dc->pc + 4;
646 dc->npc = dc->pc + 4;
648 } else if (cond == 0x8) {
649 /* unconditional taken */
652 dc->npc = dc->pc + 4;
659 gen_fcond[cc][cond]();
661 gen_branch_a(dc, target, dc->npc);
665 dc->jump_pc[0] = target;
666 dc->jump_pc[1] = dc->npc + 4;
672 #ifdef TARGET_SPARC64
673 /* XXX: potentially incorrect if dynamic npc */
674 static void do_branch_reg(DisasContext * dc, int32_t offset, uint32_t insn)
676 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
677 target_ulong target = dc->pc + offset;
682 gen_branch_a(dc, target, dc->npc);
686 dc->jump_pc[0] = target;
687 dc->jump_pc[1] = dc->npc + 4;
692 static GenOpFunc * const gen_fcmps[4] = {
699 static GenOpFunc * const gen_fcmpd[4] = {
706 #if defined(CONFIG_USER_ONLY)
707 static GenOpFunc * const gen_fcmpq[4] = {
715 static GenOpFunc * const gen_fcmpes[4] = {
722 static GenOpFunc * const gen_fcmped[4] = {
729 #if defined(CONFIG_USER_ONLY)
730 static GenOpFunc * const gen_fcmpeq[4] = {
738 static inline void gen_op_fcmps(int fccno)
740 tcg_gen_helper_0_0(gen_fcmps[fccno]);
743 static inline void gen_op_fcmpd(int fccno)
745 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
748 #if defined(CONFIG_USER_ONLY)
749 static inline void gen_op_fcmpq(int fccno)
751 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
755 static inline void gen_op_fcmpes(int fccno)
757 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
760 static inline void gen_op_fcmped(int fccno)
762 tcg_gen_helper_0_0(gen_fcmped[fccno]);
765 #if defined(CONFIG_USER_ONLY)
766 static inline void gen_op_fcmpeq(int fccno)
768 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
774 static inline void gen_op_fcmps(int fccno)
776 tcg_gen_helper_0_0(helper_fcmps);
779 static inline void gen_op_fcmpd(int fccno)
781 tcg_gen_helper_0_0(helper_fcmpd);
784 #if defined(CONFIG_USER_ONLY)
785 static inline void gen_op_fcmpq(int fccno)
787 tcg_gen_helper_0_0(helper_fcmpq);
791 static inline void gen_op_fcmpes(int fccno)
793 tcg_gen_helper_0_0(helper_fcmpes);
796 static inline void gen_op_fcmped(int fccno)
798 tcg_gen_helper_0_0(helper_fcmped);
801 #if defined(CONFIG_USER_ONLY)
802 static inline void gen_op_fcmpeq(int fccno)
804 tcg_gen_helper_0_0(helper_fcmpeq);
810 static int gen_trap_ifnofpu(DisasContext * dc)
812 #if !defined(CONFIG_USER_ONLY)
813 if (!dc->fpu_enabled) {
815 gen_op_exception(TT_NFPU_INSN);
823 static inline void gen_op_clear_ieee_excp_and_FTT(void)
825 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
826 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, ~(FSR_FTT_MASK | FSR_CEXC_MASK));
827 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
830 static inline void gen_clear_float_exceptions(void)
832 tcg_gen_helper_0_0(helper_clear_float_exceptions);
836 #ifdef TARGET_SPARC64
837 static inline void gen_ld_asi(int insn, int size, int sign)
842 r_size = tcg_temp_new(TCG_TYPE_I32);
843 r_sign = tcg_temp_new(TCG_TYPE_I32);
844 tcg_gen_movi_i32(r_size, size);
845 tcg_gen_movi_i32(r_sign, sign);
847 offset = GET_FIELD(insn, 25, 31);
848 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
849 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
851 asi = GET_FIELD(insn, 19, 26);
852 tcg_gen_movi_i32(cpu_T[1], asi);
854 tcg_gen_helper_1_4(helper_ld_asi, cpu_T[1], cpu_T[0], cpu_T[1], r_size,
858 static inline void gen_st_asi(int insn, int size)
863 r_asi = tcg_temp_new(TCG_TYPE_I32);
864 r_size = tcg_temp_new(TCG_TYPE_I32);
865 tcg_gen_movi_i32(r_size, size);
867 offset = GET_FIELD(insn, 25, 31);
868 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
869 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
871 asi = GET_FIELD(insn, 19, 26);
872 tcg_gen_movi_i32(r_asi, asi);
874 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_asi, r_size);
877 static inline void gen_ldf_asi(int insn, int size, int rd)
880 TCGv r_asi, r_size, r_rd;
882 r_asi = tcg_temp_new(TCG_TYPE_I32);
883 r_size = tcg_temp_new(TCG_TYPE_I32);
884 r_rd = tcg_temp_new(TCG_TYPE_I32);
885 tcg_gen_movi_i32(r_size, size);
886 tcg_gen_movi_i32(r_rd, rd);
888 offset = GET_FIELD(insn, 25, 31);
889 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
890 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
892 asi = GET_FIELD(insn, 19, 26);
893 tcg_gen_movi_i32(r_asi, asi);
895 tcg_gen_helper_0_4(helper_ldf_asi, cpu_T[0], r_asi, r_size, r_rd);
898 static inline void gen_stf_asi(int insn, int size, int rd)
901 TCGv r_asi, r_size, r_rd;
903 r_asi = tcg_temp_new(TCG_TYPE_I32);
904 r_size = tcg_temp_new(TCG_TYPE_I32);
905 r_rd = tcg_temp_new(TCG_TYPE_I32);
906 tcg_gen_movi_i32(r_size, size);
907 tcg_gen_movi_i32(r_rd, rd);
909 offset = GET_FIELD(insn, 25, 31);
910 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
911 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
913 asi = GET_FIELD(insn, 19, 26);
914 tcg_gen_movi_i32(r_asi, asi);
916 tcg_gen_helper_0_4(helper_stf_asi, cpu_T[0], r_asi, r_size, r_rd);
919 static inline void gen_swap_asi(int insn)
922 TCGv r_size, r_sign, r_temp;
924 r_size = tcg_temp_new(TCG_TYPE_I32);
925 r_sign = tcg_temp_new(TCG_TYPE_I32);
926 r_temp = tcg_temp_new(TCG_TYPE_I32);
927 tcg_gen_movi_i32(r_size, 4);
928 tcg_gen_movi_i32(r_sign, 0);
930 offset = GET_FIELD(insn, 25, 31);
931 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
932 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
934 asi = GET_FIELD(insn, 19, 26);
935 tcg_gen_movi_i32(cpu_T[1], asi);
937 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
939 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
940 tcg_gen_mov_i32(cpu_T[1], r_temp);
943 static inline void gen_ldda_asi(int insn)
946 TCGv r_size, r_sign, r_dword;
948 r_size = tcg_temp_new(TCG_TYPE_I32);
949 r_sign = tcg_temp_new(TCG_TYPE_I32);
950 r_dword = tcg_temp_new(TCG_TYPE_I64);
951 tcg_gen_movi_i32(r_size, 8);
952 tcg_gen_movi_i32(r_sign, 0);
954 offset = GET_FIELD(insn, 25, 31);
955 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
956 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
958 asi = GET_FIELD(insn, 19, 26);
959 tcg_gen_movi_i32(cpu_T[1], asi);
961 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
963 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
964 tcg_gen_shri_i64(r_dword, r_dword, 32);
965 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
968 static inline void gen_cas_asi(int insn, int rd)
973 r_val1 = tcg_temp_new(TCG_TYPE_I32);
974 r_asi = tcg_temp_new(TCG_TYPE_I32);
975 gen_movl_reg_TN(rd, r_val1);
977 offset = GET_FIELD(insn, 25, 31);
978 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
979 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
981 asi = GET_FIELD(insn, 19, 26);
982 tcg_gen_movi_i32(r_asi, asi);
984 tcg_gen_helper_1_4(helper_cas_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
988 static inline void gen_casx_asi(int insn, int rd)
993 r_val1 = tcg_temp_new(TCG_TYPE_I64);
994 r_asi = tcg_temp_new(TCG_TYPE_I32);
995 gen_movl_reg_TN(rd, r_val1);
997 offset = GET_FIELD(insn, 25, 31);
998 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
999 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1001 asi = GET_FIELD(insn, 19, 26);
1002 tcg_gen_movi_i32(r_asi, asi);
1004 tcg_gen_helper_1_4(helper_casx_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1008 #elif !defined(CONFIG_USER_ONLY)
1010 static inline void gen_ld_asi(int insn, int size, int sign)
1013 TCGv r_size, r_sign, r_dword;
1015 r_size = tcg_temp_new(TCG_TYPE_I32);
1016 r_sign = tcg_temp_new(TCG_TYPE_I32);
1017 r_dword = tcg_temp_new(TCG_TYPE_I64);
1018 tcg_gen_movi_i32(r_size, size);
1019 tcg_gen_movi_i32(r_sign, sign);
1020 asi = GET_FIELD(insn, 19, 26);
1021 tcg_gen_movi_i32(cpu_T[1], asi);
1022 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1024 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1027 static inline void gen_st_asi(int insn, int size)
1030 TCGv r_dword, r_asi, r_size;
1032 r_dword = tcg_temp_new(TCG_TYPE_I64);
1033 tcg_gen_extu_i32_i64(r_dword, cpu_T[1]);
1034 r_asi = tcg_temp_new(TCG_TYPE_I32);
1035 r_size = tcg_temp_new(TCG_TYPE_I32);
1036 asi = GET_FIELD(insn, 19, 26);
1037 tcg_gen_movi_i32(r_asi, asi);
1038 tcg_gen_movi_i32(r_size, size);
1039 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1042 static inline void gen_swap_asi(int insn)
1045 TCGv r_size, r_sign, r_temp;
1047 r_size = tcg_temp_new(TCG_TYPE_I32);
1048 r_sign = tcg_temp_new(TCG_TYPE_I32);
1049 r_temp = tcg_temp_new(TCG_TYPE_I32);
1050 tcg_gen_movi_i32(r_size, 4);
1051 tcg_gen_movi_i32(r_sign, 0);
1052 asi = GET_FIELD(insn, 19, 26);
1053 tcg_gen_movi_i32(cpu_T[1], asi);
1054 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
1056 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
1057 tcg_gen_mov_i32(cpu_T[1], r_temp);
1060 static inline void gen_ldda_asi(int insn)
1063 TCGv r_size, r_sign, r_dword;
1065 r_size = tcg_temp_new(TCG_TYPE_I32);
1066 r_sign = tcg_temp_new(TCG_TYPE_I32);
1067 r_dword = tcg_temp_new(TCG_TYPE_I64);
1068 tcg_gen_movi_i32(r_size, 8);
1069 tcg_gen_movi_i32(r_sign, 0);
1070 asi = GET_FIELD(insn, 19, 26);
1071 tcg_gen_movi_i32(cpu_T[1], asi);
1072 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1074 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1075 tcg_gen_shri_i64(r_dword, r_dword, 32);
1076 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1080 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1081 static inline void gen_ldstub_asi(int insn)
1084 TCGv r_dword, r_asi, r_size;
1086 gen_ld_asi(insn, 1, 0);
1088 r_dword = tcg_temp_new(TCG_TYPE_I64);
1089 r_asi = tcg_temp_new(TCG_TYPE_I32);
1090 r_size = tcg_temp_new(TCG_TYPE_I32);
1091 asi = GET_FIELD(insn, 19, 26);
1092 tcg_gen_movi_i32(r_dword, 0xff);
1093 tcg_gen_movi_i32(r_asi, asi);
1094 tcg_gen_movi_i32(r_size, 1);
1095 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1099 static inline void gen_mov_reg_C(TCGv reg)
1101 tcg_gen_ld_i32(reg, cpu_env, offsetof(CPUSPARCState, psr));
1102 tcg_gen_shri_i32(reg, reg, 20);
1103 tcg_gen_andi_i32(reg, reg, 0x1);
1106 /* before an instruction, dc->pc must be static */
1107 static void disas_sparc_insn(DisasContext * dc)
1109 unsigned int insn, opc, rs1, rs2, rd;
1111 insn = ldl_code(dc->pc);
1112 opc = GET_FIELD(insn, 0, 1);
1114 rd = GET_FIELD(insn, 2, 6);
1116 case 0: /* branches/sethi */
1118 unsigned int xop = GET_FIELD(insn, 7, 9);
1121 #ifdef TARGET_SPARC64
1122 case 0x1: /* V9 BPcc */
1126 target = GET_FIELD_SP(insn, 0, 18);
1127 target = sign_extend(target, 18);
1129 cc = GET_FIELD_SP(insn, 20, 21);
1131 do_branch(dc, target, insn, 0);
1133 do_branch(dc, target, insn, 1);
1138 case 0x3: /* V9 BPr */
1140 target = GET_FIELD_SP(insn, 0, 13) |
1141 (GET_FIELD_SP(insn, 20, 21) << 14);
1142 target = sign_extend(target, 16);
1144 rs1 = GET_FIELD(insn, 13, 17);
1145 gen_movl_reg_T0(rs1);
1146 do_branch_reg(dc, target, insn);
1149 case 0x5: /* V9 FBPcc */
1151 int cc = GET_FIELD_SP(insn, 20, 21);
1152 if (gen_trap_ifnofpu(dc))
1154 target = GET_FIELD_SP(insn, 0, 18);
1155 target = sign_extend(target, 19);
1157 do_fbranch(dc, target, insn, cc);
1161 case 0x7: /* CBN+x */
1166 case 0x2: /* BN+x */
1168 target = GET_FIELD(insn, 10, 31);
1169 target = sign_extend(target, 22);
1171 do_branch(dc, target, insn, 0);
1174 case 0x6: /* FBN+x */
1176 if (gen_trap_ifnofpu(dc))
1178 target = GET_FIELD(insn, 10, 31);
1179 target = sign_extend(target, 22);
1181 do_fbranch(dc, target, insn, 0);
1184 case 0x4: /* SETHI */
1189 uint32_t value = GET_FIELD(insn, 10, 31);
1190 tcg_gen_movi_tl(cpu_T[0], value << 10);
1191 gen_movl_T0_reg(rd);
1196 case 0x0: /* UNIMPL */
1205 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1207 tcg_gen_movi_tl(cpu_T[0], dc->pc);
1208 gen_movl_T0_reg(15);
1214 case 2: /* FPU & Logical Operations */
1216 unsigned int xop = GET_FIELD(insn, 7, 12);
1217 if (xop == 0x3a) { /* generate trap */
1220 rs1 = GET_FIELD(insn, 13, 17);
1221 gen_movl_reg_T0(rs1);
1223 rs2 = GET_FIELD(insn, 25, 31);
1224 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2);
1226 rs2 = GET_FIELD(insn, 27, 31);
1230 gen_movl_reg_T1(rs2);
1236 cond = GET_FIELD(insn, 3, 6);
1239 tcg_gen_helper_0_1(helper_trap, cpu_T[0]);
1240 } else if (cond != 0) {
1241 #ifdef TARGET_SPARC64
1243 int cc = GET_FIELD_SP(insn, 11, 12);
1247 gen_cond[0][cond]();
1249 gen_cond[1][cond]();
1255 gen_cond[0][cond]();
1257 tcg_gen_helper_0_2(helper_trapcc, cpu_T[0], cpu_T[2]);
1263 } else if (xop == 0x28) {
1264 rs1 = GET_FIELD(insn, 13, 17);
1267 #ifndef TARGET_SPARC64
1268 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1269 manual, rdy on the microSPARC
1271 case 0x0f: /* stbar in the SPARCv8 manual,
1272 rdy on the microSPARC II */
1273 case 0x10 ... 0x1f: /* implementation-dependent in the
1274 SPARCv8 manual, rdy on the
1277 gen_op_movtl_T0_env(offsetof(CPUSPARCState, y));
1278 gen_movl_T0_reg(rd);
1280 #ifdef TARGET_SPARC64
1281 case 0x2: /* V9 rdccr */
1283 gen_movl_T0_reg(rd);
1285 case 0x3: /* V9 rdasi */
1286 gen_op_movl_T0_env(offsetof(CPUSPARCState, asi));
1287 gen_movl_T0_reg(rd);
1289 case 0x4: /* V9 rdtick */
1293 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
1294 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1295 offsetof(CPUState, tick));
1296 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
1298 gen_movl_T0_reg(rd);
1301 case 0x5: /* V9 rdpc */
1302 tcg_gen_movi_tl(cpu_T[0], dc->pc);
1303 gen_movl_T0_reg(rd);
1305 case 0x6: /* V9 rdfprs */
1306 gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs));
1307 gen_movl_T0_reg(rd);
1309 case 0xf: /* V9 membar */
1310 break; /* no effect */
1311 case 0x13: /* Graphics Status */
1312 if (gen_trap_ifnofpu(dc))
1314 gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr));
1315 gen_movl_T0_reg(rd);
1317 case 0x17: /* Tick compare */
1318 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr));
1319 gen_movl_T0_reg(rd);
1321 case 0x18: /* System tick */
1325 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
1326 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1327 offsetof(CPUState, stick));
1328 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
1330 gen_movl_T0_reg(rd);
1333 case 0x19: /* System tick compare */
1334 gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr));
1335 gen_movl_T0_reg(rd);
1337 case 0x10: /* Performance Control */
1338 case 0x11: /* Performance Instrumentation Counter */
1339 case 0x12: /* Dispatch Control */
1340 case 0x14: /* Softint set, WO */
1341 case 0x15: /* Softint clear, WO */
1342 case 0x16: /* Softint write */
1347 #if !defined(CONFIG_USER_ONLY)
1348 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1349 #ifndef TARGET_SPARC64
1350 if (!supervisor(dc))
1352 tcg_gen_helper_1_0(helper_rdpsr, cpu_T[0]);
1354 if (!hypervisor(dc))
1356 rs1 = GET_FIELD(insn, 13, 17);
1359 // gen_op_rdhpstate();
1362 // gen_op_rdhtstate();
1365 gen_op_movl_T0_env(offsetof(CPUSPARCState, hintp));
1368 gen_op_movl_T0_env(offsetof(CPUSPARCState, htba));
1371 gen_op_movl_T0_env(offsetof(CPUSPARCState, hver));
1373 case 31: // hstick_cmpr
1374 gen_op_movl_env_T0(offsetof(CPUSPARCState, hstick_cmpr));
1380 gen_movl_T0_reg(rd);
1382 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
1383 if (!supervisor(dc))
1385 #ifdef TARGET_SPARC64
1386 rs1 = GET_FIELD(insn, 13, 17);
1392 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
1393 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1394 offsetof(CPUState, tsptr));
1395 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
1396 offsetof(trap_state, tpc));
1403 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
1404 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1405 offsetof(CPUState, tsptr));
1406 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
1407 offsetof(trap_state, tnpc));
1414 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
1415 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1416 offsetof(CPUState, tsptr));
1417 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
1418 offsetof(trap_state, tstate));
1425 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
1426 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1427 offsetof(CPUState, tsptr));
1428 tcg_gen_ld_i32(cpu_T[0], r_tsptr,
1429 offsetof(trap_state, tt));
1436 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
1437 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1438 offsetof(CPUState, tick));
1439 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
1441 gen_movl_T0_reg(rd);
1445 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
1448 gen_op_movl_T0_env(offsetof(CPUSPARCState, pstate));
1451 gen_op_movl_T0_env(offsetof(CPUSPARCState, tl));
1454 gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil));
1460 gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave));
1462 case 11: // canrestore
1463 gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore));
1465 case 12: // cleanwin
1466 gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin));
1468 case 13: // otherwin
1469 gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin));
1472 gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate));
1474 case 16: // UA2005 gl
1475 gen_op_movl_T0_env(offsetof(CPUSPARCState, gl));
1477 case 26: // UA2005 strand status
1478 if (!hypervisor(dc))
1480 gen_op_movl_T0_env(offsetof(CPUSPARCState, ssr));
1483 gen_op_movtl_T0_env(offsetof(CPUSPARCState, version));
1490 gen_op_movl_T0_env(offsetof(CPUSPARCState, wim));
1492 gen_movl_T0_reg(rd);
1494 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
1495 #ifdef TARGET_SPARC64
1498 if (!supervisor(dc))
1500 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
1501 gen_movl_T0_reg(rd);
1505 } else if (xop == 0x34) { /* FPU Operations */
1506 if (gen_trap_ifnofpu(dc))
1508 gen_op_clear_ieee_excp_and_FTT();
1509 rs1 = GET_FIELD(insn, 13, 17);
1510 rs2 = GET_FIELD(insn, 27, 31);
1511 xop = GET_FIELD(insn, 18, 26);
1513 case 0x1: /* fmovs */
1514 gen_op_load_fpr_FT0(rs2);
1515 gen_op_store_FT0_fpr(rd);
1517 case 0x5: /* fnegs */
1518 gen_op_load_fpr_FT1(rs2);
1520 gen_op_store_FT0_fpr(rd);
1522 case 0x9: /* fabss */
1523 gen_op_load_fpr_FT1(rs2);
1524 tcg_gen_helper_0_0(helper_fabss);
1525 gen_op_store_FT0_fpr(rd);
1527 case 0x29: /* fsqrts */
1528 gen_op_load_fpr_FT1(rs2);
1529 gen_clear_float_exceptions();
1530 tcg_gen_helper_0_0(helper_fsqrts);
1531 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1532 gen_op_store_FT0_fpr(rd);
1534 case 0x2a: /* fsqrtd */
1535 gen_op_load_fpr_DT1(DFPREG(rs2));
1536 gen_clear_float_exceptions();
1537 tcg_gen_helper_0_0(helper_fsqrtd);
1538 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1539 gen_op_store_DT0_fpr(DFPREG(rd));
1541 case 0x2b: /* fsqrtq */
1542 #if defined(CONFIG_USER_ONLY)
1543 gen_op_load_fpr_QT1(QFPREG(rs2));
1544 gen_clear_float_exceptions();
1545 tcg_gen_helper_0_0(helper_fsqrtq);
1546 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1547 gen_op_store_QT0_fpr(QFPREG(rd));
1553 gen_op_load_fpr_FT0(rs1);
1554 gen_op_load_fpr_FT1(rs2);
1555 gen_clear_float_exceptions();
1557 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1558 gen_op_store_FT0_fpr(rd);
1561 gen_op_load_fpr_DT0(DFPREG(rs1));
1562 gen_op_load_fpr_DT1(DFPREG(rs2));
1563 gen_clear_float_exceptions();
1565 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1566 gen_op_store_DT0_fpr(DFPREG(rd));
1568 case 0x43: /* faddq */
1569 #if defined(CONFIG_USER_ONLY)
1570 gen_op_load_fpr_QT0(QFPREG(rs1));
1571 gen_op_load_fpr_QT1(QFPREG(rs2));
1572 gen_clear_float_exceptions();
1574 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1575 gen_op_store_QT0_fpr(QFPREG(rd));
1581 gen_op_load_fpr_FT0(rs1);
1582 gen_op_load_fpr_FT1(rs2);
1583 gen_clear_float_exceptions();
1585 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1586 gen_op_store_FT0_fpr(rd);
1589 gen_op_load_fpr_DT0(DFPREG(rs1));
1590 gen_op_load_fpr_DT1(DFPREG(rs2));
1591 gen_clear_float_exceptions();
1593 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1594 gen_op_store_DT0_fpr(DFPREG(rd));
1596 case 0x47: /* fsubq */
1597 #if defined(CONFIG_USER_ONLY)
1598 gen_op_load_fpr_QT0(QFPREG(rs1));
1599 gen_op_load_fpr_QT1(QFPREG(rs2));
1600 gen_clear_float_exceptions();
1602 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1603 gen_op_store_QT0_fpr(QFPREG(rd));
1609 gen_op_load_fpr_FT0(rs1);
1610 gen_op_load_fpr_FT1(rs2);
1611 gen_clear_float_exceptions();
1613 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1614 gen_op_store_FT0_fpr(rd);
1617 gen_op_load_fpr_DT0(DFPREG(rs1));
1618 gen_op_load_fpr_DT1(DFPREG(rs2));
1619 gen_clear_float_exceptions();
1621 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1622 gen_op_store_DT0_fpr(DFPREG(rd));
1624 case 0x4b: /* fmulq */
1625 #if defined(CONFIG_USER_ONLY)
1626 gen_op_load_fpr_QT0(QFPREG(rs1));
1627 gen_op_load_fpr_QT1(QFPREG(rs2));
1628 gen_clear_float_exceptions();
1630 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1631 gen_op_store_QT0_fpr(QFPREG(rd));
1637 gen_op_load_fpr_FT0(rs1);
1638 gen_op_load_fpr_FT1(rs2);
1639 gen_clear_float_exceptions();
1641 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1642 gen_op_store_FT0_fpr(rd);
1645 gen_op_load_fpr_DT0(DFPREG(rs1));
1646 gen_op_load_fpr_DT1(DFPREG(rs2));
1647 gen_clear_float_exceptions();
1649 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1650 gen_op_store_DT0_fpr(DFPREG(rd));
1652 case 0x4f: /* fdivq */
1653 #if defined(CONFIG_USER_ONLY)
1654 gen_op_load_fpr_QT0(QFPREG(rs1));
1655 gen_op_load_fpr_QT1(QFPREG(rs2));
1656 gen_clear_float_exceptions();
1658 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1659 gen_op_store_QT0_fpr(QFPREG(rd));
1665 gen_op_load_fpr_FT0(rs1);
1666 gen_op_load_fpr_FT1(rs2);
1667 gen_clear_float_exceptions();
1669 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1670 gen_op_store_DT0_fpr(DFPREG(rd));
1672 case 0x6e: /* fdmulq */
1673 #if defined(CONFIG_USER_ONLY)
1674 gen_op_load_fpr_DT0(DFPREG(rs1));
1675 gen_op_load_fpr_DT1(DFPREG(rs2));
1676 gen_clear_float_exceptions();
1678 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1679 gen_op_store_QT0_fpr(QFPREG(rd));
1685 gen_op_load_fpr_FT1(rs2);
1686 gen_clear_float_exceptions();
1688 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1689 gen_op_store_FT0_fpr(rd);
1692 gen_op_load_fpr_DT1(DFPREG(rs2));
1693 gen_clear_float_exceptions();
1695 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1696 gen_op_store_FT0_fpr(rd);
1698 case 0xc7: /* fqtos */
1699 #if defined(CONFIG_USER_ONLY)
1700 gen_op_load_fpr_QT1(QFPREG(rs2));
1701 gen_clear_float_exceptions();
1703 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1704 gen_op_store_FT0_fpr(rd);
1710 gen_op_load_fpr_FT1(rs2);
1712 gen_op_store_DT0_fpr(DFPREG(rd));
1715 gen_op_load_fpr_FT1(rs2);
1717 gen_op_store_DT0_fpr(DFPREG(rd));
1719 case 0xcb: /* fqtod */
1720 #if defined(CONFIG_USER_ONLY)
1721 gen_op_load_fpr_QT1(QFPREG(rs2));
1722 gen_clear_float_exceptions();
1724 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1725 gen_op_store_DT0_fpr(DFPREG(rd));
1730 case 0xcc: /* fitoq */
1731 #if defined(CONFIG_USER_ONLY)
1732 gen_op_load_fpr_FT1(rs2);
1734 gen_op_store_QT0_fpr(QFPREG(rd));
1739 case 0xcd: /* fstoq */
1740 #if defined(CONFIG_USER_ONLY)
1741 gen_op_load_fpr_FT1(rs2);
1743 gen_op_store_QT0_fpr(QFPREG(rd));
1748 case 0xce: /* fdtoq */
1749 #if defined(CONFIG_USER_ONLY)
1750 gen_op_load_fpr_DT1(DFPREG(rs2));
1752 gen_op_store_QT0_fpr(QFPREG(rd));
1758 gen_op_load_fpr_FT1(rs2);
1759 gen_clear_float_exceptions();
1761 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1762 gen_op_store_FT0_fpr(rd);
1765 gen_op_load_fpr_DT1(DFPREG(rs2));
1766 gen_clear_float_exceptions();
1768 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1769 gen_op_store_FT0_fpr(rd);
1771 case 0xd3: /* fqtoi */
1772 #if defined(CONFIG_USER_ONLY)
1773 gen_op_load_fpr_QT1(QFPREG(rs2));
1774 gen_clear_float_exceptions();
1776 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1777 gen_op_store_FT0_fpr(rd);
1782 #ifdef TARGET_SPARC64
1783 case 0x2: /* V9 fmovd */
1784 gen_op_load_fpr_DT0(DFPREG(rs2));
1785 gen_op_store_DT0_fpr(DFPREG(rd));
1787 case 0x3: /* V9 fmovq */
1788 #if defined(CONFIG_USER_ONLY)
1789 gen_op_load_fpr_QT0(QFPREG(rs2));
1790 gen_op_store_QT0_fpr(QFPREG(rd));
1795 case 0x6: /* V9 fnegd */
1796 gen_op_load_fpr_DT1(DFPREG(rs2));
1798 gen_op_store_DT0_fpr(DFPREG(rd));
1800 case 0x7: /* V9 fnegq */
1801 #if defined(CONFIG_USER_ONLY)
1802 gen_op_load_fpr_QT1(QFPREG(rs2));
1804 gen_op_store_QT0_fpr(QFPREG(rd));
1809 case 0xa: /* V9 fabsd */
1810 gen_op_load_fpr_DT1(DFPREG(rs2));
1811 tcg_gen_helper_0_0(helper_fabsd);
1812 gen_op_store_DT0_fpr(DFPREG(rd));
1814 case 0xb: /* V9 fabsq */
1815 #if defined(CONFIG_USER_ONLY)
1816 gen_op_load_fpr_QT1(QFPREG(rs2));
1817 tcg_gen_helper_0_0(helper_fabsq);
1818 gen_op_store_QT0_fpr(QFPREG(rd));
1823 case 0x81: /* V9 fstox */
1824 gen_op_load_fpr_FT1(rs2);
1825 gen_clear_float_exceptions();
1827 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1828 gen_op_store_DT0_fpr(DFPREG(rd));
1830 case 0x82: /* V9 fdtox */
1831 gen_op_load_fpr_DT1(DFPREG(rs2));
1832 gen_clear_float_exceptions();
1834 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1835 gen_op_store_DT0_fpr(DFPREG(rd));
1837 case 0x83: /* V9 fqtox */
1838 #if defined(CONFIG_USER_ONLY)
1839 gen_op_load_fpr_QT1(QFPREG(rs2));
1840 gen_clear_float_exceptions();
1842 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1843 gen_op_store_DT0_fpr(DFPREG(rd));
1848 case 0x84: /* V9 fxtos */
1849 gen_op_load_fpr_DT1(DFPREG(rs2));
1850 gen_clear_float_exceptions();
1852 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1853 gen_op_store_FT0_fpr(rd);
1855 case 0x88: /* V9 fxtod */
1856 gen_op_load_fpr_DT1(DFPREG(rs2));
1857 gen_clear_float_exceptions();
1859 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1860 gen_op_store_DT0_fpr(DFPREG(rd));
1862 case 0x8c: /* V9 fxtoq */
1863 #if defined(CONFIG_USER_ONLY)
1864 gen_op_load_fpr_DT1(DFPREG(rs2));
1865 gen_clear_float_exceptions();
1867 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1868 gen_op_store_QT0_fpr(QFPREG(rd));
1877 } else if (xop == 0x35) { /* FPU Operations */
1878 #ifdef TARGET_SPARC64
1881 if (gen_trap_ifnofpu(dc))
1883 gen_op_clear_ieee_excp_and_FTT();
1884 rs1 = GET_FIELD(insn, 13, 17);
1885 rs2 = GET_FIELD(insn, 27, 31);
1886 xop = GET_FIELD(insn, 18, 26);
1887 #ifdef TARGET_SPARC64
1888 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
1892 l1 = gen_new_label();
1893 r_zero = tcg_temp_new(TCG_TYPE_TL);
1894 cond = GET_FIELD_SP(insn, 14, 17);
1895 rs1 = GET_FIELD(insn, 13, 17);
1896 gen_movl_reg_T0(rs1);
1897 tcg_gen_movi_tl(r_zero, 0);
1898 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
1899 gen_op_load_fpr_FT1(rs2);
1900 gen_op_store_FT0_fpr(rd);
1903 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
1907 l1 = gen_new_label();
1908 r_zero = tcg_temp_new(TCG_TYPE_TL);
1909 cond = GET_FIELD_SP(insn, 14, 17);
1910 rs1 = GET_FIELD(insn, 13, 17);
1911 gen_movl_reg_T0(rs1);
1912 tcg_gen_movi_tl(r_zero, 0);
1913 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
1914 gen_op_load_fpr_DT1(DFPREG(rs2));
1915 gen_op_store_DT0_fpr(DFPREG(rd));
1918 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
1919 #if defined(CONFIG_USER_ONLY)
1923 l1 = gen_new_label();
1924 r_zero = tcg_temp_new(TCG_TYPE_TL);
1925 cond = GET_FIELD_SP(insn, 14, 17);
1926 rs1 = GET_FIELD(insn, 13, 17);
1927 gen_movl_reg_T0(rs1);
1928 tcg_gen_movi_tl(r_zero, 0);
1929 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
1930 gen_op_load_fpr_QT1(QFPREG(rs2));
1931 gen_op_store_QT0_fpr(QFPREG(rd));
1940 #ifdef TARGET_SPARC64
1941 case 0x001: /* V9 fmovscc %fcc0 */
1942 cond = GET_FIELD_SP(insn, 14, 17);
1943 gen_op_load_fpr_FT0(rd);
1944 gen_op_load_fpr_FT1(rs2);
1946 gen_fcond[0][cond]();
1948 gen_op_store_FT0_fpr(rd);
1950 case 0x002: /* V9 fmovdcc %fcc0 */
1951 cond = GET_FIELD_SP(insn, 14, 17);
1952 gen_op_load_fpr_DT0(DFPREG(rd));
1953 gen_op_load_fpr_DT1(DFPREG(rs2));
1955 gen_fcond[0][cond]();
1957 gen_op_store_DT0_fpr(DFPREG(rd));
1959 case 0x003: /* V9 fmovqcc %fcc0 */
1960 #if defined(CONFIG_USER_ONLY)
1961 cond = GET_FIELD_SP(insn, 14, 17);
1962 gen_op_load_fpr_QT0(QFPREG(rd));
1963 gen_op_load_fpr_QT1(QFPREG(rs2));
1965 gen_fcond[0][cond]();
1967 gen_op_store_QT0_fpr(QFPREG(rd));
1972 case 0x041: /* V9 fmovscc %fcc1 */
1973 cond = GET_FIELD_SP(insn, 14, 17);
1974 gen_op_load_fpr_FT0(rd);
1975 gen_op_load_fpr_FT1(rs2);
1977 gen_fcond[1][cond]();
1979 gen_op_store_FT0_fpr(rd);
1981 case 0x042: /* V9 fmovdcc %fcc1 */
1982 cond = GET_FIELD_SP(insn, 14, 17);
1983 gen_op_load_fpr_DT0(DFPREG(rd));
1984 gen_op_load_fpr_DT1(DFPREG(rs2));
1986 gen_fcond[1][cond]();
1988 gen_op_store_DT0_fpr(DFPREG(rd));
1990 case 0x043: /* V9 fmovqcc %fcc1 */
1991 #if defined(CONFIG_USER_ONLY)
1992 cond = GET_FIELD_SP(insn, 14, 17);
1993 gen_op_load_fpr_QT0(QFPREG(rd));
1994 gen_op_load_fpr_QT1(QFPREG(rs2));
1996 gen_fcond[1][cond]();
1998 gen_op_store_QT0_fpr(QFPREG(rd));
2003 case 0x081: /* V9 fmovscc %fcc2 */
2004 cond = GET_FIELD_SP(insn, 14, 17);
2005 gen_op_load_fpr_FT0(rd);
2006 gen_op_load_fpr_FT1(rs2);
2008 gen_fcond[2][cond]();
2010 gen_op_store_FT0_fpr(rd);
2012 case 0x082: /* V9 fmovdcc %fcc2 */
2013 cond = GET_FIELD_SP(insn, 14, 17);
2014 gen_op_load_fpr_DT0(DFPREG(rd));
2015 gen_op_load_fpr_DT1(DFPREG(rs2));
2017 gen_fcond[2][cond]();
2019 gen_op_store_DT0_fpr(DFPREG(rd));
2021 case 0x083: /* V9 fmovqcc %fcc2 */
2022 #if defined(CONFIG_USER_ONLY)
2023 cond = GET_FIELD_SP(insn, 14, 17);
2024 gen_op_load_fpr_QT0(rd);
2025 gen_op_load_fpr_QT1(rs2);
2027 gen_fcond[2][cond]();
2029 gen_op_store_QT0_fpr(rd);
2034 case 0x0c1: /* V9 fmovscc %fcc3 */
2035 cond = GET_FIELD_SP(insn, 14, 17);
2036 gen_op_load_fpr_FT0(rd);
2037 gen_op_load_fpr_FT1(rs2);
2039 gen_fcond[3][cond]();
2041 gen_op_store_FT0_fpr(rd);
2043 case 0x0c2: /* V9 fmovdcc %fcc3 */
2044 cond = GET_FIELD_SP(insn, 14, 17);
2045 gen_op_load_fpr_DT0(DFPREG(rd));
2046 gen_op_load_fpr_DT1(DFPREG(rs2));
2048 gen_fcond[3][cond]();
2050 gen_op_store_DT0_fpr(DFPREG(rd));
2052 case 0x0c3: /* V9 fmovqcc %fcc3 */
2053 #if defined(CONFIG_USER_ONLY)
2054 cond = GET_FIELD_SP(insn, 14, 17);
2055 gen_op_load_fpr_QT0(QFPREG(rd));
2056 gen_op_load_fpr_QT1(QFPREG(rs2));
2058 gen_fcond[3][cond]();
2060 gen_op_store_QT0_fpr(QFPREG(rd));
2065 case 0x101: /* V9 fmovscc %icc */
2066 cond = GET_FIELD_SP(insn, 14, 17);
2067 gen_op_load_fpr_FT0(rd);
2068 gen_op_load_fpr_FT1(rs2);
2070 gen_cond[0][cond]();
2072 gen_op_store_FT0_fpr(rd);
2074 case 0x102: /* V9 fmovdcc %icc */
2075 cond = GET_FIELD_SP(insn, 14, 17);
2076 gen_op_load_fpr_DT0(DFPREG(rd));
2077 gen_op_load_fpr_DT1(DFPREG(rs2));
2079 gen_cond[0][cond]();
2081 gen_op_store_DT0_fpr(DFPREG(rd));
2083 case 0x103: /* V9 fmovqcc %icc */
2084 #if defined(CONFIG_USER_ONLY)
2085 cond = GET_FIELD_SP(insn, 14, 17);
2086 gen_op_load_fpr_QT0(rd);
2087 gen_op_load_fpr_QT1(rs2);
2089 gen_cond[0][cond]();
2091 gen_op_store_QT0_fpr(rd);
2096 case 0x181: /* V9 fmovscc %xcc */
2097 cond = GET_FIELD_SP(insn, 14, 17);
2098 gen_op_load_fpr_FT0(rd);
2099 gen_op_load_fpr_FT1(rs2);
2101 gen_cond[1][cond]();
2103 gen_op_store_FT0_fpr(rd);
2105 case 0x182: /* V9 fmovdcc %xcc */
2106 cond = GET_FIELD_SP(insn, 14, 17);
2107 gen_op_load_fpr_DT0(DFPREG(rd));
2108 gen_op_load_fpr_DT1(DFPREG(rs2));
2110 gen_cond[1][cond]();
2112 gen_op_store_DT0_fpr(DFPREG(rd));
2114 case 0x183: /* V9 fmovqcc %xcc */
2115 #if defined(CONFIG_USER_ONLY)
2116 cond = GET_FIELD_SP(insn, 14, 17);
2117 gen_op_load_fpr_QT0(rd);
2118 gen_op_load_fpr_QT1(rs2);
2120 gen_cond[1][cond]();
2122 gen_op_store_QT0_fpr(rd);
2128 case 0x51: /* fcmps, V9 %fcc */
2129 gen_op_load_fpr_FT0(rs1);
2130 gen_op_load_fpr_FT1(rs2);
2131 gen_op_fcmps(rd & 3);
2133 case 0x52: /* fcmpd, V9 %fcc */
2134 gen_op_load_fpr_DT0(DFPREG(rs1));
2135 gen_op_load_fpr_DT1(DFPREG(rs2));
2136 gen_op_fcmpd(rd & 3);
2138 case 0x53: /* fcmpq, V9 %fcc */
2139 #if defined(CONFIG_USER_ONLY)
2140 gen_op_load_fpr_QT0(QFPREG(rs1));
2141 gen_op_load_fpr_QT1(QFPREG(rs2));
2142 gen_op_fcmpq(rd & 3);
2144 #else /* !defined(CONFIG_USER_ONLY) */
2147 case 0x55: /* fcmpes, V9 %fcc */
2148 gen_op_load_fpr_FT0(rs1);
2149 gen_op_load_fpr_FT1(rs2);
2150 gen_op_fcmpes(rd & 3);
2152 case 0x56: /* fcmped, V9 %fcc */
2153 gen_op_load_fpr_DT0(DFPREG(rs1));
2154 gen_op_load_fpr_DT1(DFPREG(rs2));
2155 gen_op_fcmped(rd & 3);
2157 case 0x57: /* fcmpeq, V9 %fcc */
2158 #if defined(CONFIG_USER_ONLY)
2159 gen_op_load_fpr_QT0(QFPREG(rs1));
2160 gen_op_load_fpr_QT1(QFPREG(rs2));
2161 gen_op_fcmpeq(rd & 3);
2163 #else/* !defined(CONFIG_USER_ONLY) */
2170 } else if (xop == 0x2) {
2173 rs1 = GET_FIELD(insn, 13, 17);
2175 // or %g0, x, y -> mov T0, x; mov y, T0
2176 if (IS_IMM) { /* immediate */
2177 rs2 = GET_FIELDs(insn, 19, 31);
2178 tcg_gen_movi_tl(cpu_T[0], (int)rs2);
2179 } else { /* register */
2180 rs2 = GET_FIELD(insn, 27, 31);
2181 gen_movl_reg_T0(rs2);
2184 gen_movl_reg_T0(rs1);
2185 if (IS_IMM) { /* immediate */
2186 rs2 = GET_FIELDs(insn, 19, 31);
2187 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2);
2188 } else { /* register */
2189 // or x, %g0, y -> mov T1, x; mov y, T1
2190 rs2 = GET_FIELD(insn, 27, 31);
2192 gen_movl_reg_T1(rs2);
2197 gen_movl_T0_reg(rd);
2199 #ifdef TARGET_SPARC64
2200 } else if (xop == 0x25) { /* sll, V9 sllx */
2201 rs1 = GET_FIELD(insn, 13, 17);
2202 gen_movl_reg_T0(rs1);
2203 if (IS_IMM) { /* immediate */
2204 rs2 = GET_FIELDs(insn, 20, 31);
2205 if (insn & (1 << 12)) {
2206 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2208 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2209 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2211 } else { /* register */
2212 rs2 = GET_FIELD(insn, 27, 31);
2213 gen_movl_reg_T1(rs2);
2214 if (insn & (1 << 12)) {
2215 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2216 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2218 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2219 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2220 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2223 gen_movl_T0_reg(rd);
2224 } else if (xop == 0x26) { /* srl, V9 srlx */
2225 rs1 = GET_FIELD(insn, 13, 17);
2226 gen_movl_reg_T0(rs1);
2227 if (IS_IMM) { /* immediate */
2228 rs2 = GET_FIELDs(insn, 20, 31);
2229 if (insn & (1 << 12)) {
2230 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2232 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2233 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2235 } else { /* register */
2236 rs2 = GET_FIELD(insn, 27, 31);
2237 gen_movl_reg_T1(rs2);
2238 if (insn & (1 << 12)) {
2239 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2240 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2242 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2243 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2244 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2247 gen_movl_T0_reg(rd);
2248 } else if (xop == 0x27) { /* sra, V9 srax */
2249 rs1 = GET_FIELD(insn, 13, 17);
2250 gen_movl_reg_T0(rs1);
2251 if (IS_IMM) { /* immediate */
2252 rs2 = GET_FIELDs(insn, 20, 31);
2253 if (insn & (1 << 12)) {
2254 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2256 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2257 tcg_gen_ext_i32_i64(cpu_T[0], cpu_T[0]);
2258 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2260 } else { /* register */
2261 rs2 = GET_FIELD(insn, 27, 31);
2262 gen_movl_reg_T1(rs2);
2263 if (insn & (1 << 12)) {
2264 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2265 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2267 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2268 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2269 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2272 gen_movl_T0_reg(rd);
2274 } else if (xop < 0x36) {
2275 rs1 = GET_FIELD(insn, 13, 17);
2276 gen_movl_reg_T0(rs1);
2277 if (IS_IMM) { /* immediate */
2278 rs2 = GET_FIELDs(insn, 19, 31);
2279 gen_movl_simm_T1(rs2);
2280 } else { /* register */
2281 rs2 = GET_FIELD(insn, 27, 31);
2282 gen_movl_reg_T1(rs2);
2285 switch (xop & ~0x10) {
2288 gen_op_add_T1_T0_cc();
2293 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2295 gen_op_logic_T0_cc();
2298 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2300 gen_op_logic_T0_cc();
2303 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2305 gen_op_logic_T0_cc();
2309 gen_op_sub_T1_T0_cc();
2311 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2314 gen_op_andn_T1_T0();
2316 gen_op_logic_T0_cc();
2321 gen_op_logic_T0_cc();
2324 gen_op_xnor_T1_T0();
2326 gen_op_logic_T0_cc();
2330 gen_op_addx_T1_T0_cc();
2332 gen_mov_reg_C(cpu_tmp0);
2333 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
2334 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2337 #ifdef TARGET_SPARC64
2338 case 0x9: /* V9 mulx */
2339 tcg_gen_mul_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2343 gen_op_umul_T1_T0();
2345 gen_op_logic_T0_cc();
2348 gen_op_smul_T1_T0();
2350 gen_op_logic_T0_cc();
2354 gen_op_subx_T1_T0_cc();
2356 gen_mov_reg_C(cpu_tmp0);
2357 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
2358 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2361 #ifdef TARGET_SPARC64
2362 case 0xd: /* V9 udivx */
2363 gen_op_udivx_T1_T0();
2367 gen_op_udiv_T1_T0();
2372 gen_op_sdiv_T1_T0();
2379 gen_movl_T0_reg(rd);
2382 case 0x20: /* taddcc */
2383 gen_op_tadd_T1_T0_cc();
2384 gen_movl_T0_reg(rd);
2386 case 0x21: /* tsubcc */
2387 gen_op_tsub_T1_T0_cc();
2388 gen_movl_T0_reg(rd);
2390 case 0x22: /* taddcctv */
2392 gen_op_tadd_T1_T0_ccTV();
2393 gen_movl_T0_reg(rd);
2395 case 0x23: /* tsubcctv */
2397 gen_op_tsub_T1_T0_ccTV();
2398 gen_movl_T0_reg(rd);
2400 case 0x24: /* mulscc */
2401 gen_op_mulscc_T1_T0();
2402 gen_movl_T0_reg(rd);
2404 #ifndef TARGET_SPARC64
2405 case 0x25: /* sll */
2406 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
2407 tcg_gen_shl_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
2408 gen_movl_T0_reg(rd);
2410 case 0x26: /* srl */
2411 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
2412 tcg_gen_shr_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
2413 gen_movl_T0_reg(rd);
2415 case 0x27: /* sra */
2416 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
2417 tcg_gen_sar_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
2418 gen_movl_T0_reg(rd);
2426 gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
2428 #ifndef TARGET_SPARC64
2429 case 0x01 ... 0x0f: /* undefined in the
2433 case 0x10 ... 0x1f: /* implementation-dependent
2439 case 0x2: /* V9 wrccr */
2443 case 0x3: /* V9 wrasi */
2445 gen_op_movl_env_T0(offsetof(CPUSPARCState, asi));
2447 case 0x6: /* V9 wrfprs */
2449 gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs));
2455 case 0xf: /* V9 sir, nop if user */
2456 #if !defined(CONFIG_USER_ONLY)
2461 case 0x13: /* Graphics Status */
2462 if (gen_trap_ifnofpu(dc))
2465 gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr));
2467 case 0x17: /* Tick compare */
2468 #if !defined(CONFIG_USER_ONLY)
2469 if (!supervisor(dc))
2476 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
2478 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2479 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2480 offsetof(CPUState, tick));
2481 tcg_gen_helper_0_2(helper_tick_set_limit,
2482 r_tickptr, cpu_T[0]);
2485 case 0x18: /* System tick */
2486 #if !defined(CONFIG_USER_ONLY)
2487 if (!supervisor(dc))
2494 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2495 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2496 offsetof(CPUState, stick));
2497 tcg_gen_helper_0_2(helper_tick_set_count,
2498 r_tickptr, cpu_T[0]);
2501 case 0x19: /* System tick compare */
2502 #if !defined(CONFIG_USER_ONLY)
2503 if (!supervisor(dc))
2510 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
2512 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2513 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2514 offsetof(CPUState, stick));
2515 tcg_gen_helper_0_2(helper_tick_set_limit,
2516 r_tickptr, cpu_T[0]);
2520 case 0x10: /* Performance Control */
2521 case 0x11: /* Performance Instrumentation Counter */
2522 case 0x12: /* Dispatch Control */
2523 case 0x14: /* Softint set */
2524 case 0x15: /* Softint clear */
2525 case 0x16: /* Softint write */
2532 #if !defined(CONFIG_USER_ONLY)
2533 case 0x31: /* wrpsr, V9 saved, restored */
2535 if (!supervisor(dc))
2537 #ifdef TARGET_SPARC64
2545 case 2: /* UA2005 allclean */
2546 case 3: /* UA2005 otherw */
2547 case 4: /* UA2005 normalw */
2548 case 5: /* UA2005 invalw */
2555 tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]);
2563 case 0x32: /* wrwim, V9 wrpr */
2565 if (!supervisor(dc))
2568 #ifdef TARGET_SPARC64
2574 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2575 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2576 offsetof(CPUState, tsptr));
2577 tcg_gen_st_tl(cpu_T[0], r_tsptr,
2578 offsetof(trap_state, tpc));
2585 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2586 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2587 offsetof(CPUState, tsptr));
2588 tcg_gen_st_tl(cpu_T[0], r_tsptr,
2589 offsetof(trap_state, tnpc));
2596 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2597 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2598 offsetof(CPUState, tsptr));
2599 tcg_gen_st_tl(cpu_T[0], r_tsptr,
2600 offsetof(trap_state, tstate));
2607 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2608 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2609 offsetof(CPUState, tsptr));
2610 tcg_gen_st_i32(cpu_T[0], r_tsptr,
2611 offsetof(trap_state, tt));
2618 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2619 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2620 offsetof(CPUState, tick));
2621 tcg_gen_helper_0_2(helper_tick_set_count,
2622 r_tickptr, cpu_T[0]);
2626 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
2630 tcg_gen_helper_0_1(helper_wrpstate, cpu_T[0]);
2636 gen_op_movl_env_T0(offsetof(CPUSPARCState, tl));
2639 gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil));
2645 gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave));
2647 case 11: // canrestore
2648 gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore));
2650 case 12: // cleanwin
2651 gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin));
2653 case 13: // otherwin
2654 gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin));
2657 gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate));
2659 case 16: // UA2005 gl
2660 gen_op_movl_env_T0(offsetof(CPUSPARCState, gl));
2662 case 26: // UA2005 strand status
2663 if (!hypervisor(dc))
2665 gen_op_movl_env_T0(offsetof(CPUSPARCState, ssr));
2671 tcg_gen_andi_i32(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1));
2672 gen_op_movl_env_T0(offsetof(CPUSPARCState, wim));
2676 case 0x33: /* wrtbr, UA2005 wrhpr */
2678 #ifndef TARGET_SPARC64
2679 if (!supervisor(dc))
2682 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
2684 if (!hypervisor(dc))
2689 // XXX gen_op_wrhpstate();
2696 // XXX gen_op_wrhtstate();
2699 gen_op_movl_env_T0(offsetof(CPUSPARCState, hintp));
2702 gen_op_movl_env_T0(offsetof(CPUSPARCState, htba));
2704 case 31: // hstick_cmpr
2708 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
2710 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2711 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2712 offsetof(CPUState, hstick));
2713 tcg_gen_helper_0_2(helper_tick_set_limit,
2714 r_tickptr, cpu_T[0]);
2717 case 6: // hver readonly
2725 #ifdef TARGET_SPARC64
2726 case 0x2c: /* V9 movcc */
2728 int cc = GET_FIELD_SP(insn, 11, 12);
2729 int cond = GET_FIELD_SP(insn, 14, 17);
2734 if (insn & (1 << 18)) {
2736 gen_cond[0][cond]();
2738 gen_cond[1][cond]();
2742 gen_fcond[cc][cond]();
2745 l1 = gen_new_label();
2747 r_zero = tcg_temp_new(TCG_TYPE_TL);
2748 tcg_gen_movi_tl(r_zero, 0);
2749 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[2], r_zero, l1);
2750 if (IS_IMM) { /* immediate */
2751 rs2 = GET_FIELD_SPs(insn, 0, 10);
2752 gen_movl_simm_T1(rs2);
2754 rs2 = GET_FIELD_SP(insn, 0, 4);
2755 gen_movl_reg_T1(rs2);
2757 gen_movl_T1_reg(rd);
2761 case 0x2d: /* V9 sdivx */
2762 gen_op_sdivx_T1_T0();
2763 gen_movl_T0_reg(rd);
2765 case 0x2e: /* V9 popc */
2767 if (IS_IMM) { /* immediate */
2768 rs2 = GET_FIELD_SPs(insn, 0, 12);
2769 gen_movl_simm_T1(rs2);
2770 // XXX optimize: popc(constant)
2773 rs2 = GET_FIELD_SP(insn, 0, 4);
2774 gen_movl_reg_T1(rs2);
2776 tcg_gen_helper_1_1(helper_popc, cpu_T[0],
2778 gen_movl_T0_reg(rd);
2780 case 0x2f: /* V9 movr */
2782 int cond = GET_FIELD_SP(insn, 10, 12);
2786 rs1 = GET_FIELD(insn, 13, 17);
2787 gen_movl_reg_T0(rs1);
2789 l1 = gen_new_label();
2791 r_zero = tcg_temp_new(TCG_TYPE_TL);
2792 tcg_gen_movi_tl(r_zero, 0);
2793 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2794 if (IS_IMM) { /* immediate */
2795 rs2 = GET_FIELD_SPs(insn, 0, 9);
2796 gen_movl_simm_T1(rs2);
2798 rs2 = GET_FIELD_SP(insn, 0, 4);
2799 gen_movl_reg_T1(rs2);
2801 gen_movl_T1_reg(rd);
2810 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
2811 #ifdef TARGET_SPARC64
2812 int opf = GET_FIELD_SP(insn, 5, 13);
2813 rs1 = GET_FIELD(insn, 13, 17);
2814 rs2 = GET_FIELD(insn, 27, 31);
2815 if (gen_trap_ifnofpu(dc))
2819 case 0x000: /* VIS I edge8cc */
2820 case 0x001: /* VIS II edge8n */
2821 case 0x002: /* VIS I edge8lcc */
2822 case 0x003: /* VIS II edge8ln */
2823 case 0x004: /* VIS I edge16cc */
2824 case 0x005: /* VIS II edge16n */
2825 case 0x006: /* VIS I edge16lcc */
2826 case 0x007: /* VIS II edge16ln */
2827 case 0x008: /* VIS I edge32cc */
2828 case 0x009: /* VIS II edge32n */
2829 case 0x00a: /* VIS I edge32lcc */
2830 case 0x00b: /* VIS II edge32ln */
2833 case 0x010: /* VIS I array8 */
2834 gen_movl_reg_T0(rs1);
2835 gen_movl_reg_T1(rs2);
2837 gen_movl_T0_reg(rd);
2839 case 0x012: /* VIS I array16 */
2840 gen_movl_reg_T0(rs1);
2841 gen_movl_reg_T1(rs2);
2843 gen_movl_T0_reg(rd);
2845 case 0x014: /* VIS I array32 */
2846 gen_movl_reg_T0(rs1);
2847 gen_movl_reg_T1(rs2);
2849 gen_movl_T0_reg(rd);
2851 case 0x018: /* VIS I alignaddr */
2852 gen_movl_reg_T0(rs1);
2853 gen_movl_reg_T1(rs2);
2855 gen_movl_T0_reg(rd);
2857 case 0x019: /* VIS II bmask */
2858 case 0x01a: /* VIS I alignaddrl */
2861 case 0x020: /* VIS I fcmple16 */
2862 gen_op_load_fpr_DT0(DFPREG(rs1));
2863 gen_op_load_fpr_DT1(DFPREG(rs2));
2865 gen_op_store_DT0_fpr(DFPREG(rd));
2867 case 0x022: /* VIS I fcmpne16 */
2868 gen_op_load_fpr_DT0(DFPREG(rs1));
2869 gen_op_load_fpr_DT1(DFPREG(rs2));
2871 gen_op_store_DT0_fpr(DFPREG(rd));
2873 case 0x024: /* VIS I fcmple32 */
2874 gen_op_load_fpr_DT0(DFPREG(rs1));
2875 gen_op_load_fpr_DT1(DFPREG(rs2));
2877 gen_op_store_DT0_fpr(DFPREG(rd));
2879 case 0x026: /* VIS I fcmpne32 */
2880 gen_op_load_fpr_DT0(DFPREG(rs1));
2881 gen_op_load_fpr_DT1(DFPREG(rs2));
2883 gen_op_store_DT0_fpr(DFPREG(rd));
2885 case 0x028: /* VIS I fcmpgt16 */
2886 gen_op_load_fpr_DT0(DFPREG(rs1));
2887 gen_op_load_fpr_DT1(DFPREG(rs2));
2889 gen_op_store_DT0_fpr(DFPREG(rd));
2891 case 0x02a: /* VIS I fcmpeq16 */
2892 gen_op_load_fpr_DT0(DFPREG(rs1));
2893 gen_op_load_fpr_DT1(DFPREG(rs2));
2895 gen_op_store_DT0_fpr(DFPREG(rd));
2897 case 0x02c: /* VIS I fcmpgt32 */
2898 gen_op_load_fpr_DT0(DFPREG(rs1));
2899 gen_op_load_fpr_DT1(DFPREG(rs2));
2901 gen_op_store_DT0_fpr(DFPREG(rd));
2903 case 0x02e: /* VIS I fcmpeq32 */
2904 gen_op_load_fpr_DT0(DFPREG(rs1));
2905 gen_op_load_fpr_DT1(DFPREG(rs2));
2907 gen_op_store_DT0_fpr(DFPREG(rd));
2909 case 0x031: /* VIS I fmul8x16 */
2910 gen_op_load_fpr_DT0(DFPREG(rs1));
2911 gen_op_load_fpr_DT1(DFPREG(rs2));
2913 gen_op_store_DT0_fpr(DFPREG(rd));
2915 case 0x033: /* VIS I fmul8x16au */
2916 gen_op_load_fpr_DT0(DFPREG(rs1));
2917 gen_op_load_fpr_DT1(DFPREG(rs2));
2918 gen_op_fmul8x16au();
2919 gen_op_store_DT0_fpr(DFPREG(rd));
2921 case 0x035: /* VIS I fmul8x16al */
2922 gen_op_load_fpr_DT0(DFPREG(rs1));
2923 gen_op_load_fpr_DT1(DFPREG(rs2));
2924 gen_op_fmul8x16al();
2925 gen_op_store_DT0_fpr(DFPREG(rd));
2927 case 0x036: /* VIS I fmul8sux16 */
2928 gen_op_load_fpr_DT0(DFPREG(rs1));
2929 gen_op_load_fpr_DT1(DFPREG(rs2));
2930 gen_op_fmul8sux16();
2931 gen_op_store_DT0_fpr(DFPREG(rd));
2933 case 0x037: /* VIS I fmul8ulx16 */
2934 gen_op_load_fpr_DT0(DFPREG(rs1));
2935 gen_op_load_fpr_DT1(DFPREG(rs2));
2936 gen_op_fmul8ulx16();
2937 gen_op_store_DT0_fpr(DFPREG(rd));
2939 case 0x038: /* VIS I fmuld8sux16 */
2940 gen_op_load_fpr_DT0(DFPREG(rs1));
2941 gen_op_load_fpr_DT1(DFPREG(rs2));
2942 gen_op_fmuld8sux16();
2943 gen_op_store_DT0_fpr(DFPREG(rd));
2945 case 0x039: /* VIS I fmuld8ulx16 */
2946 gen_op_load_fpr_DT0(DFPREG(rs1));
2947 gen_op_load_fpr_DT1(DFPREG(rs2));
2948 gen_op_fmuld8ulx16();
2949 gen_op_store_DT0_fpr(DFPREG(rd));
2951 case 0x03a: /* VIS I fpack32 */
2952 case 0x03b: /* VIS I fpack16 */
2953 case 0x03d: /* VIS I fpackfix */
2954 case 0x03e: /* VIS I pdist */
2957 case 0x048: /* VIS I faligndata */
2958 gen_op_load_fpr_DT0(DFPREG(rs1));
2959 gen_op_load_fpr_DT1(DFPREG(rs2));
2960 gen_op_faligndata();
2961 gen_op_store_DT0_fpr(DFPREG(rd));
2963 case 0x04b: /* VIS I fpmerge */
2964 gen_op_load_fpr_DT0(DFPREG(rs1));
2965 gen_op_load_fpr_DT1(DFPREG(rs2));
2967 gen_op_store_DT0_fpr(DFPREG(rd));
2969 case 0x04c: /* VIS II bshuffle */
2972 case 0x04d: /* VIS I fexpand */
2973 gen_op_load_fpr_DT0(DFPREG(rs1));
2974 gen_op_load_fpr_DT1(DFPREG(rs2));
2976 gen_op_store_DT0_fpr(DFPREG(rd));
2978 case 0x050: /* VIS I fpadd16 */
2979 gen_op_load_fpr_DT0(DFPREG(rs1));
2980 gen_op_load_fpr_DT1(DFPREG(rs2));
2982 gen_op_store_DT0_fpr(DFPREG(rd));
2984 case 0x051: /* VIS I fpadd16s */
2985 gen_op_load_fpr_FT0(rs1);
2986 gen_op_load_fpr_FT1(rs2);
2988 gen_op_store_FT0_fpr(rd);
2990 case 0x052: /* VIS I fpadd32 */
2991 gen_op_load_fpr_DT0(DFPREG(rs1));
2992 gen_op_load_fpr_DT1(DFPREG(rs2));
2994 gen_op_store_DT0_fpr(DFPREG(rd));
2996 case 0x053: /* VIS I fpadd32s */
2997 gen_op_load_fpr_FT0(rs1);
2998 gen_op_load_fpr_FT1(rs2);
3000 gen_op_store_FT0_fpr(rd);
3002 case 0x054: /* VIS I fpsub16 */
3003 gen_op_load_fpr_DT0(DFPREG(rs1));
3004 gen_op_load_fpr_DT1(DFPREG(rs2));
3006 gen_op_store_DT0_fpr(DFPREG(rd));
3008 case 0x055: /* VIS I fpsub16s */
3009 gen_op_load_fpr_FT0(rs1);
3010 gen_op_load_fpr_FT1(rs2);
3012 gen_op_store_FT0_fpr(rd);
3014 case 0x056: /* VIS I fpsub32 */
3015 gen_op_load_fpr_DT0(DFPREG(rs1));
3016 gen_op_load_fpr_DT1(DFPREG(rs2));
3018 gen_op_store_DT0_fpr(DFPREG(rd));
3020 case 0x057: /* VIS I fpsub32s */
3021 gen_op_load_fpr_FT0(rs1);
3022 gen_op_load_fpr_FT1(rs2);
3024 gen_op_store_FT0_fpr(rd);
3026 case 0x060: /* VIS I fzero */
3027 gen_op_movl_DT0_0();
3028 gen_op_store_DT0_fpr(DFPREG(rd));
3030 case 0x061: /* VIS I fzeros */
3031 gen_op_movl_FT0_0();
3032 gen_op_store_FT0_fpr(rd);
3034 case 0x062: /* VIS I fnor */
3035 gen_op_load_fpr_DT0(DFPREG(rs1));
3036 gen_op_load_fpr_DT1(DFPREG(rs2));
3038 gen_op_store_DT0_fpr(DFPREG(rd));
3040 case 0x063: /* VIS I fnors */
3041 gen_op_load_fpr_FT0(rs1);
3042 gen_op_load_fpr_FT1(rs2);
3044 gen_op_store_FT0_fpr(rd);
3046 case 0x064: /* VIS I fandnot2 */
3047 gen_op_load_fpr_DT1(DFPREG(rs1));
3048 gen_op_load_fpr_DT0(DFPREG(rs2));
3050 gen_op_store_DT0_fpr(DFPREG(rd));
3052 case 0x065: /* VIS I fandnot2s */
3053 gen_op_load_fpr_FT1(rs1);
3054 gen_op_load_fpr_FT0(rs2);
3056 gen_op_store_FT0_fpr(rd);
3058 case 0x066: /* VIS I fnot2 */
3059 gen_op_load_fpr_DT1(DFPREG(rs2));
3061 gen_op_store_DT0_fpr(DFPREG(rd));
3063 case 0x067: /* VIS I fnot2s */
3064 gen_op_load_fpr_FT1(rs2);
3066 gen_op_store_FT0_fpr(rd);
3068 case 0x068: /* VIS I fandnot1 */
3069 gen_op_load_fpr_DT0(DFPREG(rs1));
3070 gen_op_load_fpr_DT1(DFPREG(rs2));
3072 gen_op_store_DT0_fpr(DFPREG(rd));
3074 case 0x069: /* VIS I fandnot1s */
3075 gen_op_load_fpr_FT0(rs1);
3076 gen_op_load_fpr_FT1(rs2);
3078 gen_op_store_FT0_fpr(rd);
3080 case 0x06a: /* VIS I fnot1 */
3081 gen_op_load_fpr_DT1(DFPREG(rs1));
3083 gen_op_store_DT0_fpr(DFPREG(rd));
3085 case 0x06b: /* VIS I fnot1s */
3086 gen_op_load_fpr_FT1(rs1);
3088 gen_op_store_FT0_fpr(rd);
3090 case 0x06c: /* VIS I fxor */
3091 gen_op_load_fpr_DT0(DFPREG(rs1));
3092 gen_op_load_fpr_DT1(DFPREG(rs2));
3094 gen_op_store_DT0_fpr(DFPREG(rd));
3096 case 0x06d: /* VIS I fxors */
3097 gen_op_load_fpr_FT0(rs1);
3098 gen_op_load_fpr_FT1(rs2);
3100 gen_op_store_FT0_fpr(rd);
3102 case 0x06e: /* VIS I fnand */
3103 gen_op_load_fpr_DT0(DFPREG(rs1));
3104 gen_op_load_fpr_DT1(DFPREG(rs2));
3106 gen_op_store_DT0_fpr(DFPREG(rd));
3108 case 0x06f: /* VIS I fnands */
3109 gen_op_load_fpr_FT0(rs1);
3110 gen_op_load_fpr_FT1(rs2);
3112 gen_op_store_FT0_fpr(rd);
3114 case 0x070: /* VIS I fand */
3115 gen_op_load_fpr_DT0(DFPREG(rs1));
3116 gen_op_load_fpr_DT1(DFPREG(rs2));
3118 gen_op_store_DT0_fpr(DFPREG(rd));
3120 case 0x071: /* VIS I fands */
3121 gen_op_load_fpr_FT0(rs1);
3122 gen_op_load_fpr_FT1(rs2);
3124 gen_op_store_FT0_fpr(rd);
3126 case 0x072: /* VIS I fxnor */
3127 gen_op_load_fpr_DT0(DFPREG(rs1));
3128 gen_op_load_fpr_DT1(DFPREG(rs2));
3130 gen_op_store_DT0_fpr(DFPREG(rd));
3132 case 0x073: /* VIS I fxnors */
3133 gen_op_load_fpr_FT0(rs1);
3134 gen_op_load_fpr_FT1(rs2);
3136 gen_op_store_FT0_fpr(rd);
3138 case 0x074: /* VIS I fsrc1 */
3139 gen_op_load_fpr_DT0(DFPREG(rs1));
3140 gen_op_store_DT0_fpr(DFPREG(rd));
3142 case 0x075: /* VIS I fsrc1s */
3143 gen_op_load_fpr_FT0(rs1);
3144 gen_op_store_FT0_fpr(rd);
3146 case 0x076: /* VIS I fornot2 */
3147 gen_op_load_fpr_DT1(DFPREG(rs1));
3148 gen_op_load_fpr_DT0(DFPREG(rs2));
3150 gen_op_store_DT0_fpr(DFPREG(rd));
3152 case 0x077: /* VIS I fornot2s */
3153 gen_op_load_fpr_FT1(rs1);
3154 gen_op_load_fpr_FT0(rs2);
3156 gen_op_store_FT0_fpr(rd);
3158 case 0x078: /* VIS I fsrc2 */
3159 gen_op_load_fpr_DT0(DFPREG(rs2));
3160 gen_op_store_DT0_fpr(DFPREG(rd));
3162 case 0x079: /* VIS I fsrc2s */
3163 gen_op_load_fpr_FT0(rs2);
3164 gen_op_store_FT0_fpr(rd);
3166 case 0x07a: /* VIS I fornot1 */
3167 gen_op_load_fpr_DT0(DFPREG(rs1));
3168 gen_op_load_fpr_DT1(DFPREG(rs2));
3170 gen_op_store_DT0_fpr(DFPREG(rd));
3172 case 0x07b: /* VIS I fornot1s */
3173 gen_op_load_fpr_FT0(rs1);
3174 gen_op_load_fpr_FT1(rs2);
3176 gen_op_store_FT0_fpr(rd);
3178 case 0x07c: /* VIS I for */
3179 gen_op_load_fpr_DT0(DFPREG(rs1));
3180 gen_op_load_fpr_DT1(DFPREG(rs2));
3182 gen_op_store_DT0_fpr(DFPREG(rd));
3184 case 0x07d: /* VIS I fors */
3185 gen_op_load_fpr_FT0(rs1);
3186 gen_op_load_fpr_FT1(rs2);
3188 gen_op_store_FT0_fpr(rd);
3190 case 0x07e: /* VIS I fone */
3191 gen_op_movl_DT0_1();
3192 gen_op_store_DT0_fpr(DFPREG(rd));
3194 case 0x07f: /* VIS I fones */
3195 gen_op_movl_FT0_1();
3196 gen_op_store_FT0_fpr(rd);
3198 case 0x080: /* VIS I shutdown */
3199 case 0x081: /* VIS II siam */
3208 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3209 #ifdef TARGET_SPARC64
3214 #ifdef TARGET_SPARC64
3215 } else if (xop == 0x39) { /* V9 return */
3216 rs1 = GET_FIELD(insn, 13, 17);
3218 gen_movl_reg_T0(rs1);
3219 if (IS_IMM) { /* immediate */
3220 rs2 = GET_FIELDs(insn, 19, 31);
3221 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3222 } else { /* register */
3223 rs2 = GET_FIELD(insn, 27, 31);
3227 gen_movl_reg_T1(rs2);
3235 gen_op_check_align_T0_3();
3236 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3237 dc->npc = DYNAMIC_PC;
3241 rs1 = GET_FIELD(insn, 13, 17);
3242 gen_movl_reg_T0(rs1);
3243 if (IS_IMM) { /* immediate */
3244 rs2 = GET_FIELDs(insn, 19, 31);
3245 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3246 } else { /* register */
3247 rs2 = GET_FIELD(insn, 27, 31);
3251 gen_movl_reg_T1(rs2);
3258 case 0x38: /* jmpl */
3261 tcg_gen_movi_tl(cpu_T[1], dc->pc);
3262 gen_movl_T1_reg(rd);
3265 gen_op_check_align_T0_3();
3266 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3267 dc->npc = DYNAMIC_PC;
3270 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3271 case 0x39: /* rett, V9 return */
3273 if (!supervisor(dc))
3276 gen_op_check_align_T0_3();
3277 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3278 dc->npc = DYNAMIC_PC;
3279 tcg_gen_helper_0_0(helper_rett);
3283 case 0x3b: /* flush */
3284 tcg_gen_helper_0_1(helper_flush, cpu_T[0]);
3286 case 0x3c: /* save */
3289 gen_movl_T0_reg(rd);
3291 case 0x3d: /* restore */
3294 gen_movl_T0_reg(rd);
3296 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
3297 case 0x3e: /* V9 done/retry */
3301 if (!supervisor(dc))
3303 dc->npc = DYNAMIC_PC;
3304 dc->pc = DYNAMIC_PC;
3305 tcg_gen_helper_0_0(helper_done);
3308 if (!supervisor(dc))
3310 dc->npc = DYNAMIC_PC;
3311 dc->pc = DYNAMIC_PC;
3312 tcg_gen_helper_0_0(helper_retry);
3327 case 3: /* load/store instructions */
3329 unsigned int xop = GET_FIELD(insn, 7, 12);
3330 rs1 = GET_FIELD(insn, 13, 17);
3332 gen_movl_reg_T0(rs1);
3333 if (xop == 0x3c || xop == 0x3e)
3335 rs2 = GET_FIELD(insn, 27, 31);
3336 gen_movl_reg_T1(rs2);
3338 else if (IS_IMM) { /* immediate */
3339 rs2 = GET_FIELDs(insn, 19, 31);
3340 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3341 } else { /* register */
3342 rs2 = GET_FIELD(insn, 27, 31);
3346 gen_movl_reg_T1(rs2);
3352 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
3353 (xop > 0x17 && xop <= 0x1d ) ||
3354 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
3356 case 0x0: /* load unsigned word */
3357 gen_op_check_align_T0_3();
3358 ABI32_MASK(cpu_T[0]);
3359 tcg_gen_qemu_ld32u(cpu_T[1], cpu_T[0], dc->mem_idx);
3361 case 0x1: /* load unsigned byte */
3362 ABI32_MASK(cpu_T[0]);
3363 tcg_gen_qemu_ld8u(cpu_T[1], cpu_T[0], dc->mem_idx);
3365 case 0x2: /* load unsigned halfword */
3366 gen_op_check_align_T0_1();
3367 ABI32_MASK(cpu_T[0]);
3368 tcg_gen_qemu_ld16u(cpu_T[1], cpu_T[0], dc->mem_idx);
3370 case 0x3: /* load double word */
3376 r_dword = tcg_temp_new(TCG_TYPE_I64);
3377 gen_op_check_align_T0_7();
3378 ABI32_MASK(cpu_T[0]);
3379 tcg_gen_qemu_ld64(r_dword, cpu_T[0], dc->mem_idx);
3380 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
3381 gen_movl_T0_reg(rd + 1);
3382 tcg_gen_shri_i64(r_dword, r_dword, 32);
3383 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
3386 case 0x9: /* load signed byte */
3387 ABI32_MASK(cpu_T[0]);
3388 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
3390 case 0xa: /* load signed halfword */
3391 gen_op_check_align_T0_1();
3392 ABI32_MASK(cpu_T[0]);
3393 tcg_gen_qemu_ld16s(cpu_T[1], cpu_T[0], dc->mem_idx);
3395 case 0xd: /* ldstub -- XXX: should be atomically */
3396 tcg_gen_movi_i32(cpu_tmp0, 0xff);
3397 ABI32_MASK(cpu_T[0]);
3398 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
3399 tcg_gen_qemu_st8(cpu_tmp0, cpu_T[0], dc->mem_idx);
3401 case 0x0f: /* swap register with memory. Also atomically */
3402 gen_op_check_align_T0_3();
3403 gen_movl_reg_T1(rd);
3404 ABI32_MASK(cpu_T[0]);
3405 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_T[0], dc->mem_idx);
3406 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
3407 tcg_gen_mov_i32(cpu_T[1], cpu_tmp0);
3409 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3410 case 0x10: /* load word alternate */
3411 #ifndef TARGET_SPARC64
3414 if (!supervisor(dc))
3417 gen_op_check_align_T0_3();
3418 gen_ld_asi(insn, 4, 0);
3420 case 0x11: /* load unsigned byte alternate */
3421 #ifndef TARGET_SPARC64
3424 if (!supervisor(dc))
3427 gen_ld_asi(insn, 1, 0);
3429 case 0x12: /* load unsigned halfword alternate */
3430 #ifndef TARGET_SPARC64
3433 if (!supervisor(dc))
3436 gen_op_check_align_T0_1();
3437 gen_ld_asi(insn, 2, 0);
3439 case 0x13: /* load double word alternate */
3440 #ifndef TARGET_SPARC64
3443 if (!supervisor(dc))
3448 gen_op_check_align_T0_7();
3450 gen_movl_T0_reg(rd + 1);
3452 case 0x19: /* load signed byte alternate */
3453 #ifndef TARGET_SPARC64
3456 if (!supervisor(dc))
3459 gen_ld_asi(insn, 1, 1);
3461 case 0x1a: /* load signed halfword alternate */
3462 #ifndef TARGET_SPARC64
3465 if (!supervisor(dc))
3468 gen_op_check_align_T0_1();
3469 gen_ld_asi(insn, 2, 1);
3471 case 0x1d: /* ldstuba -- XXX: should be atomically */
3472 #ifndef TARGET_SPARC64
3475 if (!supervisor(dc))
3478 gen_ldstub_asi(insn);
3480 case 0x1f: /* swap reg with alt. memory. Also atomically */
3481 #ifndef TARGET_SPARC64
3484 if (!supervisor(dc))
3487 gen_op_check_align_T0_3();
3488 gen_movl_reg_T1(rd);
3492 #ifndef TARGET_SPARC64
3493 case 0x30: /* ldc */
3494 case 0x31: /* ldcsr */
3495 case 0x33: /* lddc */
3499 #ifdef TARGET_SPARC64
3500 case 0x08: /* V9 ldsw */
3501 gen_op_check_align_T0_3();
3502 ABI32_MASK(cpu_T[0]);
3503 tcg_gen_qemu_ld32s(cpu_T[1], cpu_T[0], dc->mem_idx);
3505 case 0x0b: /* V9 ldx */
3506 gen_op_check_align_T0_7();
3507 ABI32_MASK(cpu_T[0]);
3508 tcg_gen_qemu_ld64(cpu_T[1], cpu_T[0], dc->mem_idx);
3510 case 0x18: /* V9 ldswa */
3511 gen_op_check_align_T0_3();
3512 gen_ld_asi(insn, 4, 1);
3514 case 0x1b: /* V9 ldxa */
3515 gen_op_check_align_T0_7();
3516 gen_ld_asi(insn, 8, 0);
3518 case 0x2d: /* V9 prefetch, no effect */
3520 case 0x30: /* V9 ldfa */
3521 gen_op_check_align_T0_3();
3522 gen_ldf_asi(insn, 4, rd);
3524 case 0x33: /* V9 lddfa */
3525 gen_op_check_align_T0_3();
3526 gen_ldf_asi(insn, 8, DFPREG(rd));
3528 case 0x3d: /* V9 prefetcha, no effect */
3530 case 0x32: /* V9 ldqfa */
3531 #if defined(CONFIG_USER_ONLY)
3532 gen_op_check_align_T0_3();
3533 gen_ldf_asi(insn, 16, QFPREG(rd));
3542 gen_movl_T1_reg(rd);
3543 #ifdef TARGET_SPARC64
3546 } else if (xop >= 0x20 && xop < 0x24) {
3547 if (gen_trap_ifnofpu(dc))
3550 case 0x20: /* load fpreg */
3551 gen_op_check_align_T0_3();
3553 gen_op_store_FT0_fpr(rd);
3555 case 0x21: /* load fsr */
3556 gen_op_check_align_T0_3();
3559 tcg_gen_helper_0_0(helper_ldfsr);
3561 case 0x22: /* load quad fpreg */
3562 #if defined(CONFIG_USER_ONLY)
3563 gen_op_check_align_T0_7();
3565 gen_op_store_QT0_fpr(QFPREG(rd));
3570 case 0x23: /* load double fpreg */
3571 gen_op_check_align_T0_7();
3573 gen_op_store_DT0_fpr(DFPREG(rd));
3578 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
3579 xop == 0xe || xop == 0x1e) {
3580 gen_movl_reg_T1(rd);
3582 case 0x4: /* store word */
3583 gen_op_check_align_T0_3();
3584 ABI32_MASK(cpu_T[0]);
3585 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
3587 case 0x5: /* store byte */
3588 ABI32_MASK(cpu_T[0]);
3589 tcg_gen_qemu_st8(cpu_T[1], cpu_T[0], dc->mem_idx);
3591 case 0x6: /* store halfword */
3592 gen_op_check_align_T0_1();
3593 ABI32_MASK(cpu_T[0]);
3594 tcg_gen_qemu_st16(cpu_T[1], cpu_T[0], dc->mem_idx);
3596 case 0x7: /* store double word */
3601 TCGv r_dword, r_low;
3603 gen_op_check_align_T0_7();
3604 r_dword = tcg_temp_new(TCG_TYPE_I64);
3605 r_low = tcg_temp_new(TCG_TYPE_I32);
3606 gen_movl_reg_TN(rd + 1, r_low);
3607 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
3609 tcg_gen_qemu_st64(r_dword, cpu_T[0], dc->mem_idx);
3611 #else /* __i386__ */
3612 gen_op_check_align_T0_7();
3614 gen_movl_reg_T2(rd + 1);
3616 #endif /* __i386__ */
3618 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3619 case 0x14: /* store word alternate */
3620 #ifndef TARGET_SPARC64
3623 if (!supervisor(dc))
3626 gen_op_check_align_T0_3();
3627 gen_st_asi(insn, 4);
3629 case 0x15: /* store byte alternate */
3630 #ifndef TARGET_SPARC64
3633 if (!supervisor(dc))
3636 gen_st_asi(insn, 1);
3638 case 0x16: /* store halfword alternate */
3639 #ifndef TARGET_SPARC64
3642 if (!supervisor(dc))
3645 gen_op_check_align_T0_1();
3646 gen_st_asi(insn, 2);
3648 case 0x17: /* store double word alternate */
3649 #ifndef TARGET_SPARC64
3652 if (!supervisor(dc))
3659 TCGv r_dword, r_temp, r_size;
3661 gen_op_check_align_T0_7();
3662 r_dword = tcg_temp_new(TCG_TYPE_I64);
3663 r_temp = tcg_temp_new(TCG_TYPE_I32);
3664 r_size = tcg_temp_new(TCG_TYPE_I32);
3665 gen_movl_reg_TN(rd + 1, r_temp);
3666 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
3668 #ifdef TARGET_SPARC64
3672 offset = GET_FIELD(insn, 25, 31);
3673 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
3674 tcg_gen_ld_i32(r_dword, cpu_env, offsetof(CPUSPARCState, asi));
3677 asi = GET_FIELD(insn, 19, 26);
3678 tcg_gen_movi_i32(r_temp, asi);
3679 #ifdef TARGET_SPARC64
3682 tcg_gen_movi_i32(r_size, 8);
3683 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_temp, r_size);
3687 #ifdef TARGET_SPARC64
3688 case 0x0e: /* V9 stx */
3689 gen_op_check_align_T0_7();
3690 ABI32_MASK(cpu_T[0]);
3691 tcg_gen_qemu_st64(cpu_T[1], cpu_T[0], dc->mem_idx);
3693 case 0x1e: /* V9 stxa */
3694 gen_op_check_align_T0_7();
3695 gen_st_asi(insn, 8);
3701 } else if (xop > 0x23 && xop < 0x28) {
3702 if (gen_trap_ifnofpu(dc))
3706 gen_op_check_align_T0_3();
3707 gen_op_load_fpr_FT0(rd);
3710 case 0x25: /* stfsr, V9 stxfsr */
3711 #ifdef CONFIG_USER_ONLY
3712 gen_op_check_align_T0_3();
3718 #ifdef TARGET_SPARC64
3719 #if defined(CONFIG_USER_ONLY)
3720 /* V9 stqf, store quad fpreg */
3721 gen_op_check_align_T0_7();
3722 gen_op_load_fpr_QT0(QFPREG(rd));
3728 #else /* !TARGET_SPARC64 */
3729 /* stdfq, store floating point queue */
3730 #if defined(CONFIG_USER_ONLY)
3733 if (!supervisor(dc))
3735 if (gen_trap_ifnofpu(dc))
3741 gen_op_check_align_T0_7();
3742 gen_op_load_fpr_DT0(DFPREG(rd));
3748 } else if (xop > 0x33 && xop < 0x3f) {
3750 #ifdef TARGET_SPARC64
3751 case 0x34: /* V9 stfa */
3752 gen_op_check_align_T0_3();
3753 gen_op_load_fpr_FT0(rd);
3754 gen_stf_asi(insn, 4, rd);
3756 case 0x36: /* V9 stqfa */
3757 #if defined(CONFIG_USER_ONLY)
3758 gen_op_check_align_T0_7();
3759 gen_op_load_fpr_QT0(QFPREG(rd));
3760 gen_stf_asi(insn, 16, QFPREG(rd));
3765 case 0x37: /* V9 stdfa */
3766 gen_op_check_align_T0_3();
3767 gen_op_load_fpr_DT0(DFPREG(rd));
3768 gen_stf_asi(insn, 8, DFPREG(rd));
3770 case 0x3c: /* V9 casa */
3771 gen_op_check_align_T0_3();
3772 gen_cas_asi(insn, rd);
3773 gen_movl_T1_reg(rd);
3775 case 0x3e: /* V9 casxa */
3776 gen_op_check_align_T0_7();
3777 gen_casx_asi(insn, rd);
3778 gen_movl_T1_reg(rd);
3781 case 0x34: /* stc */
3782 case 0x35: /* stcsr */
3783 case 0x36: /* stdcq */
3784 case 0x37: /* stdc */
3796 /* default case for non jump instructions */
3797 if (dc->npc == DYNAMIC_PC) {
3798 dc->pc = DYNAMIC_PC;
3800 } else if (dc->npc == JUMP_PC) {
3801 /* we can do a static jump */
3802 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1]);
3806 dc->npc = dc->npc + 4;
3812 gen_op_exception(TT_ILL_INSN);
3815 #if !defined(CONFIG_USER_ONLY)
3818 gen_op_exception(TT_PRIV_INSN);
3823 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
3826 #ifndef TARGET_SPARC64
3829 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
3834 #ifndef TARGET_SPARC64
3837 gen_op_exception(TT_NCP_INSN);
3843 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
3847 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
3848 int spc, CPUSPARCState *env)
3850 target_ulong pc_start, last_pc;
3851 uint16_t *gen_opc_end;
3852 DisasContext dc1, *dc = &dc1;
3855 memset(dc, 0, sizeof(DisasContext));
3860 dc->npc = (target_ulong) tb->cs_base;
3861 dc->mem_idx = cpu_mmu_index(env);
3862 dc->fpu_enabled = cpu_fpu_enabled(env);
3863 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3865 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
3866 cpu_regwptr = tcg_temp_new(TCG_TYPE_PTR); // XXX
3869 if (env->nb_breakpoints > 0) {
3870 for(j = 0; j < env->nb_breakpoints; j++) {
3871 if (env->breakpoints[j] == dc->pc) {
3872 if (dc->pc != pc_start)
3874 tcg_gen_helper_0_0(helper_debug);
3883 fprintf(logfile, "Search PC...\n");
3884 j = gen_opc_ptr - gen_opc_buf;
3888 gen_opc_instr_start[lj++] = 0;
3889 gen_opc_pc[lj] = dc->pc;
3890 gen_opc_npc[lj] = dc->npc;
3891 gen_opc_instr_start[lj] = 1;
3895 disas_sparc_insn(dc);
3899 /* if the next PC is different, we abort now */
3900 if (dc->pc != (last_pc + 4))
3902 /* if we reach a page boundary, we stop generation so that the
3903 PC of a TT_TFAULT exception is always in the right page */
3904 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
3906 /* if single step mode, we generate only one instruction and
3907 generate an exception */
3908 if (env->singlestep_enabled) {
3913 } while ((gen_opc_ptr < gen_opc_end) &&
3914 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
3918 if (dc->pc != DYNAMIC_PC &&
3919 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
3920 /* static PC and NPC: we can use direct chaining */
3921 gen_branch(dc, dc->pc, dc->npc);
3923 if (dc->pc != DYNAMIC_PC)
3929 *gen_opc_ptr = INDEX_op_end;
3931 j = gen_opc_ptr - gen_opc_buf;
3934 gen_opc_instr_start[lj++] = 0;
3940 gen_opc_jump_pc[0] = dc->jump_pc[0];
3941 gen_opc_jump_pc[1] = dc->jump_pc[1];
3943 tb->size = last_pc + 4 - pc_start;
3946 if (loglevel & CPU_LOG_TB_IN_ASM) {
3947 fprintf(logfile, "--------------\n");
3948 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
3949 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
3950 fprintf(logfile, "\n");
3956 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
3958 return gen_intermediate_code_internal(tb, 0, env);
3961 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
3963 return gen_intermediate_code_internal(tb, 1, env);
3966 void cpu_reset(CPUSPARCState *env)
3971 env->regwptr = env->regbase + (env->cwp * 16);
3972 #if defined(CONFIG_USER_ONLY)
3973 env->user_mode_only = 1;
3974 #ifdef TARGET_SPARC64
3975 env->cleanwin = NWINDOWS - 2;
3976 env->cansave = NWINDOWS - 2;
3977 env->pstate = PS_RMO | PS_PEF | PS_IE;
3978 env->asi = 0x82; // Primary no-fault
3984 #ifdef TARGET_SPARC64
3985 env->pstate = PS_PRIV;
3986 env->hpstate = HS_PRIV;
3987 env->pc = 0x1fff0000000ULL;
3988 env->tsptr = &env->ts[env->tl];
3991 env->mmuregs[0] &= ~(MMU_E | MMU_NF);
3992 env->mmuregs[0] |= env->mmu_bm;
3994 env->npc = env->pc + 4;
3998 CPUSPARCState *cpu_sparc_init(const char *cpu_model)
4001 const sparc_def_t *def;
4004 def = cpu_sparc_find_by_name(cpu_model);
4008 env = qemu_mallocz(sizeof(CPUSPARCState));
4012 env->cpu_model_str = cpu_model;
4013 env->version = def->iu_version;
4014 env->fsr = def->fpu_version;
4015 #if !defined(TARGET_SPARC64)
4016 env->mmu_bm = def->mmu_bm;
4017 env->mmu_ctpr_mask = def->mmu_ctpr_mask;
4018 env->mmu_cxr_mask = def->mmu_cxr_mask;
4019 env->mmu_sfsr_mask = def->mmu_sfsr_mask;
4020 env->mmu_trcr_mask = def->mmu_trcr_mask;
4021 env->mmuregs[0] |= def->mmu_version;
4022 cpu_sparc_set_id(env, 0);
4025 /* init various static tables */
4029 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
4030 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4031 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4032 #ifdef TARGET_SPARC64
4033 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
4034 TCG_AREG0, offsetof(CPUState, t0), "T0");
4035 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
4036 TCG_AREG0, offsetof(CPUState, t1), "T1");
4037 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
4038 TCG_AREG0, offsetof(CPUState, t2), "T2");
4040 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
4041 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
4042 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
4051 void cpu_sparc_set_id(CPUSPARCState *env, unsigned int cpu)
4053 #if !defined(TARGET_SPARC64)
4054 env->mxccregs[7] = ((cpu + 8) & 0xf) << 24;
4058 static const sparc_def_t sparc_defs[] = {
4059 #ifdef TARGET_SPARC64
4061 .name = "Fujitsu Sparc64",
4062 .iu_version = ((0x04ULL << 48) | (0x02ULL << 32) | (0ULL << 24)
4063 | (MAXTL << 8) | (NWINDOWS - 1)),
4064 .fpu_version = 0x00000000,
4068 .name = "Fujitsu Sparc64 III",
4069 .iu_version = ((0x04ULL << 48) | (0x03ULL << 32) | (0ULL << 24)
4070 | (MAXTL << 8) | (NWINDOWS - 1)),
4071 .fpu_version = 0x00000000,
4075 .name = "Fujitsu Sparc64 IV",
4076 .iu_version = ((0x04ULL << 48) | (0x04ULL << 32) | (0ULL << 24)
4077 | (MAXTL << 8) | (NWINDOWS - 1)),
4078 .fpu_version = 0x00000000,
4082 .name = "Fujitsu Sparc64 V",
4083 .iu_version = ((0x04ULL << 48) | (0x05ULL << 32) | (0x51ULL << 24)
4084 | (MAXTL << 8) | (NWINDOWS - 1)),
4085 .fpu_version = 0x00000000,
4089 .name = "TI UltraSparc I",
4090 .iu_version = ((0x17ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4091 | (MAXTL << 8) | (NWINDOWS - 1)),
4092 .fpu_version = 0x00000000,
4096 .name = "TI UltraSparc II",
4097 .iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0x20ULL << 24)
4098 | (MAXTL << 8) | (NWINDOWS - 1)),
4099 .fpu_version = 0x00000000,
4103 .name = "TI UltraSparc IIi",
4104 .iu_version = ((0x17ULL << 48) | (0x12ULL << 32) | (0x91ULL << 24)
4105 | (MAXTL << 8) | (NWINDOWS - 1)),
4106 .fpu_version = 0x00000000,
4110 .name = "TI UltraSparc IIe",
4111 .iu_version = ((0x17ULL << 48) | (0x13ULL << 32) | (0x14ULL << 24)
4112 | (MAXTL << 8) | (NWINDOWS - 1)),
4113 .fpu_version = 0x00000000,
4117 .name = "Sun UltraSparc III",
4118 .iu_version = ((0x3eULL << 48) | (0x14ULL << 32) | (0x34ULL << 24)
4119 | (MAXTL << 8) | (NWINDOWS - 1)),
4120 .fpu_version = 0x00000000,
4124 .name = "Sun UltraSparc III Cu",
4125 .iu_version = ((0x3eULL << 48) | (0x15ULL << 32) | (0x41ULL << 24)
4126 | (MAXTL << 8) | (NWINDOWS - 1)),
4127 .fpu_version = 0x00000000,
4131 .name = "Sun UltraSparc IIIi",
4132 .iu_version = ((0x3eULL << 48) | (0x16ULL << 32) | (0x34ULL << 24)
4133 | (MAXTL << 8) | (NWINDOWS - 1)),
4134 .fpu_version = 0x00000000,
4138 .name = "Sun UltraSparc IV",
4139 .iu_version = ((0x3eULL << 48) | (0x18ULL << 32) | (0x31ULL << 24)
4140 | (MAXTL << 8) | (NWINDOWS - 1)),
4141 .fpu_version = 0x00000000,
4145 .name = "Sun UltraSparc IV+",
4146 .iu_version = ((0x3eULL << 48) | (0x19ULL << 32) | (0x22ULL << 24)
4147 | (MAXTL << 8) | (NWINDOWS - 1)),
4148 .fpu_version = 0x00000000,
4152 .name = "Sun UltraSparc IIIi+",
4153 .iu_version = ((0x3eULL << 48) | (0x22ULL << 32) | (0ULL << 24)
4154 | (MAXTL << 8) | (NWINDOWS - 1)),
4155 .fpu_version = 0x00000000,
4159 .name = "NEC UltraSparc I",
4160 .iu_version = ((0x22ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4161 | (MAXTL << 8) | (NWINDOWS - 1)),
4162 .fpu_version = 0x00000000,
4167 .name = "Fujitsu MB86900",
4168 .iu_version = 0x00 << 24, /* Impl 0, ver 0 */
4169 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4170 .mmu_version = 0x00 << 24, /* Impl 0, ver 0 */
4171 .mmu_bm = 0x00004000,
4172 .mmu_ctpr_mask = 0x007ffff0,
4173 .mmu_cxr_mask = 0x0000003f,
4174 .mmu_sfsr_mask = 0xffffffff,
4175 .mmu_trcr_mask = 0xffffffff,
4178 .name = "Fujitsu MB86904",
4179 .iu_version = 0x04 << 24, /* Impl 0, ver 4 */
4180 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4181 .mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
4182 .mmu_bm = 0x00004000,
4183 .mmu_ctpr_mask = 0x00ffffc0,
4184 .mmu_cxr_mask = 0x000000ff,
4185 .mmu_sfsr_mask = 0x00016fff,
4186 .mmu_trcr_mask = 0x00ffffff,
4189 .name = "Fujitsu MB86907",
4190 .iu_version = 0x05 << 24, /* Impl 0, ver 5 */
4191 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4192 .mmu_version = 0x05 << 24, /* Impl 0, ver 5 */
4193 .mmu_bm = 0x00004000,
4194 .mmu_ctpr_mask = 0xffffffc0,
4195 .mmu_cxr_mask = 0x000000ff,
4196 .mmu_sfsr_mask = 0x00016fff,
4197 .mmu_trcr_mask = 0xffffffff,
4200 .name = "LSI L64811",
4201 .iu_version = 0x10 << 24, /* Impl 1, ver 0 */
4202 .fpu_version = 1 << 17, /* FPU version 1 (LSI L64814) */
4203 .mmu_version = 0x10 << 24,
4204 .mmu_bm = 0x00004000,
4205 .mmu_ctpr_mask = 0x007ffff0,
4206 .mmu_cxr_mask = 0x0000003f,
4207 .mmu_sfsr_mask = 0xffffffff,
4208 .mmu_trcr_mask = 0xffffffff,
4211 .name = "Cypress CY7C601",
4212 .iu_version = 0x11 << 24, /* Impl 1, ver 1 */
4213 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4214 .mmu_version = 0x10 << 24,
4215 .mmu_bm = 0x00004000,
4216 .mmu_ctpr_mask = 0x007ffff0,
4217 .mmu_cxr_mask = 0x0000003f,
4218 .mmu_sfsr_mask = 0xffffffff,
4219 .mmu_trcr_mask = 0xffffffff,
4222 .name = "Cypress CY7C611",
4223 .iu_version = 0x13 << 24, /* Impl 1, ver 3 */
4224 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4225 .mmu_version = 0x10 << 24,
4226 .mmu_bm = 0x00004000,
4227 .mmu_ctpr_mask = 0x007ffff0,
4228 .mmu_cxr_mask = 0x0000003f,
4229 .mmu_sfsr_mask = 0xffffffff,
4230 .mmu_trcr_mask = 0xffffffff,
4233 .name = "TI SuperSparc II",
4234 .iu_version = 0x40000000,
4235 .fpu_version = 0 << 17,
4236 .mmu_version = 0x04000000,
4237 .mmu_bm = 0x00002000,
4238 .mmu_ctpr_mask = 0xffffffc0,
4239 .mmu_cxr_mask = 0x0000ffff,
4240 .mmu_sfsr_mask = 0xffffffff,
4241 .mmu_trcr_mask = 0xffffffff,
4244 .name = "TI MicroSparc I",
4245 .iu_version = 0x41000000,
4246 .fpu_version = 4 << 17,
4247 .mmu_version = 0x41000000,
4248 .mmu_bm = 0x00004000,
4249 .mmu_ctpr_mask = 0x007ffff0,
4250 .mmu_cxr_mask = 0x0000003f,
4251 .mmu_sfsr_mask = 0x00016fff,
4252 .mmu_trcr_mask = 0x0000003f,
4255 .name = "TI MicroSparc II",
4256 .iu_version = 0x42000000,
4257 .fpu_version = 4 << 17,
4258 .mmu_version = 0x02000000,
4259 .mmu_bm = 0x00004000,
4260 .mmu_ctpr_mask = 0x00ffffc0,
4261 .mmu_cxr_mask = 0x000000ff,
4262 .mmu_sfsr_mask = 0x00016bff,
4263 .mmu_trcr_mask = 0x00ffffff,
4266 .name = "TI MicroSparc IIep",
4267 .iu_version = 0x42000000,
4268 .fpu_version = 4 << 17,
4269 .mmu_version = 0x04000000,
4270 .mmu_bm = 0x00004000,
4271 .mmu_ctpr_mask = 0x00ffffc0,
4272 .mmu_cxr_mask = 0x000000ff,
4273 .mmu_sfsr_mask = 0x00016bff,
4274 .mmu_trcr_mask = 0x00ffffff,
4277 .name = "TI SuperSparc 51",
4278 .iu_version = 0x43000000,
4279 .fpu_version = 0 << 17,
4280 .mmu_version = 0x04000000,
4281 .mmu_bm = 0x00002000,
4282 .mmu_ctpr_mask = 0xffffffc0,
4283 .mmu_cxr_mask = 0x0000ffff,
4284 .mmu_sfsr_mask = 0xffffffff,
4285 .mmu_trcr_mask = 0xffffffff,
4288 .name = "TI SuperSparc 61",
4289 .iu_version = 0x44000000,
4290 .fpu_version = 0 << 17,
4291 .mmu_version = 0x04000000,
4292 .mmu_bm = 0x00002000,
4293 .mmu_ctpr_mask = 0xffffffc0,
4294 .mmu_cxr_mask = 0x0000ffff,
4295 .mmu_sfsr_mask = 0xffffffff,
4296 .mmu_trcr_mask = 0xffffffff,
4299 .name = "Ross RT625",
4300 .iu_version = 0x1e000000,
4301 .fpu_version = 1 << 17,
4302 .mmu_version = 0x1e000000,
4303 .mmu_bm = 0x00004000,
4304 .mmu_ctpr_mask = 0x007ffff0,
4305 .mmu_cxr_mask = 0x0000003f,
4306 .mmu_sfsr_mask = 0xffffffff,
4307 .mmu_trcr_mask = 0xffffffff,
4310 .name = "Ross RT620",
4311 .iu_version = 0x1f000000,
4312 .fpu_version = 1 << 17,
4313 .mmu_version = 0x1f000000,
4314 .mmu_bm = 0x00004000,
4315 .mmu_ctpr_mask = 0x007ffff0,
4316 .mmu_cxr_mask = 0x0000003f,
4317 .mmu_sfsr_mask = 0xffffffff,
4318 .mmu_trcr_mask = 0xffffffff,
4321 .name = "BIT B5010",
4322 .iu_version = 0x20000000,
4323 .fpu_version = 0 << 17, /* B5010/B5110/B5120/B5210 */
4324 .mmu_version = 0x20000000,
4325 .mmu_bm = 0x00004000,
4326 .mmu_ctpr_mask = 0x007ffff0,
4327 .mmu_cxr_mask = 0x0000003f,
4328 .mmu_sfsr_mask = 0xffffffff,
4329 .mmu_trcr_mask = 0xffffffff,
4332 .name = "Matsushita MN10501",
4333 .iu_version = 0x50000000,
4334 .fpu_version = 0 << 17,
4335 .mmu_version = 0x50000000,
4336 .mmu_bm = 0x00004000,
4337 .mmu_ctpr_mask = 0x007ffff0,
4338 .mmu_cxr_mask = 0x0000003f,
4339 .mmu_sfsr_mask = 0xffffffff,
4340 .mmu_trcr_mask = 0xffffffff,
4343 .name = "Weitek W8601",
4344 .iu_version = 0x90 << 24, /* Impl 9, ver 0 */
4345 .fpu_version = 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
4346 .mmu_version = 0x10 << 24,
4347 .mmu_bm = 0x00004000,
4348 .mmu_ctpr_mask = 0x007ffff0,
4349 .mmu_cxr_mask = 0x0000003f,
4350 .mmu_sfsr_mask = 0xffffffff,
4351 .mmu_trcr_mask = 0xffffffff,
4355 .iu_version = 0xf2000000,
4356 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4357 .mmu_version = 0xf2000000,
4358 .mmu_bm = 0x00004000,
4359 .mmu_ctpr_mask = 0x007ffff0,
4360 .mmu_cxr_mask = 0x0000003f,
4361 .mmu_sfsr_mask = 0xffffffff,
4362 .mmu_trcr_mask = 0xffffffff,
4366 .iu_version = 0xf3000000,
4367 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4368 .mmu_version = 0xf3000000,
4369 .mmu_bm = 0x00004000,
4370 .mmu_ctpr_mask = 0x007ffff0,
4371 .mmu_cxr_mask = 0x0000003f,
4372 .mmu_sfsr_mask = 0xffffffff,
4373 .mmu_trcr_mask = 0xffffffff,
4378 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name)
4382 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
4383 if (strcasecmp(name, sparc_defs[i].name) == 0) {
4384 return &sparc_defs[i];
4390 void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
4394 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
4395 (*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
4397 sparc_defs[i].iu_version,
4398 sparc_defs[i].fpu_version,
4399 sparc_defs[i].mmu_version);
4403 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
4405 void cpu_dump_state(CPUState *env, FILE *f,
4406 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
4411 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
4412 cpu_fprintf(f, "General Registers:\n");
4413 for (i = 0; i < 4; i++)
4414 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
4415 cpu_fprintf(f, "\n");
4417 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
4418 cpu_fprintf(f, "\nCurrent Register Window:\n");
4419 for (x = 0; x < 3; x++) {
4420 for (i = 0; i < 4; i++)
4421 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
4422 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
4423 env->regwptr[i + x * 8]);
4424 cpu_fprintf(f, "\n");
4426 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
4427 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
4428 env->regwptr[i + x * 8]);
4429 cpu_fprintf(f, "\n");
4431 cpu_fprintf(f, "\nFloating Point Registers:\n");
4432 for (i = 0; i < 32; i++) {
4434 cpu_fprintf(f, "%%f%02d:", i);
4435 cpu_fprintf(f, " %016lf", env->fpr[i]);
4437 cpu_fprintf(f, "\n");
4439 #ifdef TARGET_SPARC64
4440 cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
4441 env->pstate, GET_CCR(env), env->asi, env->tl, env->fprs);
4442 cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
4443 env->cansave, env->canrestore, env->otherwin, env->wstate,
4444 env->cleanwin, NWINDOWS - 1 - env->cwp);
4446 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
4447 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
4448 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
4449 env->psrs?'S':'-', env->psrps?'P':'-',
4450 env->psret?'E':'-', env->wim);
4452 cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
4455 #if defined(CONFIG_USER_ONLY)
4456 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
4462 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
4463 int *access_index, target_ulong address, int rw,
4466 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
4468 target_phys_addr_t phys_addr;
4469 int prot, access_index;
4471 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2,
4472 MMU_KERNEL_IDX) != 0)
4473 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr,
4474 0, MMU_KERNEL_IDX) != 0)
4476 if (cpu_get_physical_page_desc(phys_addr) == IO_MEM_UNASSIGNED)
4482 void helper_flush(target_ulong addr)
4485 tb_invalidate_page_range(addr, addr + 8);