4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env, cpu_T[3], cpu_regwptr;
50 /* local register indexes (only used inside old micro ops) */
53 typedef struct DisasContext {
54 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
55 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
56 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
60 struct TranslationBlock *tb;
63 typedef struct sparc_def_t sparc_def_t;
66 const unsigned char *name;
67 target_ulong iu_version;
71 uint32_t mmu_ctpr_mask;
72 uint32_t mmu_cxr_mask;
73 uint32_t mmu_sfsr_mask;
74 uint32_t mmu_trcr_mask;
77 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name);
82 // This function uses non-native bit order
83 #define GET_FIELD(X, FROM, TO) \
84 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
86 // This function uses the order in the manuals, i.e. bit 0 is 2^0
87 #define GET_FIELD_SP(X, FROM, TO) \
88 GET_FIELD(X, 31 - (TO), 31 - (FROM))
90 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
91 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
95 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
97 #define DFPREG(r) (r & 0x1e)
98 #define QFPREG(r) (r & 0x1c)
101 static int sign_extend(int x, int len)
104 return (x << len) >> len;
107 #define IS_IMM (insn & (1<<13))
109 static void disas_sparc_insn(DisasContext * dc);
111 #ifdef TARGET_SPARC64
112 #define GEN32(func, NAME) \
113 static GenOpFunc * const NAME ## _table [64] = { \
114 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
115 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
116 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
117 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
118 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
119 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
120 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
121 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
122 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
123 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
124 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
125 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
127 static inline void func(int n) \
129 NAME ## _table[n](); \
132 #define GEN32(func, NAME) \
133 static GenOpFunc *const NAME ## _table [32] = { \
134 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
135 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
136 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
137 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
138 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
139 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
140 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
141 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
143 static inline void func(int n) \
145 NAME ## _table[n](); \
149 /* floating point registers moves */
150 GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
151 GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
152 GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
153 GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
155 GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
156 GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
157 GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
158 GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
160 #if defined(CONFIG_USER_ONLY)
161 GEN32(gen_op_load_fpr_QT0, gen_op_load_fpr_QT0_fprf);
162 GEN32(gen_op_load_fpr_QT1, gen_op_load_fpr_QT1_fprf);
163 GEN32(gen_op_store_QT0_fpr, gen_op_store_QT0_fpr_fprf);
164 GEN32(gen_op_store_QT1_fpr, gen_op_store_QT1_fpr_fprf);
168 #ifdef CONFIG_USER_ONLY
169 #define supervisor(dc) 0
170 #ifdef TARGET_SPARC64
171 #define hypervisor(dc) 0
173 #define gen_op_ldst(name) gen_op_##name##_raw()
175 #define supervisor(dc) (dc->mem_idx >= 1)
176 #ifdef TARGET_SPARC64
177 #define hypervisor(dc) (dc->mem_idx == 2)
178 #define OP_LD_TABLE(width) \
179 static GenOpFunc * const gen_op_##width[] = { \
180 &gen_op_##width##_user, \
181 &gen_op_##width##_kernel, \
182 &gen_op_##width##_hypv, \
185 #define OP_LD_TABLE(width) \
186 static GenOpFunc * const gen_op_##width[] = { \
187 &gen_op_##width##_user, \
188 &gen_op_##width##_kernel, \
191 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
194 #ifndef CONFIG_USER_ONLY
202 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
204 #define ABI32_MASK(addr)
207 static inline void gen_movl_simm_T1(int32_t val)
209 tcg_gen_movi_tl(cpu_T[1], val);
212 static inline void gen_movl_reg_TN(int reg, TCGv tn)
215 tcg_gen_movi_tl(tn, 0);
217 tcg_gen_ld_tl(tn, cpu_env, offsetof(CPUState, gregs[reg]));
219 tcg_gen_ld_ptr(cpu_regwptr, cpu_env, offsetof(CPUState, regwptr)); // XXX
220 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
224 static inline void gen_movl_reg_T0(int reg)
226 gen_movl_reg_TN(reg, cpu_T[0]);
229 static inline void gen_movl_reg_T1(int reg)
231 gen_movl_reg_TN(reg, cpu_T[1]);
234 static inline void gen_movl_TN_reg(int reg, TCGv tn)
239 tcg_gen_st_tl(tn, cpu_env, offsetof(CPUState, gregs[reg]));
241 tcg_gen_ld_ptr(cpu_regwptr, cpu_env, offsetof(CPUState, regwptr)); // XXX
242 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
246 static inline void gen_movl_T0_reg(int reg)
248 gen_movl_TN_reg(reg, cpu_T[0]);
251 static inline void gen_movl_T1_reg(int reg)
253 gen_movl_TN_reg(reg, cpu_T[1]);
256 static inline void gen_op_movl_T0_env(size_t offset)
258 tcg_gen_ld_i32(cpu_T[0], cpu_env, offset);
261 static inline void gen_op_movl_env_T0(size_t offset)
263 tcg_gen_st_i32(cpu_T[0], cpu_env, offset);
266 static inline void gen_op_movtl_T0_env(size_t offset)
268 tcg_gen_ld_tl(cpu_T[0], cpu_env, offset);
271 static inline void gen_op_movtl_env_T0(size_t offset)
273 tcg_gen_st_tl(cpu_T[0], cpu_env, offset);
276 static inline void gen_op_add_T1_T0(void)
278 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
281 static inline void gen_op_or_T1_T0(void)
283 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
286 static inline void gen_op_xor_T1_T0(void)
288 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
291 static inline void gen_jmp_im(target_ulong pc)
293 tcg_gen_movi_tl(cpu_tmp0, pc);
294 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, pc));
297 static inline void gen_movl_npc_im(target_ulong npc)
299 tcg_gen_movi_tl(cpu_tmp0, npc);
300 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, npc));
303 static inline void gen_goto_tb(DisasContext *s, int tb_num,
304 target_ulong pc, target_ulong npc)
306 TranslationBlock *tb;
309 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
310 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
311 /* jump to same page: we can use a direct jump */
312 tcg_gen_goto_tb(tb_num);
314 gen_movl_npc_im(npc);
315 tcg_gen_exit_tb((long)tb + tb_num);
317 /* jump to another page: currently not optimized */
319 gen_movl_npc_im(npc);
324 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
329 l1 = gen_new_label();
331 gen_op_jz_T2_label(l1);
333 gen_goto_tb(dc, 0, pc1, pc1 + 4);
336 gen_goto_tb(dc, 1, pc2, pc2 + 4);
339 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
344 l1 = gen_new_label();
346 gen_op_jz_T2_label(l1);
348 gen_goto_tb(dc, 0, pc2, pc1);
351 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
354 static inline void gen_branch(DisasContext *dc, target_ulong pc,
357 gen_goto_tb(dc, 0, pc, npc);
360 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2)
364 l1 = gen_new_label();
365 l2 = gen_new_label();
366 gen_op_jz_T2_label(l1);
368 gen_movl_npc_im(npc1);
369 gen_op_jmp_label(l2);
372 gen_movl_npc_im(npc2);
376 /* call this function before using T2 as it may have been set for a jump */
377 static inline void flush_T2(DisasContext * dc)
379 if (dc->npc == JUMP_PC) {
380 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
381 dc->npc = DYNAMIC_PC;
385 static inline void save_npc(DisasContext * dc)
387 if (dc->npc == JUMP_PC) {
388 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
389 dc->npc = DYNAMIC_PC;
390 } else if (dc->npc != DYNAMIC_PC) {
391 gen_movl_npc_im(dc->npc);
395 static inline void save_state(DisasContext * dc)
401 static inline void gen_mov_pc_npc(DisasContext * dc)
403 if (dc->npc == JUMP_PC) {
404 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
407 } else if (dc->npc == DYNAMIC_PC) {
415 static GenOpFunc * const gen_cond[2][16] = {
435 #ifdef TARGET_SPARC64
456 static GenOpFunc * const gen_fcond[4][16] = {
475 #ifdef TARGET_SPARC64
478 gen_op_eval_fbne_fcc1,
479 gen_op_eval_fblg_fcc1,
480 gen_op_eval_fbul_fcc1,
481 gen_op_eval_fbl_fcc1,
482 gen_op_eval_fbug_fcc1,
483 gen_op_eval_fbg_fcc1,
484 gen_op_eval_fbu_fcc1,
486 gen_op_eval_fbe_fcc1,
487 gen_op_eval_fbue_fcc1,
488 gen_op_eval_fbge_fcc1,
489 gen_op_eval_fbuge_fcc1,
490 gen_op_eval_fble_fcc1,
491 gen_op_eval_fbule_fcc1,
492 gen_op_eval_fbo_fcc1,
496 gen_op_eval_fbne_fcc2,
497 gen_op_eval_fblg_fcc2,
498 gen_op_eval_fbul_fcc2,
499 gen_op_eval_fbl_fcc2,
500 gen_op_eval_fbug_fcc2,
501 gen_op_eval_fbg_fcc2,
502 gen_op_eval_fbu_fcc2,
504 gen_op_eval_fbe_fcc2,
505 gen_op_eval_fbue_fcc2,
506 gen_op_eval_fbge_fcc2,
507 gen_op_eval_fbuge_fcc2,
508 gen_op_eval_fble_fcc2,
509 gen_op_eval_fbule_fcc2,
510 gen_op_eval_fbo_fcc2,
514 gen_op_eval_fbne_fcc3,
515 gen_op_eval_fblg_fcc3,
516 gen_op_eval_fbul_fcc3,
517 gen_op_eval_fbl_fcc3,
518 gen_op_eval_fbug_fcc3,
519 gen_op_eval_fbg_fcc3,
520 gen_op_eval_fbu_fcc3,
522 gen_op_eval_fbe_fcc3,
523 gen_op_eval_fbue_fcc3,
524 gen_op_eval_fbge_fcc3,
525 gen_op_eval_fbuge_fcc3,
526 gen_op_eval_fble_fcc3,
527 gen_op_eval_fbule_fcc3,
528 gen_op_eval_fbo_fcc3,
535 #ifdef TARGET_SPARC64
536 static void gen_cond_reg(int cond)
562 /* XXX: potentially incorrect if dynamic npc */
563 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
565 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
566 target_ulong target = dc->pc + offset;
569 /* unconditional not taken */
571 dc->pc = dc->npc + 4;
572 dc->npc = dc->pc + 4;
575 dc->npc = dc->pc + 4;
577 } else if (cond == 0x8) {
578 /* unconditional taken */
581 dc->npc = dc->pc + 4;
588 gen_cond[cc][cond]();
590 gen_branch_a(dc, target, dc->npc);
594 dc->jump_pc[0] = target;
595 dc->jump_pc[1] = dc->npc + 4;
601 /* XXX: potentially incorrect if dynamic npc */
602 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
604 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
605 target_ulong target = dc->pc + offset;
608 /* unconditional not taken */
610 dc->pc = dc->npc + 4;
611 dc->npc = dc->pc + 4;
614 dc->npc = dc->pc + 4;
616 } else if (cond == 0x8) {
617 /* unconditional taken */
620 dc->npc = dc->pc + 4;
627 gen_fcond[cc][cond]();
629 gen_branch_a(dc, target, dc->npc);
633 dc->jump_pc[0] = target;
634 dc->jump_pc[1] = dc->npc + 4;
640 #ifdef TARGET_SPARC64
641 /* XXX: potentially incorrect if dynamic npc */
642 static void do_branch_reg(DisasContext * dc, int32_t offset, uint32_t insn)
644 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
645 target_ulong target = dc->pc + offset;
650 gen_branch_a(dc, target, dc->npc);
654 dc->jump_pc[0] = target;
655 dc->jump_pc[1] = dc->npc + 4;
660 static GenOpFunc * const gen_fcmps[4] = {
667 static GenOpFunc * const gen_fcmpd[4] = {
674 #if defined(CONFIG_USER_ONLY)
675 static GenOpFunc * const gen_fcmpq[4] = {
683 static GenOpFunc * const gen_fcmpes[4] = {
690 static GenOpFunc * const gen_fcmped[4] = {
697 #if defined(CONFIG_USER_ONLY)
698 static GenOpFunc * const gen_fcmpeq[4] = {
707 static int gen_trap_ifnofpu(DisasContext * dc)
709 #if !defined(CONFIG_USER_ONLY)
710 if (!dc->fpu_enabled) {
712 gen_op_exception(TT_NFPU_INSN);
721 #ifdef TARGET_SPARC64
722 static inline void gen_ld_asi(int insn, int size, int sign)
727 r_size = tcg_temp_new(TCG_TYPE_I32);
728 r_sign = tcg_temp_new(TCG_TYPE_I32);
729 tcg_gen_movi_i32(r_size, size);
730 tcg_gen_movi_i32(r_sign, sign);
732 offset = GET_FIELD(insn, 25, 31);
733 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
734 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
736 asi = GET_FIELD(insn, 19, 26);
737 tcg_gen_movi_i32(cpu_T[1], asi);
739 tcg_gen_helper_1_4(helper_ld_asi, cpu_T[1], cpu_T[0], cpu_T[1], r_size,
743 static inline void gen_st_asi(int insn, int size)
748 r_asi = tcg_temp_new(TCG_TYPE_I32);
749 r_size = tcg_temp_new(TCG_TYPE_I32);
750 tcg_gen_movi_i32(r_size, size);
752 offset = GET_FIELD(insn, 25, 31);
753 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
754 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
756 asi = GET_FIELD(insn, 19, 26);
757 tcg_gen_movi_i32(r_asi, asi);
759 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_asi, r_size);
762 static inline void gen_ldf_asi(int insn, int size, int rd)
765 TCGv r_asi, r_size, r_rd;
767 r_asi = tcg_temp_new(TCG_TYPE_I32);
768 r_size = tcg_temp_new(TCG_TYPE_I32);
769 r_rd = tcg_temp_new(TCG_TYPE_I32);
770 tcg_gen_movi_i32(r_size, size);
771 tcg_gen_movi_i32(r_rd, rd);
773 offset = GET_FIELD(insn, 25, 31);
774 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
775 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
777 asi = GET_FIELD(insn, 19, 26);
778 tcg_gen_movi_i32(r_asi, asi);
780 tcg_gen_helper_0_4(helper_ldf_asi, cpu_T[0], r_asi, r_size, r_rd);
783 static inline void gen_stf_asi(int insn, int size, int rd)
786 TCGv r_asi, r_size, r_rd;
788 r_asi = tcg_temp_new(TCG_TYPE_I32);
789 r_size = tcg_temp_new(TCG_TYPE_I32);
790 r_rd = tcg_temp_new(TCG_TYPE_I32);
791 tcg_gen_movi_i32(r_size, size);
792 tcg_gen_movi_i32(r_rd, rd);
794 offset = GET_FIELD(insn, 25, 31);
795 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
796 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
798 asi = GET_FIELD(insn, 19, 26);
799 tcg_gen_movi_i32(r_asi, asi);
801 tcg_gen_helper_0_4(helper_stf_asi, cpu_T[0], r_asi, r_size, r_rd);
804 static inline void gen_swap_asi(int insn)
807 TCGv r_size, r_sign, r_temp;
809 r_size = tcg_temp_new(TCG_TYPE_I32);
810 r_sign = tcg_temp_new(TCG_TYPE_I32);
811 r_temp = tcg_temp_new(TCG_TYPE_I32);
812 tcg_gen_movi_i32(r_size, 4);
813 tcg_gen_movi_i32(r_sign, 0);
815 offset = GET_FIELD(insn, 25, 31);
816 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
817 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
819 asi = GET_FIELD(insn, 19, 26);
820 tcg_gen_movi_i32(cpu_T[1], asi);
822 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
824 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
825 tcg_gen_mov_i32(cpu_T[1], r_temp);
828 static inline void gen_ldda_asi(int insn)
831 TCGv r_size, r_sign, r_dword;
833 r_size = tcg_temp_new(TCG_TYPE_I32);
834 r_sign = tcg_temp_new(TCG_TYPE_I32);
835 r_dword = tcg_temp_new(TCG_TYPE_I64);
836 tcg_gen_movi_i32(r_size, 8);
837 tcg_gen_movi_i32(r_sign, 0);
839 offset = GET_FIELD(insn, 25, 31);
840 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
841 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
843 asi = GET_FIELD(insn, 19, 26);
844 tcg_gen_movi_i32(cpu_T[1], asi);
846 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
848 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
849 tcg_gen_shri_i64(r_dword, r_dword, 32);
850 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
853 static inline void gen_cas_asi(int insn, int rd)
858 r_val1 = tcg_temp_new(TCG_TYPE_I32);
859 r_asi = tcg_temp_new(TCG_TYPE_I32);
860 gen_movl_reg_TN(rd, r_val1);
862 offset = GET_FIELD(insn, 25, 31);
863 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
864 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
866 asi = GET_FIELD(insn, 19, 26);
867 tcg_gen_movi_i32(r_asi, asi);
869 tcg_gen_helper_1_4(helper_cas_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
873 static inline void gen_casx_asi(int insn, int rd)
878 r_val1 = tcg_temp_new(TCG_TYPE_I64);
879 r_asi = tcg_temp_new(TCG_TYPE_I32);
880 gen_movl_reg_TN(rd, r_val1);
882 offset = GET_FIELD(insn, 25, 31);
883 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
884 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
886 asi = GET_FIELD(insn, 19, 26);
887 tcg_gen_movi_i32(r_asi, asi);
889 tcg_gen_helper_1_4(helper_casx_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
893 #elif !defined(CONFIG_USER_ONLY)
895 static inline void gen_ld_asi(int insn, int size, int sign)
898 TCGv r_size, r_sign, r_dword;
900 r_size = tcg_temp_new(TCG_TYPE_I32);
901 r_sign = tcg_temp_new(TCG_TYPE_I32);
902 r_dword = tcg_temp_new(TCG_TYPE_I64);
903 tcg_gen_movi_i32(r_size, size);
904 tcg_gen_movi_i32(r_sign, sign);
905 asi = GET_FIELD(insn, 19, 26);
906 tcg_gen_movi_i32(cpu_T[1], asi);
907 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
909 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
912 static inline void gen_st_asi(int insn, int size)
915 TCGv r_dword, r_asi, r_size;
917 r_dword = tcg_temp_new(TCG_TYPE_I64);
918 tcg_gen_extu_i32_i64(r_dword, cpu_T[1]);
919 r_asi = tcg_temp_new(TCG_TYPE_I32);
920 r_size = tcg_temp_new(TCG_TYPE_I32);
921 asi = GET_FIELD(insn, 19, 26);
922 tcg_gen_movi_i32(r_asi, asi);
923 tcg_gen_movi_i32(r_size, size);
924 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
927 static inline void gen_swap_asi(int insn)
930 TCGv r_size, r_sign, r_temp;
932 r_size = tcg_temp_new(TCG_TYPE_I32);
933 r_sign = tcg_temp_new(TCG_TYPE_I32);
934 r_temp = tcg_temp_new(TCG_TYPE_I32);
935 tcg_gen_movi_i32(r_size, 4);
936 tcg_gen_movi_i32(r_sign, 0);
937 asi = GET_FIELD(insn, 19, 26);
938 tcg_gen_movi_i32(cpu_T[1], asi);
939 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
941 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
942 tcg_gen_mov_i32(cpu_T[1], r_temp);
945 static inline void gen_ldda_asi(int insn)
948 TCGv r_size, r_sign, r_dword;
950 r_size = tcg_temp_new(TCG_TYPE_I32);
951 r_sign = tcg_temp_new(TCG_TYPE_I32);
952 r_dword = tcg_temp_new(TCG_TYPE_I64);
953 tcg_gen_movi_i32(r_size, 8);
954 tcg_gen_movi_i32(r_sign, 0);
955 asi = GET_FIELD(insn, 19, 26);
956 tcg_gen_movi_i32(cpu_T[1], asi);
957 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
959 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
960 tcg_gen_shri_i64(r_dword, r_dword, 32);
961 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
965 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
966 static inline void gen_ldstub_asi(int insn)
969 TCGv r_dword, r_asi, r_size;
971 gen_ld_asi(insn, 1, 0);
973 r_dword = tcg_temp_new(TCG_TYPE_I64);
974 r_asi = tcg_temp_new(TCG_TYPE_I32);
975 r_size = tcg_temp_new(TCG_TYPE_I32);
976 asi = GET_FIELD(insn, 19, 26);
977 tcg_gen_movi_i32(r_dword, 0xff);
978 tcg_gen_movi_i32(r_asi, asi);
979 tcg_gen_movi_i32(r_size, 1);
980 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
984 /* before an instruction, dc->pc must be static */
985 static void disas_sparc_insn(DisasContext * dc)
987 unsigned int insn, opc, rs1, rs2, rd;
989 insn = ldl_code(dc->pc);
990 opc = GET_FIELD(insn, 0, 1);
992 rd = GET_FIELD(insn, 2, 6);
994 case 0: /* branches/sethi */
996 unsigned int xop = GET_FIELD(insn, 7, 9);
999 #ifdef TARGET_SPARC64
1000 case 0x1: /* V9 BPcc */
1004 target = GET_FIELD_SP(insn, 0, 18);
1005 target = sign_extend(target, 18);
1007 cc = GET_FIELD_SP(insn, 20, 21);
1009 do_branch(dc, target, insn, 0);
1011 do_branch(dc, target, insn, 1);
1016 case 0x3: /* V9 BPr */
1018 target = GET_FIELD_SP(insn, 0, 13) |
1019 (GET_FIELD_SP(insn, 20, 21) << 14);
1020 target = sign_extend(target, 16);
1022 rs1 = GET_FIELD(insn, 13, 17);
1023 gen_movl_reg_T0(rs1);
1024 do_branch_reg(dc, target, insn);
1027 case 0x5: /* V9 FBPcc */
1029 int cc = GET_FIELD_SP(insn, 20, 21);
1030 if (gen_trap_ifnofpu(dc))
1032 target = GET_FIELD_SP(insn, 0, 18);
1033 target = sign_extend(target, 19);
1035 do_fbranch(dc, target, insn, cc);
1039 case 0x7: /* CBN+x */
1044 case 0x2: /* BN+x */
1046 target = GET_FIELD(insn, 10, 31);
1047 target = sign_extend(target, 22);
1049 do_branch(dc, target, insn, 0);
1052 case 0x6: /* FBN+x */
1054 if (gen_trap_ifnofpu(dc))
1056 target = GET_FIELD(insn, 10, 31);
1057 target = sign_extend(target, 22);
1059 do_fbranch(dc, target, insn, 0);
1062 case 0x4: /* SETHI */
1067 uint32_t value = GET_FIELD(insn, 10, 31);
1068 tcg_gen_movi_tl(cpu_T[0], value << 10);
1069 gen_movl_T0_reg(rd);
1074 case 0x0: /* UNIMPL */
1083 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1085 tcg_gen_movi_tl(cpu_T[0], dc->pc);
1086 gen_movl_T0_reg(15);
1092 case 2: /* FPU & Logical Operations */
1094 unsigned int xop = GET_FIELD(insn, 7, 12);
1095 if (xop == 0x3a) { /* generate trap */
1098 rs1 = GET_FIELD(insn, 13, 17);
1099 gen_movl_reg_T0(rs1);
1101 rs2 = GET_FIELD(insn, 25, 31);
1102 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2);
1104 rs2 = GET_FIELD(insn, 27, 31);
1108 gen_movl_reg_T1(rs2);
1114 cond = GET_FIELD(insn, 3, 6);
1117 tcg_gen_helper_0_1(helper_trap, cpu_T[0]);
1118 } else if (cond != 0) {
1119 #ifdef TARGET_SPARC64
1121 int cc = GET_FIELD_SP(insn, 11, 12);
1125 gen_cond[0][cond]();
1127 gen_cond[1][cond]();
1133 gen_cond[0][cond]();
1135 tcg_gen_helper_0_2(helper_trapcc, cpu_T[0], cpu_T[2]);
1141 } else if (xop == 0x28) {
1142 rs1 = GET_FIELD(insn, 13, 17);
1145 #ifndef TARGET_SPARC64
1146 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1147 manual, rdy on the microSPARC
1149 case 0x0f: /* stbar in the SPARCv8 manual,
1150 rdy on the microSPARC II */
1151 case 0x10 ... 0x1f: /* implementation-dependent in the
1152 SPARCv8 manual, rdy on the
1155 gen_op_movtl_T0_env(offsetof(CPUSPARCState, y));
1156 gen_movl_T0_reg(rd);
1158 #ifdef TARGET_SPARC64
1159 case 0x2: /* V9 rdccr */
1161 gen_movl_T0_reg(rd);
1163 case 0x3: /* V9 rdasi */
1164 gen_op_movl_T0_env(offsetof(CPUSPARCState, asi));
1165 gen_movl_T0_reg(rd);
1167 case 0x4: /* V9 rdtick */
1169 gen_movl_T0_reg(rd);
1171 case 0x5: /* V9 rdpc */
1172 tcg_gen_movi_tl(cpu_T[0], dc->pc);
1173 gen_movl_T0_reg(rd);
1175 case 0x6: /* V9 rdfprs */
1176 gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs));
1177 gen_movl_T0_reg(rd);
1179 case 0xf: /* V9 membar */
1180 break; /* no effect */
1181 case 0x13: /* Graphics Status */
1182 if (gen_trap_ifnofpu(dc))
1184 gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr));
1185 gen_movl_T0_reg(rd);
1187 case 0x17: /* Tick compare */
1188 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr));
1189 gen_movl_T0_reg(rd);
1191 case 0x18: /* System tick */
1193 gen_movl_T0_reg(rd);
1195 case 0x19: /* System tick compare */
1196 gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr));
1197 gen_movl_T0_reg(rd);
1199 case 0x10: /* Performance Control */
1200 case 0x11: /* Performance Instrumentation Counter */
1201 case 0x12: /* Dispatch Control */
1202 case 0x14: /* Softint set, WO */
1203 case 0x15: /* Softint clear, WO */
1204 case 0x16: /* Softint write */
1209 #if !defined(CONFIG_USER_ONLY)
1210 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1211 #ifndef TARGET_SPARC64
1212 if (!supervisor(dc))
1214 tcg_gen_helper_1_0(helper_rdpsr, cpu_T[0]);
1216 if (!hypervisor(dc))
1218 rs1 = GET_FIELD(insn, 13, 17);
1221 // gen_op_rdhpstate();
1224 // gen_op_rdhtstate();
1227 gen_op_movl_T0_env(offsetof(CPUSPARCState, hintp));
1230 gen_op_movl_T0_env(offsetof(CPUSPARCState, htba));
1233 gen_op_movl_T0_env(offsetof(CPUSPARCState, hver));
1235 case 31: // hstick_cmpr
1236 gen_op_movl_env_T0(offsetof(CPUSPARCState, hstick_cmpr));
1242 gen_movl_T0_reg(rd);
1244 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
1245 if (!supervisor(dc))
1247 #ifdef TARGET_SPARC64
1248 rs1 = GET_FIELD(insn, 13, 17);
1266 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
1269 gen_op_movl_T0_env(offsetof(CPUSPARCState, pstate));
1272 gen_op_movl_T0_env(offsetof(CPUSPARCState, tl));
1275 gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil));
1281 gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave));
1283 case 11: // canrestore
1284 gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore));
1286 case 12: // cleanwin
1287 gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin));
1289 case 13: // otherwin
1290 gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin));
1293 gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate));
1295 case 16: // UA2005 gl
1296 gen_op_movl_T0_env(offsetof(CPUSPARCState, gl));
1298 case 26: // UA2005 strand status
1299 if (!hypervisor(dc))
1301 gen_op_movl_T0_env(offsetof(CPUSPARCState, ssr));
1304 gen_op_movtl_T0_env(offsetof(CPUSPARCState, version));
1311 gen_op_movl_T0_env(offsetof(CPUSPARCState, wim));
1313 gen_movl_T0_reg(rd);
1315 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
1316 #ifdef TARGET_SPARC64
1319 if (!supervisor(dc))
1321 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
1322 gen_movl_T0_reg(rd);
1326 } else if (xop == 0x34) { /* FPU Operations */
1327 if (gen_trap_ifnofpu(dc))
1329 gen_op_clear_ieee_excp_and_FTT();
1330 rs1 = GET_FIELD(insn, 13, 17);
1331 rs2 = GET_FIELD(insn, 27, 31);
1332 xop = GET_FIELD(insn, 18, 26);
1334 case 0x1: /* fmovs */
1335 gen_op_load_fpr_FT0(rs2);
1336 gen_op_store_FT0_fpr(rd);
1338 case 0x5: /* fnegs */
1339 gen_op_load_fpr_FT1(rs2);
1341 gen_op_store_FT0_fpr(rd);
1343 case 0x9: /* fabss */
1344 gen_op_load_fpr_FT1(rs2);
1346 gen_op_store_FT0_fpr(rd);
1348 case 0x29: /* fsqrts */
1349 gen_op_load_fpr_FT1(rs2);
1351 gen_op_store_FT0_fpr(rd);
1353 case 0x2a: /* fsqrtd */
1354 gen_op_load_fpr_DT1(DFPREG(rs2));
1356 gen_op_store_DT0_fpr(DFPREG(rd));
1358 case 0x2b: /* fsqrtq */
1359 #if defined(CONFIG_USER_ONLY)
1360 gen_op_load_fpr_QT1(QFPREG(rs2));
1362 gen_op_store_QT0_fpr(QFPREG(rd));
1368 gen_op_load_fpr_FT0(rs1);
1369 gen_op_load_fpr_FT1(rs2);
1371 gen_op_store_FT0_fpr(rd);
1374 gen_op_load_fpr_DT0(DFPREG(rs1));
1375 gen_op_load_fpr_DT1(DFPREG(rs2));
1377 gen_op_store_DT0_fpr(DFPREG(rd));
1379 case 0x43: /* faddq */
1380 #if defined(CONFIG_USER_ONLY)
1381 gen_op_load_fpr_QT0(QFPREG(rs1));
1382 gen_op_load_fpr_QT1(QFPREG(rs2));
1384 gen_op_store_QT0_fpr(QFPREG(rd));
1390 gen_op_load_fpr_FT0(rs1);
1391 gen_op_load_fpr_FT1(rs2);
1393 gen_op_store_FT0_fpr(rd);
1396 gen_op_load_fpr_DT0(DFPREG(rs1));
1397 gen_op_load_fpr_DT1(DFPREG(rs2));
1399 gen_op_store_DT0_fpr(DFPREG(rd));
1401 case 0x47: /* fsubq */
1402 #if defined(CONFIG_USER_ONLY)
1403 gen_op_load_fpr_QT0(QFPREG(rs1));
1404 gen_op_load_fpr_QT1(QFPREG(rs2));
1406 gen_op_store_QT0_fpr(QFPREG(rd));
1412 gen_op_load_fpr_FT0(rs1);
1413 gen_op_load_fpr_FT1(rs2);
1415 gen_op_store_FT0_fpr(rd);
1418 gen_op_load_fpr_DT0(DFPREG(rs1));
1419 gen_op_load_fpr_DT1(DFPREG(rs2));
1421 gen_op_store_DT0_fpr(DFPREG(rd));
1423 case 0x4b: /* fmulq */
1424 #if defined(CONFIG_USER_ONLY)
1425 gen_op_load_fpr_QT0(QFPREG(rs1));
1426 gen_op_load_fpr_QT1(QFPREG(rs2));
1428 gen_op_store_QT0_fpr(QFPREG(rd));
1434 gen_op_load_fpr_FT0(rs1);
1435 gen_op_load_fpr_FT1(rs2);
1437 gen_op_store_FT0_fpr(rd);
1440 gen_op_load_fpr_DT0(DFPREG(rs1));
1441 gen_op_load_fpr_DT1(DFPREG(rs2));
1443 gen_op_store_DT0_fpr(DFPREG(rd));
1445 case 0x4f: /* fdivq */
1446 #if defined(CONFIG_USER_ONLY)
1447 gen_op_load_fpr_QT0(QFPREG(rs1));
1448 gen_op_load_fpr_QT1(QFPREG(rs2));
1450 gen_op_store_QT0_fpr(QFPREG(rd));
1456 gen_op_load_fpr_FT0(rs1);
1457 gen_op_load_fpr_FT1(rs2);
1459 gen_op_store_DT0_fpr(DFPREG(rd));
1461 case 0x6e: /* fdmulq */
1462 #if defined(CONFIG_USER_ONLY)
1463 gen_op_load_fpr_DT0(DFPREG(rs1));
1464 gen_op_load_fpr_DT1(DFPREG(rs2));
1466 gen_op_store_QT0_fpr(QFPREG(rd));
1472 gen_op_load_fpr_FT1(rs2);
1474 gen_op_store_FT0_fpr(rd);
1477 gen_op_load_fpr_DT1(DFPREG(rs2));
1479 gen_op_store_FT0_fpr(rd);
1481 case 0xc7: /* fqtos */
1482 #if defined(CONFIG_USER_ONLY)
1483 gen_op_load_fpr_QT1(QFPREG(rs2));
1485 gen_op_store_FT0_fpr(rd);
1491 gen_op_load_fpr_FT1(rs2);
1493 gen_op_store_DT0_fpr(DFPREG(rd));
1496 gen_op_load_fpr_FT1(rs2);
1498 gen_op_store_DT0_fpr(DFPREG(rd));
1500 case 0xcb: /* fqtod */
1501 #if defined(CONFIG_USER_ONLY)
1502 gen_op_load_fpr_QT1(QFPREG(rs2));
1504 gen_op_store_DT0_fpr(DFPREG(rd));
1509 case 0xcc: /* fitoq */
1510 #if defined(CONFIG_USER_ONLY)
1511 gen_op_load_fpr_FT1(rs2);
1513 gen_op_store_QT0_fpr(QFPREG(rd));
1518 case 0xcd: /* fstoq */
1519 #if defined(CONFIG_USER_ONLY)
1520 gen_op_load_fpr_FT1(rs2);
1522 gen_op_store_QT0_fpr(QFPREG(rd));
1527 case 0xce: /* fdtoq */
1528 #if defined(CONFIG_USER_ONLY)
1529 gen_op_load_fpr_DT1(DFPREG(rs2));
1531 gen_op_store_QT0_fpr(QFPREG(rd));
1537 gen_op_load_fpr_FT1(rs2);
1539 gen_op_store_FT0_fpr(rd);
1542 gen_op_load_fpr_DT1(DFPREG(rs2));
1544 gen_op_store_FT0_fpr(rd);
1546 case 0xd3: /* fqtoi */
1547 #if defined(CONFIG_USER_ONLY)
1548 gen_op_load_fpr_QT1(QFPREG(rs2));
1550 gen_op_store_FT0_fpr(rd);
1555 #ifdef TARGET_SPARC64
1556 case 0x2: /* V9 fmovd */
1557 gen_op_load_fpr_DT0(DFPREG(rs2));
1558 gen_op_store_DT0_fpr(DFPREG(rd));
1560 case 0x3: /* V9 fmovq */
1561 #if defined(CONFIG_USER_ONLY)
1562 gen_op_load_fpr_QT0(QFPREG(rs2));
1563 gen_op_store_QT0_fpr(QFPREG(rd));
1568 case 0x6: /* V9 fnegd */
1569 gen_op_load_fpr_DT1(DFPREG(rs2));
1571 gen_op_store_DT0_fpr(DFPREG(rd));
1573 case 0x7: /* V9 fnegq */
1574 #if defined(CONFIG_USER_ONLY)
1575 gen_op_load_fpr_QT1(QFPREG(rs2));
1577 gen_op_store_QT0_fpr(QFPREG(rd));
1582 case 0xa: /* V9 fabsd */
1583 gen_op_load_fpr_DT1(DFPREG(rs2));
1585 gen_op_store_DT0_fpr(DFPREG(rd));
1587 case 0xb: /* V9 fabsq */
1588 #if defined(CONFIG_USER_ONLY)
1589 gen_op_load_fpr_QT1(QFPREG(rs2));
1591 gen_op_store_QT0_fpr(QFPREG(rd));
1596 case 0x81: /* V9 fstox */
1597 gen_op_load_fpr_FT1(rs2);
1599 gen_op_store_DT0_fpr(DFPREG(rd));
1601 case 0x82: /* V9 fdtox */
1602 gen_op_load_fpr_DT1(DFPREG(rs2));
1604 gen_op_store_DT0_fpr(DFPREG(rd));
1606 case 0x83: /* V9 fqtox */
1607 #if defined(CONFIG_USER_ONLY)
1608 gen_op_load_fpr_QT1(QFPREG(rs2));
1610 gen_op_store_DT0_fpr(DFPREG(rd));
1615 case 0x84: /* V9 fxtos */
1616 gen_op_load_fpr_DT1(DFPREG(rs2));
1618 gen_op_store_FT0_fpr(rd);
1620 case 0x88: /* V9 fxtod */
1621 gen_op_load_fpr_DT1(DFPREG(rs2));
1623 gen_op_store_DT0_fpr(DFPREG(rd));
1625 case 0x8c: /* V9 fxtoq */
1626 #if defined(CONFIG_USER_ONLY)
1627 gen_op_load_fpr_DT1(DFPREG(rs2));
1629 gen_op_store_QT0_fpr(QFPREG(rd));
1638 } else if (xop == 0x35) { /* FPU Operations */
1639 #ifdef TARGET_SPARC64
1642 if (gen_trap_ifnofpu(dc))
1644 gen_op_clear_ieee_excp_and_FTT();
1645 rs1 = GET_FIELD(insn, 13, 17);
1646 rs2 = GET_FIELD(insn, 27, 31);
1647 xop = GET_FIELD(insn, 18, 26);
1648 #ifdef TARGET_SPARC64
1649 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
1650 cond = GET_FIELD_SP(insn, 14, 17);
1651 gen_op_load_fpr_FT0(rd);
1652 gen_op_load_fpr_FT1(rs2);
1653 rs1 = GET_FIELD(insn, 13, 17);
1654 gen_movl_reg_T0(rs1);
1658 gen_op_store_FT0_fpr(rd);
1660 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
1661 cond = GET_FIELD_SP(insn, 14, 17);
1662 gen_op_load_fpr_DT0(DFPREG(rd));
1663 gen_op_load_fpr_DT1(DFPREG(rs2));
1665 rs1 = GET_FIELD(insn, 13, 17);
1666 gen_movl_reg_T0(rs1);
1669 gen_op_store_DT0_fpr(DFPREG(rd));
1671 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
1672 #if defined(CONFIG_USER_ONLY)
1673 cond = GET_FIELD_SP(insn, 14, 17);
1674 gen_op_load_fpr_QT0(QFPREG(rd));
1675 gen_op_load_fpr_QT1(QFPREG(rs2));
1677 rs1 = GET_FIELD(insn, 13, 17);
1678 gen_movl_reg_T0(rs1);
1681 gen_op_store_QT0_fpr(QFPREG(rd));
1689 #ifdef TARGET_SPARC64
1690 case 0x001: /* V9 fmovscc %fcc0 */
1691 cond = GET_FIELD_SP(insn, 14, 17);
1692 gen_op_load_fpr_FT0(rd);
1693 gen_op_load_fpr_FT1(rs2);
1695 gen_fcond[0][cond]();
1697 gen_op_store_FT0_fpr(rd);
1699 case 0x002: /* V9 fmovdcc %fcc0 */
1700 cond = GET_FIELD_SP(insn, 14, 17);
1701 gen_op_load_fpr_DT0(DFPREG(rd));
1702 gen_op_load_fpr_DT1(DFPREG(rs2));
1704 gen_fcond[0][cond]();
1706 gen_op_store_DT0_fpr(DFPREG(rd));
1708 case 0x003: /* V9 fmovqcc %fcc0 */
1709 #if defined(CONFIG_USER_ONLY)
1710 cond = GET_FIELD_SP(insn, 14, 17);
1711 gen_op_load_fpr_QT0(QFPREG(rd));
1712 gen_op_load_fpr_QT1(QFPREG(rs2));
1714 gen_fcond[0][cond]();
1716 gen_op_store_QT0_fpr(QFPREG(rd));
1721 case 0x041: /* V9 fmovscc %fcc1 */
1722 cond = GET_FIELD_SP(insn, 14, 17);
1723 gen_op_load_fpr_FT0(rd);
1724 gen_op_load_fpr_FT1(rs2);
1726 gen_fcond[1][cond]();
1728 gen_op_store_FT0_fpr(rd);
1730 case 0x042: /* V9 fmovdcc %fcc1 */
1731 cond = GET_FIELD_SP(insn, 14, 17);
1732 gen_op_load_fpr_DT0(DFPREG(rd));
1733 gen_op_load_fpr_DT1(DFPREG(rs2));
1735 gen_fcond[1][cond]();
1737 gen_op_store_DT0_fpr(DFPREG(rd));
1739 case 0x043: /* V9 fmovqcc %fcc1 */
1740 #if defined(CONFIG_USER_ONLY)
1741 cond = GET_FIELD_SP(insn, 14, 17);
1742 gen_op_load_fpr_QT0(QFPREG(rd));
1743 gen_op_load_fpr_QT1(QFPREG(rs2));
1745 gen_fcond[1][cond]();
1747 gen_op_store_QT0_fpr(QFPREG(rd));
1752 case 0x081: /* V9 fmovscc %fcc2 */
1753 cond = GET_FIELD_SP(insn, 14, 17);
1754 gen_op_load_fpr_FT0(rd);
1755 gen_op_load_fpr_FT1(rs2);
1757 gen_fcond[2][cond]();
1759 gen_op_store_FT0_fpr(rd);
1761 case 0x082: /* V9 fmovdcc %fcc2 */
1762 cond = GET_FIELD_SP(insn, 14, 17);
1763 gen_op_load_fpr_DT0(DFPREG(rd));
1764 gen_op_load_fpr_DT1(DFPREG(rs2));
1766 gen_fcond[2][cond]();
1768 gen_op_store_DT0_fpr(DFPREG(rd));
1770 case 0x083: /* V9 fmovqcc %fcc2 */
1771 #if defined(CONFIG_USER_ONLY)
1772 cond = GET_FIELD_SP(insn, 14, 17);
1773 gen_op_load_fpr_QT0(rd);
1774 gen_op_load_fpr_QT1(rs2);
1776 gen_fcond[2][cond]();
1778 gen_op_store_QT0_fpr(rd);
1783 case 0x0c1: /* V9 fmovscc %fcc3 */
1784 cond = GET_FIELD_SP(insn, 14, 17);
1785 gen_op_load_fpr_FT0(rd);
1786 gen_op_load_fpr_FT1(rs2);
1788 gen_fcond[3][cond]();
1790 gen_op_store_FT0_fpr(rd);
1792 case 0x0c2: /* V9 fmovdcc %fcc3 */
1793 cond = GET_FIELD_SP(insn, 14, 17);
1794 gen_op_load_fpr_DT0(DFPREG(rd));
1795 gen_op_load_fpr_DT1(DFPREG(rs2));
1797 gen_fcond[3][cond]();
1799 gen_op_store_DT0_fpr(DFPREG(rd));
1801 case 0x0c3: /* V9 fmovqcc %fcc3 */
1802 #if defined(CONFIG_USER_ONLY)
1803 cond = GET_FIELD_SP(insn, 14, 17);
1804 gen_op_load_fpr_QT0(QFPREG(rd));
1805 gen_op_load_fpr_QT1(QFPREG(rs2));
1807 gen_fcond[3][cond]();
1809 gen_op_store_QT0_fpr(QFPREG(rd));
1814 case 0x101: /* V9 fmovscc %icc */
1815 cond = GET_FIELD_SP(insn, 14, 17);
1816 gen_op_load_fpr_FT0(rd);
1817 gen_op_load_fpr_FT1(rs2);
1819 gen_cond[0][cond]();
1821 gen_op_store_FT0_fpr(rd);
1823 case 0x102: /* V9 fmovdcc %icc */
1824 cond = GET_FIELD_SP(insn, 14, 17);
1825 gen_op_load_fpr_DT0(DFPREG(rd));
1826 gen_op_load_fpr_DT1(DFPREG(rs2));
1828 gen_cond[0][cond]();
1830 gen_op_store_DT0_fpr(DFPREG(rd));
1832 case 0x103: /* V9 fmovqcc %icc */
1833 #if defined(CONFIG_USER_ONLY)
1834 cond = GET_FIELD_SP(insn, 14, 17);
1835 gen_op_load_fpr_QT0(rd);
1836 gen_op_load_fpr_QT1(rs2);
1838 gen_cond[0][cond]();
1840 gen_op_store_QT0_fpr(rd);
1845 case 0x181: /* V9 fmovscc %xcc */
1846 cond = GET_FIELD_SP(insn, 14, 17);
1847 gen_op_load_fpr_FT0(rd);
1848 gen_op_load_fpr_FT1(rs2);
1850 gen_cond[1][cond]();
1852 gen_op_store_FT0_fpr(rd);
1854 case 0x182: /* V9 fmovdcc %xcc */
1855 cond = GET_FIELD_SP(insn, 14, 17);
1856 gen_op_load_fpr_DT0(DFPREG(rd));
1857 gen_op_load_fpr_DT1(DFPREG(rs2));
1859 gen_cond[1][cond]();
1861 gen_op_store_DT0_fpr(DFPREG(rd));
1863 case 0x183: /* V9 fmovqcc %xcc */
1864 #if defined(CONFIG_USER_ONLY)
1865 cond = GET_FIELD_SP(insn, 14, 17);
1866 gen_op_load_fpr_QT0(rd);
1867 gen_op_load_fpr_QT1(rs2);
1869 gen_cond[1][cond]();
1871 gen_op_store_QT0_fpr(rd);
1877 case 0x51: /* fcmps, V9 %fcc */
1878 gen_op_load_fpr_FT0(rs1);
1879 gen_op_load_fpr_FT1(rs2);
1880 #ifdef TARGET_SPARC64
1881 gen_fcmps[rd & 3]();
1886 case 0x52: /* fcmpd, V9 %fcc */
1887 gen_op_load_fpr_DT0(DFPREG(rs1));
1888 gen_op_load_fpr_DT1(DFPREG(rs2));
1889 #ifdef TARGET_SPARC64
1890 gen_fcmpd[rd & 3]();
1895 case 0x53: /* fcmpq, V9 %fcc */
1896 #if defined(CONFIG_USER_ONLY)
1897 gen_op_load_fpr_QT0(QFPREG(rs1));
1898 gen_op_load_fpr_QT1(QFPREG(rs2));
1899 #ifdef TARGET_SPARC64
1900 gen_fcmpq[rd & 3]();
1905 #else /* !defined(CONFIG_USER_ONLY) */
1908 case 0x55: /* fcmpes, V9 %fcc */
1909 gen_op_load_fpr_FT0(rs1);
1910 gen_op_load_fpr_FT1(rs2);
1911 #ifdef TARGET_SPARC64
1912 gen_fcmpes[rd & 3]();
1917 case 0x56: /* fcmped, V9 %fcc */
1918 gen_op_load_fpr_DT0(DFPREG(rs1));
1919 gen_op_load_fpr_DT1(DFPREG(rs2));
1920 #ifdef TARGET_SPARC64
1921 gen_fcmped[rd & 3]();
1926 case 0x57: /* fcmpeq, V9 %fcc */
1927 #if defined(CONFIG_USER_ONLY)
1928 gen_op_load_fpr_QT0(QFPREG(rs1));
1929 gen_op_load_fpr_QT1(QFPREG(rs2));
1930 #ifdef TARGET_SPARC64
1931 gen_fcmpeq[rd & 3]();
1936 #else/* !defined(CONFIG_USER_ONLY) */
1943 } else if (xop == 0x2) {
1946 rs1 = GET_FIELD(insn, 13, 17);
1948 // or %g0, x, y -> mov T0, x; mov y, T0
1949 if (IS_IMM) { /* immediate */
1950 rs2 = GET_FIELDs(insn, 19, 31);
1951 tcg_gen_movi_tl(cpu_T[0], (int)rs2);
1952 } else { /* register */
1953 rs2 = GET_FIELD(insn, 27, 31);
1954 gen_movl_reg_T0(rs2);
1957 gen_movl_reg_T0(rs1);
1958 if (IS_IMM) { /* immediate */
1959 rs2 = GET_FIELDs(insn, 19, 31);
1960 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2);
1961 } else { /* register */
1962 // or x, %g0, y -> mov T1, x; mov y, T1
1963 rs2 = GET_FIELD(insn, 27, 31);
1965 gen_movl_reg_T1(rs2);
1970 gen_movl_T0_reg(rd);
1972 #ifdef TARGET_SPARC64
1973 } else if (xop == 0x25) { /* sll, V9 sllx */
1974 rs1 = GET_FIELD(insn, 13, 17);
1975 gen_movl_reg_T0(rs1);
1976 if (IS_IMM) { /* immediate */
1977 rs2 = GET_FIELDs(insn, 20, 31);
1978 if (insn & (1 << 12)) {
1979 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
1981 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
1982 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
1984 } else { /* register */
1985 rs2 = GET_FIELD(insn, 27, 31);
1986 gen_movl_reg_T1(rs2);
1987 if (insn & (1 << 12)) {
1988 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
1989 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1991 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
1992 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
1993 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1996 gen_movl_T0_reg(rd);
1997 } else if (xop == 0x26) { /* srl, V9 srlx */
1998 rs1 = GET_FIELD(insn, 13, 17);
1999 gen_movl_reg_T0(rs1);
2000 if (IS_IMM) { /* immediate */
2001 rs2 = GET_FIELDs(insn, 20, 31);
2002 if (insn & (1 << 12)) {
2003 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2005 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2006 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2008 } else { /* register */
2009 rs2 = GET_FIELD(insn, 27, 31);
2010 gen_movl_reg_T1(rs2);
2011 if (insn & (1 << 12)) {
2012 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2013 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2015 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2016 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2017 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2020 gen_movl_T0_reg(rd);
2021 } else if (xop == 0x27) { /* sra, V9 srax */
2022 rs1 = GET_FIELD(insn, 13, 17);
2023 gen_movl_reg_T0(rs1);
2024 if (IS_IMM) { /* immediate */
2025 rs2 = GET_FIELDs(insn, 20, 31);
2026 if (insn & (1 << 12)) {
2027 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2029 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2030 tcg_gen_ext_i32_i64(cpu_T[0], cpu_T[0]);
2031 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2033 } else { /* register */
2034 rs2 = GET_FIELD(insn, 27, 31);
2035 gen_movl_reg_T1(rs2);
2036 if (insn & (1 << 12)) {
2037 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2038 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2040 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2041 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2042 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2045 gen_movl_T0_reg(rd);
2047 } else if (xop < 0x36) {
2048 rs1 = GET_FIELD(insn, 13, 17);
2049 gen_movl_reg_T0(rs1);
2050 if (IS_IMM) { /* immediate */
2051 rs2 = GET_FIELDs(insn, 19, 31);
2052 gen_movl_simm_T1(rs2);
2053 } else { /* register */
2054 rs2 = GET_FIELD(insn, 27, 31);
2055 gen_movl_reg_T1(rs2);
2058 switch (xop & ~0x10) {
2061 gen_op_add_T1_T0_cc();
2066 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2068 gen_op_logic_T0_cc();
2071 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2073 gen_op_logic_T0_cc();
2076 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2078 gen_op_logic_T0_cc();
2082 gen_op_sub_T1_T0_cc();
2084 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2087 gen_op_andn_T1_T0();
2089 gen_op_logic_T0_cc();
2094 gen_op_logic_T0_cc();
2097 gen_op_xnor_T1_T0();
2099 gen_op_logic_T0_cc();
2103 gen_op_addx_T1_T0_cc();
2105 gen_op_addx_T1_T0();
2107 #ifdef TARGET_SPARC64
2108 case 0x9: /* V9 mulx */
2109 tcg_gen_mul_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2113 gen_op_umul_T1_T0();
2115 gen_op_logic_T0_cc();
2118 gen_op_smul_T1_T0();
2120 gen_op_logic_T0_cc();
2124 gen_op_subx_T1_T0_cc();
2126 gen_op_subx_T1_T0();
2128 #ifdef TARGET_SPARC64
2129 case 0xd: /* V9 udivx */
2130 gen_op_udivx_T1_T0();
2134 gen_op_udiv_T1_T0();
2139 gen_op_sdiv_T1_T0();
2146 gen_movl_T0_reg(rd);
2149 case 0x20: /* taddcc */
2150 gen_op_tadd_T1_T0_cc();
2151 gen_movl_T0_reg(rd);
2153 case 0x21: /* tsubcc */
2154 gen_op_tsub_T1_T0_cc();
2155 gen_movl_T0_reg(rd);
2157 case 0x22: /* taddcctv */
2159 gen_op_tadd_T1_T0_ccTV();
2160 gen_movl_T0_reg(rd);
2162 case 0x23: /* tsubcctv */
2164 gen_op_tsub_T1_T0_ccTV();
2165 gen_movl_T0_reg(rd);
2167 case 0x24: /* mulscc */
2168 gen_op_mulscc_T1_T0();
2169 gen_movl_T0_reg(rd);
2171 #ifndef TARGET_SPARC64
2172 case 0x25: /* sll */
2173 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
2174 tcg_gen_shl_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
2175 gen_movl_T0_reg(rd);
2177 case 0x26: /* srl */
2178 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
2179 tcg_gen_shr_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
2180 gen_movl_T0_reg(rd);
2182 case 0x27: /* sra */
2183 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
2184 tcg_gen_sar_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
2185 gen_movl_T0_reg(rd);
2193 gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
2195 #ifndef TARGET_SPARC64
2196 case 0x01 ... 0x0f: /* undefined in the
2200 case 0x10 ... 0x1f: /* implementation-dependent
2206 case 0x2: /* V9 wrccr */
2210 case 0x3: /* V9 wrasi */
2212 gen_op_movl_env_T0(offsetof(CPUSPARCState, asi));
2214 case 0x6: /* V9 wrfprs */
2216 gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs));
2222 case 0xf: /* V9 sir, nop if user */
2223 #if !defined(CONFIG_USER_ONLY)
2228 case 0x13: /* Graphics Status */
2229 if (gen_trap_ifnofpu(dc))
2232 gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr));
2234 case 0x17: /* Tick compare */
2235 #if !defined(CONFIG_USER_ONLY)
2236 if (!supervisor(dc))
2240 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tick_cmpr));
2241 gen_op_wrtick_cmpr();
2243 case 0x18: /* System tick */
2244 #if !defined(CONFIG_USER_ONLY)
2245 if (!supervisor(dc))
2251 case 0x19: /* System tick compare */
2252 #if !defined(CONFIG_USER_ONLY)
2253 if (!supervisor(dc))
2257 gen_op_movtl_env_T0(offsetof(CPUSPARCState, stick_cmpr));
2258 gen_op_wrstick_cmpr();
2261 case 0x10: /* Performance Control */
2262 case 0x11: /* Performance Instrumentation Counter */
2263 case 0x12: /* Dispatch Control */
2264 case 0x14: /* Softint set */
2265 case 0x15: /* Softint clear */
2266 case 0x16: /* Softint write */
2273 #if !defined(CONFIG_USER_ONLY)
2274 case 0x31: /* wrpsr, V9 saved, restored */
2276 if (!supervisor(dc))
2278 #ifdef TARGET_SPARC64
2286 case 2: /* UA2005 allclean */
2287 case 3: /* UA2005 otherw */
2288 case 4: /* UA2005 normalw */
2289 case 5: /* UA2005 invalw */
2296 tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]);
2304 case 0x32: /* wrwim, V9 wrpr */
2306 if (!supervisor(dc))
2309 #ifdef TARGET_SPARC64
2327 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
2331 tcg_gen_helper_0_1(helper_wrpstate, cpu_T[0]);
2337 gen_op_movl_env_T0(offsetof(CPUSPARCState, tl));
2340 gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil));
2346 gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave));
2348 case 11: // canrestore
2349 gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore));
2351 case 12: // cleanwin
2352 gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin));
2354 case 13: // otherwin
2355 gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin));
2358 gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate));
2360 case 16: // UA2005 gl
2361 gen_op_movl_env_T0(offsetof(CPUSPARCState, gl));
2363 case 26: // UA2005 strand status
2364 if (!hypervisor(dc))
2366 gen_op_movl_env_T0(offsetof(CPUSPARCState, ssr));
2372 tcg_gen_andi_i32(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1));
2373 gen_op_movl_env_T0(offsetof(CPUSPARCState, wim));
2377 case 0x33: /* wrtbr, UA2005 wrhpr */
2379 #ifndef TARGET_SPARC64
2380 if (!supervisor(dc))
2383 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
2385 if (!hypervisor(dc))
2390 // XXX gen_op_wrhpstate();
2397 // XXX gen_op_wrhtstate();
2400 gen_op_movl_env_T0(offsetof(CPUSPARCState, hintp));
2403 gen_op_movl_env_T0(offsetof(CPUSPARCState, htba));
2405 case 31: // hstick_cmpr
2406 gen_op_movtl_env_T0(offsetof(CPUSPARCState, hstick_cmpr));
2407 gen_op_wrhstick_cmpr();
2409 case 6: // hver readonly
2417 #ifdef TARGET_SPARC64
2418 case 0x2c: /* V9 movcc */
2420 int cc = GET_FIELD_SP(insn, 11, 12);
2421 int cond = GET_FIELD_SP(insn, 14, 17);
2422 if (IS_IMM) { /* immediate */
2423 rs2 = GET_FIELD_SPs(insn, 0, 10);
2424 gen_movl_simm_T1(rs2);
2427 rs2 = GET_FIELD_SP(insn, 0, 4);
2428 gen_movl_reg_T1(rs2);
2430 gen_movl_reg_T0(rd);
2432 if (insn & (1 << 18)) {
2434 gen_cond[0][cond]();
2436 gen_cond[1][cond]();
2440 gen_fcond[cc][cond]();
2443 gen_movl_T0_reg(rd);
2446 case 0x2d: /* V9 sdivx */
2447 gen_op_sdivx_T1_T0();
2448 gen_movl_T0_reg(rd);
2450 case 0x2e: /* V9 popc */
2452 if (IS_IMM) { /* immediate */
2453 rs2 = GET_FIELD_SPs(insn, 0, 12);
2454 gen_movl_simm_T1(rs2);
2455 // XXX optimize: popc(constant)
2458 rs2 = GET_FIELD_SP(insn, 0, 4);
2459 gen_movl_reg_T1(rs2);
2461 tcg_gen_helper_1_1(helper_popc, cpu_T[0],
2463 gen_movl_T0_reg(rd);
2465 case 0x2f: /* V9 movr */
2467 int cond = GET_FIELD_SP(insn, 10, 12);
2468 rs1 = GET_FIELD(insn, 13, 17);
2470 gen_movl_reg_T0(rs1);
2472 if (IS_IMM) { /* immediate */
2473 rs2 = GET_FIELD_SPs(insn, 0, 9);
2474 gen_movl_simm_T1(rs2);
2477 rs2 = GET_FIELD_SP(insn, 0, 4);
2478 gen_movl_reg_T1(rs2);
2480 gen_movl_reg_T0(rd);
2482 gen_movl_T0_reg(rd);
2490 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
2491 #ifdef TARGET_SPARC64
2492 int opf = GET_FIELD_SP(insn, 5, 13);
2493 rs1 = GET_FIELD(insn, 13, 17);
2494 rs2 = GET_FIELD(insn, 27, 31);
2495 if (gen_trap_ifnofpu(dc))
2499 case 0x000: /* VIS I edge8cc */
2500 case 0x001: /* VIS II edge8n */
2501 case 0x002: /* VIS I edge8lcc */
2502 case 0x003: /* VIS II edge8ln */
2503 case 0x004: /* VIS I edge16cc */
2504 case 0x005: /* VIS II edge16n */
2505 case 0x006: /* VIS I edge16lcc */
2506 case 0x007: /* VIS II edge16ln */
2507 case 0x008: /* VIS I edge32cc */
2508 case 0x009: /* VIS II edge32n */
2509 case 0x00a: /* VIS I edge32lcc */
2510 case 0x00b: /* VIS II edge32ln */
2513 case 0x010: /* VIS I array8 */
2514 gen_movl_reg_T0(rs1);
2515 gen_movl_reg_T1(rs2);
2517 gen_movl_T0_reg(rd);
2519 case 0x012: /* VIS I array16 */
2520 gen_movl_reg_T0(rs1);
2521 gen_movl_reg_T1(rs2);
2523 gen_movl_T0_reg(rd);
2525 case 0x014: /* VIS I array32 */
2526 gen_movl_reg_T0(rs1);
2527 gen_movl_reg_T1(rs2);
2529 gen_movl_T0_reg(rd);
2531 case 0x018: /* VIS I alignaddr */
2532 gen_movl_reg_T0(rs1);
2533 gen_movl_reg_T1(rs2);
2535 gen_movl_T0_reg(rd);
2537 case 0x019: /* VIS II bmask */
2538 case 0x01a: /* VIS I alignaddrl */
2541 case 0x020: /* VIS I fcmple16 */
2542 gen_op_load_fpr_DT0(DFPREG(rs1));
2543 gen_op_load_fpr_DT1(DFPREG(rs2));
2545 gen_op_store_DT0_fpr(DFPREG(rd));
2547 case 0x022: /* VIS I fcmpne16 */
2548 gen_op_load_fpr_DT0(DFPREG(rs1));
2549 gen_op_load_fpr_DT1(DFPREG(rs2));
2551 gen_op_store_DT0_fpr(DFPREG(rd));
2553 case 0x024: /* VIS I fcmple32 */
2554 gen_op_load_fpr_DT0(DFPREG(rs1));
2555 gen_op_load_fpr_DT1(DFPREG(rs2));
2557 gen_op_store_DT0_fpr(DFPREG(rd));
2559 case 0x026: /* VIS I fcmpne32 */
2560 gen_op_load_fpr_DT0(DFPREG(rs1));
2561 gen_op_load_fpr_DT1(DFPREG(rs2));
2563 gen_op_store_DT0_fpr(DFPREG(rd));
2565 case 0x028: /* VIS I fcmpgt16 */
2566 gen_op_load_fpr_DT0(DFPREG(rs1));
2567 gen_op_load_fpr_DT1(DFPREG(rs2));
2569 gen_op_store_DT0_fpr(DFPREG(rd));
2571 case 0x02a: /* VIS I fcmpeq16 */
2572 gen_op_load_fpr_DT0(DFPREG(rs1));
2573 gen_op_load_fpr_DT1(DFPREG(rs2));
2575 gen_op_store_DT0_fpr(DFPREG(rd));
2577 case 0x02c: /* VIS I fcmpgt32 */
2578 gen_op_load_fpr_DT0(DFPREG(rs1));
2579 gen_op_load_fpr_DT1(DFPREG(rs2));
2581 gen_op_store_DT0_fpr(DFPREG(rd));
2583 case 0x02e: /* VIS I fcmpeq32 */
2584 gen_op_load_fpr_DT0(DFPREG(rs1));
2585 gen_op_load_fpr_DT1(DFPREG(rs2));
2587 gen_op_store_DT0_fpr(DFPREG(rd));
2589 case 0x031: /* VIS I fmul8x16 */
2590 gen_op_load_fpr_DT0(DFPREG(rs1));
2591 gen_op_load_fpr_DT1(DFPREG(rs2));
2593 gen_op_store_DT0_fpr(DFPREG(rd));
2595 case 0x033: /* VIS I fmul8x16au */
2596 gen_op_load_fpr_DT0(DFPREG(rs1));
2597 gen_op_load_fpr_DT1(DFPREG(rs2));
2598 gen_op_fmul8x16au();
2599 gen_op_store_DT0_fpr(DFPREG(rd));
2601 case 0x035: /* VIS I fmul8x16al */
2602 gen_op_load_fpr_DT0(DFPREG(rs1));
2603 gen_op_load_fpr_DT1(DFPREG(rs2));
2604 gen_op_fmul8x16al();
2605 gen_op_store_DT0_fpr(DFPREG(rd));
2607 case 0x036: /* VIS I fmul8sux16 */
2608 gen_op_load_fpr_DT0(DFPREG(rs1));
2609 gen_op_load_fpr_DT1(DFPREG(rs2));
2610 gen_op_fmul8sux16();
2611 gen_op_store_DT0_fpr(DFPREG(rd));
2613 case 0x037: /* VIS I fmul8ulx16 */
2614 gen_op_load_fpr_DT0(DFPREG(rs1));
2615 gen_op_load_fpr_DT1(DFPREG(rs2));
2616 gen_op_fmul8ulx16();
2617 gen_op_store_DT0_fpr(DFPREG(rd));
2619 case 0x038: /* VIS I fmuld8sux16 */
2620 gen_op_load_fpr_DT0(DFPREG(rs1));
2621 gen_op_load_fpr_DT1(DFPREG(rs2));
2622 gen_op_fmuld8sux16();
2623 gen_op_store_DT0_fpr(DFPREG(rd));
2625 case 0x039: /* VIS I fmuld8ulx16 */
2626 gen_op_load_fpr_DT0(DFPREG(rs1));
2627 gen_op_load_fpr_DT1(DFPREG(rs2));
2628 gen_op_fmuld8ulx16();
2629 gen_op_store_DT0_fpr(DFPREG(rd));
2631 case 0x03a: /* VIS I fpack32 */
2632 case 0x03b: /* VIS I fpack16 */
2633 case 0x03d: /* VIS I fpackfix */
2634 case 0x03e: /* VIS I pdist */
2637 case 0x048: /* VIS I faligndata */
2638 gen_op_load_fpr_DT0(DFPREG(rs1));
2639 gen_op_load_fpr_DT1(DFPREG(rs2));
2640 gen_op_faligndata();
2641 gen_op_store_DT0_fpr(DFPREG(rd));
2643 case 0x04b: /* VIS I fpmerge */
2644 gen_op_load_fpr_DT0(DFPREG(rs1));
2645 gen_op_load_fpr_DT1(DFPREG(rs2));
2647 gen_op_store_DT0_fpr(DFPREG(rd));
2649 case 0x04c: /* VIS II bshuffle */
2652 case 0x04d: /* VIS I fexpand */
2653 gen_op_load_fpr_DT0(DFPREG(rs1));
2654 gen_op_load_fpr_DT1(DFPREG(rs2));
2656 gen_op_store_DT0_fpr(DFPREG(rd));
2658 case 0x050: /* VIS I fpadd16 */
2659 gen_op_load_fpr_DT0(DFPREG(rs1));
2660 gen_op_load_fpr_DT1(DFPREG(rs2));
2662 gen_op_store_DT0_fpr(DFPREG(rd));
2664 case 0x051: /* VIS I fpadd16s */
2665 gen_op_load_fpr_FT0(rs1);
2666 gen_op_load_fpr_FT1(rs2);
2668 gen_op_store_FT0_fpr(rd);
2670 case 0x052: /* VIS I fpadd32 */
2671 gen_op_load_fpr_DT0(DFPREG(rs1));
2672 gen_op_load_fpr_DT1(DFPREG(rs2));
2674 gen_op_store_DT0_fpr(DFPREG(rd));
2676 case 0x053: /* VIS I fpadd32s */
2677 gen_op_load_fpr_FT0(rs1);
2678 gen_op_load_fpr_FT1(rs2);
2680 gen_op_store_FT0_fpr(rd);
2682 case 0x054: /* VIS I fpsub16 */
2683 gen_op_load_fpr_DT0(DFPREG(rs1));
2684 gen_op_load_fpr_DT1(DFPREG(rs2));
2686 gen_op_store_DT0_fpr(DFPREG(rd));
2688 case 0x055: /* VIS I fpsub16s */
2689 gen_op_load_fpr_FT0(rs1);
2690 gen_op_load_fpr_FT1(rs2);
2692 gen_op_store_FT0_fpr(rd);
2694 case 0x056: /* VIS I fpsub32 */
2695 gen_op_load_fpr_DT0(DFPREG(rs1));
2696 gen_op_load_fpr_DT1(DFPREG(rs2));
2698 gen_op_store_DT0_fpr(DFPREG(rd));
2700 case 0x057: /* VIS I fpsub32s */
2701 gen_op_load_fpr_FT0(rs1);
2702 gen_op_load_fpr_FT1(rs2);
2704 gen_op_store_FT0_fpr(rd);
2706 case 0x060: /* VIS I fzero */
2707 gen_op_movl_DT0_0();
2708 gen_op_store_DT0_fpr(DFPREG(rd));
2710 case 0x061: /* VIS I fzeros */
2711 gen_op_movl_FT0_0();
2712 gen_op_store_FT0_fpr(rd);
2714 case 0x062: /* VIS I fnor */
2715 gen_op_load_fpr_DT0(DFPREG(rs1));
2716 gen_op_load_fpr_DT1(DFPREG(rs2));
2718 gen_op_store_DT0_fpr(DFPREG(rd));
2720 case 0x063: /* VIS I fnors */
2721 gen_op_load_fpr_FT0(rs1);
2722 gen_op_load_fpr_FT1(rs2);
2724 gen_op_store_FT0_fpr(rd);
2726 case 0x064: /* VIS I fandnot2 */
2727 gen_op_load_fpr_DT1(DFPREG(rs1));
2728 gen_op_load_fpr_DT0(DFPREG(rs2));
2730 gen_op_store_DT0_fpr(DFPREG(rd));
2732 case 0x065: /* VIS I fandnot2s */
2733 gen_op_load_fpr_FT1(rs1);
2734 gen_op_load_fpr_FT0(rs2);
2736 gen_op_store_FT0_fpr(rd);
2738 case 0x066: /* VIS I fnot2 */
2739 gen_op_load_fpr_DT1(DFPREG(rs2));
2741 gen_op_store_DT0_fpr(DFPREG(rd));
2743 case 0x067: /* VIS I fnot2s */
2744 gen_op_load_fpr_FT1(rs2);
2746 gen_op_store_FT0_fpr(rd);
2748 case 0x068: /* VIS I fandnot1 */
2749 gen_op_load_fpr_DT0(DFPREG(rs1));
2750 gen_op_load_fpr_DT1(DFPREG(rs2));
2752 gen_op_store_DT0_fpr(DFPREG(rd));
2754 case 0x069: /* VIS I fandnot1s */
2755 gen_op_load_fpr_FT0(rs1);
2756 gen_op_load_fpr_FT1(rs2);
2758 gen_op_store_FT0_fpr(rd);
2760 case 0x06a: /* VIS I fnot1 */
2761 gen_op_load_fpr_DT1(DFPREG(rs1));
2763 gen_op_store_DT0_fpr(DFPREG(rd));
2765 case 0x06b: /* VIS I fnot1s */
2766 gen_op_load_fpr_FT1(rs1);
2768 gen_op_store_FT0_fpr(rd);
2770 case 0x06c: /* VIS I fxor */
2771 gen_op_load_fpr_DT0(DFPREG(rs1));
2772 gen_op_load_fpr_DT1(DFPREG(rs2));
2774 gen_op_store_DT0_fpr(DFPREG(rd));
2776 case 0x06d: /* VIS I fxors */
2777 gen_op_load_fpr_FT0(rs1);
2778 gen_op_load_fpr_FT1(rs2);
2780 gen_op_store_FT0_fpr(rd);
2782 case 0x06e: /* VIS I fnand */
2783 gen_op_load_fpr_DT0(DFPREG(rs1));
2784 gen_op_load_fpr_DT1(DFPREG(rs2));
2786 gen_op_store_DT0_fpr(DFPREG(rd));
2788 case 0x06f: /* VIS I fnands */
2789 gen_op_load_fpr_FT0(rs1);
2790 gen_op_load_fpr_FT1(rs2);
2792 gen_op_store_FT0_fpr(rd);
2794 case 0x070: /* VIS I fand */
2795 gen_op_load_fpr_DT0(DFPREG(rs1));
2796 gen_op_load_fpr_DT1(DFPREG(rs2));
2798 gen_op_store_DT0_fpr(DFPREG(rd));
2800 case 0x071: /* VIS I fands */
2801 gen_op_load_fpr_FT0(rs1);
2802 gen_op_load_fpr_FT1(rs2);
2804 gen_op_store_FT0_fpr(rd);
2806 case 0x072: /* VIS I fxnor */
2807 gen_op_load_fpr_DT0(DFPREG(rs1));
2808 gen_op_load_fpr_DT1(DFPREG(rs2));
2810 gen_op_store_DT0_fpr(DFPREG(rd));
2812 case 0x073: /* VIS I fxnors */
2813 gen_op_load_fpr_FT0(rs1);
2814 gen_op_load_fpr_FT1(rs2);
2816 gen_op_store_FT0_fpr(rd);
2818 case 0x074: /* VIS I fsrc1 */
2819 gen_op_load_fpr_DT0(DFPREG(rs1));
2820 gen_op_store_DT0_fpr(DFPREG(rd));
2822 case 0x075: /* VIS I fsrc1s */
2823 gen_op_load_fpr_FT0(rs1);
2824 gen_op_store_FT0_fpr(rd);
2826 case 0x076: /* VIS I fornot2 */
2827 gen_op_load_fpr_DT1(DFPREG(rs1));
2828 gen_op_load_fpr_DT0(DFPREG(rs2));
2830 gen_op_store_DT0_fpr(DFPREG(rd));
2832 case 0x077: /* VIS I fornot2s */
2833 gen_op_load_fpr_FT1(rs1);
2834 gen_op_load_fpr_FT0(rs2);
2836 gen_op_store_FT0_fpr(rd);
2838 case 0x078: /* VIS I fsrc2 */
2839 gen_op_load_fpr_DT0(DFPREG(rs2));
2840 gen_op_store_DT0_fpr(DFPREG(rd));
2842 case 0x079: /* VIS I fsrc2s */
2843 gen_op_load_fpr_FT0(rs2);
2844 gen_op_store_FT0_fpr(rd);
2846 case 0x07a: /* VIS I fornot1 */
2847 gen_op_load_fpr_DT0(DFPREG(rs1));
2848 gen_op_load_fpr_DT1(DFPREG(rs2));
2850 gen_op_store_DT0_fpr(DFPREG(rd));
2852 case 0x07b: /* VIS I fornot1s */
2853 gen_op_load_fpr_FT0(rs1);
2854 gen_op_load_fpr_FT1(rs2);
2856 gen_op_store_FT0_fpr(rd);
2858 case 0x07c: /* VIS I for */
2859 gen_op_load_fpr_DT0(DFPREG(rs1));
2860 gen_op_load_fpr_DT1(DFPREG(rs2));
2862 gen_op_store_DT0_fpr(DFPREG(rd));
2864 case 0x07d: /* VIS I fors */
2865 gen_op_load_fpr_FT0(rs1);
2866 gen_op_load_fpr_FT1(rs2);
2868 gen_op_store_FT0_fpr(rd);
2870 case 0x07e: /* VIS I fone */
2871 gen_op_movl_DT0_1();
2872 gen_op_store_DT0_fpr(DFPREG(rd));
2874 case 0x07f: /* VIS I fones */
2875 gen_op_movl_FT0_1();
2876 gen_op_store_FT0_fpr(rd);
2878 case 0x080: /* VIS I shutdown */
2879 case 0x081: /* VIS II siam */
2888 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
2889 #ifdef TARGET_SPARC64
2894 #ifdef TARGET_SPARC64
2895 } else if (xop == 0x39) { /* V9 return */
2896 rs1 = GET_FIELD(insn, 13, 17);
2898 gen_movl_reg_T0(rs1);
2899 if (IS_IMM) { /* immediate */
2900 rs2 = GET_FIELDs(insn, 19, 31);
2901 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
2902 } else { /* register */
2903 rs2 = GET_FIELD(insn, 27, 31);
2907 gen_movl_reg_T1(rs2);
2915 gen_op_check_align_T0_3();
2916 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
2917 dc->npc = DYNAMIC_PC;
2921 rs1 = GET_FIELD(insn, 13, 17);
2922 gen_movl_reg_T0(rs1);
2923 if (IS_IMM) { /* immediate */
2924 rs2 = GET_FIELDs(insn, 19, 31);
2925 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
2926 } else { /* register */
2927 rs2 = GET_FIELD(insn, 27, 31);
2931 gen_movl_reg_T1(rs2);
2938 case 0x38: /* jmpl */
2941 tcg_gen_movi_tl(cpu_T[1], dc->pc);
2942 gen_movl_T1_reg(rd);
2945 gen_op_check_align_T0_3();
2946 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
2947 dc->npc = DYNAMIC_PC;
2950 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
2951 case 0x39: /* rett, V9 return */
2953 if (!supervisor(dc))
2956 gen_op_check_align_T0_3();
2957 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
2958 dc->npc = DYNAMIC_PC;
2959 tcg_gen_helper_0_0(helper_rett);
2963 case 0x3b: /* flush */
2964 tcg_gen_helper_0_1(helper_flush, cpu_T[0]);
2966 case 0x3c: /* save */
2969 gen_movl_T0_reg(rd);
2971 case 0x3d: /* restore */
2974 gen_movl_T0_reg(rd);
2976 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
2977 case 0x3e: /* V9 done/retry */
2981 if (!supervisor(dc))
2983 dc->npc = DYNAMIC_PC;
2984 dc->pc = DYNAMIC_PC;
2985 tcg_gen_helper_0_0(helper_done);
2988 if (!supervisor(dc))
2990 dc->npc = DYNAMIC_PC;
2991 dc->pc = DYNAMIC_PC;
2992 tcg_gen_helper_0_0(helper_retry);
3007 case 3: /* load/store instructions */
3009 unsigned int xop = GET_FIELD(insn, 7, 12);
3010 rs1 = GET_FIELD(insn, 13, 17);
3012 gen_movl_reg_T0(rs1);
3013 if (xop == 0x3c || xop == 0x3e)
3015 rs2 = GET_FIELD(insn, 27, 31);
3016 gen_movl_reg_T1(rs2);
3018 else if (IS_IMM) { /* immediate */
3019 rs2 = GET_FIELDs(insn, 19, 31);
3020 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3021 } else { /* register */
3022 rs2 = GET_FIELD(insn, 27, 31);
3026 gen_movl_reg_T1(rs2);
3032 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
3033 (xop > 0x17 && xop <= 0x1d ) ||
3034 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
3036 case 0x0: /* load unsigned word */
3037 gen_op_check_align_T0_3();
3038 ABI32_MASK(cpu_T[0]);
3039 tcg_gen_qemu_ld32u(cpu_T[1], cpu_T[0], dc->mem_idx);
3041 case 0x1: /* load unsigned byte */
3042 ABI32_MASK(cpu_T[0]);
3043 tcg_gen_qemu_ld8u(cpu_T[1], cpu_T[0], dc->mem_idx);
3045 case 0x2: /* load unsigned halfword */
3046 gen_op_check_align_T0_1();
3047 ABI32_MASK(cpu_T[0]);
3048 tcg_gen_qemu_ld16u(cpu_T[1], cpu_T[0], dc->mem_idx);
3050 case 0x3: /* load double word */
3056 r_dword = tcg_temp_new(TCG_TYPE_I64);
3057 gen_op_check_align_T0_7();
3058 ABI32_MASK(cpu_T[0]);
3059 tcg_gen_qemu_ld64(r_dword, cpu_T[0], dc->mem_idx);
3060 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
3061 gen_movl_T0_reg(rd + 1);
3062 tcg_gen_shri_i64(r_dword, r_dword, 32);
3063 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
3066 case 0x9: /* load signed byte */
3067 ABI32_MASK(cpu_T[0]);
3068 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
3070 case 0xa: /* load signed halfword */
3071 gen_op_check_align_T0_1();
3072 ABI32_MASK(cpu_T[0]);
3073 tcg_gen_qemu_ld16s(cpu_T[1], cpu_T[0], dc->mem_idx);
3075 case 0xd: /* ldstub -- XXX: should be atomically */
3076 tcg_gen_movi_i32(cpu_tmp0, 0xff);
3077 ABI32_MASK(cpu_T[0]);
3078 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
3079 tcg_gen_qemu_st8(cpu_tmp0, cpu_T[0], dc->mem_idx);
3081 case 0x0f: /* swap register with memory. Also atomically */
3082 gen_op_check_align_T0_3();
3083 gen_movl_reg_T1(rd);
3084 ABI32_MASK(cpu_T[0]);
3085 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_T[0], dc->mem_idx);
3086 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
3087 tcg_gen_mov_i32(cpu_T[1], cpu_tmp0);
3089 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3090 case 0x10: /* load word alternate */
3091 #ifndef TARGET_SPARC64
3094 if (!supervisor(dc))
3097 gen_op_check_align_T0_3();
3098 gen_ld_asi(insn, 4, 0);
3100 case 0x11: /* load unsigned byte alternate */
3101 #ifndef TARGET_SPARC64
3104 if (!supervisor(dc))
3107 gen_ld_asi(insn, 1, 0);
3109 case 0x12: /* load unsigned halfword alternate */
3110 #ifndef TARGET_SPARC64
3113 if (!supervisor(dc))
3116 gen_op_check_align_T0_1();
3117 gen_ld_asi(insn, 2, 0);
3119 case 0x13: /* load double word alternate */
3120 #ifndef TARGET_SPARC64
3123 if (!supervisor(dc))
3128 gen_op_check_align_T0_7();
3130 gen_movl_T0_reg(rd + 1);
3132 case 0x19: /* load signed byte alternate */
3133 #ifndef TARGET_SPARC64
3136 if (!supervisor(dc))
3139 gen_ld_asi(insn, 1, 1);
3141 case 0x1a: /* load signed halfword alternate */
3142 #ifndef TARGET_SPARC64
3145 if (!supervisor(dc))
3148 gen_op_check_align_T0_1();
3149 gen_ld_asi(insn, 2, 1);
3151 case 0x1d: /* ldstuba -- XXX: should be atomically */
3152 #ifndef TARGET_SPARC64
3155 if (!supervisor(dc))
3158 gen_ldstub_asi(insn);
3160 case 0x1f: /* swap reg with alt. memory. Also atomically */
3161 #ifndef TARGET_SPARC64
3164 if (!supervisor(dc))
3167 gen_op_check_align_T0_3();
3168 gen_movl_reg_T1(rd);
3172 #ifndef TARGET_SPARC64
3173 case 0x30: /* ldc */
3174 case 0x31: /* ldcsr */
3175 case 0x33: /* lddc */
3179 #ifdef TARGET_SPARC64
3180 case 0x08: /* V9 ldsw */
3181 gen_op_check_align_T0_3();
3182 ABI32_MASK(cpu_T[0]);
3183 tcg_gen_qemu_ld32s(cpu_T[1], cpu_T[0], dc->mem_idx);
3185 case 0x0b: /* V9 ldx */
3186 gen_op_check_align_T0_7();
3187 ABI32_MASK(cpu_T[0]);
3188 tcg_gen_qemu_ld64(cpu_T[1], cpu_T[0], dc->mem_idx);
3190 case 0x18: /* V9 ldswa */
3191 gen_op_check_align_T0_3();
3192 gen_ld_asi(insn, 4, 1);
3194 case 0x1b: /* V9 ldxa */
3195 gen_op_check_align_T0_7();
3196 gen_ld_asi(insn, 8, 0);
3198 case 0x2d: /* V9 prefetch, no effect */
3200 case 0x30: /* V9 ldfa */
3201 gen_op_check_align_T0_3();
3202 gen_ldf_asi(insn, 4, rd);
3204 case 0x33: /* V9 lddfa */
3205 gen_op_check_align_T0_3();
3206 gen_ldf_asi(insn, 8, DFPREG(rd));
3208 case 0x3d: /* V9 prefetcha, no effect */
3210 case 0x32: /* V9 ldqfa */
3211 #if defined(CONFIG_USER_ONLY)
3212 gen_op_check_align_T0_3();
3213 gen_ldf_asi(insn, 16, QFPREG(rd));
3222 gen_movl_T1_reg(rd);
3223 #ifdef TARGET_SPARC64
3226 } else if (xop >= 0x20 && xop < 0x24) {
3227 if (gen_trap_ifnofpu(dc))
3230 case 0x20: /* load fpreg */
3231 gen_op_check_align_T0_3();
3233 gen_op_store_FT0_fpr(rd);
3235 case 0x21: /* load fsr */
3236 gen_op_check_align_T0_3();
3240 case 0x22: /* load quad fpreg */
3241 #if defined(CONFIG_USER_ONLY)
3242 gen_op_check_align_T0_7();
3244 gen_op_store_QT0_fpr(QFPREG(rd));
3249 case 0x23: /* load double fpreg */
3250 gen_op_check_align_T0_7();
3252 gen_op_store_DT0_fpr(DFPREG(rd));
3257 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
3258 xop == 0xe || xop == 0x1e) {
3259 gen_movl_reg_T1(rd);
3261 case 0x4: /* store word */
3262 gen_op_check_align_T0_3();
3263 ABI32_MASK(cpu_T[0]);
3264 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
3266 case 0x5: /* store byte */
3267 ABI32_MASK(cpu_T[0]);
3268 tcg_gen_qemu_st8(cpu_T[1], cpu_T[0], dc->mem_idx);
3270 case 0x6: /* store halfword */
3271 gen_op_check_align_T0_1();
3272 ABI32_MASK(cpu_T[0]);
3273 tcg_gen_qemu_st16(cpu_T[1], cpu_T[0], dc->mem_idx);
3275 case 0x7: /* store double word */
3279 TCGv r_dword, r_low;
3281 gen_op_check_align_T0_7();
3282 r_dword = tcg_temp_new(TCG_TYPE_I64);
3283 r_low = tcg_temp_new(TCG_TYPE_I32);
3284 gen_movl_reg_TN(rd + 1, r_low);
3285 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
3287 tcg_gen_qemu_st64(r_dword, cpu_T[0], dc->mem_idx);
3290 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3291 case 0x14: /* store word alternate */
3292 #ifndef TARGET_SPARC64
3295 if (!supervisor(dc))
3298 gen_op_check_align_T0_3();
3299 gen_st_asi(insn, 4);
3301 case 0x15: /* store byte alternate */
3302 #ifndef TARGET_SPARC64
3305 if (!supervisor(dc))
3308 gen_st_asi(insn, 1);
3310 case 0x16: /* store halfword alternate */
3311 #ifndef TARGET_SPARC64
3314 if (!supervisor(dc))
3317 gen_op_check_align_T0_1();
3318 gen_st_asi(insn, 2);
3320 case 0x17: /* store double word alternate */
3321 #ifndef TARGET_SPARC64
3324 if (!supervisor(dc))
3331 TCGv r_dword, r_temp, r_size;
3333 gen_op_check_align_T0_7();
3334 r_dword = tcg_temp_new(TCG_TYPE_I64);
3335 r_temp = tcg_temp_new(TCG_TYPE_I32);
3336 r_size = tcg_temp_new(TCG_TYPE_I32);
3337 gen_movl_reg_TN(rd + 1, r_temp);
3338 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
3340 #ifdef TARGET_SPARC64
3344 offset = GET_FIELD(insn, 25, 31);
3345 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
3346 tcg_gen_ld_i32(r_dword, cpu_env, offsetof(CPUSPARCState, asi));
3349 asi = GET_FIELD(insn, 19, 26);
3350 tcg_gen_movi_i32(r_temp, asi);
3351 #ifdef TARGET_SPARC64
3354 tcg_gen_movi_i32(r_size, 8);
3355 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_temp, r_size);
3359 #ifdef TARGET_SPARC64
3360 case 0x0e: /* V9 stx */
3361 gen_op_check_align_T0_7();
3362 ABI32_MASK(cpu_T[0]);
3363 tcg_gen_qemu_st64(cpu_T[1], cpu_T[0], dc->mem_idx);
3365 case 0x1e: /* V9 stxa */
3366 gen_op_check_align_T0_7();
3367 gen_st_asi(insn, 8);
3373 } else if (xop > 0x23 && xop < 0x28) {
3374 if (gen_trap_ifnofpu(dc))
3378 gen_op_check_align_T0_3();
3379 gen_op_load_fpr_FT0(rd);
3382 case 0x25: /* stfsr, V9 stxfsr */
3383 #ifdef CONFIG_USER_ONLY
3384 gen_op_check_align_T0_3();
3390 #ifdef TARGET_SPARC64
3391 #if defined(CONFIG_USER_ONLY)
3392 /* V9 stqf, store quad fpreg */
3393 gen_op_check_align_T0_7();
3394 gen_op_load_fpr_QT0(QFPREG(rd));
3400 #else /* !TARGET_SPARC64 */
3401 /* stdfq, store floating point queue */
3402 #if defined(CONFIG_USER_ONLY)
3405 if (!supervisor(dc))
3407 if (gen_trap_ifnofpu(dc))
3413 gen_op_check_align_T0_7();
3414 gen_op_load_fpr_DT0(DFPREG(rd));
3420 } else if (xop > 0x33 && xop < 0x3f) {
3422 #ifdef TARGET_SPARC64
3423 case 0x34: /* V9 stfa */
3424 gen_op_check_align_T0_3();
3425 gen_op_load_fpr_FT0(rd);
3426 gen_stf_asi(insn, 4, rd);
3428 case 0x36: /* V9 stqfa */
3429 #if defined(CONFIG_USER_ONLY)
3430 gen_op_check_align_T0_7();
3431 gen_op_load_fpr_QT0(QFPREG(rd));
3432 gen_stf_asi(insn, 16, QFPREG(rd));
3437 case 0x37: /* V9 stdfa */
3438 gen_op_check_align_T0_3();
3439 gen_op_load_fpr_DT0(DFPREG(rd));
3440 gen_stf_asi(insn, 8, DFPREG(rd));
3442 case 0x3c: /* V9 casa */
3443 gen_op_check_align_T0_3();
3444 gen_cas_asi(insn, rd);
3445 gen_movl_T1_reg(rd);
3447 case 0x3e: /* V9 casxa */
3448 gen_op_check_align_T0_7();
3449 gen_casx_asi(insn, rd);
3450 gen_movl_T1_reg(rd);
3453 case 0x34: /* stc */
3454 case 0x35: /* stcsr */
3455 case 0x36: /* stdcq */
3456 case 0x37: /* stdc */
3468 /* default case for non jump instructions */
3469 if (dc->npc == DYNAMIC_PC) {
3470 dc->pc = DYNAMIC_PC;
3472 } else if (dc->npc == JUMP_PC) {
3473 /* we can do a static jump */
3474 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1]);
3478 dc->npc = dc->npc + 4;
3484 gen_op_exception(TT_ILL_INSN);
3487 #if !defined(CONFIG_USER_ONLY)
3490 gen_op_exception(TT_PRIV_INSN);
3495 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
3498 #ifndef TARGET_SPARC64
3501 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
3506 #ifndef TARGET_SPARC64
3509 gen_op_exception(TT_NCP_INSN);
3515 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
3519 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
3520 int spc, CPUSPARCState *env)
3522 target_ulong pc_start, last_pc;
3523 uint16_t *gen_opc_end;
3524 DisasContext dc1, *dc = &dc1;
3527 memset(dc, 0, sizeof(DisasContext));
3532 dc->npc = (target_ulong) tb->cs_base;
3533 dc->mem_idx = cpu_mmu_index(env);
3534 dc->fpu_enabled = cpu_fpu_enabled(env);
3535 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3537 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
3538 cpu_regwptr = tcg_temp_new(TCG_TYPE_PTR); // XXX
3541 if (env->nb_breakpoints > 0) {
3542 for(j = 0; j < env->nb_breakpoints; j++) {
3543 if (env->breakpoints[j] == dc->pc) {
3544 if (dc->pc != pc_start)
3546 tcg_gen_helper_0_0(helper_debug);
3555 fprintf(logfile, "Search PC...\n");
3556 j = gen_opc_ptr - gen_opc_buf;
3560 gen_opc_instr_start[lj++] = 0;
3561 gen_opc_pc[lj] = dc->pc;
3562 gen_opc_npc[lj] = dc->npc;
3563 gen_opc_instr_start[lj] = 1;
3567 disas_sparc_insn(dc);
3571 /* if the next PC is different, we abort now */
3572 if (dc->pc != (last_pc + 4))
3574 /* if we reach a page boundary, we stop generation so that the
3575 PC of a TT_TFAULT exception is always in the right page */
3576 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
3578 /* if single step mode, we generate only one instruction and
3579 generate an exception */
3580 if (env->singlestep_enabled) {
3585 } while ((gen_opc_ptr < gen_opc_end) &&
3586 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
3590 if (dc->pc != DYNAMIC_PC &&
3591 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
3592 /* static PC and NPC: we can use direct chaining */
3593 gen_branch(dc, dc->pc, dc->npc);
3595 if (dc->pc != DYNAMIC_PC)
3601 *gen_opc_ptr = INDEX_op_end;
3603 j = gen_opc_ptr - gen_opc_buf;
3606 gen_opc_instr_start[lj++] = 0;
3612 gen_opc_jump_pc[0] = dc->jump_pc[0];
3613 gen_opc_jump_pc[1] = dc->jump_pc[1];
3615 tb->size = last_pc + 4 - pc_start;
3618 if (loglevel & CPU_LOG_TB_IN_ASM) {
3619 fprintf(logfile, "--------------\n");
3620 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
3621 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
3622 fprintf(logfile, "\n");
3628 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
3630 return gen_intermediate_code_internal(tb, 0, env);
3633 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
3635 return gen_intermediate_code_internal(tb, 1, env);
3638 void cpu_reset(CPUSPARCState *env)
3643 env->regwptr = env->regbase + (env->cwp * 16);
3644 #if defined(CONFIG_USER_ONLY)
3645 env->user_mode_only = 1;
3646 #ifdef TARGET_SPARC64
3647 env->cleanwin = NWINDOWS - 2;
3648 env->cansave = NWINDOWS - 2;
3649 env->pstate = PS_RMO | PS_PEF | PS_IE;
3650 env->asi = 0x82; // Primary no-fault
3656 #ifdef TARGET_SPARC64
3657 env->pstate = PS_PRIV;
3658 env->hpstate = HS_PRIV;
3659 env->pc = 0x1fff0000000ULL;
3662 env->mmuregs[0] &= ~(MMU_E | MMU_NF);
3663 env->mmuregs[0] |= env->mmu_bm;
3665 env->npc = env->pc + 4;
3669 CPUSPARCState *cpu_sparc_init(const char *cpu_model)
3672 const sparc_def_t *def;
3675 def = cpu_sparc_find_by_name(cpu_model);
3679 env = qemu_mallocz(sizeof(CPUSPARCState));
3683 env->cpu_model_str = cpu_model;
3684 env->version = def->iu_version;
3685 env->fsr = def->fpu_version;
3686 #if !defined(TARGET_SPARC64)
3687 env->mmu_bm = def->mmu_bm;
3688 env->mmu_ctpr_mask = def->mmu_ctpr_mask;
3689 env->mmu_cxr_mask = def->mmu_cxr_mask;
3690 env->mmu_sfsr_mask = def->mmu_sfsr_mask;
3691 env->mmu_trcr_mask = def->mmu_trcr_mask;
3692 env->mmuregs[0] |= def->mmu_version;
3693 cpu_sparc_set_id(env, 0);
3696 /* init various static tables */
3700 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
3701 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
3702 //#if TARGET_LONG_BITS > HOST_LONG_BITS
3703 #ifdef TARGET_SPARC64
3704 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
3705 TCG_AREG0, offsetof(CPUState, t0), "T0");
3706 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
3707 TCG_AREG0, offsetof(CPUState, t1), "T1");
3708 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
3709 TCG_AREG0, offsetof(CPUState, t2), "T2");
3711 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
3712 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
3713 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
3722 void cpu_sparc_set_id(CPUSPARCState *env, unsigned int cpu)
3724 #if !defined(TARGET_SPARC64)
3725 env->mxccregs[7] = ((cpu + 8) & 0xf) << 24;
3729 static const sparc_def_t sparc_defs[] = {
3730 #ifdef TARGET_SPARC64
3732 .name = "Fujitsu Sparc64",
3733 .iu_version = ((0x04ULL << 48) | (0x02ULL << 32) | (0ULL << 24)
3734 | (MAXTL << 8) | (NWINDOWS - 1)),
3735 .fpu_version = 0x00000000,
3739 .name = "Fujitsu Sparc64 III",
3740 .iu_version = ((0x04ULL << 48) | (0x03ULL << 32) | (0ULL << 24)
3741 | (MAXTL << 8) | (NWINDOWS - 1)),
3742 .fpu_version = 0x00000000,
3746 .name = "Fujitsu Sparc64 IV",
3747 .iu_version = ((0x04ULL << 48) | (0x04ULL << 32) | (0ULL << 24)
3748 | (MAXTL << 8) | (NWINDOWS - 1)),
3749 .fpu_version = 0x00000000,
3753 .name = "Fujitsu Sparc64 V",
3754 .iu_version = ((0x04ULL << 48) | (0x05ULL << 32) | (0x51ULL << 24)
3755 | (MAXTL << 8) | (NWINDOWS - 1)),
3756 .fpu_version = 0x00000000,
3760 .name = "TI UltraSparc I",
3761 .iu_version = ((0x17ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
3762 | (MAXTL << 8) | (NWINDOWS - 1)),
3763 .fpu_version = 0x00000000,
3767 .name = "TI UltraSparc II",
3768 .iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0x20ULL << 24)
3769 | (MAXTL << 8) | (NWINDOWS - 1)),
3770 .fpu_version = 0x00000000,
3774 .name = "TI UltraSparc IIi",
3775 .iu_version = ((0x17ULL << 48) | (0x12ULL << 32) | (0x91ULL << 24)
3776 | (MAXTL << 8) | (NWINDOWS - 1)),
3777 .fpu_version = 0x00000000,
3781 .name = "TI UltraSparc IIe",
3782 .iu_version = ((0x17ULL << 48) | (0x13ULL << 32) | (0x14ULL << 24)
3783 | (MAXTL << 8) | (NWINDOWS - 1)),
3784 .fpu_version = 0x00000000,
3788 .name = "Sun UltraSparc III",
3789 .iu_version = ((0x3eULL << 48) | (0x14ULL << 32) | (0x34ULL << 24)
3790 | (MAXTL << 8) | (NWINDOWS - 1)),
3791 .fpu_version = 0x00000000,
3795 .name = "Sun UltraSparc III Cu",
3796 .iu_version = ((0x3eULL << 48) | (0x15ULL << 32) | (0x41ULL << 24)
3797 | (MAXTL << 8) | (NWINDOWS - 1)),
3798 .fpu_version = 0x00000000,
3802 .name = "Sun UltraSparc IIIi",
3803 .iu_version = ((0x3eULL << 48) | (0x16ULL << 32) | (0x34ULL << 24)
3804 | (MAXTL << 8) | (NWINDOWS - 1)),
3805 .fpu_version = 0x00000000,
3809 .name = "Sun UltraSparc IV",
3810 .iu_version = ((0x3eULL << 48) | (0x18ULL << 32) | (0x31ULL << 24)
3811 | (MAXTL << 8) | (NWINDOWS - 1)),
3812 .fpu_version = 0x00000000,
3816 .name = "Sun UltraSparc IV+",
3817 .iu_version = ((0x3eULL << 48) | (0x19ULL << 32) | (0x22ULL << 24)
3818 | (MAXTL << 8) | (NWINDOWS - 1)),
3819 .fpu_version = 0x00000000,
3823 .name = "Sun UltraSparc IIIi+",
3824 .iu_version = ((0x3eULL << 48) | (0x22ULL << 32) | (0ULL << 24)
3825 | (MAXTL << 8) | (NWINDOWS - 1)),
3826 .fpu_version = 0x00000000,
3830 .name = "NEC UltraSparc I",
3831 .iu_version = ((0x22ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
3832 | (MAXTL << 8) | (NWINDOWS - 1)),
3833 .fpu_version = 0x00000000,
3838 .name = "Fujitsu MB86900",
3839 .iu_version = 0x00 << 24, /* Impl 0, ver 0 */
3840 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
3841 .mmu_version = 0x00 << 24, /* Impl 0, ver 0 */
3842 .mmu_bm = 0x00004000,
3843 .mmu_ctpr_mask = 0x007ffff0,
3844 .mmu_cxr_mask = 0x0000003f,
3845 .mmu_sfsr_mask = 0xffffffff,
3846 .mmu_trcr_mask = 0xffffffff,
3849 .name = "Fujitsu MB86904",
3850 .iu_version = 0x04 << 24, /* Impl 0, ver 4 */
3851 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
3852 .mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
3853 .mmu_bm = 0x00004000,
3854 .mmu_ctpr_mask = 0x00ffffc0,
3855 .mmu_cxr_mask = 0x000000ff,
3856 .mmu_sfsr_mask = 0x00016fff,
3857 .mmu_trcr_mask = 0x00ffffff,
3860 .name = "Fujitsu MB86907",
3861 .iu_version = 0x05 << 24, /* Impl 0, ver 5 */
3862 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
3863 .mmu_version = 0x05 << 24, /* Impl 0, ver 5 */
3864 .mmu_bm = 0x00004000,
3865 .mmu_ctpr_mask = 0xffffffc0,
3866 .mmu_cxr_mask = 0x000000ff,
3867 .mmu_sfsr_mask = 0x00016fff,
3868 .mmu_trcr_mask = 0xffffffff,
3871 .name = "LSI L64811",
3872 .iu_version = 0x10 << 24, /* Impl 1, ver 0 */
3873 .fpu_version = 1 << 17, /* FPU version 1 (LSI L64814) */
3874 .mmu_version = 0x10 << 24,
3875 .mmu_bm = 0x00004000,
3876 .mmu_ctpr_mask = 0x007ffff0,
3877 .mmu_cxr_mask = 0x0000003f,
3878 .mmu_sfsr_mask = 0xffffffff,
3879 .mmu_trcr_mask = 0xffffffff,
3882 .name = "Cypress CY7C601",
3883 .iu_version = 0x11 << 24, /* Impl 1, ver 1 */
3884 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
3885 .mmu_version = 0x10 << 24,
3886 .mmu_bm = 0x00004000,
3887 .mmu_ctpr_mask = 0x007ffff0,
3888 .mmu_cxr_mask = 0x0000003f,
3889 .mmu_sfsr_mask = 0xffffffff,
3890 .mmu_trcr_mask = 0xffffffff,
3893 .name = "Cypress CY7C611",
3894 .iu_version = 0x13 << 24, /* Impl 1, ver 3 */
3895 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
3896 .mmu_version = 0x10 << 24,
3897 .mmu_bm = 0x00004000,
3898 .mmu_ctpr_mask = 0x007ffff0,
3899 .mmu_cxr_mask = 0x0000003f,
3900 .mmu_sfsr_mask = 0xffffffff,
3901 .mmu_trcr_mask = 0xffffffff,
3904 .name = "TI SuperSparc II",
3905 .iu_version = 0x40000000,
3906 .fpu_version = 0 << 17,
3907 .mmu_version = 0x04000000,
3908 .mmu_bm = 0x00002000,
3909 .mmu_ctpr_mask = 0xffffffc0,
3910 .mmu_cxr_mask = 0x0000ffff,
3911 .mmu_sfsr_mask = 0xffffffff,
3912 .mmu_trcr_mask = 0xffffffff,
3915 .name = "TI MicroSparc I",
3916 .iu_version = 0x41000000,
3917 .fpu_version = 4 << 17,
3918 .mmu_version = 0x41000000,
3919 .mmu_bm = 0x00004000,
3920 .mmu_ctpr_mask = 0x007ffff0,
3921 .mmu_cxr_mask = 0x0000003f,
3922 .mmu_sfsr_mask = 0x00016fff,
3923 .mmu_trcr_mask = 0x0000003f,
3926 .name = "TI MicroSparc II",
3927 .iu_version = 0x42000000,
3928 .fpu_version = 4 << 17,
3929 .mmu_version = 0x02000000,
3930 .mmu_bm = 0x00004000,
3931 .mmu_ctpr_mask = 0x00ffffc0,
3932 .mmu_cxr_mask = 0x000000ff,
3933 .mmu_sfsr_mask = 0x00016bff,
3934 .mmu_trcr_mask = 0x00ffffff,
3937 .name = "TI MicroSparc IIep",
3938 .iu_version = 0x42000000,
3939 .fpu_version = 4 << 17,
3940 .mmu_version = 0x04000000,
3941 .mmu_bm = 0x00004000,
3942 .mmu_ctpr_mask = 0x00ffffc0,
3943 .mmu_cxr_mask = 0x000000ff,
3944 .mmu_sfsr_mask = 0x00016bff,
3945 .mmu_trcr_mask = 0x00ffffff,
3948 .name = "TI SuperSparc 51",
3949 .iu_version = 0x43000000,
3950 .fpu_version = 0 << 17,
3951 .mmu_version = 0x04000000,
3952 .mmu_bm = 0x00002000,
3953 .mmu_ctpr_mask = 0xffffffc0,
3954 .mmu_cxr_mask = 0x0000ffff,
3955 .mmu_sfsr_mask = 0xffffffff,
3956 .mmu_trcr_mask = 0xffffffff,
3959 .name = "TI SuperSparc 61",
3960 .iu_version = 0x44000000,
3961 .fpu_version = 0 << 17,
3962 .mmu_version = 0x04000000,
3963 .mmu_bm = 0x00002000,
3964 .mmu_ctpr_mask = 0xffffffc0,
3965 .mmu_cxr_mask = 0x0000ffff,
3966 .mmu_sfsr_mask = 0xffffffff,
3967 .mmu_trcr_mask = 0xffffffff,
3970 .name = "Ross RT625",
3971 .iu_version = 0x1e000000,
3972 .fpu_version = 1 << 17,
3973 .mmu_version = 0x1e000000,
3974 .mmu_bm = 0x00004000,
3975 .mmu_ctpr_mask = 0x007ffff0,
3976 .mmu_cxr_mask = 0x0000003f,
3977 .mmu_sfsr_mask = 0xffffffff,
3978 .mmu_trcr_mask = 0xffffffff,
3981 .name = "Ross RT620",
3982 .iu_version = 0x1f000000,
3983 .fpu_version = 1 << 17,
3984 .mmu_version = 0x1f000000,
3985 .mmu_bm = 0x00004000,
3986 .mmu_ctpr_mask = 0x007ffff0,
3987 .mmu_cxr_mask = 0x0000003f,
3988 .mmu_sfsr_mask = 0xffffffff,
3989 .mmu_trcr_mask = 0xffffffff,
3992 .name = "BIT B5010",
3993 .iu_version = 0x20000000,
3994 .fpu_version = 0 << 17, /* B5010/B5110/B5120/B5210 */
3995 .mmu_version = 0x20000000,
3996 .mmu_bm = 0x00004000,
3997 .mmu_ctpr_mask = 0x007ffff0,
3998 .mmu_cxr_mask = 0x0000003f,
3999 .mmu_sfsr_mask = 0xffffffff,
4000 .mmu_trcr_mask = 0xffffffff,
4003 .name = "Matsushita MN10501",
4004 .iu_version = 0x50000000,
4005 .fpu_version = 0 << 17,
4006 .mmu_version = 0x50000000,
4007 .mmu_bm = 0x00004000,
4008 .mmu_ctpr_mask = 0x007ffff0,
4009 .mmu_cxr_mask = 0x0000003f,
4010 .mmu_sfsr_mask = 0xffffffff,
4011 .mmu_trcr_mask = 0xffffffff,
4014 .name = "Weitek W8601",
4015 .iu_version = 0x90 << 24, /* Impl 9, ver 0 */
4016 .fpu_version = 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
4017 .mmu_version = 0x10 << 24,
4018 .mmu_bm = 0x00004000,
4019 .mmu_ctpr_mask = 0x007ffff0,
4020 .mmu_cxr_mask = 0x0000003f,
4021 .mmu_sfsr_mask = 0xffffffff,
4022 .mmu_trcr_mask = 0xffffffff,
4026 .iu_version = 0xf2000000,
4027 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4028 .mmu_version = 0xf2000000,
4029 .mmu_bm = 0x00004000,
4030 .mmu_ctpr_mask = 0x007ffff0,
4031 .mmu_cxr_mask = 0x0000003f,
4032 .mmu_sfsr_mask = 0xffffffff,
4033 .mmu_trcr_mask = 0xffffffff,
4037 .iu_version = 0xf3000000,
4038 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4039 .mmu_version = 0xf3000000,
4040 .mmu_bm = 0x00004000,
4041 .mmu_ctpr_mask = 0x007ffff0,
4042 .mmu_cxr_mask = 0x0000003f,
4043 .mmu_sfsr_mask = 0xffffffff,
4044 .mmu_trcr_mask = 0xffffffff,
4049 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name)
4053 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
4054 if (strcasecmp(name, sparc_defs[i].name) == 0) {
4055 return &sparc_defs[i];
4061 void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
4065 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
4066 (*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
4068 sparc_defs[i].iu_version,
4069 sparc_defs[i].fpu_version,
4070 sparc_defs[i].mmu_version);
4074 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
4076 void cpu_dump_state(CPUState *env, FILE *f,
4077 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
4082 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
4083 cpu_fprintf(f, "General Registers:\n");
4084 for (i = 0; i < 4; i++)
4085 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
4086 cpu_fprintf(f, "\n");
4088 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
4089 cpu_fprintf(f, "\nCurrent Register Window:\n");
4090 for (x = 0; x < 3; x++) {
4091 for (i = 0; i < 4; i++)
4092 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
4093 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
4094 env->regwptr[i + x * 8]);
4095 cpu_fprintf(f, "\n");
4097 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
4098 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
4099 env->regwptr[i + x * 8]);
4100 cpu_fprintf(f, "\n");
4102 cpu_fprintf(f, "\nFloating Point Registers:\n");
4103 for (i = 0; i < 32; i++) {
4105 cpu_fprintf(f, "%%f%02d:", i);
4106 cpu_fprintf(f, " %016lf", env->fpr[i]);
4108 cpu_fprintf(f, "\n");
4110 #ifdef TARGET_SPARC64
4111 cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
4112 env->pstate, GET_CCR(env), env->asi, env->tl, env->fprs);
4113 cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
4114 env->cansave, env->canrestore, env->otherwin, env->wstate,
4115 env->cleanwin, NWINDOWS - 1 - env->cwp);
4117 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
4118 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
4119 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
4120 env->psrs?'S':'-', env->psrps?'P':'-',
4121 env->psret?'E':'-', env->wim);
4123 cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
4126 #if defined(CONFIG_USER_ONLY)
4127 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
4133 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
4134 int *access_index, target_ulong address, int rw,
4137 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
4139 target_phys_addr_t phys_addr;
4140 int prot, access_index;
4142 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2,
4143 MMU_KERNEL_IDX) != 0)
4144 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr,
4145 0, MMU_KERNEL_IDX) != 0)
4147 if (cpu_get_physical_page_desc(phys_addr) == IO_MEM_UNASSIGNED)
4153 void helper_flush(target_ulong addr)
4156 tb_invalidate_page_range(addr, addr + 8);