4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env, cpu_T[3], cpu_regwptr;
50 /* local register indexes (only used inside old micro ops) */
53 typedef struct DisasContext {
54 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
55 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
56 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
60 struct TranslationBlock *tb;
63 typedef struct sparc_def_t sparc_def_t;
66 const unsigned char *name;
67 target_ulong iu_version;
71 uint32_t mmu_ctpr_mask;
72 uint32_t mmu_cxr_mask;
73 uint32_t mmu_sfsr_mask;
74 uint32_t mmu_trcr_mask;
77 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name);
82 // This function uses non-native bit order
83 #define GET_FIELD(X, FROM, TO) \
84 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
86 // This function uses the order in the manuals, i.e. bit 0 is 2^0
87 #define GET_FIELD_SP(X, FROM, TO) \
88 GET_FIELD(X, 31 - (TO), 31 - (FROM))
90 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
91 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
95 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
97 #define DFPREG(r) (r & 0x1e)
98 #define QFPREG(r) (r & 0x1c)
101 static int sign_extend(int x, int len)
104 return (x << len) >> len;
107 #define IS_IMM (insn & (1<<13))
109 static void disas_sparc_insn(DisasContext * dc);
111 #ifdef TARGET_SPARC64
112 #define GEN32(func, NAME) \
113 static GenOpFunc * const NAME ## _table [64] = { \
114 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
115 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
116 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
117 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
118 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
119 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
120 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
121 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
122 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
123 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
124 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
125 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
127 static inline void func(int n) \
129 NAME ## _table[n](); \
132 #define GEN32(func, NAME) \
133 static GenOpFunc *const NAME ## _table [32] = { \
134 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
135 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
136 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
137 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
138 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
139 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
140 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
141 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
143 static inline void func(int n) \
145 NAME ## _table[n](); \
149 /* floating point registers moves */
150 GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
151 GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
152 GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
153 GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
155 GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
156 GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
157 GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
158 GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
160 #if defined(CONFIG_USER_ONLY)
161 GEN32(gen_op_load_fpr_QT0, gen_op_load_fpr_QT0_fprf);
162 GEN32(gen_op_load_fpr_QT1, gen_op_load_fpr_QT1_fprf);
163 GEN32(gen_op_store_QT0_fpr, gen_op_store_QT0_fpr_fprf);
164 GEN32(gen_op_store_QT1_fpr, gen_op_store_QT1_fpr_fprf);
168 #ifdef CONFIG_USER_ONLY
169 #define supervisor(dc) 0
170 #ifdef TARGET_SPARC64
171 #define hypervisor(dc) 0
173 #define gen_op_ldst(name) gen_op_##name##_raw()
175 #define supervisor(dc) (dc->mem_idx >= 1)
176 #ifdef TARGET_SPARC64
177 #define hypervisor(dc) (dc->mem_idx == 2)
178 #define OP_LD_TABLE(width) \
179 static GenOpFunc * const gen_op_##width[] = { \
180 &gen_op_##width##_user, \
181 &gen_op_##width##_kernel, \
182 &gen_op_##width##_hypv, \
185 #define OP_LD_TABLE(width) \
186 static GenOpFunc * const gen_op_##width[] = { \
187 &gen_op_##width##_user, \
188 &gen_op_##width##_kernel, \
191 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
194 #ifndef CONFIG_USER_ONLY
197 #endif /* __i386__ */
205 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
207 #define ABI32_MASK(addr)
210 static inline void gen_movl_simm_T1(int32_t val)
212 tcg_gen_movi_tl(cpu_T[1], val);
215 static inline void gen_movl_reg_TN(int reg, TCGv tn)
218 tcg_gen_movi_tl(tn, 0);
220 tcg_gen_ld_tl(tn, cpu_env, offsetof(CPUState, gregs[reg]));
222 tcg_gen_ld_ptr(cpu_regwptr, cpu_env, offsetof(CPUState, regwptr)); // XXX
223 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_movl_reg_T0(int reg)
229 gen_movl_reg_TN(reg, cpu_T[0]);
232 static inline void gen_movl_reg_T1(int reg)
234 gen_movl_reg_TN(reg, cpu_T[1]);
238 static inline void gen_movl_reg_T2(int reg)
240 gen_movl_reg_TN(reg, cpu_T[2]);
243 #endif /* __i386__ */
244 static inline void gen_movl_TN_reg(int reg, TCGv tn)
249 tcg_gen_st_tl(tn, cpu_env, offsetof(CPUState, gregs[reg]));
251 tcg_gen_ld_ptr(cpu_regwptr, cpu_env, offsetof(CPUState, regwptr)); // XXX
252 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
256 static inline void gen_movl_T0_reg(int reg)
258 gen_movl_TN_reg(reg, cpu_T[0]);
261 static inline void gen_movl_T1_reg(int reg)
263 gen_movl_TN_reg(reg, cpu_T[1]);
266 static inline void gen_op_movl_T0_env(size_t offset)
268 tcg_gen_ld_i32(cpu_T[0], cpu_env, offset);
271 static inline void gen_op_movl_env_T0(size_t offset)
273 tcg_gen_st_i32(cpu_T[0], cpu_env, offset);
276 static inline void gen_op_movtl_T0_env(size_t offset)
278 tcg_gen_ld_tl(cpu_T[0], cpu_env, offset);
281 static inline void gen_op_movtl_env_T0(size_t offset)
283 tcg_gen_st_tl(cpu_T[0], cpu_env, offset);
286 static inline void gen_op_add_T1_T0(void)
288 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
291 static inline void gen_op_or_T1_T0(void)
293 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
296 static inline void gen_op_xor_T1_T0(void)
298 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
301 static inline void gen_jmp_im(target_ulong pc)
303 tcg_gen_movi_tl(cpu_tmp0, pc);
304 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, pc));
307 static inline void gen_movl_npc_im(target_ulong npc)
309 tcg_gen_movi_tl(cpu_tmp0, npc);
310 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, npc));
313 static inline void gen_goto_tb(DisasContext *s, int tb_num,
314 target_ulong pc, target_ulong npc)
316 TranslationBlock *tb;
319 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
320 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
321 /* jump to same page: we can use a direct jump */
322 tcg_gen_goto_tb(tb_num);
324 gen_movl_npc_im(npc);
325 tcg_gen_exit_tb((long)tb + tb_num);
327 /* jump to another page: currently not optimized */
329 gen_movl_npc_im(npc);
334 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
339 l1 = gen_new_label();
341 gen_op_jz_T2_label(l1);
343 gen_goto_tb(dc, 0, pc1, pc1 + 4);
346 gen_goto_tb(dc, 1, pc2, pc2 + 4);
349 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
354 l1 = gen_new_label();
356 gen_op_jz_T2_label(l1);
358 gen_goto_tb(dc, 0, pc2, pc1);
361 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
364 static inline void gen_branch(DisasContext *dc, target_ulong pc,
367 gen_goto_tb(dc, 0, pc, npc);
370 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2)
374 l1 = gen_new_label();
375 l2 = gen_new_label();
376 gen_op_jz_T2_label(l1);
378 gen_movl_npc_im(npc1);
379 gen_op_jmp_label(l2);
382 gen_movl_npc_im(npc2);
386 /* call this function before using T2 as it may have been set for a jump */
387 static inline void flush_T2(DisasContext * dc)
389 if (dc->npc == JUMP_PC) {
390 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
391 dc->npc = DYNAMIC_PC;
395 static inline void save_npc(DisasContext * dc)
397 if (dc->npc == JUMP_PC) {
398 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
399 dc->npc = DYNAMIC_PC;
400 } else if (dc->npc != DYNAMIC_PC) {
401 gen_movl_npc_im(dc->npc);
405 static inline void save_state(DisasContext * dc)
411 static inline void gen_mov_pc_npc(DisasContext * dc)
413 if (dc->npc == JUMP_PC) {
414 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
415 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
416 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
418 } else if (dc->npc == DYNAMIC_PC) {
419 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
420 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
427 static inline void gen_op_next_insn(void)
429 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
430 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
431 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, 4);
432 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
435 static GenOpFunc * const gen_cond[2][16] = {
455 #ifdef TARGET_SPARC64
476 static GenOpFunc * const gen_fcond[4][16] = {
495 #ifdef TARGET_SPARC64
498 gen_op_eval_fbne_fcc1,
499 gen_op_eval_fblg_fcc1,
500 gen_op_eval_fbul_fcc1,
501 gen_op_eval_fbl_fcc1,
502 gen_op_eval_fbug_fcc1,
503 gen_op_eval_fbg_fcc1,
504 gen_op_eval_fbu_fcc1,
506 gen_op_eval_fbe_fcc1,
507 gen_op_eval_fbue_fcc1,
508 gen_op_eval_fbge_fcc1,
509 gen_op_eval_fbuge_fcc1,
510 gen_op_eval_fble_fcc1,
511 gen_op_eval_fbule_fcc1,
512 gen_op_eval_fbo_fcc1,
516 gen_op_eval_fbne_fcc2,
517 gen_op_eval_fblg_fcc2,
518 gen_op_eval_fbul_fcc2,
519 gen_op_eval_fbl_fcc2,
520 gen_op_eval_fbug_fcc2,
521 gen_op_eval_fbg_fcc2,
522 gen_op_eval_fbu_fcc2,
524 gen_op_eval_fbe_fcc2,
525 gen_op_eval_fbue_fcc2,
526 gen_op_eval_fbge_fcc2,
527 gen_op_eval_fbuge_fcc2,
528 gen_op_eval_fble_fcc2,
529 gen_op_eval_fbule_fcc2,
530 gen_op_eval_fbo_fcc2,
534 gen_op_eval_fbne_fcc3,
535 gen_op_eval_fblg_fcc3,
536 gen_op_eval_fbul_fcc3,
537 gen_op_eval_fbl_fcc3,
538 gen_op_eval_fbug_fcc3,
539 gen_op_eval_fbg_fcc3,
540 gen_op_eval_fbu_fcc3,
542 gen_op_eval_fbe_fcc3,
543 gen_op_eval_fbue_fcc3,
544 gen_op_eval_fbge_fcc3,
545 gen_op_eval_fbuge_fcc3,
546 gen_op_eval_fble_fcc3,
547 gen_op_eval_fbule_fcc3,
548 gen_op_eval_fbo_fcc3,
555 #ifdef TARGET_SPARC64
556 static void gen_cond_reg(int cond)
582 static const int gen_tcg_cond_reg[8] = {
594 /* XXX: potentially incorrect if dynamic npc */
595 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
597 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
598 target_ulong target = dc->pc + offset;
601 /* unconditional not taken */
603 dc->pc = dc->npc + 4;
604 dc->npc = dc->pc + 4;
607 dc->npc = dc->pc + 4;
609 } else if (cond == 0x8) {
610 /* unconditional taken */
613 dc->npc = dc->pc + 4;
620 gen_cond[cc][cond]();
622 gen_branch_a(dc, target, dc->npc);
626 dc->jump_pc[0] = target;
627 dc->jump_pc[1] = dc->npc + 4;
633 /* XXX: potentially incorrect if dynamic npc */
634 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
636 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
637 target_ulong target = dc->pc + offset;
640 /* unconditional not taken */
642 dc->pc = dc->npc + 4;
643 dc->npc = dc->pc + 4;
646 dc->npc = dc->pc + 4;
648 } else if (cond == 0x8) {
649 /* unconditional taken */
652 dc->npc = dc->pc + 4;
659 gen_fcond[cc][cond]();
661 gen_branch_a(dc, target, dc->npc);
665 dc->jump_pc[0] = target;
666 dc->jump_pc[1] = dc->npc + 4;
672 #ifdef TARGET_SPARC64
673 /* XXX: potentially incorrect if dynamic npc */
674 static void do_branch_reg(DisasContext * dc, int32_t offset, uint32_t insn)
676 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
677 target_ulong target = dc->pc + offset;
682 gen_branch_a(dc, target, dc->npc);
686 dc->jump_pc[0] = target;
687 dc->jump_pc[1] = dc->npc + 4;
692 static GenOpFunc * const gen_fcmps[4] = {
699 static GenOpFunc * const gen_fcmpd[4] = {
706 #if defined(CONFIG_USER_ONLY)
707 static GenOpFunc * const gen_fcmpq[4] = {
715 static GenOpFunc * const gen_fcmpes[4] = {
722 static GenOpFunc * const gen_fcmped[4] = {
729 #if defined(CONFIG_USER_ONLY)
730 static GenOpFunc * const gen_fcmpeq[4] = {
738 static inline void gen_op_fcmps(int fccno)
740 tcg_gen_helper_0_0(gen_fcmps[fccno]);
743 static inline void gen_op_fcmpd(int fccno)
745 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
748 #if defined(CONFIG_USER_ONLY)
749 static inline void gen_op_fcmpq(int fccno)
751 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
755 static inline void gen_op_fcmpes(int fccno)
757 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
760 static inline void gen_op_fcmped(int fccno)
762 tcg_gen_helper_0_0(gen_fcmped[fccno]);
765 #if defined(CONFIG_USER_ONLY)
766 static inline void gen_op_fcmpeq(int fccno)
768 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
774 static inline void gen_op_fcmps(int fccno)
776 tcg_gen_helper_0_0(helper_fcmps);
779 static inline void gen_op_fcmpd(int fccno)
781 tcg_gen_helper_0_0(helper_fcmpd);
784 #if defined(CONFIG_USER_ONLY)
785 static inline void gen_op_fcmpq(int fccno)
787 tcg_gen_helper_0_0(helper_fcmpq);
791 static inline void gen_op_fcmpes(int fccno)
793 tcg_gen_helper_0_0(helper_fcmpes);
796 static inline void gen_op_fcmped(int fccno)
798 tcg_gen_helper_0_0(helper_fcmped);
801 #if defined(CONFIG_USER_ONLY)
802 static inline void gen_op_fcmpeq(int fccno)
804 tcg_gen_helper_0_0(helper_fcmpeq);
810 static inline void gen_op_exception(int exception)
814 r_except = tcg_temp_new(TCG_TYPE_I32);
815 tcg_gen_movi_i32(r_except, exception);
816 tcg_gen_helper_0_1(raise_exception, r_except);
819 static inline void gen_op_fpexception_im(int fsr_flags)
821 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
822 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, ~FSR_FTT_MASK);
823 tcg_gen_ori_tl(cpu_tmp0, cpu_tmp0, fsr_flags);
824 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
825 gen_op_exception(TT_FP_EXCP);
828 static int gen_trap_ifnofpu(DisasContext * dc)
830 #if !defined(CONFIG_USER_ONLY)
831 if (!dc->fpu_enabled) {
833 gen_op_exception(TT_NFPU_INSN);
841 static inline void gen_op_clear_ieee_excp_and_FTT(void)
843 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
844 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, ~(FSR_FTT_MASK | FSR_CEXC_MASK));
845 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
848 static inline void gen_clear_float_exceptions(void)
850 tcg_gen_helper_0_0(helper_clear_float_exceptions);
854 #ifdef TARGET_SPARC64
855 static inline void gen_ld_asi(int insn, int size, int sign)
860 r_size = tcg_temp_new(TCG_TYPE_I32);
861 r_sign = tcg_temp_new(TCG_TYPE_I32);
862 tcg_gen_movi_i32(r_size, size);
863 tcg_gen_movi_i32(r_sign, sign);
865 offset = GET_FIELD(insn, 25, 31);
866 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
867 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
869 asi = GET_FIELD(insn, 19, 26);
870 tcg_gen_movi_i32(cpu_T[1], asi);
872 tcg_gen_helper_1_4(helper_ld_asi, cpu_T[1], cpu_T[0], cpu_T[1], r_size,
876 static inline void gen_st_asi(int insn, int size)
881 r_asi = tcg_temp_new(TCG_TYPE_I32);
882 r_size = tcg_temp_new(TCG_TYPE_I32);
883 tcg_gen_movi_i32(r_size, size);
885 offset = GET_FIELD(insn, 25, 31);
886 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
887 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
889 asi = GET_FIELD(insn, 19, 26);
890 tcg_gen_movi_i32(r_asi, asi);
892 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_asi, r_size);
895 static inline void gen_ldf_asi(int insn, int size, int rd)
898 TCGv r_asi, r_size, r_rd;
900 r_asi = tcg_temp_new(TCG_TYPE_I32);
901 r_size = tcg_temp_new(TCG_TYPE_I32);
902 r_rd = tcg_temp_new(TCG_TYPE_I32);
903 tcg_gen_movi_i32(r_size, size);
904 tcg_gen_movi_i32(r_rd, rd);
906 offset = GET_FIELD(insn, 25, 31);
907 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
908 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
910 asi = GET_FIELD(insn, 19, 26);
911 tcg_gen_movi_i32(r_asi, asi);
913 tcg_gen_helper_0_4(helper_ldf_asi, cpu_T[0], r_asi, r_size, r_rd);
916 static inline void gen_stf_asi(int insn, int size, int rd)
919 TCGv r_asi, r_size, r_rd;
921 r_asi = tcg_temp_new(TCG_TYPE_I32);
922 r_size = tcg_temp_new(TCG_TYPE_I32);
923 r_rd = tcg_temp_new(TCG_TYPE_I32);
924 tcg_gen_movi_i32(r_size, size);
925 tcg_gen_movi_i32(r_rd, rd);
927 offset = GET_FIELD(insn, 25, 31);
928 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
929 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
931 asi = GET_FIELD(insn, 19, 26);
932 tcg_gen_movi_i32(r_asi, asi);
934 tcg_gen_helper_0_4(helper_stf_asi, cpu_T[0], r_asi, r_size, r_rd);
937 static inline void gen_swap_asi(int insn)
940 TCGv r_size, r_sign, r_temp;
942 r_size = tcg_temp_new(TCG_TYPE_I32);
943 r_sign = tcg_temp_new(TCG_TYPE_I32);
944 r_temp = tcg_temp_new(TCG_TYPE_I32);
945 tcg_gen_movi_i32(r_size, 4);
946 tcg_gen_movi_i32(r_sign, 0);
948 offset = GET_FIELD(insn, 25, 31);
949 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
950 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
952 asi = GET_FIELD(insn, 19, 26);
953 tcg_gen_movi_i32(cpu_T[1], asi);
955 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
957 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
958 tcg_gen_mov_i32(cpu_T[1], r_temp);
961 static inline void gen_ldda_asi(int insn)
964 TCGv r_size, r_sign, r_dword;
966 r_size = tcg_temp_new(TCG_TYPE_I32);
967 r_sign = tcg_temp_new(TCG_TYPE_I32);
968 r_dword = tcg_temp_new(TCG_TYPE_I64);
969 tcg_gen_movi_i32(r_size, 8);
970 tcg_gen_movi_i32(r_sign, 0);
972 offset = GET_FIELD(insn, 25, 31);
973 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
974 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
976 asi = GET_FIELD(insn, 19, 26);
977 tcg_gen_movi_i32(cpu_T[1], asi);
979 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
981 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
982 tcg_gen_shri_i64(r_dword, r_dword, 32);
983 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
986 static inline void gen_cas_asi(int insn, int rd)
991 r_val1 = tcg_temp_new(TCG_TYPE_I32);
992 r_asi = tcg_temp_new(TCG_TYPE_I32);
993 gen_movl_reg_TN(rd, r_val1);
995 offset = GET_FIELD(insn, 25, 31);
996 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
997 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
999 asi = GET_FIELD(insn, 19, 26);
1000 tcg_gen_movi_i32(r_asi, asi);
1002 tcg_gen_helper_1_4(helper_cas_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1006 static inline void gen_casx_asi(int insn, int rd)
1011 r_val1 = tcg_temp_new(TCG_TYPE_I64);
1012 r_asi = tcg_temp_new(TCG_TYPE_I32);
1013 gen_movl_reg_TN(rd, r_val1);
1015 offset = GET_FIELD(insn, 25, 31);
1016 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1017 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1019 asi = GET_FIELD(insn, 19, 26);
1020 tcg_gen_movi_i32(r_asi, asi);
1022 tcg_gen_helper_1_4(helper_casx_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1026 #elif !defined(CONFIG_USER_ONLY)
1028 static inline void gen_ld_asi(int insn, int size, int sign)
1031 TCGv r_size, r_sign, r_dword;
1033 r_size = tcg_temp_new(TCG_TYPE_I32);
1034 r_sign = tcg_temp_new(TCG_TYPE_I32);
1035 r_dword = tcg_temp_new(TCG_TYPE_I64);
1036 tcg_gen_movi_i32(r_size, size);
1037 tcg_gen_movi_i32(r_sign, sign);
1038 asi = GET_FIELD(insn, 19, 26);
1039 tcg_gen_movi_i32(cpu_T[1], asi);
1040 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1042 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1045 static inline void gen_st_asi(int insn, int size)
1048 TCGv r_dword, r_asi, r_size;
1050 r_dword = tcg_temp_new(TCG_TYPE_I64);
1051 tcg_gen_extu_i32_i64(r_dword, cpu_T[1]);
1052 r_asi = tcg_temp_new(TCG_TYPE_I32);
1053 r_size = tcg_temp_new(TCG_TYPE_I32);
1054 asi = GET_FIELD(insn, 19, 26);
1055 tcg_gen_movi_i32(r_asi, asi);
1056 tcg_gen_movi_i32(r_size, size);
1057 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1060 static inline void gen_swap_asi(int insn)
1063 TCGv r_size, r_sign, r_temp;
1065 r_size = tcg_temp_new(TCG_TYPE_I32);
1066 r_sign = tcg_temp_new(TCG_TYPE_I32);
1067 r_temp = tcg_temp_new(TCG_TYPE_I32);
1068 tcg_gen_movi_i32(r_size, 4);
1069 tcg_gen_movi_i32(r_sign, 0);
1070 asi = GET_FIELD(insn, 19, 26);
1071 tcg_gen_movi_i32(cpu_T[1], asi);
1072 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
1074 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
1075 tcg_gen_mov_i32(cpu_T[1], r_temp);
1078 static inline void gen_ldda_asi(int insn)
1081 TCGv r_size, r_sign, r_dword;
1083 r_size = tcg_temp_new(TCG_TYPE_I32);
1084 r_sign = tcg_temp_new(TCG_TYPE_I32);
1085 r_dword = tcg_temp_new(TCG_TYPE_I64);
1086 tcg_gen_movi_i32(r_size, 8);
1087 tcg_gen_movi_i32(r_sign, 0);
1088 asi = GET_FIELD(insn, 19, 26);
1089 tcg_gen_movi_i32(cpu_T[1], asi);
1090 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1092 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1093 tcg_gen_shri_i64(r_dword, r_dword, 32);
1094 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1098 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1099 static inline void gen_ldstub_asi(int insn)
1102 TCGv r_dword, r_asi, r_size;
1104 gen_ld_asi(insn, 1, 0);
1106 r_dword = tcg_temp_new(TCG_TYPE_I64);
1107 r_asi = tcg_temp_new(TCG_TYPE_I32);
1108 r_size = tcg_temp_new(TCG_TYPE_I32);
1109 asi = GET_FIELD(insn, 19, 26);
1110 tcg_gen_movi_i32(r_dword, 0xff);
1111 tcg_gen_movi_i32(r_asi, asi);
1112 tcg_gen_movi_i32(r_size, 1);
1113 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1117 static inline void gen_mov_reg_C(TCGv reg)
1119 tcg_gen_ld_i32(reg, cpu_env, offsetof(CPUSPARCState, psr));
1120 tcg_gen_shri_i32(reg, reg, 20);
1121 tcg_gen_andi_i32(reg, reg, 0x1);
1124 /* before an instruction, dc->pc must be static */
1125 static void disas_sparc_insn(DisasContext * dc)
1127 unsigned int insn, opc, rs1, rs2, rd;
1129 insn = ldl_code(dc->pc);
1130 opc = GET_FIELD(insn, 0, 1);
1132 rd = GET_FIELD(insn, 2, 6);
1134 case 0: /* branches/sethi */
1136 unsigned int xop = GET_FIELD(insn, 7, 9);
1139 #ifdef TARGET_SPARC64
1140 case 0x1: /* V9 BPcc */
1144 target = GET_FIELD_SP(insn, 0, 18);
1145 target = sign_extend(target, 18);
1147 cc = GET_FIELD_SP(insn, 20, 21);
1149 do_branch(dc, target, insn, 0);
1151 do_branch(dc, target, insn, 1);
1156 case 0x3: /* V9 BPr */
1158 target = GET_FIELD_SP(insn, 0, 13) |
1159 (GET_FIELD_SP(insn, 20, 21) << 14);
1160 target = sign_extend(target, 16);
1162 rs1 = GET_FIELD(insn, 13, 17);
1163 gen_movl_reg_T0(rs1);
1164 do_branch_reg(dc, target, insn);
1167 case 0x5: /* V9 FBPcc */
1169 int cc = GET_FIELD_SP(insn, 20, 21);
1170 if (gen_trap_ifnofpu(dc))
1172 target = GET_FIELD_SP(insn, 0, 18);
1173 target = sign_extend(target, 19);
1175 do_fbranch(dc, target, insn, cc);
1179 case 0x7: /* CBN+x */
1184 case 0x2: /* BN+x */
1186 target = GET_FIELD(insn, 10, 31);
1187 target = sign_extend(target, 22);
1189 do_branch(dc, target, insn, 0);
1192 case 0x6: /* FBN+x */
1194 if (gen_trap_ifnofpu(dc))
1196 target = GET_FIELD(insn, 10, 31);
1197 target = sign_extend(target, 22);
1199 do_fbranch(dc, target, insn, 0);
1202 case 0x4: /* SETHI */
1207 uint32_t value = GET_FIELD(insn, 10, 31);
1208 tcg_gen_movi_tl(cpu_T[0], value << 10);
1209 gen_movl_T0_reg(rd);
1214 case 0x0: /* UNIMPL */
1223 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1225 tcg_gen_movi_tl(cpu_T[0], dc->pc);
1226 gen_movl_T0_reg(15);
1232 case 2: /* FPU & Logical Operations */
1234 unsigned int xop = GET_FIELD(insn, 7, 12);
1235 if (xop == 0x3a) { /* generate trap */
1238 rs1 = GET_FIELD(insn, 13, 17);
1239 gen_movl_reg_T0(rs1);
1241 rs2 = GET_FIELD(insn, 25, 31);
1242 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2);
1244 rs2 = GET_FIELD(insn, 27, 31);
1248 gen_movl_reg_T1(rs2);
1254 cond = GET_FIELD(insn, 3, 6);
1257 tcg_gen_helper_0_1(helper_trap, cpu_T[0]);
1258 } else if (cond != 0) {
1259 #ifdef TARGET_SPARC64
1261 int cc = GET_FIELD_SP(insn, 11, 12);
1265 gen_cond[0][cond]();
1267 gen_cond[1][cond]();
1273 gen_cond[0][cond]();
1275 tcg_gen_helper_0_2(helper_trapcc, cpu_T[0], cpu_T[2]);
1281 } else if (xop == 0x28) {
1282 rs1 = GET_FIELD(insn, 13, 17);
1285 #ifndef TARGET_SPARC64
1286 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1287 manual, rdy on the microSPARC
1289 case 0x0f: /* stbar in the SPARCv8 manual,
1290 rdy on the microSPARC II */
1291 case 0x10 ... 0x1f: /* implementation-dependent in the
1292 SPARCv8 manual, rdy on the
1295 gen_op_movtl_T0_env(offsetof(CPUSPARCState, y));
1296 gen_movl_T0_reg(rd);
1298 #ifdef TARGET_SPARC64
1299 case 0x2: /* V9 rdccr */
1301 gen_movl_T0_reg(rd);
1303 case 0x3: /* V9 rdasi */
1304 gen_op_movl_T0_env(offsetof(CPUSPARCState, asi));
1305 gen_movl_T0_reg(rd);
1307 case 0x4: /* V9 rdtick */
1311 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
1312 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1313 offsetof(CPUState, tick));
1314 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
1316 gen_movl_T0_reg(rd);
1319 case 0x5: /* V9 rdpc */
1320 tcg_gen_movi_tl(cpu_T[0], dc->pc);
1321 gen_movl_T0_reg(rd);
1323 case 0x6: /* V9 rdfprs */
1324 gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs));
1325 gen_movl_T0_reg(rd);
1327 case 0xf: /* V9 membar */
1328 break; /* no effect */
1329 case 0x13: /* Graphics Status */
1330 if (gen_trap_ifnofpu(dc))
1332 gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr));
1333 gen_movl_T0_reg(rd);
1335 case 0x17: /* Tick compare */
1336 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr));
1337 gen_movl_T0_reg(rd);
1339 case 0x18: /* System tick */
1343 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
1344 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1345 offsetof(CPUState, stick));
1346 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
1348 gen_movl_T0_reg(rd);
1351 case 0x19: /* System tick compare */
1352 gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr));
1353 gen_movl_T0_reg(rd);
1355 case 0x10: /* Performance Control */
1356 case 0x11: /* Performance Instrumentation Counter */
1357 case 0x12: /* Dispatch Control */
1358 case 0x14: /* Softint set, WO */
1359 case 0x15: /* Softint clear, WO */
1360 case 0x16: /* Softint write */
1365 #if !defined(CONFIG_USER_ONLY)
1366 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1367 #ifndef TARGET_SPARC64
1368 if (!supervisor(dc))
1370 tcg_gen_helper_1_0(helper_rdpsr, cpu_T[0]);
1372 if (!hypervisor(dc))
1374 rs1 = GET_FIELD(insn, 13, 17);
1377 // gen_op_rdhpstate();
1380 // gen_op_rdhtstate();
1383 gen_op_movl_T0_env(offsetof(CPUSPARCState, hintp));
1386 gen_op_movl_T0_env(offsetof(CPUSPARCState, htba));
1389 gen_op_movl_T0_env(offsetof(CPUSPARCState, hver));
1391 case 31: // hstick_cmpr
1392 gen_op_movl_env_T0(offsetof(CPUSPARCState, hstick_cmpr));
1398 gen_movl_T0_reg(rd);
1400 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
1401 if (!supervisor(dc))
1403 #ifdef TARGET_SPARC64
1404 rs1 = GET_FIELD(insn, 13, 17);
1410 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
1411 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1412 offsetof(CPUState, tsptr));
1413 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
1414 offsetof(trap_state, tpc));
1421 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
1422 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1423 offsetof(CPUState, tsptr));
1424 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
1425 offsetof(trap_state, tnpc));
1432 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
1433 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1434 offsetof(CPUState, tsptr));
1435 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
1436 offsetof(trap_state, tstate));
1443 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
1444 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1445 offsetof(CPUState, tsptr));
1446 tcg_gen_ld_i32(cpu_T[0], r_tsptr,
1447 offsetof(trap_state, tt));
1454 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
1455 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1456 offsetof(CPUState, tick));
1457 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
1459 gen_movl_T0_reg(rd);
1463 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
1466 gen_op_movl_T0_env(offsetof(CPUSPARCState, pstate));
1469 gen_op_movl_T0_env(offsetof(CPUSPARCState, tl));
1472 gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil));
1478 gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave));
1480 case 11: // canrestore
1481 gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore));
1483 case 12: // cleanwin
1484 gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin));
1486 case 13: // otherwin
1487 gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin));
1490 gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate));
1492 case 16: // UA2005 gl
1493 gen_op_movl_T0_env(offsetof(CPUSPARCState, gl));
1495 case 26: // UA2005 strand status
1496 if (!hypervisor(dc))
1498 gen_op_movl_T0_env(offsetof(CPUSPARCState, ssr));
1501 gen_op_movtl_T0_env(offsetof(CPUSPARCState, version));
1508 gen_op_movl_T0_env(offsetof(CPUSPARCState, wim));
1510 gen_movl_T0_reg(rd);
1512 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
1513 #ifdef TARGET_SPARC64
1516 if (!supervisor(dc))
1518 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
1519 gen_movl_T0_reg(rd);
1523 } else if (xop == 0x34) { /* FPU Operations */
1524 if (gen_trap_ifnofpu(dc))
1526 gen_op_clear_ieee_excp_and_FTT();
1527 rs1 = GET_FIELD(insn, 13, 17);
1528 rs2 = GET_FIELD(insn, 27, 31);
1529 xop = GET_FIELD(insn, 18, 26);
1531 case 0x1: /* fmovs */
1532 gen_op_load_fpr_FT0(rs2);
1533 gen_op_store_FT0_fpr(rd);
1535 case 0x5: /* fnegs */
1536 gen_op_load_fpr_FT1(rs2);
1538 gen_op_store_FT0_fpr(rd);
1540 case 0x9: /* fabss */
1541 gen_op_load_fpr_FT1(rs2);
1542 tcg_gen_helper_0_0(helper_fabss);
1543 gen_op_store_FT0_fpr(rd);
1545 case 0x29: /* fsqrts */
1546 gen_op_load_fpr_FT1(rs2);
1547 gen_clear_float_exceptions();
1548 tcg_gen_helper_0_0(helper_fsqrts);
1549 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1550 gen_op_store_FT0_fpr(rd);
1552 case 0x2a: /* fsqrtd */
1553 gen_op_load_fpr_DT1(DFPREG(rs2));
1554 gen_clear_float_exceptions();
1555 tcg_gen_helper_0_0(helper_fsqrtd);
1556 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1557 gen_op_store_DT0_fpr(DFPREG(rd));
1559 case 0x2b: /* fsqrtq */
1560 #if defined(CONFIG_USER_ONLY)
1561 gen_op_load_fpr_QT1(QFPREG(rs2));
1562 gen_clear_float_exceptions();
1563 tcg_gen_helper_0_0(helper_fsqrtq);
1564 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1565 gen_op_store_QT0_fpr(QFPREG(rd));
1571 gen_op_load_fpr_FT0(rs1);
1572 gen_op_load_fpr_FT1(rs2);
1573 gen_clear_float_exceptions();
1575 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1576 gen_op_store_FT0_fpr(rd);
1579 gen_op_load_fpr_DT0(DFPREG(rs1));
1580 gen_op_load_fpr_DT1(DFPREG(rs2));
1581 gen_clear_float_exceptions();
1583 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1584 gen_op_store_DT0_fpr(DFPREG(rd));
1586 case 0x43: /* faddq */
1587 #if defined(CONFIG_USER_ONLY)
1588 gen_op_load_fpr_QT0(QFPREG(rs1));
1589 gen_op_load_fpr_QT1(QFPREG(rs2));
1590 gen_clear_float_exceptions();
1592 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1593 gen_op_store_QT0_fpr(QFPREG(rd));
1599 gen_op_load_fpr_FT0(rs1);
1600 gen_op_load_fpr_FT1(rs2);
1601 gen_clear_float_exceptions();
1603 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1604 gen_op_store_FT0_fpr(rd);
1607 gen_op_load_fpr_DT0(DFPREG(rs1));
1608 gen_op_load_fpr_DT1(DFPREG(rs2));
1609 gen_clear_float_exceptions();
1611 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1612 gen_op_store_DT0_fpr(DFPREG(rd));
1614 case 0x47: /* fsubq */
1615 #if defined(CONFIG_USER_ONLY)
1616 gen_op_load_fpr_QT0(QFPREG(rs1));
1617 gen_op_load_fpr_QT1(QFPREG(rs2));
1618 gen_clear_float_exceptions();
1620 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1621 gen_op_store_QT0_fpr(QFPREG(rd));
1627 gen_op_load_fpr_FT0(rs1);
1628 gen_op_load_fpr_FT1(rs2);
1629 gen_clear_float_exceptions();
1631 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1632 gen_op_store_FT0_fpr(rd);
1635 gen_op_load_fpr_DT0(DFPREG(rs1));
1636 gen_op_load_fpr_DT1(DFPREG(rs2));
1637 gen_clear_float_exceptions();
1639 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1640 gen_op_store_DT0_fpr(DFPREG(rd));
1642 case 0x4b: /* fmulq */
1643 #if defined(CONFIG_USER_ONLY)
1644 gen_op_load_fpr_QT0(QFPREG(rs1));
1645 gen_op_load_fpr_QT1(QFPREG(rs2));
1646 gen_clear_float_exceptions();
1648 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1649 gen_op_store_QT0_fpr(QFPREG(rd));
1655 gen_op_load_fpr_FT0(rs1);
1656 gen_op_load_fpr_FT1(rs2);
1657 gen_clear_float_exceptions();
1659 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1660 gen_op_store_FT0_fpr(rd);
1663 gen_op_load_fpr_DT0(DFPREG(rs1));
1664 gen_op_load_fpr_DT1(DFPREG(rs2));
1665 gen_clear_float_exceptions();
1667 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1668 gen_op_store_DT0_fpr(DFPREG(rd));
1670 case 0x4f: /* fdivq */
1671 #if defined(CONFIG_USER_ONLY)
1672 gen_op_load_fpr_QT0(QFPREG(rs1));
1673 gen_op_load_fpr_QT1(QFPREG(rs2));
1674 gen_clear_float_exceptions();
1676 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1677 gen_op_store_QT0_fpr(QFPREG(rd));
1683 gen_op_load_fpr_FT0(rs1);
1684 gen_op_load_fpr_FT1(rs2);
1685 gen_clear_float_exceptions();
1687 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1688 gen_op_store_DT0_fpr(DFPREG(rd));
1690 case 0x6e: /* fdmulq */
1691 #if defined(CONFIG_USER_ONLY)
1692 gen_op_load_fpr_DT0(DFPREG(rs1));
1693 gen_op_load_fpr_DT1(DFPREG(rs2));
1694 gen_clear_float_exceptions();
1696 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1697 gen_op_store_QT0_fpr(QFPREG(rd));
1703 gen_op_load_fpr_FT1(rs2);
1704 gen_clear_float_exceptions();
1706 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1707 gen_op_store_FT0_fpr(rd);
1710 gen_op_load_fpr_DT1(DFPREG(rs2));
1711 gen_clear_float_exceptions();
1713 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1714 gen_op_store_FT0_fpr(rd);
1716 case 0xc7: /* fqtos */
1717 #if defined(CONFIG_USER_ONLY)
1718 gen_op_load_fpr_QT1(QFPREG(rs2));
1719 gen_clear_float_exceptions();
1721 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1722 gen_op_store_FT0_fpr(rd);
1728 gen_op_load_fpr_FT1(rs2);
1730 gen_op_store_DT0_fpr(DFPREG(rd));
1733 gen_op_load_fpr_FT1(rs2);
1735 gen_op_store_DT0_fpr(DFPREG(rd));
1737 case 0xcb: /* fqtod */
1738 #if defined(CONFIG_USER_ONLY)
1739 gen_op_load_fpr_QT1(QFPREG(rs2));
1740 gen_clear_float_exceptions();
1742 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1743 gen_op_store_DT0_fpr(DFPREG(rd));
1748 case 0xcc: /* fitoq */
1749 #if defined(CONFIG_USER_ONLY)
1750 gen_op_load_fpr_FT1(rs2);
1752 gen_op_store_QT0_fpr(QFPREG(rd));
1757 case 0xcd: /* fstoq */
1758 #if defined(CONFIG_USER_ONLY)
1759 gen_op_load_fpr_FT1(rs2);
1761 gen_op_store_QT0_fpr(QFPREG(rd));
1766 case 0xce: /* fdtoq */
1767 #if defined(CONFIG_USER_ONLY)
1768 gen_op_load_fpr_DT1(DFPREG(rs2));
1770 gen_op_store_QT0_fpr(QFPREG(rd));
1776 gen_op_load_fpr_FT1(rs2);
1777 gen_clear_float_exceptions();
1779 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1780 gen_op_store_FT0_fpr(rd);
1783 gen_op_load_fpr_DT1(DFPREG(rs2));
1784 gen_clear_float_exceptions();
1786 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1787 gen_op_store_FT0_fpr(rd);
1789 case 0xd3: /* fqtoi */
1790 #if defined(CONFIG_USER_ONLY)
1791 gen_op_load_fpr_QT1(QFPREG(rs2));
1792 gen_clear_float_exceptions();
1794 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1795 gen_op_store_FT0_fpr(rd);
1800 #ifdef TARGET_SPARC64
1801 case 0x2: /* V9 fmovd */
1802 gen_op_load_fpr_DT0(DFPREG(rs2));
1803 gen_op_store_DT0_fpr(DFPREG(rd));
1805 case 0x3: /* V9 fmovq */
1806 #if defined(CONFIG_USER_ONLY)
1807 gen_op_load_fpr_QT0(QFPREG(rs2));
1808 gen_op_store_QT0_fpr(QFPREG(rd));
1813 case 0x6: /* V9 fnegd */
1814 gen_op_load_fpr_DT1(DFPREG(rs2));
1816 gen_op_store_DT0_fpr(DFPREG(rd));
1818 case 0x7: /* V9 fnegq */
1819 #if defined(CONFIG_USER_ONLY)
1820 gen_op_load_fpr_QT1(QFPREG(rs2));
1822 gen_op_store_QT0_fpr(QFPREG(rd));
1827 case 0xa: /* V9 fabsd */
1828 gen_op_load_fpr_DT1(DFPREG(rs2));
1829 tcg_gen_helper_0_0(helper_fabsd);
1830 gen_op_store_DT0_fpr(DFPREG(rd));
1832 case 0xb: /* V9 fabsq */
1833 #if defined(CONFIG_USER_ONLY)
1834 gen_op_load_fpr_QT1(QFPREG(rs2));
1835 tcg_gen_helper_0_0(helper_fabsq);
1836 gen_op_store_QT0_fpr(QFPREG(rd));
1841 case 0x81: /* V9 fstox */
1842 gen_op_load_fpr_FT1(rs2);
1843 gen_clear_float_exceptions();
1845 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1846 gen_op_store_DT0_fpr(DFPREG(rd));
1848 case 0x82: /* V9 fdtox */
1849 gen_op_load_fpr_DT1(DFPREG(rs2));
1850 gen_clear_float_exceptions();
1852 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1853 gen_op_store_DT0_fpr(DFPREG(rd));
1855 case 0x83: /* V9 fqtox */
1856 #if defined(CONFIG_USER_ONLY)
1857 gen_op_load_fpr_QT1(QFPREG(rs2));
1858 gen_clear_float_exceptions();
1860 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1861 gen_op_store_DT0_fpr(DFPREG(rd));
1866 case 0x84: /* V9 fxtos */
1867 gen_op_load_fpr_DT1(DFPREG(rs2));
1868 gen_clear_float_exceptions();
1870 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1871 gen_op_store_FT0_fpr(rd);
1873 case 0x88: /* V9 fxtod */
1874 gen_op_load_fpr_DT1(DFPREG(rs2));
1875 gen_clear_float_exceptions();
1877 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1878 gen_op_store_DT0_fpr(DFPREG(rd));
1880 case 0x8c: /* V9 fxtoq */
1881 #if defined(CONFIG_USER_ONLY)
1882 gen_op_load_fpr_DT1(DFPREG(rs2));
1883 gen_clear_float_exceptions();
1885 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
1886 gen_op_store_QT0_fpr(QFPREG(rd));
1895 } else if (xop == 0x35) { /* FPU Operations */
1896 #ifdef TARGET_SPARC64
1899 if (gen_trap_ifnofpu(dc))
1901 gen_op_clear_ieee_excp_and_FTT();
1902 rs1 = GET_FIELD(insn, 13, 17);
1903 rs2 = GET_FIELD(insn, 27, 31);
1904 xop = GET_FIELD(insn, 18, 26);
1905 #ifdef TARGET_SPARC64
1906 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
1910 l1 = gen_new_label();
1911 r_zero = tcg_temp_new(TCG_TYPE_TL);
1912 cond = GET_FIELD_SP(insn, 14, 17);
1913 rs1 = GET_FIELD(insn, 13, 17);
1914 gen_movl_reg_T0(rs1);
1915 tcg_gen_movi_tl(r_zero, 0);
1916 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
1917 gen_op_load_fpr_FT1(rs2);
1918 gen_op_store_FT0_fpr(rd);
1921 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
1925 l1 = gen_new_label();
1926 r_zero = tcg_temp_new(TCG_TYPE_TL);
1927 cond = GET_FIELD_SP(insn, 14, 17);
1928 rs1 = GET_FIELD(insn, 13, 17);
1929 gen_movl_reg_T0(rs1);
1930 tcg_gen_movi_tl(r_zero, 0);
1931 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
1932 gen_op_load_fpr_DT1(DFPREG(rs2));
1933 gen_op_store_DT0_fpr(DFPREG(rd));
1936 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
1937 #if defined(CONFIG_USER_ONLY)
1941 l1 = gen_new_label();
1942 r_zero = tcg_temp_new(TCG_TYPE_TL);
1943 cond = GET_FIELD_SP(insn, 14, 17);
1944 rs1 = GET_FIELD(insn, 13, 17);
1945 gen_movl_reg_T0(rs1);
1946 tcg_gen_movi_tl(r_zero, 0);
1947 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
1948 gen_op_load_fpr_QT1(QFPREG(rs2));
1949 gen_op_store_QT0_fpr(QFPREG(rd));
1958 #ifdef TARGET_SPARC64
1959 case 0x001: /* V9 fmovscc %fcc0 */
1960 cond = GET_FIELD_SP(insn, 14, 17);
1961 gen_op_load_fpr_FT0(rd);
1962 gen_op_load_fpr_FT1(rs2);
1964 gen_fcond[0][cond]();
1966 gen_op_store_FT0_fpr(rd);
1968 case 0x002: /* V9 fmovdcc %fcc0 */
1969 cond = GET_FIELD_SP(insn, 14, 17);
1970 gen_op_load_fpr_DT0(DFPREG(rd));
1971 gen_op_load_fpr_DT1(DFPREG(rs2));
1973 gen_fcond[0][cond]();
1975 gen_op_store_DT0_fpr(DFPREG(rd));
1977 case 0x003: /* V9 fmovqcc %fcc0 */
1978 #if defined(CONFIG_USER_ONLY)
1979 cond = GET_FIELD_SP(insn, 14, 17);
1980 gen_op_load_fpr_QT0(QFPREG(rd));
1981 gen_op_load_fpr_QT1(QFPREG(rs2));
1983 gen_fcond[0][cond]();
1985 gen_op_store_QT0_fpr(QFPREG(rd));
1990 case 0x041: /* V9 fmovscc %fcc1 */
1991 cond = GET_FIELD_SP(insn, 14, 17);
1992 gen_op_load_fpr_FT0(rd);
1993 gen_op_load_fpr_FT1(rs2);
1995 gen_fcond[1][cond]();
1997 gen_op_store_FT0_fpr(rd);
1999 case 0x042: /* V9 fmovdcc %fcc1 */
2000 cond = GET_FIELD_SP(insn, 14, 17);
2001 gen_op_load_fpr_DT0(DFPREG(rd));
2002 gen_op_load_fpr_DT1(DFPREG(rs2));
2004 gen_fcond[1][cond]();
2006 gen_op_store_DT0_fpr(DFPREG(rd));
2008 case 0x043: /* V9 fmovqcc %fcc1 */
2009 #if defined(CONFIG_USER_ONLY)
2010 cond = GET_FIELD_SP(insn, 14, 17);
2011 gen_op_load_fpr_QT0(QFPREG(rd));
2012 gen_op_load_fpr_QT1(QFPREG(rs2));
2014 gen_fcond[1][cond]();
2016 gen_op_store_QT0_fpr(QFPREG(rd));
2021 case 0x081: /* V9 fmovscc %fcc2 */
2022 cond = GET_FIELD_SP(insn, 14, 17);
2023 gen_op_load_fpr_FT0(rd);
2024 gen_op_load_fpr_FT1(rs2);
2026 gen_fcond[2][cond]();
2028 gen_op_store_FT0_fpr(rd);
2030 case 0x082: /* V9 fmovdcc %fcc2 */
2031 cond = GET_FIELD_SP(insn, 14, 17);
2032 gen_op_load_fpr_DT0(DFPREG(rd));
2033 gen_op_load_fpr_DT1(DFPREG(rs2));
2035 gen_fcond[2][cond]();
2037 gen_op_store_DT0_fpr(DFPREG(rd));
2039 case 0x083: /* V9 fmovqcc %fcc2 */
2040 #if defined(CONFIG_USER_ONLY)
2041 cond = GET_FIELD_SP(insn, 14, 17);
2042 gen_op_load_fpr_QT0(rd);
2043 gen_op_load_fpr_QT1(rs2);
2045 gen_fcond[2][cond]();
2047 gen_op_store_QT0_fpr(rd);
2052 case 0x0c1: /* V9 fmovscc %fcc3 */
2053 cond = GET_FIELD_SP(insn, 14, 17);
2054 gen_op_load_fpr_FT0(rd);
2055 gen_op_load_fpr_FT1(rs2);
2057 gen_fcond[3][cond]();
2059 gen_op_store_FT0_fpr(rd);
2061 case 0x0c2: /* V9 fmovdcc %fcc3 */
2062 cond = GET_FIELD_SP(insn, 14, 17);
2063 gen_op_load_fpr_DT0(DFPREG(rd));
2064 gen_op_load_fpr_DT1(DFPREG(rs2));
2066 gen_fcond[3][cond]();
2068 gen_op_store_DT0_fpr(DFPREG(rd));
2070 case 0x0c3: /* V9 fmovqcc %fcc3 */
2071 #if defined(CONFIG_USER_ONLY)
2072 cond = GET_FIELD_SP(insn, 14, 17);
2073 gen_op_load_fpr_QT0(QFPREG(rd));
2074 gen_op_load_fpr_QT1(QFPREG(rs2));
2076 gen_fcond[3][cond]();
2078 gen_op_store_QT0_fpr(QFPREG(rd));
2083 case 0x101: /* V9 fmovscc %icc */
2084 cond = GET_FIELD_SP(insn, 14, 17);
2085 gen_op_load_fpr_FT0(rd);
2086 gen_op_load_fpr_FT1(rs2);
2088 gen_cond[0][cond]();
2090 gen_op_store_FT0_fpr(rd);
2092 case 0x102: /* V9 fmovdcc %icc */
2093 cond = GET_FIELD_SP(insn, 14, 17);
2094 gen_op_load_fpr_DT0(DFPREG(rd));
2095 gen_op_load_fpr_DT1(DFPREG(rs2));
2097 gen_cond[0][cond]();
2099 gen_op_store_DT0_fpr(DFPREG(rd));
2101 case 0x103: /* V9 fmovqcc %icc */
2102 #if defined(CONFIG_USER_ONLY)
2103 cond = GET_FIELD_SP(insn, 14, 17);
2104 gen_op_load_fpr_QT0(rd);
2105 gen_op_load_fpr_QT1(rs2);
2107 gen_cond[0][cond]();
2109 gen_op_store_QT0_fpr(rd);
2114 case 0x181: /* V9 fmovscc %xcc */
2115 cond = GET_FIELD_SP(insn, 14, 17);
2116 gen_op_load_fpr_FT0(rd);
2117 gen_op_load_fpr_FT1(rs2);
2119 gen_cond[1][cond]();
2121 gen_op_store_FT0_fpr(rd);
2123 case 0x182: /* V9 fmovdcc %xcc */
2124 cond = GET_FIELD_SP(insn, 14, 17);
2125 gen_op_load_fpr_DT0(DFPREG(rd));
2126 gen_op_load_fpr_DT1(DFPREG(rs2));
2128 gen_cond[1][cond]();
2130 gen_op_store_DT0_fpr(DFPREG(rd));
2132 case 0x183: /* V9 fmovqcc %xcc */
2133 #if defined(CONFIG_USER_ONLY)
2134 cond = GET_FIELD_SP(insn, 14, 17);
2135 gen_op_load_fpr_QT0(rd);
2136 gen_op_load_fpr_QT1(rs2);
2138 gen_cond[1][cond]();
2140 gen_op_store_QT0_fpr(rd);
2146 case 0x51: /* fcmps, V9 %fcc */
2147 gen_op_load_fpr_FT0(rs1);
2148 gen_op_load_fpr_FT1(rs2);
2149 gen_op_fcmps(rd & 3);
2151 case 0x52: /* fcmpd, V9 %fcc */
2152 gen_op_load_fpr_DT0(DFPREG(rs1));
2153 gen_op_load_fpr_DT1(DFPREG(rs2));
2154 gen_op_fcmpd(rd & 3);
2156 case 0x53: /* fcmpq, V9 %fcc */
2157 #if defined(CONFIG_USER_ONLY)
2158 gen_op_load_fpr_QT0(QFPREG(rs1));
2159 gen_op_load_fpr_QT1(QFPREG(rs2));
2160 gen_op_fcmpq(rd & 3);
2162 #else /* !defined(CONFIG_USER_ONLY) */
2165 case 0x55: /* fcmpes, V9 %fcc */
2166 gen_op_load_fpr_FT0(rs1);
2167 gen_op_load_fpr_FT1(rs2);
2168 gen_op_fcmpes(rd & 3);
2170 case 0x56: /* fcmped, V9 %fcc */
2171 gen_op_load_fpr_DT0(DFPREG(rs1));
2172 gen_op_load_fpr_DT1(DFPREG(rs2));
2173 gen_op_fcmped(rd & 3);
2175 case 0x57: /* fcmpeq, V9 %fcc */
2176 #if defined(CONFIG_USER_ONLY)
2177 gen_op_load_fpr_QT0(QFPREG(rs1));
2178 gen_op_load_fpr_QT1(QFPREG(rs2));
2179 gen_op_fcmpeq(rd & 3);
2181 #else/* !defined(CONFIG_USER_ONLY) */
2188 } else if (xop == 0x2) {
2191 rs1 = GET_FIELD(insn, 13, 17);
2193 // or %g0, x, y -> mov T0, x; mov y, T0
2194 if (IS_IMM) { /* immediate */
2195 rs2 = GET_FIELDs(insn, 19, 31);
2196 tcg_gen_movi_tl(cpu_T[0], (int)rs2);
2197 } else { /* register */
2198 rs2 = GET_FIELD(insn, 27, 31);
2199 gen_movl_reg_T0(rs2);
2202 gen_movl_reg_T0(rs1);
2203 if (IS_IMM) { /* immediate */
2204 rs2 = GET_FIELDs(insn, 19, 31);
2205 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2);
2206 } else { /* register */
2207 // or x, %g0, y -> mov T1, x; mov y, T1
2208 rs2 = GET_FIELD(insn, 27, 31);
2210 gen_movl_reg_T1(rs2);
2215 gen_movl_T0_reg(rd);
2217 #ifdef TARGET_SPARC64
2218 } else if (xop == 0x25) { /* sll, V9 sllx */
2219 rs1 = GET_FIELD(insn, 13, 17);
2220 gen_movl_reg_T0(rs1);
2221 if (IS_IMM) { /* immediate */
2222 rs2 = GET_FIELDs(insn, 20, 31);
2223 if (insn & (1 << 12)) {
2224 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2226 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2227 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2229 } else { /* register */
2230 rs2 = GET_FIELD(insn, 27, 31);
2231 gen_movl_reg_T1(rs2);
2232 if (insn & (1 << 12)) {
2233 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2234 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2236 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2237 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2238 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2241 gen_movl_T0_reg(rd);
2242 } else if (xop == 0x26) { /* srl, V9 srlx */
2243 rs1 = GET_FIELD(insn, 13, 17);
2244 gen_movl_reg_T0(rs1);
2245 if (IS_IMM) { /* immediate */
2246 rs2 = GET_FIELDs(insn, 20, 31);
2247 if (insn & (1 << 12)) {
2248 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2250 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2251 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2253 } else { /* register */
2254 rs2 = GET_FIELD(insn, 27, 31);
2255 gen_movl_reg_T1(rs2);
2256 if (insn & (1 << 12)) {
2257 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2258 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2260 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2261 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2262 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2265 gen_movl_T0_reg(rd);
2266 } else if (xop == 0x27) { /* sra, V9 srax */
2267 rs1 = GET_FIELD(insn, 13, 17);
2268 gen_movl_reg_T0(rs1);
2269 if (IS_IMM) { /* immediate */
2270 rs2 = GET_FIELDs(insn, 20, 31);
2271 if (insn & (1 << 12)) {
2272 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2274 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2275 tcg_gen_ext_i32_i64(cpu_T[0], cpu_T[0]);
2276 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2278 } else { /* register */
2279 rs2 = GET_FIELD(insn, 27, 31);
2280 gen_movl_reg_T1(rs2);
2281 if (insn & (1 << 12)) {
2282 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2283 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2285 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2286 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2287 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2290 gen_movl_T0_reg(rd);
2292 } else if (xop < 0x36) {
2293 rs1 = GET_FIELD(insn, 13, 17);
2294 gen_movl_reg_T0(rs1);
2295 if (IS_IMM) { /* immediate */
2296 rs2 = GET_FIELDs(insn, 19, 31);
2297 gen_movl_simm_T1(rs2);
2298 } else { /* register */
2299 rs2 = GET_FIELD(insn, 27, 31);
2300 gen_movl_reg_T1(rs2);
2303 switch (xop & ~0x10) {
2306 gen_op_add_T1_T0_cc();
2311 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2313 gen_op_logic_T0_cc();
2316 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2318 gen_op_logic_T0_cc();
2321 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2323 gen_op_logic_T0_cc();
2327 gen_op_sub_T1_T0_cc();
2329 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2332 gen_op_andn_T1_T0();
2334 gen_op_logic_T0_cc();
2339 gen_op_logic_T0_cc();
2342 gen_op_xnor_T1_T0();
2344 gen_op_logic_T0_cc();
2348 gen_op_addx_T1_T0_cc();
2350 gen_mov_reg_C(cpu_tmp0);
2351 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
2352 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2355 #ifdef TARGET_SPARC64
2356 case 0x9: /* V9 mulx */
2357 tcg_gen_mul_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2361 gen_op_umul_T1_T0();
2363 gen_op_logic_T0_cc();
2366 gen_op_smul_T1_T0();
2368 gen_op_logic_T0_cc();
2372 gen_op_subx_T1_T0_cc();
2374 gen_mov_reg_C(cpu_tmp0);
2375 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
2376 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2379 #ifdef TARGET_SPARC64
2380 case 0xd: /* V9 udivx */
2381 gen_op_udivx_T1_T0();
2385 gen_op_udiv_T1_T0();
2390 gen_op_sdiv_T1_T0();
2397 gen_movl_T0_reg(rd);
2400 case 0x20: /* taddcc */
2401 gen_op_tadd_T1_T0_cc();
2402 gen_movl_T0_reg(rd);
2404 case 0x21: /* tsubcc */
2405 gen_op_tsub_T1_T0_cc();
2406 gen_movl_T0_reg(rd);
2408 case 0x22: /* taddcctv */
2410 gen_op_tadd_T1_T0_ccTV();
2411 gen_movl_T0_reg(rd);
2413 case 0x23: /* tsubcctv */
2415 gen_op_tsub_T1_T0_ccTV();
2416 gen_movl_T0_reg(rd);
2418 case 0x24: /* mulscc */
2419 gen_op_mulscc_T1_T0();
2420 gen_movl_T0_reg(rd);
2422 #ifndef TARGET_SPARC64
2423 case 0x25: /* sll */
2424 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
2425 tcg_gen_shl_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
2426 gen_movl_T0_reg(rd);
2428 case 0x26: /* srl */
2429 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
2430 tcg_gen_shr_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
2431 gen_movl_T0_reg(rd);
2433 case 0x27: /* sra */
2434 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
2435 tcg_gen_sar_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
2436 gen_movl_T0_reg(rd);
2444 gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
2446 #ifndef TARGET_SPARC64
2447 case 0x01 ... 0x0f: /* undefined in the
2451 case 0x10 ... 0x1f: /* implementation-dependent
2457 case 0x2: /* V9 wrccr */
2461 case 0x3: /* V9 wrasi */
2463 gen_op_movl_env_T0(offsetof(CPUSPARCState, asi));
2465 case 0x6: /* V9 wrfprs */
2467 gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs));
2473 case 0xf: /* V9 sir, nop if user */
2474 #if !defined(CONFIG_USER_ONLY)
2479 case 0x13: /* Graphics Status */
2480 if (gen_trap_ifnofpu(dc))
2483 gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr));
2485 case 0x17: /* Tick compare */
2486 #if !defined(CONFIG_USER_ONLY)
2487 if (!supervisor(dc))
2494 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
2496 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2497 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2498 offsetof(CPUState, tick));
2499 tcg_gen_helper_0_2(helper_tick_set_limit,
2500 r_tickptr, cpu_T[0]);
2503 case 0x18: /* System tick */
2504 #if !defined(CONFIG_USER_ONLY)
2505 if (!supervisor(dc))
2512 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2513 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2514 offsetof(CPUState, stick));
2515 tcg_gen_helper_0_2(helper_tick_set_count,
2516 r_tickptr, cpu_T[0]);
2519 case 0x19: /* System tick compare */
2520 #if !defined(CONFIG_USER_ONLY)
2521 if (!supervisor(dc))
2528 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
2530 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2531 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2532 offsetof(CPUState, stick));
2533 tcg_gen_helper_0_2(helper_tick_set_limit,
2534 r_tickptr, cpu_T[0]);
2538 case 0x10: /* Performance Control */
2539 case 0x11: /* Performance Instrumentation Counter */
2540 case 0x12: /* Dispatch Control */
2541 case 0x14: /* Softint set */
2542 case 0x15: /* Softint clear */
2543 case 0x16: /* Softint write */
2550 #if !defined(CONFIG_USER_ONLY)
2551 case 0x31: /* wrpsr, V9 saved, restored */
2553 if (!supervisor(dc))
2555 #ifdef TARGET_SPARC64
2563 case 2: /* UA2005 allclean */
2564 case 3: /* UA2005 otherw */
2565 case 4: /* UA2005 normalw */
2566 case 5: /* UA2005 invalw */
2573 tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]);
2581 case 0x32: /* wrwim, V9 wrpr */
2583 if (!supervisor(dc))
2586 #ifdef TARGET_SPARC64
2592 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2593 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2594 offsetof(CPUState, tsptr));
2595 tcg_gen_st_tl(cpu_T[0], r_tsptr,
2596 offsetof(trap_state, tpc));
2603 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2604 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2605 offsetof(CPUState, tsptr));
2606 tcg_gen_st_tl(cpu_T[0], r_tsptr,
2607 offsetof(trap_state, tnpc));
2614 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2615 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2616 offsetof(CPUState, tsptr));
2617 tcg_gen_st_tl(cpu_T[0], r_tsptr,
2618 offsetof(trap_state, tstate));
2625 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2626 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2627 offsetof(CPUState, tsptr));
2628 tcg_gen_st_i32(cpu_T[0], r_tsptr,
2629 offsetof(trap_state, tt));
2636 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2637 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2638 offsetof(CPUState, tick));
2639 tcg_gen_helper_0_2(helper_tick_set_count,
2640 r_tickptr, cpu_T[0]);
2644 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
2648 tcg_gen_helper_0_1(helper_wrpstate, cpu_T[0]);
2654 gen_op_movl_env_T0(offsetof(CPUSPARCState, tl));
2657 gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil));
2663 gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave));
2665 case 11: // canrestore
2666 gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore));
2668 case 12: // cleanwin
2669 gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin));
2671 case 13: // otherwin
2672 gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin));
2675 gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate));
2677 case 16: // UA2005 gl
2678 gen_op_movl_env_T0(offsetof(CPUSPARCState, gl));
2680 case 26: // UA2005 strand status
2681 if (!hypervisor(dc))
2683 gen_op_movl_env_T0(offsetof(CPUSPARCState, ssr));
2689 tcg_gen_andi_i32(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1));
2690 gen_op_movl_env_T0(offsetof(CPUSPARCState, wim));
2694 case 0x33: /* wrtbr, UA2005 wrhpr */
2696 #ifndef TARGET_SPARC64
2697 if (!supervisor(dc))
2700 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
2702 if (!hypervisor(dc))
2707 // XXX gen_op_wrhpstate();
2714 // XXX gen_op_wrhtstate();
2717 gen_op_movl_env_T0(offsetof(CPUSPARCState, hintp));
2720 gen_op_movl_env_T0(offsetof(CPUSPARCState, htba));
2722 case 31: // hstick_cmpr
2726 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
2728 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2729 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2730 offsetof(CPUState, hstick));
2731 tcg_gen_helper_0_2(helper_tick_set_limit,
2732 r_tickptr, cpu_T[0]);
2735 case 6: // hver readonly
2743 #ifdef TARGET_SPARC64
2744 case 0x2c: /* V9 movcc */
2746 int cc = GET_FIELD_SP(insn, 11, 12);
2747 int cond = GET_FIELD_SP(insn, 14, 17);
2752 if (insn & (1 << 18)) {
2754 gen_cond[0][cond]();
2756 gen_cond[1][cond]();
2760 gen_fcond[cc][cond]();
2763 l1 = gen_new_label();
2765 r_zero = tcg_temp_new(TCG_TYPE_TL);
2766 tcg_gen_movi_tl(r_zero, 0);
2767 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[2], r_zero, l1);
2768 if (IS_IMM) { /* immediate */
2769 rs2 = GET_FIELD_SPs(insn, 0, 10);
2770 gen_movl_simm_T1(rs2);
2772 rs2 = GET_FIELD_SP(insn, 0, 4);
2773 gen_movl_reg_T1(rs2);
2775 gen_movl_T1_reg(rd);
2779 case 0x2d: /* V9 sdivx */
2780 gen_op_sdivx_T1_T0();
2781 gen_movl_T0_reg(rd);
2783 case 0x2e: /* V9 popc */
2785 if (IS_IMM) { /* immediate */
2786 rs2 = GET_FIELD_SPs(insn, 0, 12);
2787 gen_movl_simm_T1(rs2);
2788 // XXX optimize: popc(constant)
2791 rs2 = GET_FIELD_SP(insn, 0, 4);
2792 gen_movl_reg_T1(rs2);
2794 tcg_gen_helper_1_1(helper_popc, cpu_T[0],
2796 gen_movl_T0_reg(rd);
2798 case 0x2f: /* V9 movr */
2800 int cond = GET_FIELD_SP(insn, 10, 12);
2804 rs1 = GET_FIELD(insn, 13, 17);
2805 gen_movl_reg_T0(rs1);
2807 l1 = gen_new_label();
2809 r_zero = tcg_temp_new(TCG_TYPE_TL);
2810 tcg_gen_movi_tl(r_zero, 0);
2811 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2812 if (IS_IMM) { /* immediate */
2813 rs2 = GET_FIELD_SPs(insn, 0, 9);
2814 gen_movl_simm_T1(rs2);
2816 rs2 = GET_FIELD_SP(insn, 0, 4);
2817 gen_movl_reg_T1(rs2);
2819 gen_movl_T1_reg(rd);
2828 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
2829 #ifdef TARGET_SPARC64
2830 int opf = GET_FIELD_SP(insn, 5, 13);
2831 rs1 = GET_FIELD(insn, 13, 17);
2832 rs2 = GET_FIELD(insn, 27, 31);
2833 if (gen_trap_ifnofpu(dc))
2837 case 0x000: /* VIS I edge8cc */
2838 case 0x001: /* VIS II edge8n */
2839 case 0x002: /* VIS I edge8lcc */
2840 case 0x003: /* VIS II edge8ln */
2841 case 0x004: /* VIS I edge16cc */
2842 case 0x005: /* VIS II edge16n */
2843 case 0x006: /* VIS I edge16lcc */
2844 case 0x007: /* VIS II edge16ln */
2845 case 0x008: /* VIS I edge32cc */
2846 case 0x009: /* VIS II edge32n */
2847 case 0x00a: /* VIS I edge32lcc */
2848 case 0x00b: /* VIS II edge32ln */
2851 case 0x010: /* VIS I array8 */
2852 gen_movl_reg_T0(rs1);
2853 gen_movl_reg_T1(rs2);
2855 gen_movl_T0_reg(rd);
2857 case 0x012: /* VIS I array16 */
2858 gen_movl_reg_T0(rs1);
2859 gen_movl_reg_T1(rs2);
2861 gen_movl_T0_reg(rd);
2863 case 0x014: /* VIS I array32 */
2864 gen_movl_reg_T0(rs1);
2865 gen_movl_reg_T1(rs2);
2867 gen_movl_T0_reg(rd);
2869 case 0x018: /* VIS I alignaddr */
2870 gen_movl_reg_T0(rs1);
2871 gen_movl_reg_T1(rs2);
2873 gen_movl_T0_reg(rd);
2875 case 0x019: /* VIS II bmask */
2876 case 0x01a: /* VIS I alignaddrl */
2879 case 0x020: /* VIS I fcmple16 */
2880 gen_op_load_fpr_DT0(DFPREG(rs1));
2881 gen_op_load_fpr_DT1(DFPREG(rs2));
2883 gen_op_store_DT0_fpr(DFPREG(rd));
2885 case 0x022: /* VIS I fcmpne16 */
2886 gen_op_load_fpr_DT0(DFPREG(rs1));
2887 gen_op_load_fpr_DT1(DFPREG(rs2));
2889 gen_op_store_DT0_fpr(DFPREG(rd));
2891 case 0x024: /* VIS I fcmple32 */
2892 gen_op_load_fpr_DT0(DFPREG(rs1));
2893 gen_op_load_fpr_DT1(DFPREG(rs2));
2895 gen_op_store_DT0_fpr(DFPREG(rd));
2897 case 0x026: /* VIS I fcmpne32 */
2898 gen_op_load_fpr_DT0(DFPREG(rs1));
2899 gen_op_load_fpr_DT1(DFPREG(rs2));
2901 gen_op_store_DT0_fpr(DFPREG(rd));
2903 case 0x028: /* VIS I fcmpgt16 */
2904 gen_op_load_fpr_DT0(DFPREG(rs1));
2905 gen_op_load_fpr_DT1(DFPREG(rs2));
2907 gen_op_store_DT0_fpr(DFPREG(rd));
2909 case 0x02a: /* VIS I fcmpeq16 */
2910 gen_op_load_fpr_DT0(DFPREG(rs1));
2911 gen_op_load_fpr_DT1(DFPREG(rs2));
2913 gen_op_store_DT0_fpr(DFPREG(rd));
2915 case 0x02c: /* VIS I fcmpgt32 */
2916 gen_op_load_fpr_DT0(DFPREG(rs1));
2917 gen_op_load_fpr_DT1(DFPREG(rs2));
2919 gen_op_store_DT0_fpr(DFPREG(rd));
2921 case 0x02e: /* VIS I fcmpeq32 */
2922 gen_op_load_fpr_DT0(DFPREG(rs1));
2923 gen_op_load_fpr_DT1(DFPREG(rs2));
2925 gen_op_store_DT0_fpr(DFPREG(rd));
2927 case 0x031: /* VIS I fmul8x16 */
2928 gen_op_load_fpr_DT0(DFPREG(rs1));
2929 gen_op_load_fpr_DT1(DFPREG(rs2));
2931 gen_op_store_DT0_fpr(DFPREG(rd));
2933 case 0x033: /* VIS I fmul8x16au */
2934 gen_op_load_fpr_DT0(DFPREG(rs1));
2935 gen_op_load_fpr_DT1(DFPREG(rs2));
2936 gen_op_fmul8x16au();
2937 gen_op_store_DT0_fpr(DFPREG(rd));
2939 case 0x035: /* VIS I fmul8x16al */
2940 gen_op_load_fpr_DT0(DFPREG(rs1));
2941 gen_op_load_fpr_DT1(DFPREG(rs2));
2942 gen_op_fmul8x16al();
2943 gen_op_store_DT0_fpr(DFPREG(rd));
2945 case 0x036: /* VIS I fmul8sux16 */
2946 gen_op_load_fpr_DT0(DFPREG(rs1));
2947 gen_op_load_fpr_DT1(DFPREG(rs2));
2948 gen_op_fmul8sux16();
2949 gen_op_store_DT0_fpr(DFPREG(rd));
2951 case 0x037: /* VIS I fmul8ulx16 */
2952 gen_op_load_fpr_DT0(DFPREG(rs1));
2953 gen_op_load_fpr_DT1(DFPREG(rs2));
2954 gen_op_fmul8ulx16();
2955 gen_op_store_DT0_fpr(DFPREG(rd));
2957 case 0x038: /* VIS I fmuld8sux16 */
2958 gen_op_load_fpr_DT0(DFPREG(rs1));
2959 gen_op_load_fpr_DT1(DFPREG(rs2));
2960 gen_op_fmuld8sux16();
2961 gen_op_store_DT0_fpr(DFPREG(rd));
2963 case 0x039: /* VIS I fmuld8ulx16 */
2964 gen_op_load_fpr_DT0(DFPREG(rs1));
2965 gen_op_load_fpr_DT1(DFPREG(rs2));
2966 gen_op_fmuld8ulx16();
2967 gen_op_store_DT0_fpr(DFPREG(rd));
2969 case 0x03a: /* VIS I fpack32 */
2970 case 0x03b: /* VIS I fpack16 */
2971 case 0x03d: /* VIS I fpackfix */
2972 case 0x03e: /* VIS I pdist */
2975 case 0x048: /* VIS I faligndata */
2976 gen_op_load_fpr_DT0(DFPREG(rs1));
2977 gen_op_load_fpr_DT1(DFPREG(rs2));
2978 gen_op_faligndata();
2979 gen_op_store_DT0_fpr(DFPREG(rd));
2981 case 0x04b: /* VIS I fpmerge */
2982 gen_op_load_fpr_DT0(DFPREG(rs1));
2983 gen_op_load_fpr_DT1(DFPREG(rs2));
2985 gen_op_store_DT0_fpr(DFPREG(rd));
2987 case 0x04c: /* VIS II bshuffle */
2990 case 0x04d: /* VIS I fexpand */
2991 gen_op_load_fpr_DT0(DFPREG(rs1));
2992 gen_op_load_fpr_DT1(DFPREG(rs2));
2994 gen_op_store_DT0_fpr(DFPREG(rd));
2996 case 0x050: /* VIS I fpadd16 */
2997 gen_op_load_fpr_DT0(DFPREG(rs1));
2998 gen_op_load_fpr_DT1(DFPREG(rs2));
3000 gen_op_store_DT0_fpr(DFPREG(rd));
3002 case 0x051: /* VIS I fpadd16s */
3003 gen_op_load_fpr_FT0(rs1);
3004 gen_op_load_fpr_FT1(rs2);
3006 gen_op_store_FT0_fpr(rd);
3008 case 0x052: /* VIS I fpadd32 */
3009 gen_op_load_fpr_DT0(DFPREG(rs1));
3010 gen_op_load_fpr_DT1(DFPREG(rs2));
3012 gen_op_store_DT0_fpr(DFPREG(rd));
3014 case 0x053: /* VIS I fpadd32s */
3015 gen_op_load_fpr_FT0(rs1);
3016 gen_op_load_fpr_FT1(rs2);
3018 gen_op_store_FT0_fpr(rd);
3020 case 0x054: /* VIS I fpsub16 */
3021 gen_op_load_fpr_DT0(DFPREG(rs1));
3022 gen_op_load_fpr_DT1(DFPREG(rs2));
3024 gen_op_store_DT0_fpr(DFPREG(rd));
3026 case 0x055: /* VIS I fpsub16s */
3027 gen_op_load_fpr_FT0(rs1);
3028 gen_op_load_fpr_FT1(rs2);
3030 gen_op_store_FT0_fpr(rd);
3032 case 0x056: /* VIS I fpsub32 */
3033 gen_op_load_fpr_DT0(DFPREG(rs1));
3034 gen_op_load_fpr_DT1(DFPREG(rs2));
3036 gen_op_store_DT0_fpr(DFPREG(rd));
3038 case 0x057: /* VIS I fpsub32s */
3039 gen_op_load_fpr_FT0(rs1);
3040 gen_op_load_fpr_FT1(rs2);
3042 gen_op_store_FT0_fpr(rd);
3044 case 0x060: /* VIS I fzero */
3045 gen_op_movl_DT0_0();
3046 gen_op_store_DT0_fpr(DFPREG(rd));
3048 case 0x061: /* VIS I fzeros */
3049 gen_op_movl_FT0_0();
3050 gen_op_store_FT0_fpr(rd);
3052 case 0x062: /* VIS I fnor */
3053 gen_op_load_fpr_DT0(DFPREG(rs1));
3054 gen_op_load_fpr_DT1(DFPREG(rs2));
3056 gen_op_store_DT0_fpr(DFPREG(rd));
3058 case 0x063: /* VIS I fnors */
3059 gen_op_load_fpr_FT0(rs1);
3060 gen_op_load_fpr_FT1(rs2);
3062 gen_op_store_FT0_fpr(rd);
3064 case 0x064: /* VIS I fandnot2 */
3065 gen_op_load_fpr_DT1(DFPREG(rs1));
3066 gen_op_load_fpr_DT0(DFPREG(rs2));
3068 gen_op_store_DT0_fpr(DFPREG(rd));
3070 case 0x065: /* VIS I fandnot2s */
3071 gen_op_load_fpr_FT1(rs1);
3072 gen_op_load_fpr_FT0(rs2);
3074 gen_op_store_FT0_fpr(rd);
3076 case 0x066: /* VIS I fnot2 */
3077 gen_op_load_fpr_DT1(DFPREG(rs2));
3079 gen_op_store_DT0_fpr(DFPREG(rd));
3081 case 0x067: /* VIS I fnot2s */
3082 gen_op_load_fpr_FT1(rs2);
3084 gen_op_store_FT0_fpr(rd);
3086 case 0x068: /* VIS I fandnot1 */
3087 gen_op_load_fpr_DT0(DFPREG(rs1));
3088 gen_op_load_fpr_DT1(DFPREG(rs2));
3090 gen_op_store_DT0_fpr(DFPREG(rd));
3092 case 0x069: /* VIS I fandnot1s */
3093 gen_op_load_fpr_FT0(rs1);
3094 gen_op_load_fpr_FT1(rs2);
3096 gen_op_store_FT0_fpr(rd);
3098 case 0x06a: /* VIS I fnot1 */
3099 gen_op_load_fpr_DT1(DFPREG(rs1));
3101 gen_op_store_DT0_fpr(DFPREG(rd));
3103 case 0x06b: /* VIS I fnot1s */
3104 gen_op_load_fpr_FT1(rs1);
3106 gen_op_store_FT0_fpr(rd);
3108 case 0x06c: /* VIS I fxor */
3109 gen_op_load_fpr_DT0(DFPREG(rs1));
3110 gen_op_load_fpr_DT1(DFPREG(rs2));
3112 gen_op_store_DT0_fpr(DFPREG(rd));
3114 case 0x06d: /* VIS I fxors */
3115 gen_op_load_fpr_FT0(rs1);
3116 gen_op_load_fpr_FT1(rs2);
3118 gen_op_store_FT0_fpr(rd);
3120 case 0x06e: /* VIS I fnand */
3121 gen_op_load_fpr_DT0(DFPREG(rs1));
3122 gen_op_load_fpr_DT1(DFPREG(rs2));
3124 gen_op_store_DT0_fpr(DFPREG(rd));
3126 case 0x06f: /* VIS I fnands */
3127 gen_op_load_fpr_FT0(rs1);
3128 gen_op_load_fpr_FT1(rs2);
3130 gen_op_store_FT0_fpr(rd);
3132 case 0x070: /* VIS I fand */
3133 gen_op_load_fpr_DT0(DFPREG(rs1));
3134 gen_op_load_fpr_DT1(DFPREG(rs2));
3136 gen_op_store_DT0_fpr(DFPREG(rd));
3138 case 0x071: /* VIS I fands */
3139 gen_op_load_fpr_FT0(rs1);
3140 gen_op_load_fpr_FT1(rs2);
3142 gen_op_store_FT0_fpr(rd);
3144 case 0x072: /* VIS I fxnor */
3145 gen_op_load_fpr_DT0(DFPREG(rs1));
3146 gen_op_load_fpr_DT1(DFPREG(rs2));
3148 gen_op_store_DT0_fpr(DFPREG(rd));
3150 case 0x073: /* VIS I fxnors */
3151 gen_op_load_fpr_FT0(rs1);
3152 gen_op_load_fpr_FT1(rs2);
3154 gen_op_store_FT0_fpr(rd);
3156 case 0x074: /* VIS I fsrc1 */
3157 gen_op_load_fpr_DT0(DFPREG(rs1));
3158 gen_op_store_DT0_fpr(DFPREG(rd));
3160 case 0x075: /* VIS I fsrc1s */
3161 gen_op_load_fpr_FT0(rs1);
3162 gen_op_store_FT0_fpr(rd);
3164 case 0x076: /* VIS I fornot2 */
3165 gen_op_load_fpr_DT1(DFPREG(rs1));
3166 gen_op_load_fpr_DT0(DFPREG(rs2));
3168 gen_op_store_DT0_fpr(DFPREG(rd));
3170 case 0x077: /* VIS I fornot2s */
3171 gen_op_load_fpr_FT1(rs1);
3172 gen_op_load_fpr_FT0(rs2);
3174 gen_op_store_FT0_fpr(rd);
3176 case 0x078: /* VIS I fsrc2 */
3177 gen_op_load_fpr_DT0(DFPREG(rs2));
3178 gen_op_store_DT0_fpr(DFPREG(rd));
3180 case 0x079: /* VIS I fsrc2s */
3181 gen_op_load_fpr_FT0(rs2);
3182 gen_op_store_FT0_fpr(rd);
3184 case 0x07a: /* VIS I fornot1 */
3185 gen_op_load_fpr_DT0(DFPREG(rs1));
3186 gen_op_load_fpr_DT1(DFPREG(rs2));
3188 gen_op_store_DT0_fpr(DFPREG(rd));
3190 case 0x07b: /* VIS I fornot1s */
3191 gen_op_load_fpr_FT0(rs1);
3192 gen_op_load_fpr_FT1(rs2);
3194 gen_op_store_FT0_fpr(rd);
3196 case 0x07c: /* VIS I for */
3197 gen_op_load_fpr_DT0(DFPREG(rs1));
3198 gen_op_load_fpr_DT1(DFPREG(rs2));
3200 gen_op_store_DT0_fpr(DFPREG(rd));
3202 case 0x07d: /* VIS I fors */
3203 gen_op_load_fpr_FT0(rs1);
3204 gen_op_load_fpr_FT1(rs2);
3206 gen_op_store_FT0_fpr(rd);
3208 case 0x07e: /* VIS I fone */
3209 gen_op_movl_DT0_1();
3210 gen_op_store_DT0_fpr(DFPREG(rd));
3212 case 0x07f: /* VIS I fones */
3213 gen_op_movl_FT0_1();
3214 gen_op_store_FT0_fpr(rd);
3216 case 0x080: /* VIS I shutdown */
3217 case 0x081: /* VIS II siam */
3226 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3227 #ifdef TARGET_SPARC64
3232 #ifdef TARGET_SPARC64
3233 } else if (xop == 0x39) { /* V9 return */
3234 rs1 = GET_FIELD(insn, 13, 17);
3236 gen_movl_reg_T0(rs1);
3237 if (IS_IMM) { /* immediate */
3238 rs2 = GET_FIELDs(insn, 19, 31);
3239 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3240 } else { /* register */
3241 rs2 = GET_FIELD(insn, 27, 31);
3245 gen_movl_reg_T1(rs2);
3253 gen_op_check_align_T0_3();
3254 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3255 dc->npc = DYNAMIC_PC;
3259 rs1 = GET_FIELD(insn, 13, 17);
3260 gen_movl_reg_T0(rs1);
3261 if (IS_IMM) { /* immediate */
3262 rs2 = GET_FIELDs(insn, 19, 31);
3263 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3264 } else { /* register */
3265 rs2 = GET_FIELD(insn, 27, 31);
3269 gen_movl_reg_T1(rs2);
3276 case 0x38: /* jmpl */
3279 tcg_gen_movi_tl(cpu_T[1], dc->pc);
3280 gen_movl_T1_reg(rd);
3283 gen_op_check_align_T0_3();
3284 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3285 dc->npc = DYNAMIC_PC;
3288 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3289 case 0x39: /* rett, V9 return */
3291 if (!supervisor(dc))
3294 gen_op_check_align_T0_3();
3295 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3296 dc->npc = DYNAMIC_PC;
3297 tcg_gen_helper_0_0(helper_rett);
3301 case 0x3b: /* flush */
3302 tcg_gen_helper_0_1(helper_flush, cpu_T[0]);
3304 case 0x3c: /* save */
3307 gen_movl_T0_reg(rd);
3309 case 0x3d: /* restore */
3312 gen_movl_T0_reg(rd);
3314 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
3315 case 0x3e: /* V9 done/retry */
3319 if (!supervisor(dc))
3321 dc->npc = DYNAMIC_PC;
3322 dc->pc = DYNAMIC_PC;
3323 tcg_gen_helper_0_0(helper_done);
3326 if (!supervisor(dc))
3328 dc->npc = DYNAMIC_PC;
3329 dc->pc = DYNAMIC_PC;
3330 tcg_gen_helper_0_0(helper_retry);
3345 case 3: /* load/store instructions */
3347 unsigned int xop = GET_FIELD(insn, 7, 12);
3348 rs1 = GET_FIELD(insn, 13, 17);
3350 gen_movl_reg_T0(rs1);
3351 if (xop == 0x3c || xop == 0x3e)
3353 rs2 = GET_FIELD(insn, 27, 31);
3354 gen_movl_reg_T1(rs2);
3356 else if (IS_IMM) { /* immediate */
3357 rs2 = GET_FIELDs(insn, 19, 31);
3358 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3359 } else { /* register */
3360 rs2 = GET_FIELD(insn, 27, 31);
3364 gen_movl_reg_T1(rs2);
3370 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
3371 (xop > 0x17 && xop <= 0x1d ) ||
3372 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
3374 case 0x0: /* load unsigned word */
3375 gen_op_check_align_T0_3();
3376 ABI32_MASK(cpu_T[0]);
3377 tcg_gen_qemu_ld32u(cpu_T[1], cpu_T[0], dc->mem_idx);
3379 case 0x1: /* load unsigned byte */
3380 ABI32_MASK(cpu_T[0]);
3381 tcg_gen_qemu_ld8u(cpu_T[1], cpu_T[0], dc->mem_idx);
3383 case 0x2: /* load unsigned halfword */
3384 gen_op_check_align_T0_1();
3385 ABI32_MASK(cpu_T[0]);
3386 tcg_gen_qemu_ld16u(cpu_T[1], cpu_T[0], dc->mem_idx);
3388 case 0x3: /* load double word */
3394 r_dword = tcg_temp_new(TCG_TYPE_I64);
3395 gen_op_check_align_T0_7();
3396 ABI32_MASK(cpu_T[0]);
3397 tcg_gen_qemu_ld64(r_dword, cpu_T[0], dc->mem_idx);
3398 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
3399 gen_movl_T0_reg(rd + 1);
3400 tcg_gen_shri_i64(r_dword, r_dword, 32);
3401 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
3404 case 0x9: /* load signed byte */
3405 ABI32_MASK(cpu_T[0]);
3406 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
3408 case 0xa: /* load signed halfword */
3409 gen_op_check_align_T0_1();
3410 ABI32_MASK(cpu_T[0]);
3411 tcg_gen_qemu_ld16s(cpu_T[1], cpu_T[0], dc->mem_idx);
3413 case 0xd: /* ldstub -- XXX: should be atomically */
3414 tcg_gen_movi_i32(cpu_tmp0, 0xff);
3415 ABI32_MASK(cpu_T[0]);
3416 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
3417 tcg_gen_qemu_st8(cpu_tmp0, cpu_T[0], dc->mem_idx);
3419 case 0x0f: /* swap register with memory. Also atomically */
3420 gen_op_check_align_T0_3();
3421 gen_movl_reg_T1(rd);
3422 ABI32_MASK(cpu_T[0]);
3423 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_T[0], dc->mem_idx);
3424 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
3425 tcg_gen_mov_i32(cpu_T[1], cpu_tmp0);
3427 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3428 case 0x10: /* load word alternate */
3429 #ifndef TARGET_SPARC64
3432 if (!supervisor(dc))
3435 gen_op_check_align_T0_3();
3436 gen_ld_asi(insn, 4, 0);
3438 case 0x11: /* load unsigned byte alternate */
3439 #ifndef TARGET_SPARC64
3442 if (!supervisor(dc))
3445 gen_ld_asi(insn, 1, 0);
3447 case 0x12: /* load unsigned halfword alternate */
3448 #ifndef TARGET_SPARC64
3451 if (!supervisor(dc))
3454 gen_op_check_align_T0_1();
3455 gen_ld_asi(insn, 2, 0);
3457 case 0x13: /* load double word alternate */
3458 #ifndef TARGET_SPARC64
3461 if (!supervisor(dc))
3466 gen_op_check_align_T0_7();
3468 gen_movl_T0_reg(rd + 1);
3470 case 0x19: /* load signed byte alternate */
3471 #ifndef TARGET_SPARC64
3474 if (!supervisor(dc))
3477 gen_ld_asi(insn, 1, 1);
3479 case 0x1a: /* load signed halfword alternate */
3480 #ifndef TARGET_SPARC64
3483 if (!supervisor(dc))
3486 gen_op_check_align_T0_1();
3487 gen_ld_asi(insn, 2, 1);
3489 case 0x1d: /* ldstuba -- XXX: should be atomically */
3490 #ifndef TARGET_SPARC64
3493 if (!supervisor(dc))
3496 gen_ldstub_asi(insn);
3498 case 0x1f: /* swap reg with alt. memory. Also atomically */
3499 #ifndef TARGET_SPARC64
3502 if (!supervisor(dc))
3505 gen_op_check_align_T0_3();
3506 gen_movl_reg_T1(rd);
3510 #ifndef TARGET_SPARC64
3511 case 0x30: /* ldc */
3512 case 0x31: /* ldcsr */
3513 case 0x33: /* lddc */
3517 #ifdef TARGET_SPARC64
3518 case 0x08: /* V9 ldsw */
3519 gen_op_check_align_T0_3();
3520 ABI32_MASK(cpu_T[0]);
3521 tcg_gen_qemu_ld32s(cpu_T[1], cpu_T[0], dc->mem_idx);
3523 case 0x0b: /* V9 ldx */
3524 gen_op_check_align_T0_7();
3525 ABI32_MASK(cpu_T[0]);
3526 tcg_gen_qemu_ld64(cpu_T[1], cpu_T[0], dc->mem_idx);
3528 case 0x18: /* V9 ldswa */
3529 gen_op_check_align_T0_3();
3530 gen_ld_asi(insn, 4, 1);
3532 case 0x1b: /* V9 ldxa */
3533 gen_op_check_align_T0_7();
3534 gen_ld_asi(insn, 8, 0);
3536 case 0x2d: /* V9 prefetch, no effect */
3538 case 0x30: /* V9 ldfa */
3539 gen_op_check_align_T0_3();
3540 gen_ldf_asi(insn, 4, rd);
3542 case 0x33: /* V9 lddfa */
3543 gen_op_check_align_T0_3();
3544 gen_ldf_asi(insn, 8, DFPREG(rd));
3546 case 0x3d: /* V9 prefetcha, no effect */
3548 case 0x32: /* V9 ldqfa */
3549 #if defined(CONFIG_USER_ONLY)
3550 gen_op_check_align_T0_3();
3551 gen_ldf_asi(insn, 16, QFPREG(rd));
3560 gen_movl_T1_reg(rd);
3561 #ifdef TARGET_SPARC64
3564 } else if (xop >= 0x20 && xop < 0x24) {
3565 if (gen_trap_ifnofpu(dc))
3568 case 0x20: /* load fpreg */
3569 gen_op_check_align_T0_3();
3571 gen_op_store_FT0_fpr(rd);
3573 case 0x21: /* load fsr */
3574 gen_op_check_align_T0_3();
3577 tcg_gen_helper_0_0(helper_ldfsr);
3579 case 0x22: /* load quad fpreg */
3580 #if defined(CONFIG_USER_ONLY)
3581 gen_op_check_align_T0_7();
3583 gen_op_store_QT0_fpr(QFPREG(rd));
3588 case 0x23: /* load double fpreg */
3589 gen_op_check_align_T0_7();
3591 gen_op_store_DT0_fpr(DFPREG(rd));
3596 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
3597 xop == 0xe || xop == 0x1e) {
3598 gen_movl_reg_T1(rd);
3600 case 0x4: /* store word */
3601 gen_op_check_align_T0_3();
3602 ABI32_MASK(cpu_T[0]);
3603 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
3605 case 0x5: /* store byte */
3606 ABI32_MASK(cpu_T[0]);
3607 tcg_gen_qemu_st8(cpu_T[1], cpu_T[0], dc->mem_idx);
3609 case 0x6: /* store halfword */
3610 gen_op_check_align_T0_1();
3611 ABI32_MASK(cpu_T[0]);
3612 tcg_gen_qemu_st16(cpu_T[1], cpu_T[0], dc->mem_idx);
3614 case 0x7: /* store double word */
3619 TCGv r_dword, r_low;
3621 gen_op_check_align_T0_7();
3622 r_dword = tcg_temp_new(TCG_TYPE_I64);
3623 r_low = tcg_temp_new(TCG_TYPE_I32);
3624 gen_movl_reg_TN(rd + 1, r_low);
3625 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
3627 tcg_gen_qemu_st64(r_dword, cpu_T[0], dc->mem_idx);
3629 #else /* __i386__ */
3630 gen_op_check_align_T0_7();
3632 gen_movl_reg_T2(rd + 1);
3634 #endif /* __i386__ */
3636 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3637 case 0x14: /* store word alternate */
3638 #ifndef TARGET_SPARC64
3641 if (!supervisor(dc))
3644 gen_op_check_align_T0_3();
3645 gen_st_asi(insn, 4);
3647 case 0x15: /* store byte alternate */
3648 #ifndef TARGET_SPARC64
3651 if (!supervisor(dc))
3654 gen_st_asi(insn, 1);
3656 case 0x16: /* store halfword alternate */
3657 #ifndef TARGET_SPARC64
3660 if (!supervisor(dc))
3663 gen_op_check_align_T0_1();
3664 gen_st_asi(insn, 2);
3666 case 0x17: /* store double word alternate */
3667 #ifndef TARGET_SPARC64
3670 if (!supervisor(dc))
3677 TCGv r_dword, r_temp, r_size;
3679 gen_op_check_align_T0_7();
3680 r_dword = tcg_temp_new(TCG_TYPE_I64);
3681 r_temp = tcg_temp_new(TCG_TYPE_I32);
3682 r_size = tcg_temp_new(TCG_TYPE_I32);
3683 gen_movl_reg_TN(rd + 1, r_temp);
3684 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
3686 #ifdef TARGET_SPARC64
3690 offset = GET_FIELD(insn, 25, 31);
3691 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
3692 tcg_gen_ld_i32(r_dword, cpu_env, offsetof(CPUSPARCState, asi));
3695 asi = GET_FIELD(insn, 19, 26);
3696 tcg_gen_movi_i32(r_temp, asi);
3697 #ifdef TARGET_SPARC64
3700 tcg_gen_movi_i32(r_size, 8);
3701 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_temp, r_size);
3705 #ifdef TARGET_SPARC64
3706 case 0x0e: /* V9 stx */
3707 gen_op_check_align_T0_7();
3708 ABI32_MASK(cpu_T[0]);
3709 tcg_gen_qemu_st64(cpu_T[1], cpu_T[0], dc->mem_idx);
3711 case 0x1e: /* V9 stxa */
3712 gen_op_check_align_T0_7();
3713 gen_st_asi(insn, 8);
3719 } else if (xop > 0x23 && xop < 0x28) {
3720 if (gen_trap_ifnofpu(dc))
3724 gen_op_check_align_T0_3();
3725 gen_op_load_fpr_FT0(rd);
3728 case 0x25: /* stfsr, V9 stxfsr */
3729 #ifdef CONFIG_USER_ONLY
3730 gen_op_check_align_T0_3();
3736 #ifdef TARGET_SPARC64
3737 #if defined(CONFIG_USER_ONLY)
3738 /* V9 stqf, store quad fpreg */
3739 gen_op_check_align_T0_7();
3740 gen_op_load_fpr_QT0(QFPREG(rd));
3746 #else /* !TARGET_SPARC64 */
3747 /* stdfq, store floating point queue */
3748 #if defined(CONFIG_USER_ONLY)
3751 if (!supervisor(dc))
3753 if (gen_trap_ifnofpu(dc))
3759 gen_op_check_align_T0_7();
3760 gen_op_load_fpr_DT0(DFPREG(rd));
3766 } else if (xop > 0x33 && xop < 0x3f) {
3768 #ifdef TARGET_SPARC64
3769 case 0x34: /* V9 stfa */
3770 gen_op_check_align_T0_3();
3771 gen_op_load_fpr_FT0(rd);
3772 gen_stf_asi(insn, 4, rd);
3774 case 0x36: /* V9 stqfa */
3775 #if defined(CONFIG_USER_ONLY)
3776 gen_op_check_align_T0_7();
3777 gen_op_load_fpr_QT0(QFPREG(rd));
3778 gen_stf_asi(insn, 16, QFPREG(rd));
3783 case 0x37: /* V9 stdfa */
3784 gen_op_check_align_T0_3();
3785 gen_op_load_fpr_DT0(DFPREG(rd));
3786 gen_stf_asi(insn, 8, DFPREG(rd));
3788 case 0x3c: /* V9 casa */
3789 gen_op_check_align_T0_3();
3790 gen_cas_asi(insn, rd);
3791 gen_movl_T1_reg(rd);
3793 case 0x3e: /* V9 casxa */
3794 gen_op_check_align_T0_7();
3795 gen_casx_asi(insn, rd);
3796 gen_movl_T1_reg(rd);
3799 case 0x34: /* stc */
3800 case 0x35: /* stcsr */
3801 case 0x36: /* stdcq */
3802 case 0x37: /* stdc */
3814 /* default case for non jump instructions */
3815 if (dc->npc == DYNAMIC_PC) {
3816 dc->pc = DYNAMIC_PC;
3818 } else if (dc->npc == JUMP_PC) {
3819 /* we can do a static jump */
3820 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1]);
3824 dc->npc = dc->npc + 4;
3830 gen_op_exception(TT_ILL_INSN);
3833 #if !defined(CONFIG_USER_ONLY)
3836 gen_op_exception(TT_PRIV_INSN);
3841 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
3844 #ifndef TARGET_SPARC64
3847 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
3852 #ifndef TARGET_SPARC64
3855 gen_op_exception(TT_NCP_INSN);
3861 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
3865 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
3866 int spc, CPUSPARCState *env)
3868 target_ulong pc_start, last_pc;
3869 uint16_t *gen_opc_end;
3870 DisasContext dc1, *dc = &dc1;
3873 memset(dc, 0, sizeof(DisasContext));
3878 dc->npc = (target_ulong) tb->cs_base;
3879 dc->mem_idx = cpu_mmu_index(env);
3880 dc->fpu_enabled = cpu_fpu_enabled(env);
3881 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3883 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
3884 cpu_regwptr = tcg_temp_new(TCG_TYPE_PTR); // XXX
3887 if (env->nb_breakpoints > 0) {
3888 for(j = 0; j < env->nb_breakpoints; j++) {
3889 if (env->breakpoints[j] == dc->pc) {
3890 if (dc->pc != pc_start)
3892 tcg_gen_helper_0_0(helper_debug);
3901 fprintf(logfile, "Search PC...\n");
3902 j = gen_opc_ptr - gen_opc_buf;
3906 gen_opc_instr_start[lj++] = 0;
3907 gen_opc_pc[lj] = dc->pc;
3908 gen_opc_npc[lj] = dc->npc;
3909 gen_opc_instr_start[lj] = 1;
3913 disas_sparc_insn(dc);
3917 /* if the next PC is different, we abort now */
3918 if (dc->pc != (last_pc + 4))
3920 /* if we reach a page boundary, we stop generation so that the
3921 PC of a TT_TFAULT exception is always in the right page */
3922 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
3924 /* if single step mode, we generate only one instruction and
3925 generate an exception */
3926 if (env->singlestep_enabled) {
3931 } while ((gen_opc_ptr < gen_opc_end) &&
3932 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
3936 if (dc->pc != DYNAMIC_PC &&
3937 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
3938 /* static PC and NPC: we can use direct chaining */
3939 gen_branch(dc, dc->pc, dc->npc);
3941 if (dc->pc != DYNAMIC_PC)
3947 *gen_opc_ptr = INDEX_op_end;
3949 j = gen_opc_ptr - gen_opc_buf;
3952 gen_opc_instr_start[lj++] = 0;
3958 gen_opc_jump_pc[0] = dc->jump_pc[0];
3959 gen_opc_jump_pc[1] = dc->jump_pc[1];
3961 tb->size = last_pc + 4 - pc_start;
3964 if (loglevel & CPU_LOG_TB_IN_ASM) {
3965 fprintf(logfile, "--------------\n");
3966 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
3967 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
3968 fprintf(logfile, "\n");
3974 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
3976 return gen_intermediate_code_internal(tb, 0, env);
3979 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
3981 return gen_intermediate_code_internal(tb, 1, env);
3984 void cpu_reset(CPUSPARCState *env)
3989 env->regwptr = env->regbase + (env->cwp * 16);
3990 #if defined(CONFIG_USER_ONLY)
3991 env->user_mode_only = 1;
3992 #ifdef TARGET_SPARC64
3993 env->cleanwin = NWINDOWS - 2;
3994 env->cansave = NWINDOWS - 2;
3995 env->pstate = PS_RMO | PS_PEF | PS_IE;
3996 env->asi = 0x82; // Primary no-fault
4002 #ifdef TARGET_SPARC64
4003 env->pstate = PS_PRIV;
4004 env->hpstate = HS_PRIV;
4005 env->pc = 0x1fff0000000ULL;
4006 env->tsptr = &env->ts[env->tl];
4009 env->mmuregs[0] &= ~(MMU_E | MMU_NF);
4010 env->mmuregs[0] |= env->mmu_bm;
4012 env->npc = env->pc + 4;
4016 CPUSPARCState *cpu_sparc_init(const char *cpu_model)
4019 const sparc_def_t *def;
4022 def = cpu_sparc_find_by_name(cpu_model);
4026 env = qemu_mallocz(sizeof(CPUSPARCState));
4030 env->cpu_model_str = cpu_model;
4031 env->version = def->iu_version;
4032 env->fsr = def->fpu_version;
4033 #if !defined(TARGET_SPARC64)
4034 env->mmu_bm = def->mmu_bm;
4035 env->mmu_ctpr_mask = def->mmu_ctpr_mask;
4036 env->mmu_cxr_mask = def->mmu_cxr_mask;
4037 env->mmu_sfsr_mask = def->mmu_sfsr_mask;
4038 env->mmu_trcr_mask = def->mmu_trcr_mask;
4039 env->mmuregs[0] |= def->mmu_version;
4040 cpu_sparc_set_id(env, 0);
4043 /* init various static tables */
4047 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
4048 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4049 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4050 #ifdef TARGET_SPARC64
4051 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
4052 TCG_AREG0, offsetof(CPUState, t0), "T0");
4053 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
4054 TCG_AREG0, offsetof(CPUState, t1), "T1");
4055 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
4056 TCG_AREG0, offsetof(CPUState, t2), "T2");
4058 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
4059 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
4060 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
4069 void cpu_sparc_set_id(CPUSPARCState *env, unsigned int cpu)
4071 #if !defined(TARGET_SPARC64)
4072 env->mxccregs[7] = ((cpu + 8) & 0xf) << 24;
4076 static const sparc_def_t sparc_defs[] = {
4077 #ifdef TARGET_SPARC64
4079 .name = "Fujitsu Sparc64",
4080 .iu_version = ((0x04ULL << 48) | (0x02ULL << 32) | (0ULL << 24)
4081 | (MAXTL << 8) | (NWINDOWS - 1)),
4082 .fpu_version = 0x00000000,
4086 .name = "Fujitsu Sparc64 III",
4087 .iu_version = ((0x04ULL << 48) | (0x03ULL << 32) | (0ULL << 24)
4088 | (MAXTL << 8) | (NWINDOWS - 1)),
4089 .fpu_version = 0x00000000,
4093 .name = "Fujitsu Sparc64 IV",
4094 .iu_version = ((0x04ULL << 48) | (0x04ULL << 32) | (0ULL << 24)
4095 | (MAXTL << 8) | (NWINDOWS - 1)),
4096 .fpu_version = 0x00000000,
4100 .name = "Fujitsu Sparc64 V",
4101 .iu_version = ((0x04ULL << 48) | (0x05ULL << 32) | (0x51ULL << 24)
4102 | (MAXTL << 8) | (NWINDOWS - 1)),
4103 .fpu_version = 0x00000000,
4107 .name = "TI UltraSparc I",
4108 .iu_version = ((0x17ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4109 | (MAXTL << 8) | (NWINDOWS - 1)),
4110 .fpu_version = 0x00000000,
4114 .name = "TI UltraSparc II",
4115 .iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0x20ULL << 24)
4116 | (MAXTL << 8) | (NWINDOWS - 1)),
4117 .fpu_version = 0x00000000,
4121 .name = "TI UltraSparc IIi",
4122 .iu_version = ((0x17ULL << 48) | (0x12ULL << 32) | (0x91ULL << 24)
4123 | (MAXTL << 8) | (NWINDOWS - 1)),
4124 .fpu_version = 0x00000000,
4128 .name = "TI UltraSparc IIe",
4129 .iu_version = ((0x17ULL << 48) | (0x13ULL << 32) | (0x14ULL << 24)
4130 | (MAXTL << 8) | (NWINDOWS - 1)),
4131 .fpu_version = 0x00000000,
4135 .name = "Sun UltraSparc III",
4136 .iu_version = ((0x3eULL << 48) | (0x14ULL << 32) | (0x34ULL << 24)
4137 | (MAXTL << 8) | (NWINDOWS - 1)),
4138 .fpu_version = 0x00000000,
4142 .name = "Sun UltraSparc III Cu",
4143 .iu_version = ((0x3eULL << 48) | (0x15ULL << 32) | (0x41ULL << 24)
4144 | (MAXTL << 8) | (NWINDOWS - 1)),
4145 .fpu_version = 0x00000000,
4149 .name = "Sun UltraSparc IIIi",
4150 .iu_version = ((0x3eULL << 48) | (0x16ULL << 32) | (0x34ULL << 24)
4151 | (MAXTL << 8) | (NWINDOWS - 1)),
4152 .fpu_version = 0x00000000,
4156 .name = "Sun UltraSparc IV",
4157 .iu_version = ((0x3eULL << 48) | (0x18ULL << 32) | (0x31ULL << 24)
4158 | (MAXTL << 8) | (NWINDOWS - 1)),
4159 .fpu_version = 0x00000000,
4163 .name = "Sun UltraSparc IV+",
4164 .iu_version = ((0x3eULL << 48) | (0x19ULL << 32) | (0x22ULL << 24)
4165 | (MAXTL << 8) | (NWINDOWS - 1)),
4166 .fpu_version = 0x00000000,
4170 .name = "Sun UltraSparc IIIi+",
4171 .iu_version = ((0x3eULL << 48) | (0x22ULL << 32) | (0ULL << 24)
4172 | (MAXTL << 8) | (NWINDOWS - 1)),
4173 .fpu_version = 0x00000000,
4177 .name = "NEC UltraSparc I",
4178 .iu_version = ((0x22ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4179 | (MAXTL << 8) | (NWINDOWS - 1)),
4180 .fpu_version = 0x00000000,
4185 .name = "Fujitsu MB86900",
4186 .iu_version = 0x00 << 24, /* Impl 0, ver 0 */
4187 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4188 .mmu_version = 0x00 << 24, /* Impl 0, ver 0 */
4189 .mmu_bm = 0x00004000,
4190 .mmu_ctpr_mask = 0x007ffff0,
4191 .mmu_cxr_mask = 0x0000003f,
4192 .mmu_sfsr_mask = 0xffffffff,
4193 .mmu_trcr_mask = 0xffffffff,
4196 .name = "Fujitsu MB86904",
4197 .iu_version = 0x04 << 24, /* Impl 0, ver 4 */
4198 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4199 .mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
4200 .mmu_bm = 0x00004000,
4201 .mmu_ctpr_mask = 0x00ffffc0,
4202 .mmu_cxr_mask = 0x000000ff,
4203 .mmu_sfsr_mask = 0x00016fff,
4204 .mmu_trcr_mask = 0x00ffffff,
4207 .name = "Fujitsu MB86907",
4208 .iu_version = 0x05 << 24, /* Impl 0, ver 5 */
4209 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4210 .mmu_version = 0x05 << 24, /* Impl 0, ver 5 */
4211 .mmu_bm = 0x00004000,
4212 .mmu_ctpr_mask = 0xffffffc0,
4213 .mmu_cxr_mask = 0x000000ff,
4214 .mmu_sfsr_mask = 0x00016fff,
4215 .mmu_trcr_mask = 0xffffffff,
4218 .name = "LSI L64811",
4219 .iu_version = 0x10 << 24, /* Impl 1, ver 0 */
4220 .fpu_version = 1 << 17, /* FPU version 1 (LSI L64814) */
4221 .mmu_version = 0x10 << 24,
4222 .mmu_bm = 0x00004000,
4223 .mmu_ctpr_mask = 0x007ffff0,
4224 .mmu_cxr_mask = 0x0000003f,
4225 .mmu_sfsr_mask = 0xffffffff,
4226 .mmu_trcr_mask = 0xffffffff,
4229 .name = "Cypress CY7C601",
4230 .iu_version = 0x11 << 24, /* Impl 1, ver 1 */
4231 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4232 .mmu_version = 0x10 << 24,
4233 .mmu_bm = 0x00004000,
4234 .mmu_ctpr_mask = 0x007ffff0,
4235 .mmu_cxr_mask = 0x0000003f,
4236 .mmu_sfsr_mask = 0xffffffff,
4237 .mmu_trcr_mask = 0xffffffff,
4240 .name = "Cypress CY7C611",
4241 .iu_version = 0x13 << 24, /* Impl 1, ver 3 */
4242 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4243 .mmu_version = 0x10 << 24,
4244 .mmu_bm = 0x00004000,
4245 .mmu_ctpr_mask = 0x007ffff0,
4246 .mmu_cxr_mask = 0x0000003f,
4247 .mmu_sfsr_mask = 0xffffffff,
4248 .mmu_trcr_mask = 0xffffffff,
4251 .name = "TI SuperSparc II",
4252 .iu_version = 0x40000000,
4253 .fpu_version = 0 << 17,
4254 .mmu_version = 0x04000000,
4255 .mmu_bm = 0x00002000,
4256 .mmu_ctpr_mask = 0xffffffc0,
4257 .mmu_cxr_mask = 0x0000ffff,
4258 .mmu_sfsr_mask = 0xffffffff,
4259 .mmu_trcr_mask = 0xffffffff,
4262 .name = "TI MicroSparc I",
4263 .iu_version = 0x41000000,
4264 .fpu_version = 4 << 17,
4265 .mmu_version = 0x41000000,
4266 .mmu_bm = 0x00004000,
4267 .mmu_ctpr_mask = 0x007ffff0,
4268 .mmu_cxr_mask = 0x0000003f,
4269 .mmu_sfsr_mask = 0x00016fff,
4270 .mmu_trcr_mask = 0x0000003f,
4273 .name = "TI MicroSparc II",
4274 .iu_version = 0x42000000,
4275 .fpu_version = 4 << 17,
4276 .mmu_version = 0x02000000,
4277 .mmu_bm = 0x00004000,
4278 .mmu_ctpr_mask = 0x00ffffc0,
4279 .mmu_cxr_mask = 0x000000ff,
4280 .mmu_sfsr_mask = 0x00016fff,
4281 .mmu_trcr_mask = 0x00ffffff,
4284 .name = "TI MicroSparc IIep",
4285 .iu_version = 0x42000000,
4286 .fpu_version = 4 << 17,
4287 .mmu_version = 0x04000000,
4288 .mmu_bm = 0x00004000,
4289 .mmu_ctpr_mask = 0x00ffffc0,
4290 .mmu_cxr_mask = 0x000000ff,
4291 .mmu_sfsr_mask = 0x00016bff,
4292 .mmu_trcr_mask = 0x00ffffff,
4295 .name = "TI SuperSparc 51",
4296 .iu_version = 0x43000000,
4297 .fpu_version = 0 << 17,
4298 .mmu_version = 0x04000000,
4299 .mmu_bm = 0x00002000,
4300 .mmu_ctpr_mask = 0xffffffc0,
4301 .mmu_cxr_mask = 0x0000ffff,
4302 .mmu_sfsr_mask = 0xffffffff,
4303 .mmu_trcr_mask = 0xffffffff,
4306 .name = "TI SuperSparc 61",
4307 .iu_version = 0x44000000,
4308 .fpu_version = 0 << 17,
4309 .mmu_version = 0x04000000,
4310 .mmu_bm = 0x00002000,
4311 .mmu_ctpr_mask = 0xffffffc0,
4312 .mmu_cxr_mask = 0x0000ffff,
4313 .mmu_sfsr_mask = 0xffffffff,
4314 .mmu_trcr_mask = 0xffffffff,
4317 .name = "Ross RT625",
4318 .iu_version = 0x1e000000,
4319 .fpu_version = 1 << 17,
4320 .mmu_version = 0x1e000000,
4321 .mmu_bm = 0x00004000,
4322 .mmu_ctpr_mask = 0x007ffff0,
4323 .mmu_cxr_mask = 0x0000003f,
4324 .mmu_sfsr_mask = 0xffffffff,
4325 .mmu_trcr_mask = 0xffffffff,
4328 .name = "Ross RT620",
4329 .iu_version = 0x1f000000,
4330 .fpu_version = 1 << 17,
4331 .mmu_version = 0x1f000000,
4332 .mmu_bm = 0x00004000,
4333 .mmu_ctpr_mask = 0x007ffff0,
4334 .mmu_cxr_mask = 0x0000003f,
4335 .mmu_sfsr_mask = 0xffffffff,
4336 .mmu_trcr_mask = 0xffffffff,
4339 .name = "BIT B5010",
4340 .iu_version = 0x20000000,
4341 .fpu_version = 0 << 17, /* B5010/B5110/B5120/B5210 */
4342 .mmu_version = 0x20000000,
4343 .mmu_bm = 0x00004000,
4344 .mmu_ctpr_mask = 0x007ffff0,
4345 .mmu_cxr_mask = 0x0000003f,
4346 .mmu_sfsr_mask = 0xffffffff,
4347 .mmu_trcr_mask = 0xffffffff,
4350 .name = "Matsushita MN10501",
4351 .iu_version = 0x50000000,
4352 .fpu_version = 0 << 17,
4353 .mmu_version = 0x50000000,
4354 .mmu_bm = 0x00004000,
4355 .mmu_ctpr_mask = 0x007ffff0,
4356 .mmu_cxr_mask = 0x0000003f,
4357 .mmu_sfsr_mask = 0xffffffff,
4358 .mmu_trcr_mask = 0xffffffff,
4361 .name = "Weitek W8601",
4362 .iu_version = 0x90 << 24, /* Impl 9, ver 0 */
4363 .fpu_version = 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
4364 .mmu_version = 0x10 << 24,
4365 .mmu_bm = 0x00004000,
4366 .mmu_ctpr_mask = 0x007ffff0,
4367 .mmu_cxr_mask = 0x0000003f,
4368 .mmu_sfsr_mask = 0xffffffff,
4369 .mmu_trcr_mask = 0xffffffff,
4373 .iu_version = 0xf2000000,
4374 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4375 .mmu_version = 0xf2000000,
4376 .mmu_bm = 0x00004000,
4377 .mmu_ctpr_mask = 0x007ffff0,
4378 .mmu_cxr_mask = 0x0000003f,
4379 .mmu_sfsr_mask = 0xffffffff,
4380 .mmu_trcr_mask = 0xffffffff,
4384 .iu_version = 0xf3000000,
4385 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4386 .mmu_version = 0xf3000000,
4387 .mmu_bm = 0x00004000,
4388 .mmu_ctpr_mask = 0x007ffff0,
4389 .mmu_cxr_mask = 0x0000003f,
4390 .mmu_sfsr_mask = 0xffffffff,
4391 .mmu_trcr_mask = 0xffffffff,
4396 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name)
4400 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
4401 if (strcasecmp(name, sparc_defs[i].name) == 0) {
4402 return &sparc_defs[i];
4408 void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
4412 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
4413 (*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
4415 sparc_defs[i].iu_version,
4416 sparc_defs[i].fpu_version,
4417 sparc_defs[i].mmu_version);
4421 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
4423 void cpu_dump_state(CPUState *env, FILE *f,
4424 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
4429 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
4430 cpu_fprintf(f, "General Registers:\n");
4431 for (i = 0; i < 4; i++)
4432 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
4433 cpu_fprintf(f, "\n");
4435 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
4436 cpu_fprintf(f, "\nCurrent Register Window:\n");
4437 for (x = 0; x < 3; x++) {
4438 for (i = 0; i < 4; i++)
4439 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
4440 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
4441 env->regwptr[i + x * 8]);
4442 cpu_fprintf(f, "\n");
4444 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
4445 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
4446 env->regwptr[i + x * 8]);
4447 cpu_fprintf(f, "\n");
4449 cpu_fprintf(f, "\nFloating Point Registers:\n");
4450 for (i = 0; i < 32; i++) {
4452 cpu_fprintf(f, "%%f%02d:", i);
4453 cpu_fprintf(f, " %016lf", env->fpr[i]);
4455 cpu_fprintf(f, "\n");
4457 #ifdef TARGET_SPARC64
4458 cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
4459 env->pstate, GET_CCR(env), env->asi, env->tl, env->fprs);
4460 cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
4461 env->cansave, env->canrestore, env->otherwin, env->wstate,
4462 env->cleanwin, NWINDOWS - 1 - env->cwp);
4464 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
4465 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
4466 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
4467 env->psrs?'S':'-', env->psrps?'P':'-',
4468 env->psret?'E':'-', env->wim);
4470 cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
4473 #if defined(CONFIG_USER_ONLY)
4474 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
4480 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
4481 int *access_index, target_ulong address, int rw,
4484 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
4486 target_phys_addr_t phys_addr;
4487 int prot, access_index;
4489 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2,
4490 MMU_KERNEL_IDX) != 0)
4491 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr,
4492 0, MMU_KERNEL_IDX) != 0)
4494 if (cpu_get_physical_page_desc(phys_addr) == IO_MEM_UNASSIGNED)
4500 void helper_flush(target_ulong addr)
4503 tb_invalidate_page_range(addr, addr + 8);