4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env, cpu_T[3], cpu_regwptr, cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
50 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
54 /* local register indexes (only used inside old micro ops) */
57 typedef struct DisasContext {
58 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
59 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
60 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
64 struct TranslationBlock *tb;
67 typedef struct sparc_def_t sparc_def_t;
70 const unsigned char *name;
71 target_ulong iu_version;
75 uint32_t mmu_ctpr_mask;
76 uint32_t mmu_cxr_mask;
77 uint32_t mmu_sfsr_mask;
78 uint32_t mmu_trcr_mask;
81 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name);
86 // This function uses non-native bit order
87 #define GET_FIELD(X, FROM, TO) \
88 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90 // This function uses the order in the manuals, i.e. bit 0 is 2^0
91 #define GET_FIELD_SP(X, FROM, TO) \
92 GET_FIELD(X, 31 - (TO), 31 - (FROM))
94 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
95 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #define FFPREG(r) (r)
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
107 static int sign_extend(int x, int len)
110 return (x << len) >> len;
113 #define IS_IMM (insn & (1<<13))
115 static void disas_sparc_insn(DisasContext * dc);
117 #ifdef TARGET_SPARC64
118 #define GEN32(func, NAME) \
119 static GenOpFunc * const NAME ## _table [64] = { \
120 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
121 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
122 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
123 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
124 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
125 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
126 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
127 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
128 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
129 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
130 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
131 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
133 static inline void func(int n) \
135 NAME ## _table[n](); \
138 #define GEN32(func, NAME) \
139 static GenOpFunc *const NAME ## _table [32] = { \
140 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
141 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
142 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
143 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
144 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
145 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
146 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
147 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
149 static inline void func(int n) \
151 NAME ## _table[n](); \
155 /* floating point registers moves */
156 GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
157 GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
158 GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
159 GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
161 GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
162 GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
163 GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
164 GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
166 #if defined(CONFIG_USER_ONLY)
167 GEN32(gen_op_load_fpr_QT0, gen_op_load_fpr_QT0_fprf);
168 GEN32(gen_op_load_fpr_QT1, gen_op_load_fpr_QT1_fprf);
169 GEN32(gen_op_store_QT0_fpr, gen_op_store_QT0_fpr_fprf);
170 GEN32(gen_op_store_QT1_fpr, gen_op_store_QT1_fpr_fprf);
174 #ifdef CONFIG_USER_ONLY
175 #define supervisor(dc) 0
176 #ifdef TARGET_SPARC64
177 #define hypervisor(dc) 0
179 #define gen_op_ldst(name) gen_op_##name##_raw()
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
184 #define OP_LD_TABLE(width) \
185 static GenOpFunc * const gen_op_##width[] = { \
186 &gen_op_##width##_user, \
187 &gen_op_##width##_kernel, \
188 &gen_op_##width##_hypv, \
191 #define OP_LD_TABLE(width) \
192 static GenOpFunc * const gen_op_##width[] = { \
193 &gen_op_##width##_user, \
194 &gen_op_##width##_kernel, \
197 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
200 #ifndef CONFIG_USER_ONLY
203 #endif /* __i386__ */
211 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
213 #define ABI32_MASK(addr)
216 static inline void gen_movl_simm_T1(int32_t val)
218 tcg_gen_movi_tl(cpu_T[1], val);
221 static inline void gen_movl_reg_TN(int reg, TCGv tn)
224 tcg_gen_movi_tl(tn, 0);
226 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
228 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
232 static inline void gen_movl_reg_T0(int reg)
234 gen_movl_reg_TN(reg, cpu_T[0]);
237 static inline void gen_movl_reg_T1(int reg)
239 gen_movl_reg_TN(reg, cpu_T[1]);
243 static inline void gen_movl_reg_T2(int reg)
245 gen_movl_reg_TN(reg, cpu_T[2]);
248 #endif /* __i386__ */
249 static inline void gen_movl_TN_reg(int reg, TCGv tn)
254 tcg_gen_mov_tl(cpu_gregs[reg], tn);
256 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
260 static inline void gen_movl_T0_reg(int reg)
262 gen_movl_TN_reg(reg, cpu_T[0]);
265 static inline void gen_movl_T1_reg(int reg)
267 gen_movl_TN_reg(reg, cpu_T[1]);
270 static inline void gen_op_movl_T0_env(size_t offset)
272 tcg_gen_ld_i32(cpu_T[0], cpu_env, offset);
275 static inline void gen_op_movl_env_T0(size_t offset)
277 tcg_gen_st_i32(cpu_T[0], cpu_env, offset);
280 static inline void gen_op_movtl_T0_env(size_t offset)
282 tcg_gen_ld_tl(cpu_T[0], cpu_env, offset);
285 static inline void gen_op_movtl_env_T0(size_t offset)
287 tcg_gen_st_tl(cpu_T[0], cpu_env, offset);
290 static inline void gen_op_add_T1_T0(void)
292 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
295 static inline void gen_op_or_T1_T0(void)
297 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
300 static inline void gen_op_xor_T1_T0(void)
302 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
305 static inline void gen_jmp_im(target_ulong pc)
307 tcg_gen_movi_tl(cpu_pc, pc);
310 static inline void gen_movl_npc_im(target_ulong npc)
312 tcg_gen_movi_tl(cpu_npc, npc);
315 static inline void gen_goto_tb(DisasContext *s, int tb_num,
316 target_ulong pc, target_ulong npc)
318 TranslationBlock *tb;
321 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
322 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
323 /* jump to same page: we can use a direct jump */
324 tcg_gen_goto_tb(tb_num);
326 gen_movl_npc_im(npc);
327 tcg_gen_exit_tb((long)tb + tb_num);
329 /* jump to another page: currently not optimized */
331 gen_movl_npc_im(npc);
337 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
339 tcg_gen_shri_i32(reg, src, 23);
340 tcg_gen_andi_tl(reg, reg, 0x1);
343 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
345 tcg_gen_shri_i32(reg, src, 22);
346 tcg_gen_andi_tl(reg, reg, 0x1);
349 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
351 tcg_gen_shri_i32(reg, src, 21);
352 tcg_gen_andi_tl(reg, reg, 0x1);
355 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
357 tcg_gen_shri_i32(reg, src, 20);
358 tcg_gen_andi_tl(reg, reg, 0x1);
361 static inline void gen_op_exception(int exception)
363 tcg_gen_movi_i32(cpu_tmp0, exception);
364 tcg_gen_helper_0_1(raise_exception, cpu_tmp0);
367 static inline void gen_cc_clear(void)
369 tcg_gen_movi_i32(cpu_psr, 0);
370 #ifdef TARGET_SPARC64
371 tcg_gen_movi_i32(cpu_xcc, 0);
377 env->psr |= PSR_ZERO;
378 if ((int32_t) T0 < 0)
381 static inline void gen_cc_NZ(TCGv dst)
385 l1 = gen_new_label();
386 l2 = gen_new_label();
387 tcg_gen_brcond_i32(TCG_COND_NE, dst, tcg_const_i32(0), l1);
388 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
390 tcg_gen_brcond_i32(TCG_COND_GE, dst, tcg_const_i32(0), l2);
391 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
393 #ifdef TARGET_SPARC64
397 l3 = gen_new_label();
398 l4 = gen_new_label();
399 tcg_gen_brcond_tl(TCG_COND_NE, dst, tcg_const_tl(0), l3);
400 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
402 tcg_gen_brcond_tl(TCG_COND_GE, dst, tcg_const_tl(0), l4);
403 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
411 env->psr |= PSR_CARRY;
413 static inline void gen_cc_C_add(TCGv dst, TCGv src1)
417 l1 = gen_new_label();
418 tcg_gen_brcond_i32(TCG_COND_GEU, dst, src1, l1);
419 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
421 #ifdef TARGET_SPARC64
425 l2 = gen_new_label();
426 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l2);
427 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
434 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
437 static inline void gen_cc_V_add(TCGv dst, TCGv src1, TCGv src2)
442 l1 = gen_new_label();
444 r_temp = tcg_temp_new(TCG_TYPE_TL);
445 tcg_gen_xor_tl(r_temp, src1, src2);
446 tcg_gen_xori_tl(r_temp, r_temp, -1);
447 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
448 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
449 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
450 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp, tcg_const_i32(0), l1);
451 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
453 #ifdef TARGET_SPARC64
457 l2 = gen_new_label();
458 tcg_gen_xor_tl(r_temp, src1, src2);
459 tcg_gen_xori_tl(r_temp, r_temp, -1);
460 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
461 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
462 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
463 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2);
464 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
468 tcg_gen_discard_tl(r_temp);
471 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
476 l1 = gen_new_label();
478 r_temp = tcg_temp_new(TCG_TYPE_TL);
479 tcg_gen_xor_tl(r_temp, src1, src2);
480 tcg_gen_xori_tl(r_temp, r_temp, -1);
481 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
482 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
483 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
484 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp, tcg_const_i32(0), l1);
485 gen_op_exception(TT_TOVF);
487 #ifdef TARGET_SPARC64
491 l2 = gen_new_label();
492 tcg_gen_xor_tl(r_temp, src1, src2);
493 tcg_gen_xori_tl(r_temp, r_temp, -1);
494 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
495 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
496 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
497 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2);
498 gen_op_exception(TT_TOVF);
502 tcg_gen_discard_tl(r_temp);
505 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
509 l1 = gen_new_label();
510 tcg_gen_or_tl(cpu_tmp0, src1, src2);
511 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
512 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1);
513 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
517 static inline void gen_tag_tv(TCGv src1, TCGv src2)
521 l1 = gen_new_label();
522 tcg_gen_or_tl(cpu_tmp0, src1, src2);
523 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
524 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1);
525 gen_op_exception(TT_TOVF);
529 static inline void gen_op_add_T1_T0_cc(void)
531 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
532 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
535 gen_cc_C_add(cpu_T[0], cpu_cc_src);
536 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
539 static inline void gen_op_addx_T1_T0_cc(void)
541 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
542 gen_mov_reg_C(cpu_tmp0, cpu_psr);
543 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
545 gen_cc_C_add(cpu_T[0], cpu_cc_src);
546 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
547 gen_cc_C_add(cpu_T[0], cpu_cc_src);
549 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
552 static inline void gen_op_tadd_T1_T0_cc(void)
554 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
555 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
558 gen_cc_C_add(cpu_T[0], cpu_cc_src);
559 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
560 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
563 static inline void gen_op_tadd_T1_T0_ccTV(void)
565 gen_tag_tv(cpu_T[0], cpu_T[1]);
566 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
567 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
568 gen_add_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
571 gen_cc_C_add(cpu_T[0], cpu_cc_src);
576 env->psr |= PSR_CARRY;
578 static inline void gen_cc_C_sub(TCGv src1, TCGv src2)
582 l1 = gen_new_label();
583 tcg_gen_brcond_i32(TCG_COND_GEU, src1, src2, l1);
584 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
586 #ifdef TARGET_SPARC64
590 l2 = gen_new_label();
591 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l2);
592 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
599 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
602 static inline void gen_cc_V_sub(TCGv dst, TCGv src1, TCGv src2)
607 l1 = gen_new_label();
609 r_temp = tcg_temp_new(TCG_TYPE_TL);
610 tcg_gen_xor_tl(r_temp, src1, src2);
611 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
612 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
613 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
614 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp, tcg_const_i32(0), l1);
615 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
617 #ifdef TARGET_SPARC64
621 l2 = gen_new_label();
622 tcg_gen_xor_tl(r_temp, src1, src2);
623 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
624 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
625 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
626 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2);
627 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
631 tcg_gen_discard_tl(r_temp);
634 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
639 l1 = gen_new_label();
641 r_temp = tcg_temp_new(TCG_TYPE_TL);
642 tcg_gen_xor_tl(r_temp, src1, src2);
643 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
644 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
645 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
646 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp, tcg_const_i32(0), l1);
647 gen_op_exception(TT_TOVF);
649 #ifdef TARGET_SPARC64
653 l2 = gen_new_label();
654 tcg_gen_xor_tl(r_temp, src1, src2);
655 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
656 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
657 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
658 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2);
659 gen_op_exception(TT_TOVF);
663 tcg_gen_discard_tl(r_temp);
666 static inline void gen_op_sub_T1_T0_cc(void)
668 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
669 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
672 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
673 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
676 static inline void gen_op_subx_T1_T0_cc(void)
678 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
679 gen_mov_reg_C(cpu_tmp0, cpu_psr);
680 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
682 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
683 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
684 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
686 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
689 static inline void gen_op_tsub_T1_T0_cc(void)
691 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
692 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
695 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
696 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
697 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
700 static inline void gen_op_tsub_T1_T0_ccTV(void)
702 gen_tag_tv(cpu_T[0], cpu_T[1]);
703 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
704 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
705 gen_sub_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
708 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
711 static inline void gen_op_mulscc_T1_T0(void)
716 l1 = gen_new_label();
717 l2 = gen_new_label();
718 r_temp = tcg_temp_new(TCG_TYPE_TL);
724 tcg_gen_ld_i32(r_temp, cpu_env, offsetof(CPUSPARCState, y));
725 tcg_gen_andi_i32(r_temp, r_temp, 0x1);
726 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1);
727 tcg_gen_mov_tl(cpu_cc_src2, cpu_T[1]);
728 gen_op_jmp_label(l2);
730 tcg_gen_movi_tl(cpu_cc_src2, 0);
734 // env->y = (b2 << 31) | (env->y >> 1);
735 tcg_gen_shli_i32(r_temp, cpu_T[0], 31);
736 tcg_gen_ld_i32(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, y));
737 tcg_gen_shri_i32(cpu_tmp0, cpu_tmp0, 1);
738 tcg_gen_or_i32(cpu_tmp0, cpu_tmp0, r_temp);
739 tcg_gen_st_i32(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, y));
742 gen_mov_reg_N(cpu_tmp0, cpu_psr);
743 gen_mov_reg_V(r_temp, cpu_psr);
744 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
746 // T0 = (b1 << 31) | (T0 >> 1);
748 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
749 tcg_gen_shri_tl(cpu_cc_src, cpu_T[0], 1);
750 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
752 /* do addition and update flags */
753 tcg_gen_add_tl(cpu_T[0], cpu_cc_src, cpu_cc_src2);
754 tcg_gen_discard_tl(r_temp);
758 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_cc_src2);
759 gen_cc_C_add(cpu_T[0], cpu_cc_src);
762 static inline void gen_op_umul_T1_T0(void)
764 TCGv r_temp, r_temp2;
766 r_temp = tcg_temp_new(TCG_TYPE_I64);
767 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
769 tcg_gen_extu_i32_i64(r_temp, cpu_T[1]);
770 tcg_gen_extu_i32_i64(r_temp2, cpu_T[0]);
771 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
773 tcg_gen_shri_i64(r_temp, r_temp2, 32);
774 tcg_gen_trunc_i64_i32(r_temp, r_temp);
775 tcg_gen_st_i32(r_temp, cpu_env, offsetof(CPUSPARCState, y));
776 #ifdef TARGET_SPARC64
777 tcg_gen_mov_i64(cpu_T[0], r_temp2);
779 tcg_gen_trunc_i64_i32(cpu_T[0], r_temp2);
782 tcg_gen_discard_i64(r_temp);
783 tcg_gen_discard_i64(r_temp2);
786 static inline void gen_op_smul_T1_T0(void)
788 TCGv r_temp, r_temp2;
790 r_temp = tcg_temp_new(TCG_TYPE_I64);
791 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
793 tcg_gen_ext_i32_i64(r_temp, cpu_T[1]);
794 tcg_gen_ext_i32_i64(r_temp2, cpu_T[0]);
795 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
797 tcg_gen_shri_i64(r_temp, r_temp2, 32);
798 tcg_gen_trunc_i64_i32(r_temp, r_temp);
799 tcg_gen_st_i32(r_temp, cpu_env, offsetof(CPUSPARCState, y));
800 #ifdef TARGET_SPARC64
801 tcg_gen_mov_i64(cpu_T[0], r_temp2);
803 tcg_gen_trunc_i64_i32(cpu_T[0], r_temp2);
806 tcg_gen_discard_i64(r_temp);
807 tcg_gen_discard_i64(r_temp2);
810 static inline void gen_op_udiv_T1_T0(void)
812 tcg_gen_helper_1_2(helper_udiv, cpu_T[0], cpu_T[0], cpu_T[1]);
815 static inline void gen_op_sdiv_T1_T0(void)
817 tcg_gen_helper_1_2(helper_sdiv, cpu_T[0], cpu_T[0], cpu_T[1]);
820 #ifdef TARGET_SPARC64
821 static inline void gen_trap_ifdivzero_i64(TCGv divisor)
825 l1 = gen_new_label();
826 tcg_gen_brcond_i64(TCG_COND_NE, divisor, tcg_const_tl(0), l1);
827 gen_op_exception(TT_DIV_ZERO);
831 static inline void gen_op_sdivx_T1_T0(void)
835 l1 = gen_new_label();
836 l2 = gen_new_label();
837 gen_trap_ifdivzero_i64(cpu_T[1]);
838 tcg_gen_brcond_i64(TCG_COND_NE, cpu_T[0], tcg_const_i64(INT64_MIN), l1);
839 tcg_gen_brcond_i64(TCG_COND_NE, cpu_T[1], tcg_const_i64(-1), l1);
840 tcg_gen_movi_i64(cpu_T[0], INT64_MIN);
841 gen_op_jmp_label(l2);
843 tcg_gen_div_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
848 static inline void gen_op_div_cc(void)
854 l1 = gen_new_label();
855 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, cc_src2));
856 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1);
857 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
861 static inline void gen_op_logic_T0_cc(void)
868 static inline void gen_op_eval_ba(TCGv dst)
870 tcg_gen_movi_tl(dst, 1);
874 static inline void gen_op_eval_be(TCGv dst, TCGv src)
876 gen_mov_reg_Z(dst, src);
880 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
882 gen_mov_reg_N(cpu_tmp0, src);
883 gen_mov_reg_V(dst, src);
884 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
885 gen_mov_reg_Z(cpu_tmp0, src);
886 tcg_gen_or_tl(dst, dst, cpu_tmp0);
890 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
892 gen_mov_reg_V(cpu_tmp0, src);
893 gen_mov_reg_N(dst, src);
894 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
898 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
900 gen_mov_reg_Z(cpu_tmp0, src);
901 gen_mov_reg_C(dst, src);
902 tcg_gen_or_tl(dst, dst, cpu_tmp0);
906 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
908 gen_mov_reg_C(dst, src);
912 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
914 gen_mov_reg_V(dst, src);
918 static inline void gen_op_eval_bn(TCGv dst)
920 tcg_gen_movi_tl(dst, 0);
924 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
926 gen_mov_reg_N(dst, src);
930 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
932 gen_mov_reg_Z(dst, src);
933 tcg_gen_xori_tl(dst, dst, 0x1);
937 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
939 gen_mov_reg_N(cpu_tmp0, src);
940 gen_mov_reg_V(dst, src);
941 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
942 gen_mov_reg_Z(cpu_tmp0, src);
943 tcg_gen_or_tl(dst, dst, cpu_tmp0);
944 tcg_gen_xori_tl(dst, dst, 0x1);
948 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
950 gen_mov_reg_V(cpu_tmp0, src);
951 gen_mov_reg_N(dst, src);
952 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
953 tcg_gen_xori_tl(dst, dst, 0x1);
957 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
959 gen_mov_reg_Z(cpu_tmp0, src);
960 gen_mov_reg_C(dst, src);
961 tcg_gen_or_tl(dst, dst, cpu_tmp0);
962 tcg_gen_xori_tl(dst, dst, 0x1);
966 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
968 gen_mov_reg_C(dst, src);
969 tcg_gen_xori_tl(dst, dst, 0x1);
973 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
975 gen_mov_reg_N(dst, src);
976 tcg_gen_xori_tl(dst, dst, 0x1);
980 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
982 gen_mov_reg_V(dst, src);
983 tcg_gen_xori_tl(dst, dst, 0x1);
987 FPSR bit field FCC1 | FCC0:
993 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
994 unsigned int fcc_offset)
996 tcg_gen_shri_i32(reg, src, 10 + fcc_offset);
997 tcg_gen_andi_tl(reg, reg, 0x1);
1000 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1001 unsigned int fcc_offset)
1003 tcg_gen_shri_i32(reg, src, 11 + fcc_offset);
1004 tcg_gen_andi_tl(reg, reg, 0x1);
1008 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1009 unsigned int fcc_offset)
1011 gen_mov_reg_FCC0(dst, src, fcc_offset);
1012 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1013 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1016 // 1 or 2: FCC0 ^ FCC1
1017 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1018 unsigned int fcc_offset)
1020 gen_mov_reg_FCC0(dst, src, fcc_offset);
1021 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1022 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1026 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1027 unsigned int fcc_offset)
1029 gen_mov_reg_FCC0(dst, src, fcc_offset);
1033 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1034 unsigned int fcc_offset)
1036 gen_mov_reg_FCC0(dst, src, fcc_offset);
1037 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1038 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1039 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1043 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1044 unsigned int fcc_offset)
1046 gen_mov_reg_FCC1(dst, src, fcc_offset);
1050 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1051 unsigned int fcc_offset)
1053 gen_mov_reg_FCC0(dst, src, fcc_offset);
1054 tcg_gen_xori_tl(dst, dst, 0x1);
1055 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1056 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1060 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1061 unsigned int fcc_offset)
1063 gen_mov_reg_FCC0(dst, src, fcc_offset);
1064 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1065 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068 // 0: !(FCC0 | FCC1)
1069 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1070 unsigned int fcc_offset)
1072 gen_mov_reg_FCC0(dst, src, fcc_offset);
1073 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1074 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1075 tcg_gen_xori_tl(dst, dst, 0x1);
1078 // 0 or 3: !(FCC0 ^ FCC1)
1079 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1080 unsigned int fcc_offset)
1082 gen_mov_reg_FCC0(dst, src, fcc_offset);
1083 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1084 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1085 tcg_gen_xori_tl(dst, dst, 0x1);
1089 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1090 unsigned int fcc_offset)
1092 gen_mov_reg_FCC0(dst, src, fcc_offset);
1093 tcg_gen_xori_tl(dst, dst, 0x1);
1096 // !1: !(FCC0 & !FCC1)
1097 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1098 unsigned int fcc_offset)
1100 gen_mov_reg_FCC0(dst, src, fcc_offset);
1101 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1102 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1103 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1104 tcg_gen_xori_tl(dst, dst, 0x1);
1108 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1109 unsigned int fcc_offset)
1111 gen_mov_reg_FCC1(dst, src, fcc_offset);
1112 tcg_gen_xori_tl(dst, dst, 0x1);
1115 // !2: !(!FCC0 & FCC1)
1116 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1117 unsigned int fcc_offset)
1119 gen_mov_reg_FCC0(dst, src, fcc_offset);
1120 tcg_gen_xori_tl(dst, dst, 0x1);
1121 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1122 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1123 tcg_gen_xori_tl(dst, dst, 0x1);
1126 // !3: !(FCC0 & FCC1)
1127 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1128 unsigned int fcc_offset)
1130 gen_mov_reg_FCC0(dst, src, fcc_offset);
1131 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1132 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1133 tcg_gen_xori_tl(dst, dst, 0x1);
1136 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1137 target_ulong pc2, TCGv r_cond)
1141 l1 = gen_new_label();
1143 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1);
1145 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1148 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1151 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1152 target_ulong pc2, TCGv r_cond)
1156 l1 = gen_new_label();
1158 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1);
1160 gen_goto_tb(dc, 0, pc2, pc1);
1163 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1166 static inline void gen_branch(DisasContext *dc, target_ulong pc,
1169 gen_goto_tb(dc, 0, pc, npc);
1172 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1177 l1 = gen_new_label();
1178 l2 = gen_new_label();
1180 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1);
1182 gen_movl_npc_im(npc1);
1183 gen_op_jmp_label(l2);
1186 gen_movl_npc_im(npc2);
1190 /* call this function before using T2 as it may have been set for a jump */
1191 static inline void flush_T2(DisasContext * dc)
1193 if (dc->npc == JUMP_PC) {
1194 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1195 dc->npc = DYNAMIC_PC;
1199 static inline void save_npc(DisasContext * dc)
1201 if (dc->npc == JUMP_PC) {
1202 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1203 dc->npc = DYNAMIC_PC;
1204 } else if (dc->npc != DYNAMIC_PC) {
1205 gen_movl_npc_im(dc->npc);
1209 static inline void save_state(DisasContext * dc)
1215 static inline void gen_mov_pc_npc(DisasContext * dc)
1217 if (dc->npc == JUMP_PC) {
1218 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1219 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1220 dc->pc = DYNAMIC_PC;
1221 } else if (dc->npc == DYNAMIC_PC) {
1222 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1223 dc->pc = DYNAMIC_PC;
1229 static inline void gen_op_next_insn(void)
1231 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1232 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1235 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1239 #ifdef TARGET_SPARC64
1249 gen_op_eval_bn(r_dst);
1252 gen_op_eval_be(r_dst, r_src);
1255 gen_op_eval_ble(r_dst, r_src);
1258 gen_op_eval_bl(r_dst, r_src);
1261 gen_op_eval_bleu(r_dst, r_src);
1264 gen_op_eval_bcs(r_dst, r_src);
1267 gen_op_eval_bneg(r_dst, r_src);
1270 gen_op_eval_bvs(r_dst, r_src);
1273 gen_op_eval_ba(r_dst);
1276 gen_op_eval_bne(r_dst, r_src);
1279 gen_op_eval_bg(r_dst, r_src);
1282 gen_op_eval_bge(r_dst, r_src);
1285 gen_op_eval_bgu(r_dst, r_src);
1288 gen_op_eval_bcc(r_dst, r_src);
1291 gen_op_eval_bpos(r_dst, r_src);
1294 gen_op_eval_bvc(r_dst, r_src);
1299 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1301 unsigned int offset;
1321 gen_op_eval_bn(r_dst);
1324 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1327 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1330 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1333 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1336 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1339 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1342 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1345 gen_op_eval_ba(r_dst);
1348 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1351 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1354 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1357 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1360 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1363 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1366 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1371 #ifdef TARGET_SPARC64
1373 static const int gen_tcg_cond_reg[8] = {
1384 static inline void gen_cond_reg(TCGv r_dst, int cond)
1388 l1 = gen_new_label();
1389 tcg_gen_movi_tl(r_dst, 0);
1390 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], tcg_const_tl(0), l1);
1391 tcg_gen_movi_tl(r_dst, 1);
1396 /* XXX: potentially incorrect if dynamic npc */
1397 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1399 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1400 target_ulong target = dc->pc + offset;
1403 /* unconditional not taken */
1405 dc->pc = dc->npc + 4;
1406 dc->npc = dc->pc + 4;
1409 dc->npc = dc->pc + 4;
1411 } else if (cond == 0x8) {
1412 /* unconditional taken */
1415 dc->npc = dc->pc + 4;
1422 gen_cond(cpu_T[2], cc, cond);
1424 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1428 dc->jump_pc[0] = target;
1429 dc->jump_pc[1] = dc->npc + 4;
1435 /* XXX: potentially incorrect if dynamic npc */
1436 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1438 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1439 target_ulong target = dc->pc + offset;
1442 /* unconditional not taken */
1444 dc->pc = dc->npc + 4;
1445 dc->npc = dc->pc + 4;
1448 dc->npc = dc->pc + 4;
1450 } else if (cond == 0x8) {
1451 /* unconditional taken */
1454 dc->npc = dc->pc + 4;
1461 gen_fcond(cpu_T[2], cc, cond);
1463 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1467 dc->jump_pc[0] = target;
1468 dc->jump_pc[1] = dc->npc + 4;
1474 #ifdef TARGET_SPARC64
1475 /* XXX: potentially incorrect if dynamic npc */
1476 static void do_branch_reg(DisasContext * dc, int32_t offset, uint32_t insn)
1478 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1479 target_ulong target = dc->pc + offset;
1482 gen_cond_reg(cpu_T[2], cond);
1484 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1488 dc->jump_pc[0] = target;
1489 dc->jump_pc[1] = dc->npc + 4;
1494 static GenOpFunc * const gen_fcmps[4] = {
1501 static GenOpFunc * const gen_fcmpd[4] = {
1508 #if defined(CONFIG_USER_ONLY)
1509 static GenOpFunc * const gen_fcmpq[4] = {
1517 static GenOpFunc * const gen_fcmpes[4] = {
1524 static GenOpFunc * const gen_fcmped[4] = {
1531 #if defined(CONFIG_USER_ONLY)
1532 static GenOpFunc * const gen_fcmpeq[4] = {
1540 static inline void gen_op_fcmps(int fccno)
1542 tcg_gen_helper_0_0(gen_fcmps[fccno]);
1545 static inline void gen_op_fcmpd(int fccno)
1547 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1550 #if defined(CONFIG_USER_ONLY)
1551 static inline void gen_op_fcmpq(int fccno)
1553 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1557 static inline void gen_op_fcmpes(int fccno)
1559 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
1562 static inline void gen_op_fcmped(int fccno)
1564 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1567 #if defined(CONFIG_USER_ONLY)
1568 static inline void gen_op_fcmpeq(int fccno)
1570 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1576 static inline void gen_op_fcmps(int fccno)
1578 tcg_gen_helper_0_0(helper_fcmps);
1581 static inline void gen_op_fcmpd(int fccno)
1583 tcg_gen_helper_0_0(helper_fcmpd);
1586 #if defined(CONFIG_USER_ONLY)
1587 static inline void gen_op_fcmpq(int fccno)
1589 tcg_gen_helper_0_0(helper_fcmpq);
1593 static inline void gen_op_fcmpes(int fccno)
1595 tcg_gen_helper_0_0(helper_fcmpes);
1598 static inline void gen_op_fcmped(int fccno)
1600 tcg_gen_helper_0_0(helper_fcmped);
1603 #if defined(CONFIG_USER_ONLY)
1604 static inline void gen_op_fcmpeq(int fccno)
1606 tcg_gen_helper_0_0(helper_fcmpeq);
1612 static inline void gen_op_fpexception_im(int fsr_flags)
1614 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, ~FSR_FTT_MASK);
1615 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1616 gen_op_exception(TT_FP_EXCP);
1619 static int gen_trap_ifnofpu(DisasContext * dc)
1621 #if !defined(CONFIG_USER_ONLY)
1622 if (!dc->fpu_enabled) {
1624 gen_op_exception(TT_NFPU_INSN);
1632 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1634 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, ~(FSR_FTT_MASK | FSR_CEXC_MASK));
1637 static inline void gen_clear_float_exceptions(void)
1639 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1643 #ifdef TARGET_SPARC64
1644 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1650 r_asi = tcg_temp_new(TCG_TYPE_I32);
1651 offset = GET_FIELD(insn, 25, 31);
1652 tcg_gen_addi_tl(r_addr, r_addr, offset);
1653 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1655 asi = GET_FIELD(insn, 19, 26);
1656 r_asi = tcg_const_i32(asi);
1661 static inline void gen_ld_asi(int insn, int size, int sign)
1665 r_asi = gen_get_asi(insn, cpu_T[0]);
1666 tcg_gen_helper_1_4(helper_ld_asi, cpu_T[1], cpu_T[0], r_asi,
1667 tcg_const_i32(size), tcg_const_i32(sign));
1668 tcg_gen_discard_i32(r_asi);
1671 static inline void gen_st_asi(int insn, int size)
1675 r_asi = gen_get_asi(insn, cpu_T[0]);
1676 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_asi,
1677 tcg_const_i32(size));
1678 tcg_gen_discard_i32(r_asi);
1681 static inline void gen_ldf_asi(int insn, int size, int rd)
1685 r_asi = gen_get_asi(insn, cpu_T[0]);
1686 tcg_gen_helper_0_4(helper_ldf_asi, cpu_T[0], r_asi, tcg_const_i32(size),
1688 tcg_gen_discard_i32(r_asi);
1691 static inline void gen_stf_asi(int insn, int size, int rd)
1695 r_asi = gen_get_asi(insn, cpu_T[0]);
1696 tcg_gen_helper_0_4(helper_stf_asi, cpu_T[0], r_asi, tcg_const_i32(size),
1698 tcg_gen_discard_i32(r_asi);
1701 static inline void gen_swap_asi(int insn)
1705 r_temp = tcg_temp_new(TCG_TYPE_I32);
1706 r_asi = gen_get_asi(insn, cpu_T[0]);
1707 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], r_asi,
1708 tcg_const_i32(4), tcg_const_i32(0));
1709 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_temp, r_asi,
1711 tcg_gen_mov_i32(cpu_T[1], r_temp);
1712 tcg_gen_discard_i32(r_asi);
1713 tcg_gen_discard_i32(r_temp);
1716 static inline void gen_ldda_asi(int insn)
1718 TCGv r_dword, r_asi;
1720 r_dword = tcg_temp_new(TCG_TYPE_I64);
1721 r_asi = gen_get_asi(insn, cpu_T[0]);
1722 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], r_asi,
1723 tcg_const_i32(8), tcg_const_i32(0));
1724 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1725 tcg_gen_shri_i64(r_dword, r_dword, 32);
1726 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1727 tcg_gen_discard_i32(r_asi);
1728 tcg_gen_discard_i64(r_dword);
1731 static inline void gen_stda_asi(int insn, int rd)
1733 TCGv r_dword, r_temp, r_asi;
1735 r_dword = tcg_temp_new(TCG_TYPE_I64);
1736 r_temp = tcg_temp_new(TCG_TYPE_I32);
1737 gen_movl_reg_TN(rd + 1, r_temp);
1738 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
1740 r_asi = gen_get_asi(insn, cpu_T[0]);
1741 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi,
1743 tcg_gen_discard_i32(r_asi);
1744 tcg_gen_discard_i32(r_temp);
1745 tcg_gen_discard_i64(r_dword);
1748 static inline void gen_cas_asi(int insn, int rd)
1752 r_val1 = tcg_temp_new(TCG_TYPE_I32);
1753 gen_movl_reg_TN(rd, r_val1);
1754 r_asi = gen_get_asi(insn, cpu_T[0]);
1755 tcg_gen_helper_1_4(helper_cas_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1757 tcg_gen_discard_i32(r_asi);
1758 tcg_gen_discard_i32(r_val1);
1761 static inline void gen_casx_asi(int insn, int rd)
1765 r_val1 = tcg_temp_new(TCG_TYPE_I64);
1766 gen_movl_reg_TN(rd, r_val1);
1767 r_asi = gen_get_asi(insn, cpu_T[0]);
1768 tcg_gen_helper_1_4(helper_casx_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1770 tcg_gen_discard_i32(r_asi);
1771 tcg_gen_discard_i32(r_val1);
1774 #elif !defined(CONFIG_USER_ONLY)
1776 static inline void gen_ld_asi(int insn, int size, int sign)
1781 r_dword = tcg_temp_new(TCG_TYPE_I64);
1782 asi = GET_FIELD(insn, 19, 26);
1783 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], tcg_const_i32(asi),
1784 tcg_const_i32(size), tcg_const_i32(sign));
1785 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1786 tcg_gen_discard_i64(r_dword);
1789 static inline void gen_st_asi(int insn, int size)
1794 r_dword = tcg_temp_new(TCG_TYPE_I64);
1795 tcg_gen_extu_i32_i64(r_dword, cpu_T[1]);
1796 asi = GET_FIELD(insn, 19, 26);
1797 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, tcg_const_i32(asi),
1798 tcg_const_i32(size));
1799 tcg_gen_discard_i64(r_dword);
1802 static inline void gen_swap_asi(int insn)
1807 r_temp = tcg_temp_new(TCG_TYPE_I32);
1808 asi = GET_FIELD(insn, 19, 26);
1809 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], tcg_const_i32(asi),
1810 tcg_const_i32(4), tcg_const_i32(0));
1811 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], tcg_const_i32(asi),
1813 tcg_gen_mov_i32(cpu_T[1], r_temp);
1814 tcg_gen_discard_i32(r_temp);
1817 static inline void gen_ldda_asi(int insn)
1822 r_dword = tcg_temp_new(TCG_TYPE_I64);
1823 asi = GET_FIELD(insn, 19, 26);
1824 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], tcg_const_i32(asi),
1825 tcg_const_i32(8), tcg_const_i32(0));
1826 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1827 tcg_gen_shri_i64(r_dword, r_dword, 32);
1828 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1829 tcg_gen_discard_i64(r_dword);
1832 static inline void gen_stda_asi(int insn, int rd)
1835 TCGv r_dword, r_temp;
1837 r_dword = tcg_temp_new(TCG_TYPE_I64);
1838 r_temp = tcg_temp_new(TCG_TYPE_I32);
1839 gen_movl_reg_TN(rd + 1, r_temp);
1840 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1], r_temp);
1841 asi = GET_FIELD(insn, 19, 26);
1842 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, tcg_const_i32(asi),
1844 tcg_gen_discard_i64(r_dword);
1848 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1849 static inline void gen_ldstub_asi(int insn)
1853 gen_ld_asi(insn, 1, 0);
1855 asi = GET_FIELD(insn, 19, 26);
1856 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], tcg_const_i64(0xff),
1857 tcg_const_i32(asi), tcg_const_i32(1));
1861 /* before an instruction, dc->pc must be static */
1862 static void disas_sparc_insn(DisasContext * dc)
1864 unsigned int insn, opc, rs1, rs2, rd;
1866 insn = ldl_code(dc->pc);
1867 opc = GET_FIELD(insn, 0, 1);
1869 rd = GET_FIELD(insn, 2, 6);
1871 case 0: /* branches/sethi */
1873 unsigned int xop = GET_FIELD(insn, 7, 9);
1876 #ifdef TARGET_SPARC64
1877 case 0x1: /* V9 BPcc */
1881 target = GET_FIELD_SP(insn, 0, 18);
1882 target = sign_extend(target, 18);
1884 cc = GET_FIELD_SP(insn, 20, 21);
1886 do_branch(dc, target, insn, 0);
1888 do_branch(dc, target, insn, 1);
1893 case 0x3: /* V9 BPr */
1895 target = GET_FIELD_SP(insn, 0, 13) |
1896 (GET_FIELD_SP(insn, 20, 21) << 14);
1897 target = sign_extend(target, 16);
1899 rs1 = GET_FIELD(insn, 13, 17);
1900 gen_movl_reg_T0(rs1);
1901 do_branch_reg(dc, target, insn);
1904 case 0x5: /* V9 FBPcc */
1906 int cc = GET_FIELD_SP(insn, 20, 21);
1907 if (gen_trap_ifnofpu(dc))
1909 target = GET_FIELD_SP(insn, 0, 18);
1910 target = sign_extend(target, 19);
1912 do_fbranch(dc, target, insn, cc);
1916 case 0x7: /* CBN+x */
1921 case 0x2: /* BN+x */
1923 target = GET_FIELD(insn, 10, 31);
1924 target = sign_extend(target, 22);
1926 do_branch(dc, target, insn, 0);
1929 case 0x6: /* FBN+x */
1931 if (gen_trap_ifnofpu(dc))
1933 target = GET_FIELD(insn, 10, 31);
1934 target = sign_extend(target, 22);
1936 do_fbranch(dc, target, insn, 0);
1939 case 0x4: /* SETHI */
1944 uint32_t value = GET_FIELD(insn, 10, 31);
1945 tcg_gen_movi_tl(cpu_T[0], value << 10);
1946 gen_movl_T0_reg(rd);
1951 case 0x0: /* UNIMPL */
1960 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1962 gen_movl_TN_reg(15, tcg_const_tl(dc->pc));
1968 case 2: /* FPU & Logical Operations */
1970 unsigned int xop = GET_FIELD(insn, 7, 12);
1971 if (xop == 0x3a) { /* generate trap */
1974 rs1 = GET_FIELD(insn, 13, 17);
1975 gen_movl_reg_T0(rs1);
1977 rs2 = GET_FIELD(insn, 25, 31);
1978 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2);
1980 rs2 = GET_FIELD(insn, 27, 31);
1984 gen_movl_reg_T1(rs2);
1990 cond = GET_FIELD(insn, 3, 6);
1993 tcg_gen_helper_0_1(helper_trap, cpu_T[0]);
1994 } else if (cond != 0) {
1995 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
1996 #ifdef TARGET_SPARC64
1998 int cc = GET_FIELD_SP(insn, 11, 12);
2002 gen_cond(r_cond, 0, cond);
2004 gen_cond(r_cond, 1, cond);
2009 gen_cond(r_cond, 0, cond);
2011 tcg_gen_helper_0_2(helper_trapcc, cpu_T[0], r_cond);
2012 tcg_gen_discard_tl(r_cond);
2018 } else if (xop == 0x28) {
2019 rs1 = GET_FIELD(insn, 13, 17);
2022 #ifndef TARGET_SPARC64
2023 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2024 manual, rdy on the microSPARC
2026 case 0x0f: /* stbar in the SPARCv8 manual,
2027 rdy on the microSPARC II */
2028 case 0x10 ... 0x1f: /* implementation-dependent in the
2029 SPARCv8 manual, rdy on the
2032 gen_op_movtl_T0_env(offsetof(CPUSPARCState, y));
2033 gen_movl_T0_reg(rd);
2035 #ifdef TARGET_SPARC64
2036 case 0x2: /* V9 rdccr */
2037 tcg_gen_helper_1_0(helper_rdccr, cpu_T[0]);
2038 gen_movl_T0_reg(rd);
2040 case 0x3: /* V9 rdasi */
2041 gen_op_movl_T0_env(offsetof(CPUSPARCState, asi));
2042 gen_movl_T0_reg(rd);
2044 case 0x4: /* V9 rdtick */
2048 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2049 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2050 offsetof(CPUState, tick));
2051 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2053 gen_movl_T0_reg(rd);
2054 tcg_gen_discard_ptr(r_tickptr);
2057 case 0x5: /* V9 rdpc */
2058 tcg_gen_movi_tl(cpu_T[0], dc->pc);
2059 gen_movl_T0_reg(rd);
2061 case 0x6: /* V9 rdfprs */
2062 gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs));
2063 gen_movl_T0_reg(rd);
2065 case 0xf: /* V9 membar */
2066 break; /* no effect */
2067 case 0x13: /* Graphics Status */
2068 if (gen_trap_ifnofpu(dc))
2070 gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr));
2071 gen_movl_T0_reg(rd);
2073 case 0x17: /* Tick compare */
2074 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr));
2075 gen_movl_T0_reg(rd);
2077 case 0x18: /* System tick */
2081 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2082 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2083 offsetof(CPUState, stick));
2084 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2086 gen_movl_T0_reg(rd);
2087 tcg_gen_discard_ptr(r_tickptr);
2090 case 0x19: /* System tick compare */
2091 gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr));
2092 gen_movl_T0_reg(rd);
2094 case 0x10: /* Performance Control */
2095 case 0x11: /* Performance Instrumentation Counter */
2096 case 0x12: /* Dispatch Control */
2097 case 0x14: /* Softint set, WO */
2098 case 0x15: /* Softint clear, WO */
2099 case 0x16: /* Softint write */
2104 #if !defined(CONFIG_USER_ONLY)
2105 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2106 #ifndef TARGET_SPARC64
2107 if (!supervisor(dc))
2109 tcg_gen_helper_1_0(helper_rdpsr, cpu_T[0]);
2111 if (!hypervisor(dc))
2113 rs1 = GET_FIELD(insn, 13, 17);
2116 // gen_op_rdhpstate();
2119 // gen_op_rdhtstate();
2122 gen_op_movl_T0_env(offsetof(CPUSPARCState, hintp));
2125 gen_op_movl_T0_env(offsetof(CPUSPARCState, htba));
2128 gen_op_movl_T0_env(offsetof(CPUSPARCState, hver));
2130 case 31: // hstick_cmpr
2131 gen_op_movl_env_T0(offsetof(CPUSPARCState, hstick_cmpr));
2137 gen_movl_T0_reg(rd);
2139 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2140 if (!supervisor(dc))
2142 #ifdef TARGET_SPARC64
2143 rs1 = GET_FIELD(insn, 13, 17);
2149 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2150 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2151 offsetof(CPUState, tsptr));
2152 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2153 offsetof(trap_state, tpc));
2154 tcg_gen_discard_ptr(r_tsptr);
2161 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2162 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2163 offsetof(CPUState, tsptr));
2164 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2165 offsetof(trap_state, tnpc));
2166 tcg_gen_discard_ptr(r_tsptr);
2173 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2174 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2175 offsetof(CPUState, tsptr));
2176 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2177 offsetof(trap_state, tstate));
2178 tcg_gen_discard_ptr(r_tsptr);
2185 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2186 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2187 offsetof(CPUState, tsptr));
2188 tcg_gen_ld_i32(cpu_T[0], r_tsptr,
2189 offsetof(trap_state, tt));
2190 tcg_gen_discard_ptr(r_tsptr);
2197 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2198 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2199 offsetof(CPUState, tick));
2200 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2202 gen_movl_T0_reg(rd);
2203 tcg_gen_discard_ptr(r_tickptr);
2207 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2210 gen_op_movl_T0_env(offsetof(CPUSPARCState, pstate));
2213 gen_op_movl_T0_env(offsetof(CPUSPARCState, tl));
2216 gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil));
2219 tcg_gen_helper_1_0(helper_rdcwp, cpu_T[0]);
2222 gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave));
2224 case 11: // canrestore
2225 gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore));
2227 case 12: // cleanwin
2228 gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin));
2230 case 13: // otherwin
2231 gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin));
2234 gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate));
2236 case 16: // UA2005 gl
2237 gen_op_movl_T0_env(offsetof(CPUSPARCState, gl));
2239 case 26: // UA2005 strand status
2240 if (!hypervisor(dc))
2242 gen_op_movl_T0_env(offsetof(CPUSPARCState, ssr));
2245 gen_op_movtl_T0_env(offsetof(CPUSPARCState, version));
2252 gen_op_movl_T0_env(offsetof(CPUSPARCState, wim));
2254 gen_movl_T0_reg(rd);
2256 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2257 #ifdef TARGET_SPARC64
2260 if (!supervisor(dc))
2262 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2263 gen_movl_T0_reg(rd);
2267 } else if (xop == 0x34) { /* FPU Operations */
2268 if (gen_trap_ifnofpu(dc))
2270 gen_op_clear_ieee_excp_and_FTT();
2271 rs1 = GET_FIELD(insn, 13, 17);
2272 rs2 = GET_FIELD(insn, 27, 31);
2273 xop = GET_FIELD(insn, 18, 26);
2275 case 0x1: /* fmovs */
2276 gen_op_load_fpr_FT0(rs2);
2277 gen_op_store_FT0_fpr(rd);
2279 case 0x5: /* fnegs */
2280 gen_op_load_fpr_FT1(rs2);
2282 gen_op_store_FT0_fpr(rd);
2284 case 0x9: /* fabss */
2285 gen_op_load_fpr_FT1(rs2);
2286 tcg_gen_helper_0_0(helper_fabss);
2287 gen_op_store_FT0_fpr(rd);
2289 case 0x29: /* fsqrts */
2290 gen_op_load_fpr_FT1(rs2);
2291 gen_clear_float_exceptions();
2292 tcg_gen_helper_0_0(helper_fsqrts);
2293 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2294 gen_op_store_FT0_fpr(rd);
2296 case 0x2a: /* fsqrtd */
2297 gen_op_load_fpr_DT1(DFPREG(rs2));
2298 gen_clear_float_exceptions();
2299 tcg_gen_helper_0_0(helper_fsqrtd);
2300 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2301 gen_op_store_DT0_fpr(DFPREG(rd));
2303 case 0x2b: /* fsqrtq */
2304 #if defined(CONFIG_USER_ONLY)
2305 gen_op_load_fpr_QT1(QFPREG(rs2));
2306 gen_clear_float_exceptions();
2307 tcg_gen_helper_0_0(helper_fsqrtq);
2308 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2309 gen_op_store_QT0_fpr(QFPREG(rd));
2315 gen_op_load_fpr_FT0(rs1);
2316 gen_op_load_fpr_FT1(rs2);
2317 gen_clear_float_exceptions();
2319 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2320 gen_op_store_FT0_fpr(rd);
2323 gen_op_load_fpr_DT0(DFPREG(rs1));
2324 gen_op_load_fpr_DT1(DFPREG(rs2));
2325 gen_clear_float_exceptions();
2327 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2328 gen_op_store_DT0_fpr(DFPREG(rd));
2330 case 0x43: /* faddq */
2331 #if defined(CONFIG_USER_ONLY)
2332 gen_op_load_fpr_QT0(QFPREG(rs1));
2333 gen_op_load_fpr_QT1(QFPREG(rs2));
2334 gen_clear_float_exceptions();
2336 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2337 gen_op_store_QT0_fpr(QFPREG(rd));
2343 gen_op_load_fpr_FT0(rs1);
2344 gen_op_load_fpr_FT1(rs2);
2345 gen_clear_float_exceptions();
2347 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2348 gen_op_store_FT0_fpr(rd);
2351 gen_op_load_fpr_DT0(DFPREG(rs1));
2352 gen_op_load_fpr_DT1(DFPREG(rs2));
2353 gen_clear_float_exceptions();
2355 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2356 gen_op_store_DT0_fpr(DFPREG(rd));
2358 case 0x47: /* fsubq */
2359 #if defined(CONFIG_USER_ONLY)
2360 gen_op_load_fpr_QT0(QFPREG(rs1));
2361 gen_op_load_fpr_QT1(QFPREG(rs2));
2362 gen_clear_float_exceptions();
2364 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2365 gen_op_store_QT0_fpr(QFPREG(rd));
2371 gen_op_load_fpr_FT0(rs1);
2372 gen_op_load_fpr_FT1(rs2);
2373 gen_clear_float_exceptions();
2375 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2376 gen_op_store_FT0_fpr(rd);
2379 gen_op_load_fpr_DT0(DFPREG(rs1));
2380 gen_op_load_fpr_DT1(DFPREG(rs2));
2381 gen_clear_float_exceptions();
2383 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2384 gen_op_store_DT0_fpr(DFPREG(rd));
2386 case 0x4b: /* fmulq */
2387 #if defined(CONFIG_USER_ONLY)
2388 gen_op_load_fpr_QT0(QFPREG(rs1));
2389 gen_op_load_fpr_QT1(QFPREG(rs2));
2390 gen_clear_float_exceptions();
2392 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2393 gen_op_store_QT0_fpr(QFPREG(rd));
2399 gen_op_load_fpr_FT0(rs1);
2400 gen_op_load_fpr_FT1(rs2);
2401 gen_clear_float_exceptions();
2403 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2404 gen_op_store_FT0_fpr(rd);
2407 gen_op_load_fpr_DT0(DFPREG(rs1));
2408 gen_op_load_fpr_DT1(DFPREG(rs2));
2409 gen_clear_float_exceptions();
2411 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2412 gen_op_store_DT0_fpr(DFPREG(rd));
2414 case 0x4f: /* fdivq */
2415 #if defined(CONFIG_USER_ONLY)
2416 gen_op_load_fpr_QT0(QFPREG(rs1));
2417 gen_op_load_fpr_QT1(QFPREG(rs2));
2418 gen_clear_float_exceptions();
2420 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2421 gen_op_store_QT0_fpr(QFPREG(rd));
2427 gen_op_load_fpr_FT0(rs1);
2428 gen_op_load_fpr_FT1(rs2);
2429 gen_clear_float_exceptions();
2431 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2432 gen_op_store_DT0_fpr(DFPREG(rd));
2434 case 0x6e: /* fdmulq */
2435 #if defined(CONFIG_USER_ONLY)
2436 gen_op_load_fpr_DT0(DFPREG(rs1));
2437 gen_op_load_fpr_DT1(DFPREG(rs2));
2438 gen_clear_float_exceptions();
2440 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2441 gen_op_store_QT0_fpr(QFPREG(rd));
2447 gen_op_load_fpr_FT1(rs2);
2448 gen_clear_float_exceptions();
2450 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2451 gen_op_store_FT0_fpr(rd);
2454 gen_op_load_fpr_DT1(DFPREG(rs2));
2455 gen_clear_float_exceptions();
2457 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2458 gen_op_store_FT0_fpr(rd);
2460 case 0xc7: /* fqtos */
2461 #if defined(CONFIG_USER_ONLY)
2462 gen_op_load_fpr_QT1(QFPREG(rs2));
2463 gen_clear_float_exceptions();
2465 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2466 gen_op_store_FT0_fpr(rd);
2472 gen_op_load_fpr_FT1(rs2);
2474 gen_op_store_DT0_fpr(DFPREG(rd));
2477 gen_op_load_fpr_FT1(rs2);
2479 gen_op_store_DT0_fpr(DFPREG(rd));
2481 case 0xcb: /* fqtod */
2482 #if defined(CONFIG_USER_ONLY)
2483 gen_op_load_fpr_QT1(QFPREG(rs2));
2484 gen_clear_float_exceptions();
2486 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2487 gen_op_store_DT0_fpr(DFPREG(rd));
2492 case 0xcc: /* fitoq */
2493 #if defined(CONFIG_USER_ONLY)
2494 gen_op_load_fpr_FT1(rs2);
2496 gen_op_store_QT0_fpr(QFPREG(rd));
2501 case 0xcd: /* fstoq */
2502 #if defined(CONFIG_USER_ONLY)
2503 gen_op_load_fpr_FT1(rs2);
2505 gen_op_store_QT0_fpr(QFPREG(rd));
2510 case 0xce: /* fdtoq */
2511 #if defined(CONFIG_USER_ONLY)
2512 gen_op_load_fpr_DT1(DFPREG(rs2));
2514 gen_op_store_QT0_fpr(QFPREG(rd));
2520 gen_op_load_fpr_FT1(rs2);
2521 gen_clear_float_exceptions();
2523 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2524 gen_op_store_FT0_fpr(rd);
2527 gen_op_load_fpr_DT1(DFPREG(rs2));
2528 gen_clear_float_exceptions();
2530 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2531 gen_op_store_FT0_fpr(rd);
2533 case 0xd3: /* fqtoi */
2534 #if defined(CONFIG_USER_ONLY)
2535 gen_op_load_fpr_QT1(QFPREG(rs2));
2536 gen_clear_float_exceptions();
2538 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2539 gen_op_store_FT0_fpr(rd);
2544 #ifdef TARGET_SPARC64
2545 case 0x2: /* V9 fmovd */
2546 gen_op_load_fpr_DT0(DFPREG(rs2));
2547 gen_op_store_DT0_fpr(DFPREG(rd));
2549 case 0x3: /* V9 fmovq */
2550 #if defined(CONFIG_USER_ONLY)
2551 gen_op_load_fpr_QT0(QFPREG(rs2));
2552 gen_op_store_QT0_fpr(QFPREG(rd));
2557 case 0x6: /* V9 fnegd */
2558 gen_op_load_fpr_DT1(DFPREG(rs2));
2560 gen_op_store_DT0_fpr(DFPREG(rd));
2562 case 0x7: /* V9 fnegq */
2563 #if defined(CONFIG_USER_ONLY)
2564 gen_op_load_fpr_QT1(QFPREG(rs2));
2566 gen_op_store_QT0_fpr(QFPREG(rd));
2571 case 0xa: /* V9 fabsd */
2572 gen_op_load_fpr_DT1(DFPREG(rs2));
2573 tcg_gen_helper_0_0(helper_fabsd);
2574 gen_op_store_DT0_fpr(DFPREG(rd));
2576 case 0xb: /* V9 fabsq */
2577 #if defined(CONFIG_USER_ONLY)
2578 gen_op_load_fpr_QT1(QFPREG(rs2));
2579 tcg_gen_helper_0_0(helper_fabsq);
2580 gen_op_store_QT0_fpr(QFPREG(rd));
2585 case 0x81: /* V9 fstox */
2586 gen_op_load_fpr_FT1(rs2);
2587 gen_clear_float_exceptions();
2589 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2590 gen_op_store_DT0_fpr(DFPREG(rd));
2592 case 0x82: /* V9 fdtox */
2593 gen_op_load_fpr_DT1(DFPREG(rs2));
2594 gen_clear_float_exceptions();
2596 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2597 gen_op_store_DT0_fpr(DFPREG(rd));
2599 case 0x83: /* V9 fqtox */
2600 #if defined(CONFIG_USER_ONLY)
2601 gen_op_load_fpr_QT1(QFPREG(rs2));
2602 gen_clear_float_exceptions();
2604 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2605 gen_op_store_DT0_fpr(DFPREG(rd));
2610 case 0x84: /* V9 fxtos */
2611 gen_op_load_fpr_DT1(DFPREG(rs2));
2612 gen_clear_float_exceptions();
2614 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2615 gen_op_store_FT0_fpr(rd);
2617 case 0x88: /* V9 fxtod */
2618 gen_op_load_fpr_DT1(DFPREG(rs2));
2619 gen_clear_float_exceptions();
2621 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2622 gen_op_store_DT0_fpr(DFPREG(rd));
2624 case 0x8c: /* V9 fxtoq */
2625 #if defined(CONFIG_USER_ONLY)
2626 gen_op_load_fpr_DT1(DFPREG(rs2));
2627 gen_clear_float_exceptions();
2629 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2630 gen_op_store_QT0_fpr(QFPREG(rd));
2639 } else if (xop == 0x35) { /* FPU Operations */
2640 #ifdef TARGET_SPARC64
2643 if (gen_trap_ifnofpu(dc))
2645 gen_op_clear_ieee_excp_and_FTT();
2646 rs1 = GET_FIELD(insn, 13, 17);
2647 rs2 = GET_FIELD(insn, 27, 31);
2648 xop = GET_FIELD(insn, 18, 26);
2649 #ifdef TARGET_SPARC64
2650 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2653 l1 = gen_new_label();
2654 cond = GET_FIELD_SP(insn, 14, 17);
2655 rs1 = GET_FIELD(insn, 13, 17);
2656 gen_movl_reg_T0(rs1);
2657 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0],
2658 tcg_const_tl(0), l1);
2659 gen_op_load_fpr_FT0(rs2);
2660 gen_op_store_FT0_fpr(rd);
2663 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2666 l1 = gen_new_label();
2667 cond = GET_FIELD_SP(insn, 14, 17);
2668 rs1 = GET_FIELD(insn, 13, 17);
2669 gen_movl_reg_T0(rs1);
2670 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0],
2671 tcg_const_tl(0), l1);
2672 gen_op_load_fpr_DT0(DFPREG(rs2));
2673 gen_op_store_DT0_fpr(DFPREG(rd));
2676 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2677 #if defined(CONFIG_USER_ONLY)
2680 l1 = gen_new_label();
2681 cond = GET_FIELD_SP(insn, 14, 17);
2682 rs1 = GET_FIELD(insn, 13, 17);
2683 gen_movl_reg_T0(rs1);
2684 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0],
2685 tcg_const_tl(0), l1);
2686 gen_op_load_fpr_QT0(QFPREG(rs2));
2687 gen_op_store_QT0_fpr(QFPREG(rd));
2696 #ifdef TARGET_SPARC64
2697 #define FMOVCC(size_FDQ, fcc) \
2702 l1 = gen_new_label(); \
2703 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2704 cond = GET_FIELD_SP(insn, 14, 17); \
2705 gen_fcond(r_cond, fcc, cond); \
2706 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, \
2707 tcg_const_tl(0), l1); \
2708 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2709 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2710 gen_set_label(l1); \
2711 tcg_gen_discard_tl(r_cond); \
2713 case 0x001: /* V9 fmovscc %fcc0 */
2716 case 0x002: /* V9 fmovdcc %fcc0 */
2719 case 0x003: /* V9 fmovqcc %fcc0 */
2720 #if defined(CONFIG_USER_ONLY)
2726 case 0x041: /* V9 fmovscc %fcc1 */
2729 case 0x042: /* V9 fmovdcc %fcc1 */
2732 case 0x043: /* V9 fmovqcc %fcc1 */
2733 #if defined(CONFIG_USER_ONLY)
2739 case 0x081: /* V9 fmovscc %fcc2 */
2742 case 0x082: /* V9 fmovdcc %fcc2 */
2745 case 0x083: /* V9 fmovqcc %fcc2 */
2746 #if defined(CONFIG_USER_ONLY)
2752 case 0x0c1: /* V9 fmovscc %fcc3 */
2755 case 0x0c2: /* V9 fmovdcc %fcc3 */
2758 case 0x0c3: /* V9 fmovqcc %fcc3 */
2759 #if defined(CONFIG_USER_ONLY)
2766 #define FMOVCC(size_FDQ, icc) \
2771 l1 = gen_new_label(); \
2772 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2773 cond = GET_FIELD_SP(insn, 14, 17); \
2774 gen_cond(r_cond, icc, cond); \
2775 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, \
2776 tcg_const_tl(0), l1); \
2777 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2778 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2779 gen_set_label(l1); \
2780 tcg_gen_discard_tl(r_cond); \
2783 case 0x101: /* V9 fmovscc %icc */
2786 case 0x102: /* V9 fmovdcc %icc */
2788 case 0x103: /* V9 fmovqcc %icc */
2789 #if defined(CONFIG_USER_ONLY)
2795 case 0x181: /* V9 fmovscc %xcc */
2798 case 0x182: /* V9 fmovdcc %xcc */
2801 case 0x183: /* V9 fmovqcc %xcc */
2802 #if defined(CONFIG_USER_ONLY)
2810 case 0x51: /* fcmps, V9 %fcc */
2811 gen_op_load_fpr_FT0(rs1);
2812 gen_op_load_fpr_FT1(rs2);
2813 gen_op_fcmps(rd & 3);
2815 case 0x52: /* fcmpd, V9 %fcc */
2816 gen_op_load_fpr_DT0(DFPREG(rs1));
2817 gen_op_load_fpr_DT1(DFPREG(rs2));
2818 gen_op_fcmpd(rd & 3);
2820 case 0x53: /* fcmpq, V9 %fcc */
2821 #if defined(CONFIG_USER_ONLY)
2822 gen_op_load_fpr_QT0(QFPREG(rs1));
2823 gen_op_load_fpr_QT1(QFPREG(rs2));
2824 gen_op_fcmpq(rd & 3);
2826 #else /* !defined(CONFIG_USER_ONLY) */
2829 case 0x55: /* fcmpes, V9 %fcc */
2830 gen_op_load_fpr_FT0(rs1);
2831 gen_op_load_fpr_FT1(rs2);
2832 gen_op_fcmpes(rd & 3);
2834 case 0x56: /* fcmped, V9 %fcc */
2835 gen_op_load_fpr_DT0(DFPREG(rs1));
2836 gen_op_load_fpr_DT1(DFPREG(rs2));
2837 gen_op_fcmped(rd & 3);
2839 case 0x57: /* fcmpeq, V9 %fcc */
2840 #if defined(CONFIG_USER_ONLY)
2841 gen_op_load_fpr_QT0(QFPREG(rs1));
2842 gen_op_load_fpr_QT1(QFPREG(rs2));
2843 gen_op_fcmpeq(rd & 3);
2845 #else/* !defined(CONFIG_USER_ONLY) */
2852 } else if (xop == 0x2) {
2855 rs1 = GET_FIELD(insn, 13, 17);
2857 // or %g0, x, y -> mov T0, x; mov y, T0
2858 if (IS_IMM) { /* immediate */
2859 rs2 = GET_FIELDs(insn, 19, 31);
2860 tcg_gen_movi_tl(cpu_T[0], (int)rs2);
2861 } else { /* register */
2862 rs2 = GET_FIELD(insn, 27, 31);
2863 gen_movl_reg_T0(rs2);
2866 gen_movl_reg_T0(rs1);
2867 if (IS_IMM) { /* immediate */
2868 rs2 = GET_FIELDs(insn, 19, 31);
2869 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2);
2870 } else { /* register */
2871 // or x, %g0, y -> mov T1, x; mov y, T1
2872 rs2 = GET_FIELD(insn, 27, 31);
2874 gen_movl_reg_T1(rs2);
2879 gen_movl_T0_reg(rd);
2881 #ifdef TARGET_SPARC64
2882 } else if (xop == 0x25) { /* sll, V9 sllx */
2883 rs1 = GET_FIELD(insn, 13, 17);
2884 gen_movl_reg_T0(rs1);
2885 if (IS_IMM) { /* immediate */
2886 rs2 = GET_FIELDs(insn, 20, 31);
2887 if (insn & (1 << 12)) {
2888 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2890 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2891 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2893 } else { /* register */
2894 rs2 = GET_FIELD(insn, 27, 31);
2895 gen_movl_reg_T1(rs2);
2896 if (insn & (1 << 12)) {
2897 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2898 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2900 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2901 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2902 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2905 gen_movl_T0_reg(rd);
2906 } else if (xop == 0x26) { /* srl, V9 srlx */
2907 rs1 = GET_FIELD(insn, 13, 17);
2908 gen_movl_reg_T0(rs1);
2909 if (IS_IMM) { /* immediate */
2910 rs2 = GET_FIELDs(insn, 20, 31);
2911 if (insn & (1 << 12)) {
2912 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2914 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2915 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2917 } else { /* register */
2918 rs2 = GET_FIELD(insn, 27, 31);
2919 gen_movl_reg_T1(rs2);
2920 if (insn & (1 << 12)) {
2921 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2922 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2924 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2925 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2926 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2929 gen_movl_T0_reg(rd);
2930 } else if (xop == 0x27) { /* sra, V9 srax */
2931 rs1 = GET_FIELD(insn, 13, 17);
2932 gen_movl_reg_T0(rs1);
2933 if (IS_IMM) { /* immediate */
2934 rs2 = GET_FIELDs(insn, 20, 31);
2935 if (insn & (1 << 12)) {
2936 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2938 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2939 tcg_gen_ext_i32_i64(cpu_T[0], cpu_T[0]);
2940 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2942 } else { /* register */
2943 rs2 = GET_FIELD(insn, 27, 31);
2944 gen_movl_reg_T1(rs2);
2945 if (insn & (1 << 12)) {
2946 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2947 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2949 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2950 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2951 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2954 gen_movl_T0_reg(rd);
2956 } else if (xop < 0x36) {
2957 rs1 = GET_FIELD(insn, 13, 17);
2958 gen_movl_reg_T0(rs1);
2959 if (IS_IMM) { /* immediate */
2960 rs2 = GET_FIELDs(insn, 19, 31);
2961 gen_movl_simm_T1(rs2);
2962 } else { /* register */
2963 rs2 = GET_FIELD(insn, 27, 31);
2964 gen_movl_reg_T1(rs2);
2967 switch (xop & ~0x10) {
2970 gen_op_add_T1_T0_cc();
2975 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2977 gen_op_logic_T0_cc();
2980 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2982 gen_op_logic_T0_cc();
2985 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2987 gen_op_logic_T0_cc();
2991 gen_op_sub_T1_T0_cc();
2993 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2996 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
2997 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2999 gen_op_logic_T0_cc();
3002 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3003 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3005 gen_op_logic_T0_cc();
3008 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3009 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3011 gen_op_logic_T0_cc();
3015 gen_op_addx_T1_T0_cc();
3017 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3018 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3019 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3022 #ifdef TARGET_SPARC64
3023 case 0x9: /* V9 mulx */
3024 tcg_gen_mul_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
3028 gen_op_umul_T1_T0();
3030 gen_op_logic_T0_cc();
3033 gen_op_smul_T1_T0();
3035 gen_op_logic_T0_cc();
3039 gen_op_subx_T1_T0_cc();
3041 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3042 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3043 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3046 #ifdef TARGET_SPARC64
3047 case 0xd: /* V9 udivx */
3048 gen_trap_ifdivzero_i64(cpu_T[1]);
3049 tcg_gen_divu_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
3053 gen_op_udiv_T1_T0();
3058 gen_op_sdiv_T1_T0();
3065 gen_movl_T0_reg(rd);
3068 case 0x20: /* taddcc */
3069 gen_op_tadd_T1_T0_cc();
3070 gen_movl_T0_reg(rd);
3072 case 0x21: /* tsubcc */
3073 gen_op_tsub_T1_T0_cc();
3074 gen_movl_T0_reg(rd);
3076 case 0x22: /* taddcctv */
3078 gen_op_tadd_T1_T0_ccTV();
3079 gen_movl_T0_reg(rd);
3081 case 0x23: /* tsubcctv */
3083 gen_op_tsub_T1_T0_ccTV();
3084 gen_movl_T0_reg(rd);
3086 case 0x24: /* mulscc */
3087 gen_op_mulscc_T1_T0();
3088 gen_movl_T0_reg(rd);
3090 #ifndef TARGET_SPARC64
3091 case 0x25: /* sll */
3092 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3093 tcg_gen_shl_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3094 gen_movl_T0_reg(rd);
3096 case 0x26: /* srl */
3097 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3098 tcg_gen_shr_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3099 gen_movl_T0_reg(rd);
3101 case 0x27: /* sra */
3102 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3103 tcg_gen_sar_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3104 gen_movl_T0_reg(rd);
3112 gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
3114 #ifndef TARGET_SPARC64
3115 case 0x01 ... 0x0f: /* undefined in the
3119 case 0x10 ... 0x1f: /* implementation-dependent
3125 case 0x2: /* V9 wrccr */
3127 tcg_gen_helper_0_1(helper_wrccr, cpu_T[0]);
3129 case 0x3: /* V9 wrasi */
3131 gen_op_movl_env_T0(offsetof(CPUSPARCState, asi));
3133 case 0x6: /* V9 wrfprs */
3135 gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs));
3141 case 0xf: /* V9 sir, nop if user */
3142 #if !defined(CONFIG_USER_ONLY)
3147 case 0x13: /* Graphics Status */
3148 if (gen_trap_ifnofpu(dc))
3151 gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr));
3153 case 0x17: /* Tick compare */
3154 #if !defined(CONFIG_USER_ONLY)
3155 if (!supervisor(dc))
3162 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3164 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3165 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3166 offsetof(CPUState, tick));
3167 tcg_gen_helper_0_2(helper_tick_set_limit,
3168 r_tickptr, cpu_T[0]);
3169 tcg_gen_discard_ptr(r_tickptr);
3172 case 0x18: /* System tick */
3173 #if !defined(CONFIG_USER_ONLY)
3174 if (!supervisor(dc))
3181 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3182 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3183 offsetof(CPUState, stick));
3184 tcg_gen_helper_0_2(helper_tick_set_count,
3185 r_tickptr, cpu_T[0]);
3186 tcg_gen_discard_ptr(r_tickptr);
3189 case 0x19: /* System tick compare */
3190 #if !defined(CONFIG_USER_ONLY)
3191 if (!supervisor(dc))
3198 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3200 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3201 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3202 offsetof(CPUState, stick));
3203 tcg_gen_helper_0_2(helper_tick_set_limit,
3204 r_tickptr, cpu_T[0]);
3205 tcg_gen_discard_ptr(r_tickptr);
3209 case 0x10: /* Performance Control */
3210 case 0x11: /* Performance Instrumentation Counter */
3211 case 0x12: /* Dispatch Control */
3212 case 0x14: /* Softint set */
3213 case 0x15: /* Softint clear */
3214 case 0x16: /* Softint write */
3221 #if !defined(CONFIG_USER_ONLY)
3222 case 0x31: /* wrpsr, V9 saved, restored */
3224 if (!supervisor(dc))
3226 #ifdef TARGET_SPARC64
3234 case 2: /* UA2005 allclean */
3235 case 3: /* UA2005 otherw */
3236 case 4: /* UA2005 normalw */
3237 case 5: /* UA2005 invalw */
3244 tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]);
3252 case 0x32: /* wrwim, V9 wrpr */
3254 if (!supervisor(dc))
3257 #ifdef TARGET_SPARC64
3263 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3264 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3265 offsetof(CPUState, tsptr));
3266 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3267 offsetof(trap_state, tpc));
3268 tcg_gen_discard_ptr(r_tsptr);
3275 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3276 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3277 offsetof(CPUState, tsptr));
3278 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3279 offsetof(trap_state, tnpc));
3280 tcg_gen_discard_ptr(r_tsptr);
3287 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3288 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3289 offsetof(CPUState, tsptr));
3290 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3291 offsetof(trap_state, tstate));
3292 tcg_gen_discard_ptr(r_tsptr);
3299 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3300 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3301 offsetof(CPUState, tsptr));
3302 tcg_gen_st_i32(cpu_T[0], r_tsptr,
3303 offsetof(trap_state, tt));
3304 tcg_gen_discard_ptr(r_tsptr);
3311 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3312 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3313 offsetof(CPUState, tick));
3314 tcg_gen_helper_0_2(helper_tick_set_count,
3315 r_tickptr, cpu_T[0]);
3316 tcg_gen_discard_ptr(r_tickptr);
3320 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3324 tcg_gen_helper_0_1(helper_wrpstate, cpu_T[0]);
3330 gen_op_movl_env_T0(offsetof(CPUSPARCState, tl));
3333 gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil));
3336 tcg_gen_helper_0_1(helper_wrcwp, cpu_T[0]);
3339 gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave));
3341 case 11: // canrestore
3342 gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore));
3344 case 12: // cleanwin
3345 gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin));
3347 case 13: // otherwin
3348 gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin));
3351 gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate));
3353 case 16: // UA2005 gl
3354 gen_op_movl_env_T0(offsetof(CPUSPARCState, gl));
3356 case 26: // UA2005 strand status
3357 if (!hypervisor(dc))
3359 gen_op_movl_env_T0(offsetof(CPUSPARCState, ssr));
3365 tcg_gen_andi_i32(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1));
3366 gen_op_movl_env_T0(offsetof(CPUSPARCState, wim));
3370 case 0x33: /* wrtbr, UA2005 wrhpr */
3372 #ifndef TARGET_SPARC64
3373 if (!supervisor(dc))
3376 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3378 if (!hypervisor(dc))
3383 // XXX gen_op_wrhpstate();
3390 // XXX gen_op_wrhtstate();
3393 gen_op_movl_env_T0(offsetof(CPUSPARCState, hintp));
3396 gen_op_movl_env_T0(offsetof(CPUSPARCState, htba));
3398 case 31: // hstick_cmpr
3402 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3404 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3405 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3406 offsetof(CPUState, hstick));
3407 tcg_gen_helper_0_2(helper_tick_set_limit,
3408 r_tickptr, cpu_T[0]);
3409 tcg_gen_discard_ptr(r_tickptr);
3412 case 6: // hver readonly
3420 #ifdef TARGET_SPARC64
3421 case 0x2c: /* V9 movcc */
3423 int cc = GET_FIELD_SP(insn, 11, 12);
3424 int cond = GET_FIELD_SP(insn, 14, 17);
3428 r_cond = tcg_temp_new(TCG_TYPE_TL);
3429 if (insn & (1 << 18)) {
3431 gen_cond(r_cond, 0, cond);
3433 gen_cond(r_cond, 1, cond);
3437 gen_fcond(r_cond, cc, cond);
3440 l1 = gen_new_label();
3442 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond,
3443 tcg_const_tl(0), l1);
3444 if (IS_IMM) { /* immediate */
3445 rs2 = GET_FIELD_SPs(insn, 0, 10);
3446 gen_movl_simm_T1(rs2);
3448 rs2 = GET_FIELD_SP(insn, 0, 4);
3449 gen_movl_reg_T1(rs2);
3451 gen_movl_T1_reg(rd);
3453 tcg_gen_discard_tl(r_cond);
3456 case 0x2d: /* V9 sdivx */
3457 gen_op_sdivx_T1_T0();
3458 gen_movl_T0_reg(rd);
3460 case 0x2e: /* V9 popc */
3462 if (IS_IMM) { /* immediate */
3463 rs2 = GET_FIELD_SPs(insn, 0, 12);
3464 gen_movl_simm_T1(rs2);
3465 // XXX optimize: popc(constant)
3468 rs2 = GET_FIELD_SP(insn, 0, 4);
3469 gen_movl_reg_T1(rs2);
3471 tcg_gen_helper_1_1(helper_popc, cpu_T[0],
3473 gen_movl_T0_reg(rd);
3475 case 0x2f: /* V9 movr */
3477 int cond = GET_FIELD_SP(insn, 10, 12);
3480 rs1 = GET_FIELD(insn, 13, 17);
3481 gen_movl_reg_T0(rs1);
3483 l1 = gen_new_label();
3485 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0],
3486 tcg_const_tl(0), l1);
3487 if (IS_IMM) { /* immediate */
3488 rs2 = GET_FIELD_SPs(insn, 0, 9);
3489 gen_movl_simm_T1(rs2);
3491 rs2 = GET_FIELD_SP(insn, 0, 4);
3492 gen_movl_reg_T1(rs2);
3494 gen_movl_T1_reg(rd);
3503 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3504 #ifdef TARGET_SPARC64
3505 int opf = GET_FIELD_SP(insn, 5, 13);
3506 rs1 = GET_FIELD(insn, 13, 17);
3507 rs2 = GET_FIELD(insn, 27, 31);
3508 if (gen_trap_ifnofpu(dc))
3512 case 0x000: /* VIS I edge8cc */
3513 case 0x001: /* VIS II edge8n */
3514 case 0x002: /* VIS I edge8lcc */
3515 case 0x003: /* VIS II edge8ln */
3516 case 0x004: /* VIS I edge16cc */
3517 case 0x005: /* VIS II edge16n */
3518 case 0x006: /* VIS I edge16lcc */
3519 case 0x007: /* VIS II edge16ln */
3520 case 0x008: /* VIS I edge32cc */
3521 case 0x009: /* VIS II edge32n */
3522 case 0x00a: /* VIS I edge32lcc */
3523 case 0x00b: /* VIS II edge32ln */
3526 case 0x010: /* VIS I array8 */
3527 gen_movl_reg_T0(rs1);
3528 gen_movl_reg_T1(rs2);
3529 tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0],
3531 gen_movl_T0_reg(rd);
3533 case 0x012: /* VIS I array16 */
3534 gen_movl_reg_T0(rs1);
3535 gen_movl_reg_T1(rs2);
3536 tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0],
3538 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 1);
3539 gen_movl_T0_reg(rd);
3541 case 0x014: /* VIS I array32 */
3542 gen_movl_reg_T0(rs1);
3543 gen_movl_reg_T1(rs2);
3544 tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0],
3546 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
3547 gen_movl_T0_reg(rd);
3549 case 0x018: /* VIS I alignaddr */
3550 gen_movl_reg_T0(rs1);
3551 gen_movl_reg_T1(rs2);
3552 tcg_gen_helper_1_2(helper_alignaddr, cpu_T[0], cpu_T[0],
3554 gen_movl_T0_reg(rd);
3556 case 0x019: /* VIS II bmask */
3557 case 0x01a: /* VIS I alignaddrl */
3560 case 0x020: /* VIS I fcmple16 */
3561 gen_op_load_fpr_DT0(DFPREG(rs1));
3562 gen_op_load_fpr_DT1(DFPREG(rs2));
3564 gen_op_store_DT0_fpr(DFPREG(rd));
3566 case 0x022: /* VIS I fcmpne16 */
3567 gen_op_load_fpr_DT0(DFPREG(rs1));
3568 gen_op_load_fpr_DT1(DFPREG(rs2));
3570 gen_op_store_DT0_fpr(DFPREG(rd));
3572 case 0x024: /* VIS I fcmple32 */
3573 gen_op_load_fpr_DT0(DFPREG(rs1));
3574 gen_op_load_fpr_DT1(DFPREG(rs2));
3576 gen_op_store_DT0_fpr(DFPREG(rd));
3578 case 0x026: /* VIS I fcmpne32 */
3579 gen_op_load_fpr_DT0(DFPREG(rs1));
3580 gen_op_load_fpr_DT1(DFPREG(rs2));
3582 gen_op_store_DT0_fpr(DFPREG(rd));
3584 case 0x028: /* VIS I fcmpgt16 */
3585 gen_op_load_fpr_DT0(DFPREG(rs1));
3586 gen_op_load_fpr_DT1(DFPREG(rs2));
3588 gen_op_store_DT0_fpr(DFPREG(rd));
3590 case 0x02a: /* VIS I fcmpeq16 */
3591 gen_op_load_fpr_DT0(DFPREG(rs1));
3592 gen_op_load_fpr_DT1(DFPREG(rs2));
3594 gen_op_store_DT0_fpr(DFPREG(rd));
3596 case 0x02c: /* VIS I fcmpgt32 */
3597 gen_op_load_fpr_DT0(DFPREG(rs1));
3598 gen_op_load_fpr_DT1(DFPREG(rs2));
3600 gen_op_store_DT0_fpr(DFPREG(rd));
3602 case 0x02e: /* VIS I fcmpeq32 */
3603 gen_op_load_fpr_DT0(DFPREG(rs1));
3604 gen_op_load_fpr_DT1(DFPREG(rs2));
3606 gen_op_store_DT0_fpr(DFPREG(rd));
3608 case 0x031: /* VIS I fmul8x16 */
3609 gen_op_load_fpr_DT0(DFPREG(rs1));
3610 gen_op_load_fpr_DT1(DFPREG(rs2));
3612 gen_op_store_DT0_fpr(DFPREG(rd));
3614 case 0x033: /* VIS I fmul8x16au */
3615 gen_op_load_fpr_DT0(DFPREG(rs1));
3616 gen_op_load_fpr_DT1(DFPREG(rs2));
3617 gen_op_fmul8x16au();
3618 gen_op_store_DT0_fpr(DFPREG(rd));
3620 case 0x035: /* VIS I fmul8x16al */
3621 gen_op_load_fpr_DT0(DFPREG(rs1));
3622 gen_op_load_fpr_DT1(DFPREG(rs2));
3623 gen_op_fmul8x16al();
3624 gen_op_store_DT0_fpr(DFPREG(rd));
3626 case 0x036: /* VIS I fmul8sux16 */
3627 gen_op_load_fpr_DT0(DFPREG(rs1));
3628 gen_op_load_fpr_DT1(DFPREG(rs2));
3629 gen_op_fmul8sux16();
3630 gen_op_store_DT0_fpr(DFPREG(rd));
3632 case 0x037: /* VIS I fmul8ulx16 */
3633 gen_op_load_fpr_DT0(DFPREG(rs1));
3634 gen_op_load_fpr_DT1(DFPREG(rs2));
3635 gen_op_fmul8ulx16();
3636 gen_op_store_DT0_fpr(DFPREG(rd));
3638 case 0x038: /* VIS I fmuld8sux16 */
3639 gen_op_load_fpr_DT0(DFPREG(rs1));
3640 gen_op_load_fpr_DT1(DFPREG(rs2));
3641 gen_op_fmuld8sux16();
3642 gen_op_store_DT0_fpr(DFPREG(rd));
3644 case 0x039: /* VIS I fmuld8ulx16 */
3645 gen_op_load_fpr_DT0(DFPREG(rs1));
3646 gen_op_load_fpr_DT1(DFPREG(rs2));
3647 gen_op_fmuld8ulx16();
3648 gen_op_store_DT0_fpr(DFPREG(rd));
3650 case 0x03a: /* VIS I fpack32 */
3651 case 0x03b: /* VIS I fpack16 */
3652 case 0x03d: /* VIS I fpackfix */
3653 case 0x03e: /* VIS I pdist */
3656 case 0x048: /* VIS I faligndata */
3657 gen_op_load_fpr_DT0(DFPREG(rs1));
3658 gen_op_load_fpr_DT1(DFPREG(rs2));
3659 gen_op_faligndata();
3660 gen_op_store_DT0_fpr(DFPREG(rd));
3662 case 0x04b: /* VIS I fpmerge */
3663 gen_op_load_fpr_DT0(DFPREG(rs1));
3664 gen_op_load_fpr_DT1(DFPREG(rs2));
3666 gen_op_store_DT0_fpr(DFPREG(rd));
3668 case 0x04c: /* VIS II bshuffle */
3671 case 0x04d: /* VIS I fexpand */
3672 gen_op_load_fpr_DT0(DFPREG(rs1));
3673 gen_op_load_fpr_DT1(DFPREG(rs2));
3675 gen_op_store_DT0_fpr(DFPREG(rd));
3677 case 0x050: /* VIS I fpadd16 */
3678 gen_op_load_fpr_DT0(DFPREG(rs1));
3679 gen_op_load_fpr_DT1(DFPREG(rs2));
3681 gen_op_store_DT0_fpr(DFPREG(rd));
3683 case 0x051: /* VIS I fpadd16s */
3684 gen_op_load_fpr_FT0(rs1);
3685 gen_op_load_fpr_FT1(rs2);
3687 gen_op_store_FT0_fpr(rd);
3689 case 0x052: /* VIS I fpadd32 */
3690 gen_op_load_fpr_DT0(DFPREG(rs1));
3691 gen_op_load_fpr_DT1(DFPREG(rs2));
3693 gen_op_store_DT0_fpr(DFPREG(rd));
3695 case 0x053: /* VIS I fpadd32s */
3696 gen_op_load_fpr_FT0(rs1);
3697 gen_op_load_fpr_FT1(rs2);
3699 gen_op_store_FT0_fpr(rd);
3701 case 0x054: /* VIS I fpsub16 */
3702 gen_op_load_fpr_DT0(DFPREG(rs1));
3703 gen_op_load_fpr_DT1(DFPREG(rs2));
3705 gen_op_store_DT0_fpr(DFPREG(rd));
3707 case 0x055: /* VIS I fpsub16s */
3708 gen_op_load_fpr_FT0(rs1);
3709 gen_op_load_fpr_FT1(rs2);
3711 gen_op_store_FT0_fpr(rd);
3713 case 0x056: /* VIS I fpsub32 */
3714 gen_op_load_fpr_DT0(DFPREG(rs1));
3715 gen_op_load_fpr_DT1(DFPREG(rs2));
3717 gen_op_store_DT0_fpr(DFPREG(rd));
3719 case 0x057: /* VIS I fpsub32s */
3720 gen_op_load_fpr_FT0(rs1);
3721 gen_op_load_fpr_FT1(rs2);
3723 gen_op_store_FT0_fpr(rd);
3725 case 0x060: /* VIS I fzero */
3726 gen_op_movl_DT0_0();
3727 gen_op_store_DT0_fpr(DFPREG(rd));
3729 case 0x061: /* VIS I fzeros */
3730 gen_op_movl_FT0_0();
3731 gen_op_store_FT0_fpr(rd);
3733 case 0x062: /* VIS I fnor */
3734 gen_op_load_fpr_DT0(DFPREG(rs1));
3735 gen_op_load_fpr_DT1(DFPREG(rs2));
3737 gen_op_store_DT0_fpr(DFPREG(rd));
3739 case 0x063: /* VIS I fnors */
3740 gen_op_load_fpr_FT0(rs1);
3741 gen_op_load_fpr_FT1(rs2);
3743 gen_op_store_FT0_fpr(rd);
3745 case 0x064: /* VIS I fandnot2 */
3746 gen_op_load_fpr_DT1(DFPREG(rs1));
3747 gen_op_load_fpr_DT0(DFPREG(rs2));
3749 gen_op_store_DT0_fpr(DFPREG(rd));
3751 case 0x065: /* VIS I fandnot2s */
3752 gen_op_load_fpr_FT1(rs1);
3753 gen_op_load_fpr_FT0(rs2);
3755 gen_op_store_FT0_fpr(rd);
3757 case 0x066: /* VIS I fnot2 */
3758 gen_op_load_fpr_DT1(DFPREG(rs2));
3760 gen_op_store_DT0_fpr(DFPREG(rd));
3762 case 0x067: /* VIS I fnot2s */
3763 gen_op_load_fpr_FT1(rs2);
3765 gen_op_store_FT0_fpr(rd);
3767 case 0x068: /* VIS I fandnot1 */
3768 gen_op_load_fpr_DT0(DFPREG(rs1));
3769 gen_op_load_fpr_DT1(DFPREG(rs2));
3771 gen_op_store_DT0_fpr(DFPREG(rd));
3773 case 0x069: /* VIS I fandnot1s */
3774 gen_op_load_fpr_FT0(rs1);
3775 gen_op_load_fpr_FT1(rs2);
3777 gen_op_store_FT0_fpr(rd);
3779 case 0x06a: /* VIS I fnot1 */
3780 gen_op_load_fpr_DT1(DFPREG(rs1));
3782 gen_op_store_DT0_fpr(DFPREG(rd));
3784 case 0x06b: /* VIS I fnot1s */
3785 gen_op_load_fpr_FT1(rs1);
3787 gen_op_store_FT0_fpr(rd);
3789 case 0x06c: /* VIS I fxor */
3790 gen_op_load_fpr_DT0(DFPREG(rs1));
3791 gen_op_load_fpr_DT1(DFPREG(rs2));
3793 gen_op_store_DT0_fpr(DFPREG(rd));
3795 case 0x06d: /* VIS I fxors */
3796 gen_op_load_fpr_FT0(rs1);
3797 gen_op_load_fpr_FT1(rs2);
3799 gen_op_store_FT0_fpr(rd);
3801 case 0x06e: /* VIS I fnand */
3802 gen_op_load_fpr_DT0(DFPREG(rs1));
3803 gen_op_load_fpr_DT1(DFPREG(rs2));
3805 gen_op_store_DT0_fpr(DFPREG(rd));
3807 case 0x06f: /* VIS I fnands */
3808 gen_op_load_fpr_FT0(rs1);
3809 gen_op_load_fpr_FT1(rs2);
3811 gen_op_store_FT0_fpr(rd);
3813 case 0x070: /* VIS I fand */
3814 gen_op_load_fpr_DT0(DFPREG(rs1));
3815 gen_op_load_fpr_DT1(DFPREG(rs2));
3817 gen_op_store_DT0_fpr(DFPREG(rd));
3819 case 0x071: /* VIS I fands */
3820 gen_op_load_fpr_FT0(rs1);
3821 gen_op_load_fpr_FT1(rs2);
3823 gen_op_store_FT0_fpr(rd);
3825 case 0x072: /* VIS I fxnor */
3826 gen_op_load_fpr_DT0(DFPREG(rs1));
3827 gen_op_load_fpr_DT1(DFPREG(rs2));
3829 gen_op_store_DT0_fpr(DFPREG(rd));
3831 case 0x073: /* VIS I fxnors */
3832 gen_op_load_fpr_FT0(rs1);
3833 gen_op_load_fpr_FT1(rs2);
3835 gen_op_store_FT0_fpr(rd);
3837 case 0x074: /* VIS I fsrc1 */
3838 gen_op_load_fpr_DT0(DFPREG(rs1));
3839 gen_op_store_DT0_fpr(DFPREG(rd));
3841 case 0x075: /* VIS I fsrc1s */
3842 gen_op_load_fpr_FT0(rs1);
3843 gen_op_store_FT0_fpr(rd);
3845 case 0x076: /* VIS I fornot2 */
3846 gen_op_load_fpr_DT1(DFPREG(rs1));
3847 gen_op_load_fpr_DT0(DFPREG(rs2));
3849 gen_op_store_DT0_fpr(DFPREG(rd));
3851 case 0x077: /* VIS I fornot2s */
3852 gen_op_load_fpr_FT1(rs1);
3853 gen_op_load_fpr_FT0(rs2);
3855 gen_op_store_FT0_fpr(rd);
3857 case 0x078: /* VIS I fsrc2 */
3858 gen_op_load_fpr_DT0(DFPREG(rs2));
3859 gen_op_store_DT0_fpr(DFPREG(rd));
3861 case 0x079: /* VIS I fsrc2s */
3862 gen_op_load_fpr_FT0(rs2);
3863 gen_op_store_FT0_fpr(rd);
3865 case 0x07a: /* VIS I fornot1 */
3866 gen_op_load_fpr_DT0(DFPREG(rs1));
3867 gen_op_load_fpr_DT1(DFPREG(rs2));
3869 gen_op_store_DT0_fpr(DFPREG(rd));
3871 case 0x07b: /* VIS I fornot1s */
3872 gen_op_load_fpr_FT0(rs1);
3873 gen_op_load_fpr_FT1(rs2);
3875 gen_op_store_FT0_fpr(rd);
3877 case 0x07c: /* VIS I for */
3878 gen_op_load_fpr_DT0(DFPREG(rs1));
3879 gen_op_load_fpr_DT1(DFPREG(rs2));
3881 gen_op_store_DT0_fpr(DFPREG(rd));
3883 case 0x07d: /* VIS I fors */
3884 gen_op_load_fpr_FT0(rs1);
3885 gen_op_load_fpr_FT1(rs2);
3887 gen_op_store_FT0_fpr(rd);
3889 case 0x07e: /* VIS I fone */
3890 gen_op_movl_DT0_1();
3891 gen_op_store_DT0_fpr(DFPREG(rd));
3893 case 0x07f: /* VIS I fones */
3894 gen_op_movl_FT0_1();
3895 gen_op_store_FT0_fpr(rd);
3897 case 0x080: /* VIS I shutdown */
3898 case 0x081: /* VIS II siam */
3907 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3908 #ifdef TARGET_SPARC64
3913 #ifdef TARGET_SPARC64
3914 } else if (xop == 0x39) { /* V9 return */
3915 rs1 = GET_FIELD(insn, 13, 17);
3917 gen_movl_reg_T0(rs1);
3918 if (IS_IMM) { /* immediate */
3919 rs2 = GET_FIELDs(insn, 19, 31);
3920 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3921 } else { /* register */
3922 rs2 = GET_FIELD(insn, 27, 31);
3926 gen_movl_reg_T1(rs2);
3934 gen_op_check_align_T0_3();
3935 tcg_gen_mov_tl(cpu_npc, cpu_T[0]);
3936 dc->npc = DYNAMIC_PC;
3940 rs1 = GET_FIELD(insn, 13, 17);
3941 gen_movl_reg_T0(rs1);
3942 if (IS_IMM) { /* immediate */
3943 rs2 = GET_FIELDs(insn, 19, 31);
3944 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3945 } else { /* register */
3946 rs2 = GET_FIELD(insn, 27, 31);
3950 gen_movl_reg_T1(rs2);
3957 case 0x38: /* jmpl */
3960 tcg_gen_movi_tl(cpu_T[1], dc->pc);
3961 gen_movl_T1_reg(rd);
3964 gen_op_check_align_T0_3();
3965 tcg_gen_mov_tl(cpu_npc, cpu_T[0]);
3966 dc->npc = DYNAMIC_PC;
3969 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3970 case 0x39: /* rett, V9 return */
3972 if (!supervisor(dc))
3975 gen_op_check_align_T0_3();
3976 tcg_gen_mov_tl(cpu_npc, cpu_T[0]);
3977 dc->npc = DYNAMIC_PC;
3978 tcg_gen_helper_0_0(helper_rett);
3982 case 0x3b: /* flush */
3983 tcg_gen_helper_0_1(helper_flush, cpu_T[0]);
3985 case 0x3c: /* save */
3988 gen_movl_T0_reg(rd);
3990 case 0x3d: /* restore */
3993 gen_movl_T0_reg(rd);
3995 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
3996 case 0x3e: /* V9 done/retry */
4000 if (!supervisor(dc))
4002 dc->npc = DYNAMIC_PC;
4003 dc->pc = DYNAMIC_PC;
4004 tcg_gen_helper_0_0(helper_done);
4007 if (!supervisor(dc))
4009 dc->npc = DYNAMIC_PC;
4010 dc->pc = DYNAMIC_PC;
4011 tcg_gen_helper_0_0(helper_retry);
4026 case 3: /* load/store instructions */
4028 unsigned int xop = GET_FIELD(insn, 7, 12);
4029 rs1 = GET_FIELD(insn, 13, 17);
4031 gen_movl_reg_T0(rs1);
4032 if (xop == 0x3c || xop == 0x3e)
4034 rs2 = GET_FIELD(insn, 27, 31);
4035 gen_movl_reg_T1(rs2);
4037 else if (IS_IMM) { /* immediate */
4038 rs2 = GET_FIELDs(insn, 19, 31);
4039 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
4040 } else { /* register */
4041 rs2 = GET_FIELD(insn, 27, 31);
4045 gen_movl_reg_T1(rs2);
4051 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4052 (xop > 0x17 && xop <= 0x1d ) ||
4053 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4055 case 0x0: /* load unsigned word */
4056 gen_op_check_align_T0_3();
4057 ABI32_MASK(cpu_T[0]);
4058 tcg_gen_qemu_ld32u(cpu_T[1], cpu_T[0], dc->mem_idx);
4060 case 0x1: /* load unsigned byte */
4061 ABI32_MASK(cpu_T[0]);
4062 tcg_gen_qemu_ld8u(cpu_T[1], cpu_T[0], dc->mem_idx);
4064 case 0x2: /* load unsigned halfword */
4065 gen_op_check_align_T0_1();
4066 ABI32_MASK(cpu_T[0]);
4067 tcg_gen_qemu_ld16u(cpu_T[1], cpu_T[0], dc->mem_idx);
4069 case 0x3: /* load double word */
4075 r_dword = tcg_temp_new(TCG_TYPE_I64);
4076 gen_op_check_align_T0_7();
4077 ABI32_MASK(cpu_T[0]);
4078 tcg_gen_qemu_ld64(r_dword, cpu_T[0], dc->mem_idx);
4079 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
4080 gen_movl_T0_reg(rd + 1);
4081 tcg_gen_shri_i64(r_dword, r_dword, 32);
4082 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
4083 tcg_gen_discard_i64(r_dword);
4086 case 0x9: /* load signed byte */
4087 ABI32_MASK(cpu_T[0]);
4088 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4090 case 0xa: /* load signed halfword */
4091 gen_op_check_align_T0_1();
4092 ABI32_MASK(cpu_T[0]);
4093 tcg_gen_qemu_ld16s(cpu_T[1], cpu_T[0], dc->mem_idx);
4095 case 0xd: /* ldstub -- XXX: should be atomically */
4096 tcg_gen_movi_i32(cpu_tmp0, 0xff);
4097 ABI32_MASK(cpu_T[0]);
4098 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4099 tcg_gen_qemu_st8(cpu_tmp0, cpu_T[0], dc->mem_idx);
4101 case 0x0f: /* swap register with memory. Also atomically */
4102 gen_op_check_align_T0_3();
4103 gen_movl_reg_T1(rd);
4104 ABI32_MASK(cpu_T[0]);
4105 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_T[0], dc->mem_idx);
4106 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4107 tcg_gen_mov_i32(cpu_T[1], cpu_tmp0);
4109 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4110 case 0x10: /* load word alternate */
4111 #ifndef TARGET_SPARC64
4114 if (!supervisor(dc))
4117 gen_op_check_align_T0_3();
4118 gen_ld_asi(insn, 4, 0);
4120 case 0x11: /* load unsigned byte alternate */
4121 #ifndef TARGET_SPARC64
4124 if (!supervisor(dc))
4127 gen_ld_asi(insn, 1, 0);
4129 case 0x12: /* load unsigned halfword alternate */
4130 #ifndef TARGET_SPARC64
4133 if (!supervisor(dc))
4136 gen_op_check_align_T0_1();
4137 gen_ld_asi(insn, 2, 0);
4139 case 0x13: /* load double word alternate */
4140 #ifndef TARGET_SPARC64
4143 if (!supervisor(dc))
4148 gen_op_check_align_T0_7();
4150 gen_movl_T0_reg(rd + 1);
4152 case 0x19: /* load signed byte alternate */
4153 #ifndef TARGET_SPARC64
4156 if (!supervisor(dc))
4159 gen_ld_asi(insn, 1, 1);
4161 case 0x1a: /* load signed halfword alternate */
4162 #ifndef TARGET_SPARC64
4165 if (!supervisor(dc))
4168 gen_op_check_align_T0_1();
4169 gen_ld_asi(insn, 2, 1);
4171 case 0x1d: /* ldstuba -- XXX: should be atomically */
4172 #ifndef TARGET_SPARC64
4175 if (!supervisor(dc))
4178 gen_ldstub_asi(insn);
4180 case 0x1f: /* swap reg with alt. memory. Also atomically */
4181 #ifndef TARGET_SPARC64
4184 if (!supervisor(dc))
4187 gen_op_check_align_T0_3();
4188 gen_movl_reg_T1(rd);
4192 #ifndef TARGET_SPARC64
4193 case 0x30: /* ldc */
4194 case 0x31: /* ldcsr */
4195 case 0x33: /* lddc */
4199 #ifdef TARGET_SPARC64
4200 case 0x08: /* V9 ldsw */
4201 gen_op_check_align_T0_3();
4202 ABI32_MASK(cpu_T[0]);
4203 tcg_gen_qemu_ld32s(cpu_T[1], cpu_T[0], dc->mem_idx);
4205 case 0x0b: /* V9 ldx */
4206 gen_op_check_align_T0_7();
4207 ABI32_MASK(cpu_T[0]);
4208 tcg_gen_qemu_ld64(cpu_T[1], cpu_T[0], dc->mem_idx);
4210 case 0x18: /* V9 ldswa */
4211 gen_op_check_align_T0_3();
4212 gen_ld_asi(insn, 4, 1);
4214 case 0x1b: /* V9 ldxa */
4215 gen_op_check_align_T0_7();
4216 gen_ld_asi(insn, 8, 0);
4218 case 0x2d: /* V9 prefetch, no effect */
4220 case 0x30: /* V9 ldfa */
4221 gen_op_check_align_T0_3();
4222 gen_ldf_asi(insn, 4, rd);
4224 case 0x33: /* V9 lddfa */
4225 gen_op_check_align_T0_3();
4226 gen_ldf_asi(insn, 8, DFPREG(rd));
4228 case 0x3d: /* V9 prefetcha, no effect */
4230 case 0x32: /* V9 ldqfa */
4231 #if defined(CONFIG_USER_ONLY)
4232 gen_op_check_align_T0_3();
4233 gen_ldf_asi(insn, 16, QFPREG(rd));
4242 gen_movl_T1_reg(rd);
4243 #ifdef TARGET_SPARC64
4246 } else if (xop >= 0x20 && xop < 0x24) {
4247 if (gen_trap_ifnofpu(dc))
4250 case 0x20: /* load fpreg */
4251 gen_op_check_align_T0_3();
4253 gen_op_store_FT0_fpr(rd);
4255 case 0x21: /* load fsr */
4256 gen_op_check_align_T0_3();
4258 tcg_gen_helper_0_0(helper_ldfsr);
4260 case 0x22: /* load quad fpreg */
4261 #if defined(CONFIG_USER_ONLY)
4262 gen_op_check_align_T0_7();
4264 gen_op_store_QT0_fpr(QFPREG(rd));
4269 case 0x23: /* load double fpreg */
4270 gen_op_check_align_T0_7();
4272 gen_op_store_DT0_fpr(DFPREG(rd));
4277 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4278 xop == 0xe || xop == 0x1e) {
4279 gen_movl_reg_T1(rd);
4281 case 0x4: /* store word */
4282 gen_op_check_align_T0_3();
4283 ABI32_MASK(cpu_T[0]);
4284 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4286 case 0x5: /* store byte */
4287 ABI32_MASK(cpu_T[0]);
4288 tcg_gen_qemu_st8(cpu_T[1], cpu_T[0], dc->mem_idx);
4290 case 0x6: /* store halfword */
4291 gen_op_check_align_T0_1();
4292 ABI32_MASK(cpu_T[0]);
4293 tcg_gen_qemu_st16(cpu_T[1], cpu_T[0], dc->mem_idx);
4295 case 0x7: /* store double word */
4300 TCGv r_dword, r_low;
4302 gen_op_check_align_T0_7();
4303 r_dword = tcg_temp_new(TCG_TYPE_I64);
4304 r_low = tcg_temp_new(TCG_TYPE_I32);
4305 gen_movl_reg_TN(rd + 1, r_low);
4306 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
4308 tcg_gen_qemu_st64(r_dword, cpu_T[0], dc->mem_idx);
4309 tcg_gen_discard_i64(r_dword);
4311 #else /* __i386__ */
4312 gen_op_check_align_T0_7();
4314 gen_movl_reg_T2(rd + 1);
4316 #endif /* __i386__ */
4318 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4319 case 0x14: /* store word alternate */
4320 #ifndef TARGET_SPARC64
4323 if (!supervisor(dc))
4326 gen_op_check_align_T0_3();
4327 gen_st_asi(insn, 4);
4329 case 0x15: /* store byte alternate */
4330 #ifndef TARGET_SPARC64
4333 if (!supervisor(dc))
4336 gen_st_asi(insn, 1);
4338 case 0x16: /* store halfword alternate */
4339 #ifndef TARGET_SPARC64
4342 if (!supervisor(dc))
4345 gen_op_check_align_T0_1();
4346 gen_st_asi(insn, 2);
4348 case 0x17: /* store double word alternate */
4349 #ifndef TARGET_SPARC64
4352 if (!supervisor(dc))
4358 gen_op_check_align_T0_7();
4359 gen_stda_asi(insn, rd);
4363 #ifdef TARGET_SPARC64
4364 case 0x0e: /* V9 stx */
4365 gen_op_check_align_T0_7();
4366 ABI32_MASK(cpu_T[0]);
4367 tcg_gen_qemu_st64(cpu_T[1], cpu_T[0], dc->mem_idx);
4369 case 0x1e: /* V9 stxa */
4370 gen_op_check_align_T0_7();
4371 gen_st_asi(insn, 8);
4377 } else if (xop > 0x23 && xop < 0x28) {
4378 if (gen_trap_ifnofpu(dc))
4382 gen_op_check_align_T0_3();
4383 gen_op_load_fpr_FT0(rd);
4386 case 0x25: /* stfsr, V9 stxfsr */
4387 #ifdef CONFIG_USER_ONLY
4388 gen_op_check_align_T0_3();
4390 tcg_gen_helper_0_0(helper_stfsr);
4394 #ifdef TARGET_SPARC64
4395 #if defined(CONFIG_USER_ONLY)
4396 /* V9 stqf, store quad fpreg */
4397 gen_op_check_align_T0_7();
4398 gen_op_load_fpr_QT0(QFPREG(rd));
4404 #else /* !TARGET_SPARC64 */
4405 /* stdfq, store floating point queue */
4406 #if defined(CONFIG_USER_ONLY)
4409 if (!supervisor(dc))
4411 if (gen_trap_ifnofpu(dc))
4417 gen_op_check_align_T0_7();
4418 gen_op_load_fpr_DT0(DFPREG(rd));
4424 } else if (xop > 0x33 && xop < 0x3f) {
4426 #ifdef TARGET_SPARC64
4427 case 0x34: /* V9 stfa */
4428 gen_op_check_align_T0_3();
4429 gen_op_load_fpr_FT0(rd);
4430 gen_stf_asi(insn, 4, rd);
4432 case 0x36: /* V9 stqfa */
4433 #if defined(CONFIG_USER_ONLY)
4434 gen_op_check_align_T0_7();
4435 gen_op_load_fpr_QT0(QFPREG(rd));
4436 gen_stf_asi(insn, 16, QFPREG(rd));
4441 case 0x37: /* V9 stdfa */
4442 gen_op_check_align_T0_3();
4443 gen_op_load_fpr_DT0(DFPREG(rd));
4444 gen_stf_asi(insn, 8, DFPREG(rd));
4446 case 0x3c: /* V9 casa */
4447 gen_op_check_align_T0_3();
4448 gen_cas_asi(insn, rd);
4449 gen_movl_T1_reg(rd);
4451 case 0x3e: /* V9 casxa */
4452 gen_op_check_align_T0_7();
4453 gen_casx_asi(insn, rd);
4454 gen_movl_T1_reg(rd);
4457 case 0x34: /* stc */
4458 case 0x35: /* stcsr */
4459 case 0x36: /* stdcq */
4460 case 0x37: /* stdc */
4472 /* default case for non jump instructions */
4473 if (dc->npc == DYNAMIC_PC) {
4474 dc->pc = DYNAMIC_PC;
4476 } else if (dc->npc == JUMP_PC) {
4477 /* we can do a static jump */
4478 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
4482 dc->npc = dc->npc + 4;
4488 gen_op_exception(TT_ILL_INSN);
4491 #if !defined(CONFIG_USER_ONLY)
4494 gen_op_exception(TT_PRIV_INSN);
4499 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4502 #ifndef TARGET_SPARC64
4505 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4510 #ifndef TARGET_SPARC64
4513 gen_op_exception(TT_NCP_INSN);
4519 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
4523 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
4524 int spc, CPUSPARCState *env)
4526 target_ulong pc_start, last_pc;
4527 uint16_t *gen_opc_end;
4528 DisasContext dc1, *dc = &dc1;
4531 memset(dc, 0, sizeof(DisasContext));
4536 dc->npc = (target_ulong) tb->cs_base;
4537 dc->mem_idx = cpu_mmu_index(env);
4538 dc->fpu_enabled = cpu_fpu_enabled(env);
4539 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4541 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4544 if (env->nb_breakpoints > 0) {
4545 for(j = 0; j < env->nb_breakpoints; j++) {
4546 if (env->breakpoints[j] == dc->pc) {
4547 if (dc->pc != pc_start)
4549 tcg_gen_helper_0_0(helper_debug);
4558 fprintf(logfile, "Search PC...\n");
4559 j = gen_opc_ptr - gen_opc_buf;
4563 gen_opc_instr_start[lj++] = 0;
4564 gen_opc_pc[lj] = dc->pc;
4565 gen_opc_npc[lj] = dc->npc;
4566 gen_opc_instr_start[lj] = 1;
4570 disas_sparc_insn(dc);
4574 /* if the next PC is different, we abort now */
4575 if (dc->pc != (last_pc + 4))
4577 /* if we reach a page boundary, we stop generation so that the
4578 PC of a TT_TFAULT exception is always in the right page */
4579 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4581 /* if single step mode, we generate only one instruction and
4582 generate an exception */
4583 if (env->singlestep_enabled) {
4588 } while ((gen_opc_ptr < gen_opc_end) &&
4589 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
4593 if (dc->pc != DYNAMIC_PC &&
4594 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4595 /* static PC and NPC: we can use direct chaining */
4596 gen_branch(dc, dc->pc, dc->npc);
4598 if (dc->pc != DYNAMIC_PC)
4604 *gen_opc_ptr = INDEX_op_end;
4606 j = gen_opc_ptr - gen_opc_buf;
4609 gen_opc_instr_start[lj++] = 0;
4615 gen_opc_jump_pc[0] = dc->jump_pc[0];
4616 gen_opc_jump_pc[1] = dc->jump_pc[1];
4618 tb->size = last_pc + 4 - pc_start;
4621 if (loglevel & CPU_LOG_TB_IN_ASM) {
4622 fprintf(logfile, "--------------\n");
4623 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4624 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4625 fprintf(logfile, "\n");
4631 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4633 return gen_intermediate_code_internal(tb, 0, env);
4636 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4638 return gen_intermediate_code_internal(tb, 1, env);
4641 void cpu_reset(CPUSPARCState *env)
4646 env->regwptr = env->regbase + (env->cwp * 16);
4647 #if defined(CONFIG_USER_ONLY)
4648 env->user_mode_only = 1;
4649 #ifdef TARGET_SPARC64
4650 env->cleanwin = NWINDOWS - 2;
4651 env->cansave = NWINDOWS - 2;
4652 env->pstate = PS_RMO | PS_PEF | PS_IE;
4653 env->asi = 0x82; // Primary no-fault
4659 #ifdef TARGET_SPARC64
4660 env->pstate = PS_PRIV;
4661 env->hpstate = HS_PRIV;
4662 env->pc = 0x1fff0000000ULL;
4663 env->tsptr = &env->ts[env->tl];
4666 env->mmuregs[0] &= ~(MMU_E | MMU_NF);
4667 env->mmuregs[0] |= env->mmu_bm;
4669 env->npc = env->pc + 4;
4673 CPUSPARCState *cpu_sparc_init(const char *cpu_model)
4676 const sparc_def_t *def;
4679 static const char * const gregnames[8] = {
4680 NULL, // g0 not used
4690 def = cpu_sparc_find_by_name(cpu_model);
4694 env = qemu_mallocz(sizeof(CPUSPARCState));
4698 env->cpu_model_str = cpu_model;
4699 env->version = def->iu_version;
4700 env->fsr = def->fpu_version;
4701 #if !defined(TARGET_SPARC64)
4702 env->mmu_bm = def->mmu_bm;
4703 env->mmu_ctpr_mask = def->mmu_ctpr_mask;
4704 env->mmu_cxr_mask = def->mmu_cxr_mask;
4705 env->mmu_sfsr_mask = def->mmu_sfsr_mask;
4706 env->mmu_trcr_mask = def->mmu_trcr_mask;
4707 env->mmuregs[0] |= def->mmu_version;
4708 cpu_sparc_set_id(env, 0);
4711 /* init various static tables */
4715 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
4716 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4717 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4718 offsetof(CPUState, regwptr),
4720 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4721 #ifdef TARGET_SPARC64
4722 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
4723 TCG_AREG0, offsetof(CPUState, t0), "T0");
4724 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
4725 TCG_AREG0, offsetof(CPUState, t1), "T1");
4726 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
4727 TCG_AREG0, offsetof(CPUState, t2), "T2");
4728 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4729 TCG_AREG0, offsetof(CPUState, xcc),
4732 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
4733 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
4734 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
4736 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4737 TCG_AREG0, offsetof(CPUState, cc_src),
4739 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4740 offsetof(CPUState, cc_src2),
4742 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4743 TCG_AREG0, offsetof(CPUState, cc_dst),
4745 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4746 TCG_AREG0, offsetof(CPUState, psr),
4748 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
4749 TCG_AREG0, offsetof(CPUState, fsr),
4751 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
4752 TCG_AREG0, offsetof(CPUState, pc),
4754 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
4755 TCG_AREG0, offsetof(CPUState, npc),
4757 for (i = 1; i < 8; i++)
4758 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4759 offsetof(CPUState, gregs[i]),
4768 void cpu_sparc_set_id(CPUSPARCState *env, unsigned int cpu)
4770 #if !defined(TARGET_SPARC64)
4771 env->mxccregs[7] = ((cpu + 8) & 0xf) << 24;
4775 static const sparc_def_t sparc_defs[] = {
4776 #ifdef TARGET_SPARC64
4778 .name = "Fujitsu Sparc64",
4779 .iu_version = ((0x04ULL << 48) | (0x02ULL << 32) | (0ULL << 24)
4780 | (MAXTL << 8) | (NWINDOWS - 1)),
4781 .fpu_version = 0x00000000,
4785 .name = "Fujitsu Sparc64 III",
4786 .iu_version = ((0x04ULL << 48) | (0x03ULL << 32) | (0ULL << 24)
4787 | (MAXTL << 8) | (NWINDOWS - 1)),
4788 .fpu_version = 0x00000000,
4792 .name = "Fujitsu Sparc64 IV",
4793 .iu_version = ((0x04ULL << 48) | (0x04ULL << 32) | (0ULL << 24)
4794 | (MAXTL << 8) | (NWINDOWS - 1)),
4795 .fpu_version = 0x00000000,
4799 .name = "Fujitsu Sparc64 V",
4800 .iu_version = ((0x04ULL << 48) | (0x05ULL << 32) | (0x51ULL << 24)
4801 | (MAXTL << 8) | (NWINDOWS - 1)),
4802 .fpu_version = 0x00000000,
4806 .name = "TI UltraSparc I",
4807 .iu_version = ((0x17ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4808 | (MAXTL << 8) | (NWINDOWS - 1)),
4809 .fpu_version = 0x00000000,
4813 .name = "TI UltraSparc II",
4814 .iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0x20ULL << 24)
4815 | (MAXTL << 8) | (NWINDOWS - 1)),
4816 .fpu_version = 0x00000000,
4820 .name = "TI UltraSparc IIi",
4821 .iu_version = ((0x17ULL << 48) | (0x12ULL << 32) | (0x91ULL << 24)
4822 | (MAXTL << 8) | (NWINDOWS - 1)),
4823 .fpu_version = 0x00000000,
4827 .name = "TI UltraSparc IIe",
4828 .iu_version = ((0x17ULL << 48) | (0x13ULL << 32) | (0x14ULL << 24)
4829 | (MAXTL << 8) | (NWINDOWS - 1)),
4830 .fpu_version = 0x00000000,
4834 .name = "Sun UltraSparc III",
4835 .iu_version = ((0x3eULL << 48) | (0x14ULL << 32) | (0x34ULL << 24)
4836 | (MAXTL << 8) | (NWINDOWS - 1)),
4837 .fpu_version = 0x00000000,
4841 .name = "Sun UltraSparc III Cu",
4842 .iu_version = ((0x3eULL << 48) | (0x15ULL << 32) | (0x41ULL << 24)
4843 | (MAXTL << 8) | (NWINDOWS - 1)),
4844 .fpu_version = 0x00000000,
4848 .name = "Sun UltraSparc IIIi",
4849 .iu_version = ((0x3eULL << 48) | (0x16ULL << 32) | (0x34ULL << 24)
4850 | (MAXTL << 8) | (NWINDOWS - 1)),
4851 .fpu_version = 0x00000000,
4855 .name = "Sun UltraSparc IV",
4856 .iu_version = ((0x3eULL << 48) | (0x18ULL << 32) | (0x31ULL << 24)
4857 | (MAXTL << 8) | (NWINDOWS - 1)),
4858 .fpu_version = 0x00000000,
4862 .name = "Sun UltraSparc IV+",
4863 .iu_version = ((0x3eULL << 48) | (0x19ULL << 32) | (0x22ULL << 24)
4864 | (MAXTL << 8) | (NWINDOWS - 1)),
4865 .fpu_version = 0x00000000,
4869 .name = "Sun UltraSparc IIIi+",
4870 .iu_version = ((0x3eULL << 48) | (0x22ULL << 32) | (0ULL << 24)
4871 | (MAXTL << 8) | (NWINDOWS - 1)),
4872 .fpu_version = 0x00000000,
4876 .name = "NEC UltraSparc I",
4877 .iu_version = ((0x22ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4878 | (MAXTL << 8) | (NWINDOWS - 1)),
4879 .fpu_version = 0x00000000,
4884 .name = "Fujitsu MB86900",
4885 .iu_version = 0x00 << 24, /* Impl 0, ver 0 */
4886 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4887 .mmu_version = 0x00 << 24, /* Impl 0, ver 0 */
4888 .mmu_bm = 0x00004000,
4889 .mmu_ctpr_mask = 0x007ffff0,
4890 .mmu_cxr_mask = 0x0000003f,
4891 .mmu_sfsr_mask = 0xffffffff,
4892 .mmu_trcr_mask = 0xffffffff,
4895 .name = "Fujitsu MB86904",
4896 .iu_version = 0x04 << 24, /* Impl 0, ver 4 */
4897 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4898 .mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
4899 .mmu_bm = 0x00004000,
4900 .mmu_ctpr_mask = 0x00ffffc0,
4901 .mmu_cxr_mask = 0x000000ff,
4902 .mmu_sfsr_mask = 0x00016fff,
4903 .mmu_trcr_mask = 0x00ffffff,
4906 .name = "Fujitsu MB86907",
4907 .iu_version = 0x05 << 24, /* Impl 0, ver 5 */
4908 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4909 .mmu_version = 0x05 << 24, /* Impl 0, ver 5 */
4910 .mmu_bm = 0x00004000,
4911 .mmu_ctpr_mask = 0xffffffc0,
4912 .mmu_cxr_mask = 0x000000ff,
4913 .mmu_sfsr_mask = 0x00016fff,
4914 .mmu_trcr_mask = 0xffffffff,
4917 .name = "LSI L64811",
4918 .iu_version = 0x10 << 24, /* Impl 1, ver 0 */
4919 .fpu_version = 1 << 17, /* FPU version 1 (LSI L64814) */
4920 .mmu_version = 0x10 << 24,
4921 .mmu_bm = 0x00004000,
4922 .mmu_ctpr_mask = 0x007ffff0,
4923 .mmu_cxr_mask = 0x0000003f,
4924 .mmu_sfsr_mask = 0xffffffff,
4925 .mmu_trcr_mask = 0xffffffff,
4928 .name = "Cypress CY7C601",
4929 .iu_version = 0x11 << 24, /* Impl 1, ver 1 */
4930 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4931 .mmu_version = 0x10 << 24,
4932 .mmu_bm = 0x00004000,
4933 .mmu_ctpr_mask = 0x007ffff0,
4934 .mmu_cxr_mask = 0x0000003f,
4935 .mmu_sfsr_mask = 0xffffffff,
4936 .mmu_trcr_mask = 0xffffffff,
4939 .name = "Cypress CY7C611",
4940 .iu_version = 0x13 << 24, /* Impl 1, ver 3 */
4941 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4942 .mmu_version = 0x10 << 24,
4943 .mmu_bm = 0x00004000,
4944 .mmu_ctpr_mask = 0x007ffff0,
4945 .mmu_cxr_mask = 0x0000003f,
4946 .mmu_sfsr_mask = 0xffffffff,
4947 .mmu_trcr_mask = 0xffffffff,
4950 .name = "TI SuperSparc II",
4951 .iu_version = 0x40000000,
4952 .fpu_version = 0 << 17,
4953 .mmu_version = 0x04000000,
4954 .mmu_bm = 0x00002000,
4955 .mmu_ctpr_mask = 0xffffffc0,
4956 .mmu_cxr_mask = 0x0000ffff,
4957 .mmu_sfsr_mask = 0xffffffff,
4958 .mmu_trcr_mask = 0xffffffff,
4961 .name = "TI MicroSparc I",
4962 .iu_version = 0x41000000,
4963 .fpu_version = 4 << 17,
4964 .mmu_version = 0x41000000,
4965 .mmu_bm = 0x00004000,
4966 .mmu_ctpr_mask = 0x007ffff0,
4967 .mmu_cxr_mask = 0x0000003f,
4968 .mmu_sfsr_mask = 0x00016fff,
4969 .mmu_trcr_mask = 0x0000003f,
4972 .name = "TI MicroSparc II",
4973 .iu_version = 0x42000000,
4974 .fpu_version = 4 << 17,
4975 .mmu_version = 0x02000000,
4976 .mmu_bm = 0x00004000,
4977 .mmu_ctpr_mask = 0x00ffffc0,
4978 .mmu_cxr_mask = 0x000000ff,
4979 .mmu_sfsr_mask = 0x00016fff,
4980 .mmu_trcr_mask = 0x00ffffff,
4983 .name = "TI MicroSparc IIep",
4984 .iu_version = 0x42000000,
4985 .fpu_version = 4 << 17,
4986 .mmu_version = 0x04000000,
4987 .mmu_bm = 0x00004000,
4988 .mmu_ctpr_mask = 0x00ffffc0,
4989 .mmu_cxr_mask = 0x000000ff,
4990 .mmu_sfsr_mask = 0x00016bff,
4991 .mmu_trcr_mask = 0x00ffffff,
4994 .name = "TI SuperSparc 51",
4995 .iu_version = 0x43000000,
4996 .fpu_version = 0 << 17,
4997 .mmu_version = 0x04000000,
4998 .mmu_bm = 0x00002000,
4999 .mmu_ctpr_mask = 0xffffffc0,
5000 .mmu_cxr_mask = 0x0000ffff,
5001 .mmu_sfsr_mask = 0xffffffff,
5002 .mmu_trcr_mask = 0xffffffff,
5005 .name = "TI SuperSparc 61",
5006 .iu_version = 0x44000000,
5007 .fpu_version = 0 << 17,
5008 .mmu_version = 0x04000000,
5009 .mmu_bm = 0x00002000,
5010 .mmu_ctpr_mask = 0xffffffc0,
5011 .mmu_cxr_mask = 0x0000ffff,
5012 .mmu_sfsr_mask = 0xffffffff,
5013 .mmu_trcr_mask = 0xffffffff,
5016 .name = "Ross RT625",
5017 .iu_version = 0x1e000000,
5018 .fpu_version = 1 << 17,
5019 .mmu_version = 0x1e000000,
5020 .mmu_bm = 0x00004000,
5021 .mmu_ctpr_mask = 0x007ffff0,
5022 .mmu_cxr_mask = 0x0000003f,
5023 .mmu_sfsr_mask = 0xffffffff,
5024 .mmu_trcr_mask = 0xffffffff,
5027 .name = "Ross RT620",
5028 .iu_version = 0x1f000000,
5029 .fpu_version = 1 << 17,
5030 .mmu_version = 0x1f000000,
5031 .mmu_bm = 0x00004000,
5032 .mmu_ctpr_mask = 0x007ffff0,
5033 .mmu_cxr_mask = 0x0000003f,
5034 .mmu_sfsr_mask = 0xffffffff,
5035 .mmu_trcr_mask = 0xffffffff,
5038 .name = "BIT B5010",
5039 .iu_version = 0x20000000,
5040 .fpu_version = 0 << 17, /* B5010/B5110/B5120/B5210 */
5041 .mmu_version = 0x20000000,
5042 .mmu_bm = 0x00004000,
5043 .mmu_ctpr_mask = 0x007ffff0,
5044 .mmu_cxr_mask = 0x0000003f,
5045 .mmu_sfsr_mask = 0xffffffff,
5046 .mmu_trcr_mask = 0xffffffff,
5049 .name = "Matsushita MN10501",
5050 .iu_version = 0x50000000,
5051 .fpu_version = 0 << 17,
5052 .mmu_version = 0x50000000,
5053 .mmu_bm = 0x00004000,
5054 .mmu_ctpr_mask = 0x007ffff0,
5055 .mmu_cxr_mask = 0x0000003f,
5056 .mmu_sfsr_mask = 0xffffffff,
5057 .mmu_trcr_mask = 0xffffffff,
5060 .name = "Weitek W8601",
5061 .iu_version = 0x90 << 24, /* Impl 9, ver 0 */
5062 .fpu_version = 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
5063 .mmu_version = 0x10 << 24,
5064 .mmu_bm = 0x00004000,
5065 .mmu_ctpr_mask = 0x007ffff0,
5066 .mmu_cxr_mask = 0x0000003f,
5067 .mmu_sfsr_mask = 0xffffffff,
5068 .mmu_trcr_mask = 0xffffffff,
5072 .iu_version = 0xf2000000,
5073 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5074 .mmu_version = 0xf2000000,
5075 .mmu_bm = 0x00004000,
5076 .mmu_ctpr_mask = 0x007ffff0,
5077 .mmu_cxr_mask = 0x0000003f,
5078 .mmu_sfsr_mask = 0xffffffff,
5079 .mmu_trcr_mask = 0xffffffff,
5083 .iu_version = 0xf3000000,
5084 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5085 .mmu_version = 0xf3000000,
5086 .mmu_bm = 0x00004000,
5087 .mmu_ctpr_mask = 0x007ffff0,
5088 .mmu_cxr_mask = 0x0000003f,
5089 .mmu_sfsr_mask = 0xffffffff,
5090 .mmu_trcr_mask = 0xffffffff,
5095 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name)
5099 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5100 if (strcasecmp(name, sparc_defs[i].name) == 0) {
5101 return &sparc_defs[i];
5107 void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
5111 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5112 (*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
5114 sparc_defs[i].iu_version,
5115 sparc_defs[i].fpu_version,
5116 sparc_defs[i].mmu_version);
5120 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
5122 void cpu_dump_state(CPUState *env, FILE *f,
5123 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
5128 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
5129 cpu_fprintf(f, "General Registers:\n");
5130 for (i = 0; i < 4; i++)
5131 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5132 cpu_fprintf(f, "\n");
5134 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5135 cpu_fprintf(f, "\nCurrent Register Window:\n");
5136 for (x = 0; x < 3; x++) {
5137 for (i = 0; i < 4; i++)
5138 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5139 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
5140 env->regwptr[i + x * 8]);
5141 cpu_fprintf(f, "\n");
5143 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5144 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
5145 env->regwptr[i + x * 8]);
5146 cpu_fprintf(f, "\n");
5148 cpu_fprintf(f, "\nFloating Point Registers:\n");
5149 for (i = 0; i < 32; i++) {
5151 cpu_fprintf(f, "%%f%02d:", i);
5152 cpu_fprintf(f, " %016lf", env->fpr[i]);
5154 cpu_fprintf(f, "\n");
5156 #ifdef TARGET_SPARC64
5157 cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
5158 env->pstate, GET_CCR(env), env->asi, env->tl, env->fprs);
5159 cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
5160 env->cansave, env->canrestore, env->otherwin, env->wstate,
5161 env->cleanwin, NWINDOWS - 1 - env->cwp);
5163 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
5164 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
5165 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
5166 env->psrs?'S':'-', env->psrps?'P':'-',
5167 env->psret?'E':'-', env->wim);
5169 cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
5172 #if defined(CONFIG_USER_ONLY)
5173 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5179 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
5180 int *access_index, target_ulong address, int rw,
5183 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5185 target_phys_addr_t phys_addr;
5186 int prot, access_index;
5188 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2,
5189 MMU_KERNEL_IDX) != 0)
5190 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr,
5191 0, MMU_KERNEL_IDX) != 0)
5193 if (cpu_get_physical_page_desc(phys_addr) == IO_MEM_UNASSIGNED)
5199 void helper_flush(target_ulong addr)
5202 tb_invalidate_page_range(addr, addr + 8);