4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
189 #ifdef TARGET_SPARC64
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
193 #define AM_CHECK(dc) (1)
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
208 tcg_gen_movi_tl(tn, 0);
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_cc_clear_icc(void)
279 tcg_gen_movi_i32(cpu_psr, 0);
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
285 tcg_gen_movi_i32(cpu_xcc, 0);
291 env->psr |= PSR_ZERO;
292 if ((int32_t) T0 < 0)
295 static inline void gen_cc_NZ_icc(TCGv dst)
300 l1 = gen_new_label();
301 l2 = gen_new_label();
302 r_temp = tcg_temp_new();
303 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
307 tcg_gen_ext32s_tl(r_temp, dst);
308 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
311 tcg_temp_free(r_temp);
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
319 l1 = gen_new_label();
320 l2 = gen_new_label();
321 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
324 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
332 env->psr |= PSR_CARRY;
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
336 TCGv r_temp1, r_temp2;
339 l1 = gen_new_label();
340 r_temp1 = tcg_temp_new();
341 r_temp2 = tcg_temp_new();
342 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
347 tcg_temp_free(r_temp1);
348 tcg_temp_free(r_temp2);
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
356 l1 = gen_new_label();
357 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
364 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
371 r_temp = tcg_temp_new();
372 tcg_gen_xor_tl(r_temp, src1, src2);
373 tcg_gen_not_tl(r_temp, r_temp);
374 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379 tcg_temp_free(r_temp);
380 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
388 r_temp = tcg_temp_new();
389 tcg_gen_xor_tl(r_temp, src1, src2);
390 tcg_gen_not_tl(r_temp, r_temp);
391 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396 tcg_temp_free(r_temp);
397 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
407 l1 = gen_new_label();
409 r_temp = tcg_temp_new();
410 tcg_gen_xor_tl(r_temp, src1, src2);
411 tcg_gen_not_tl(r_temp, r_temp);
412 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416 r_const = tcg_const_i32(TT_TOVF);
417 gen_helper_raise_exception(r_const);
418 tcg_temp_free_i32(r_const);
420 tcg_temp_free(r_temp);
423 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
427 l1 = gen_new_label();
428 tcg_gen_or_tl(cpu_tmp0, src1, src2);
429 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
430 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
431 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
435 static inline void gen_op_logic_cc(TCGv dst)
437 tcg_gen_mov_tl(cpu_cc_dst, dst);
440 gen_cc_NZ_icc(cpu_cc_dst);
441 #ifdef TARGET_SPARC64
443 gen_cc_NZ_xcc(cpu_cc_dst);
447 static inline void gen_tag_tv(TCGv src1, TCGv src2)
452 l1 = gen_new_label();
453 tcg_gen_or_tl(cpu_tmp0, src1, src2);
454 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
455 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
456 r_const = tcg_const_i32(TT_TOVF);
457 gen_helper_raise_exception(r_const);
458 tcg_temp_free_i32(r_const);
462 static inline void gen_op_add_cc2(TCGv dst)
465 gen_cc_NZ_icc(cpu_cc_dst);
466 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
467 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
468 #ifdef TARGET_SPARC64
470 gen_cc_NZ_xcc(cpu_cc_dst);
471 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
472 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
474 tcg_gen_mov_tl(dst, cpu_cc_dst);
477 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
479 tcg_gen_mov_tl(cpu_cc_src, src1);
480 tcg_gen_movi_tl(cpu_cc_src2, src2);
481 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
485 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
487 tcg_gen_mov_tl(cpu_cc_src, src1);
488 tcg_gen_mov_tl(cpu_cc_src2, src2);
489 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
493 static inline void gen_op_addx_cc2(TCGv dst)
495 gen_cc_NZ_icc(cpu_cc_dst);
496 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
497 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
498 #ifdef TARGET_SPARC64
499 gen_cc_NZ_xcc(cpu_cc_dst);
500 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
501 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
503 tcg_gen_mov_tl(dst, cpu_cc_dst);
506 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
508 tcg_gen_mov_tl(cpu_cc_src, src1);
509 tcg_gen_movi_tl(cpu_cc_src2, src2);
510 gen_mov_reg_C(cpu_tmp0, cpu_psr);
511 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
513 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
514 #ifdef TARGET_SPARC64
516 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
518 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
519 gen_op_addx_cc2(dst);
522 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
524 tcg_gen_mov_tl(cpu_cc_src, src1);
525 tcg_gen_mov_tl(cpu_cc_src2, src2);
526 gen_mov_reg_C(cpu_tmp0, cpu_psr);
527 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
529 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
530 #ifdef TARGET_SPARC64
532 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
534 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
535 gen_op_addx_cc2(dst);
538 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
540 tcg_gen_mov_tl(cpu_cc_src, src1);
541 tcg_gen_mov_tl(cpu_cc_src2, src2);
542 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
544 gen_cc_NZ_icc(cpu_cc_dst);
545 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
546 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
547 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
548 #ifdef TARGET_SPARC64
550 gen_cc_NZ_xcc(cpu_cc_dst);
551 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
552 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
554 tcg_gen_mov_tl(dst, cpu_cc_dst);
557 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
559 tcg_gen_mov_tl(cpu_cc_src, src1);
560 tcg_gen_mov_tl(cpu_cc_src2, src2);
561 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
562 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
563 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
565 gen_cc_NZ_icc(cpu_cc_dst);
566 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
567 #ifdef TARGET_SPARC64
569 gen_cc_NZ_xcc(cpu_cc_dst);
570 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
571 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573 tcg_gen_mov_tl(dst, cpu_cc_dst);
578 env->psr |= PSR_CARRY;
580 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
582 TCGv r_temp1, r_temp2;
585 l1 = gen_new_label();
586 r_temp1 = tcg_temp_new();
587 r_temp2 = tcg_temp_new();
588 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
589 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
590 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
591 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
593 tcg_temp_free(r_temp1);
594 tcg_temp_free(r_temp2);
597 #ifdef TARGET_SPARC64
598 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
602 l1 = gen_new_label();
603 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
604 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
610 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
613 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
617 r_temp = tcg_temp_new();
618 tcg_gen_xor_tl(r_temp, src1, src2);
619 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
620 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
621 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
622 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
623 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
624 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
625 tcg_temp_free(r_temp);
628 #ifdef TARGET_SPARC64
629 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
633 r_temp = tcg_temp_new();
634 tcg_gen_xor_tl(r_temp, src1, src2);
635 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
636 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
637 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
638 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
639 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
640 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
641 tcg_temp_free(r_temp);
645 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
651 l1 = gen_new_label();
653 r_temp = tcg_temp_new();
654 tcg_gen_xor_tl(r_temp, src1, src2);
655 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
656 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
657 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
658 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
659 r_const = tcg_const_i32(TT_TOVF);
660 gen_helper_raise_exception(r_const);
661 tcg_temp_free_i32(r_const);
663 tcg_temp_free(r_temp);
666 static inline void gen_op_sub_cc2(TCGv dst)
669 gen_cc_NZ_icc(cpu_cc_dst);
670 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
671 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
672 #ifdef TARGET_SPARC64
674 gen_cc_NZ_xcc(cpu_cc_dst);
675 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
676 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
678 tcg_gen_mov_tl(dst, cpu_cc_dst);
681 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2)
683 tcg_gen_mov_tl(cpu_cc_src, src1);
684 tcg_gen_movi_tl(cpu_cc_src2, src2);
686 tcg_gen_mov_tl(dst, src1);
687 gen_op_logic_cc(dst);
689 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
694 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
696 tcg_gen_mov_tl(cpu_cc_src, src1);
697 tcg_gen_mov_tl(cpu_cc_src2, src2);
698 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
702 static inline void gen_op_subx_cc2(TCGv dst)
704 gen_cc_NZ_icc(cpu_cc_dst);
705 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
706 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
707 #ifdef TARGET_SPARC64
708 gen_cc_NZ_xcc(cpu_cc_dst);
709 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
710 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
712 tcg_gen_mov_tl(dst, cpu_cc_dst);
715 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
717 tcg_gen_mov_tl(cpu_cc_src, src1);
718 tcg_gen_movi_tl(cpu_cc_src2, src2);
719 gen_mov_reg_C(cpu_tmp0, cpu_psr);
720 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
722 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
723 #ifdef TARGET_SPARC64
725 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
727 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
728 gen_op_subx_cc2(dst);
731 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
733 tcg_gen_mov_tl(cpu_cc_src, src1);
734 tcg_gen_mov_tl(cpu_cc_src2, src2);
735 gen_mov_reg_C(cpu_tmp0, cpu_psr);
736 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
738 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
739 #ifdef TARGET_SPARC64
741 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
743 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
744 gen_op_subx_cc2(dst);
747 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
749 tcg_gen_mov_tl(cpu_cc_src, src1);
750 tcg_gen_mov_tl(cpu_cc_src2, src2);
751 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
753 gen_cc_NZ_icc(cpu_cc_dst);
754 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
755 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
756 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
757 #ifdef TARGET_SPARC64
759 gen_cc_NZ_xcc(cpu_cc_dst);
760 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
761 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
763 tcg_gen_mov_tl(dst, cpu_cc_dst);
766 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
768 tcg_gen_mov_tl(cpu_cc_src, src1);
769 tcg_gen_mov_tl(cpu_cc_src2, src2);
770 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
771 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
772 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
774 gen_cc_NZ_icc(cpu_cc_dst);
775 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
776 #ifdef TARGET_SPARC64
778 gen_cc_NZ_xcc(cpu_cc_dst);
779 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
780 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
782 tcg_gen_mov_tl(dst, cpu_cc_dst);
785 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
790 l1 = gen_new_label();
791 r_temp = tcg_temp_new();
797 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
798 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
799 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
800 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
801 tcg_gen_movi_tl(cpu_cc_src2, 0);
805 // env->y = (b2 << 31) | (env->y >> 1);
806 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
807 tcg_gen_shli_tl(r_temp, r_temp, 31);
808 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
809 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
810 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
811 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
814 gen_mov_reg_N(cpu_tmp0, cpu_psr);
815 gen_mov_reg_V(r_temp, cpu_psr);
816 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
817 tcg_temp_free(r_temp);
819 // T0 = (b1 << 31) | (T0 >> 1);
821 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
822 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
823 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
825 /* do addition and update flags */
826 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
829 gen_cc_NZ_icc(cpu_cc_dst);
830 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
831 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
832 tcg_gen_mov_tl(dst, cpu_cc_dst);
835 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
837 TCGv_i64 r_temp, r_temp2;
839 r_temp = tcg_temp_new_i64();
840 r_temp2 = tcg_temp_new_i64();
842 tcg_gen_extu_tl_i64(r_temp, src2);
843 tcg_gen_extu_tl_i64(r_temp2, src1);
844 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
846 tcg_gen_shri_i64(r_temp, r_temp2, 32);
847 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
848 tcg_temp_free_i64(r_temp);
849 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
850 #ifdef TARGET_SPARC64
851 tcg_gen_mov_i64(dst, r_temp2);
853 tcg_gen_trunc_i64_tl(dst, r_temp2);
855 tcg_temp_free_i64(r_temp2);
858 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
860 TCGv_i64 r_temp, r_temp2;
862 r_temp = tcg_temp_new_i64();
863 r_temp2 = tcg_temp_new_i64();
865 tcg_gen_ext_tl_i64(r_temp, src2);
866 tcg_gen_ext_tl_i64(r_temp2, src1);
867 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
869 tcg_gen_shri_i64(r_temp, r_temp2, 32);
870 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
871 tcg_temp_free_i64(r_temp);
872 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
873 #ifdef TARGET_SPARC64
874 tcg_gen_mov_i64(dst, r_temp2);
876 tcg_gen_trunc_i64_tl(dst, r_temp2);
878 tcg_temp_free_i64(r_temp2);
881 #ifdef TARGET_SPARC64
882 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
887 l1 = gen_new_label();
888 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
889 r_const = tcg_const_i32(TT_DIV_ZERO);
890 gen_helper_raise_exception(r_const);
891 tcg_temp_free_i32(r_const);
895 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
899 l1 = gen_new_label();
900 l2 = gen_new_label();
901 tcg_gen_mov_tl(cpu_cc_src, src1);
902 tcg_gen_mov_tl(cpu_cc_src2, src2);
903 gen_trap_ifdivzero_tl(cpu_cc_src2);
904 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
905 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
906 tcg_gen_movi_i64(dst, INT64_MIN);
909 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
914 static inline void gen_op_div_cc(TCGv dst)
918 tcg_gen_mov_tl(cpu_cc_dst, dst);
920 gen_cc_NZ_icc(cpu_cc_dst);
921 l1 = gen_new_label();
922 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
923 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
928 static inline void gen_op_eval_ba(TCGv dst)
930 tcg_gen_movi_tl(dst, 1);
934 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
936 gen_mov_reg_Z(dst, src);
940 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
942 gen_mov_reg_N(cpu_tmp0, src);
943 gen_mov_reg_V(dst, src);
944 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
945 gen_mov_reg_Z(cpu_tmp0, src);
946 tcg_gen_or_tl(dst, dst, cpu_tmp0);
950 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
952 gen_mov_reg_V(cpu_tmp0, src);
953 gen_mov_reg_N(dst, src);
954 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
958 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
960 gen_mov_reg_Z(cpu_tmp0, src);
961 gen_mov_reg_C(dst, src);
962 tcg_gen_or_tl(dst, dst, cpu_tmp0);
966 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
968 gen_mov_reg_C(dst, src);
972 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
974 gen_mov_reg_V(dst, src);
978 static inline void gen_op_eval_bn(TCGv dst)
980 tcg_gen_movi_tl(dst, 0);
984 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
986 gen_mov_reg_N(dst, src);
990 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
992 gen_mov_reg_Z(dst, src);
993 tcg_gen_xori_tl(dst, dst, 0x1);
997 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
999 gen_mov_reg_N(cpu_tmp0, src);
1000 gen_mov_reg_V(dst, src);
1001 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1002 gen_mov_reg_Z(cpu_tmp0, src);
1003 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1004 tcg_gen_xori_tl(dst, dst, 0x1);
1008 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
1010 gen_mov_reg_V(cpu_tmp0, src);
1011 gen_mov_reg_N(dst, src);
1012 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1013 tcg_gen_xori_tl(dst, dst, 0x1);
1017 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
1019 gen_mov_reg_Z(cpu_tmp0, src);
1020 gen_mov_reg_C(dst, src);
1021 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1022 tcg_gen_xori_tl(dst, dst, 0x1);
1026 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
1028 gen_mov_reg_C(dst, src);
1029 tcg_gen_xori_tl(dst, dst, 0x1);
1033 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
1035 gen_mov_reg_N(dst, src);
1036 tcg_gen_xori_tl(dst, dst, 0x1);
1040 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
1042 gen_mov_reg_V(dst, src);
1043 tcg_gen_xori_tl(dst, dst, 0x1);
1047 FPSR bit field FCC1 | FCC0:
1053 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1054 unsigned int fcc_offset)
1056 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
1057 tcg_gen_andi_tl(reg, reg, 0x1);
1060 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1061 unsigned int fcc_offset)
1063 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1064 tcg_gen_andi_tl(reg, reg, 0x1);
1068 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1069 unsigned int fcc_offset)
1071 gen_mov_reg_FCC0(dst, src, fcc_offset);
1072 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1073 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1076 // 1 or 2: FCC0 ^ FCC1
1077 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1078 unsigned int fcc_offset)
1080 gen_mov_reg_FCC0(dst, src, fcc_offset);
1081 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1082 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1086 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1087 unsigned int fcc_offset)
1089 gen_mov_reg_FCC0(dst, src, fcc_offset);
1093 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1094 unsigned int fcc_offset)
1096 gen_mov_reg_FCC0(dst, src, fcc_offset);
1097 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1098 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1099 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1103 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1104 unsigned int fcc_offset)
1106 gen_mov_reg_FCC1(dst, src, fcc_offset);
1110 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1111 unsigned int fcc_offset)
1113 gen_mov_reg_FCC0(dst, src, fcc_offset);
1114 tcg_gen_xori_tl(dst, dst, 0x1);
1115 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1116 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1120 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1121 unsigned int fcc_offset)
1123 gen_mov_reg_FCC0(dst, src, fcc_offset);
1124 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1125 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1128 // 0: !(FCC0 | FCC1)
1129 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1130 unsigned int fcc_offset)
1132 gen_mov_reg_FCC0(dst, src, fcc_offset);
1133 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1134 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1135 tcg_gen_xori_tl(dst, dst, 0x1);
1138 // 0 or 3: !(FCC0 ^ FCC1)
1139 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1140 unsigned int fcc_offset)
1142 gen_mov_reg_FCC0(dst, src, fcc_offset);
1143 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1144 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1145 tcg_gen_xori_tl(dst, dst, 0x1);
1149 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1150 unsigned int fcc_offset)
1152 gen_mov_reg_FCC0(dst, src, fcc_offset);
1153 tcg_gen_xori_tl(dst, dst, 0x1);
1156 // !1: !(FCC0 & !FCC1)
1157 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1158 unsigned int fcc_offset)
1160 gen_mov_reg_FCC0(dst, src, fcc_offset);
1161 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1162 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1163 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1164 tcg_gen_xori_tl(dst, dst, 0x1);
1168 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1169 unsigned int fcc_offset)
1171 gen_mov_reg_FCC1(dst, src, fcc_offset);
1172 tcg_gen_xori_tl(dst, dst, 0x1);
1175 // !2: !(!FCC0 & FCC1)
1176 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1177 unsigned int fcc_offset)
1179 gen_mov_reg_FCC0(dst, src, fcc_offset);
1180 tcg_gen_xori_tl(dst, dst, 0x1);
1181 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1182 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1183 tcg_gen_xori_tl(dst, dst, 0x1);
1186 // !3: !(FCC0 & FCC1)
1187 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1188 unsigned int fcc_offset)
1190 gen_mov_reg_FCC0(dst, src, fcc_offset);
1191 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1192 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1193 tcg_gen_xori_tl(dst, dst, 0x1);
1196 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1197 target_ulong pc2, TCGv r_cond)
1201 l1 = gen_new_label();
1203 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1205 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1208 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1211 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1212 target_ulong pc2, TCGv r_cond)
1216 l1 = gen_new_label();
1218 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1220 gen_goto_tb(dc, 0, pc2, pc1);
1223 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1226 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1231 l1 = gen_new_label();
1232 l2 = gen_new_label();
1234 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1236 tcg_gen_movi_tl(cpu_npc, npc1);
1240 tcg_gen_movi_tl(cpu_npc, npc2);
1244 /* call this function before using the condition register as it may
1245 have been set for a jump */
1246 static inline void flush_cond(DisasContext *dc, TCGv cond)
1248 if (dc->npc == JUMP_PC) {
1249 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1250 dc->npc = DYNAMIC_PC;
1254 static inline void save_npc(DisasContext *dc, TCGv cond)
1256 if (dc->npc == JUMP_PC) {
1257 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1258 dc->npc = DYNAMIC_PC;
1259 } else if (dc->npc != DYNAMIC_PC) {
1260 tcg_gen_movi_tl(cpu_npc, dc->npc);
1264 static inline void save_state(DisasContext *dc, TCGv cond)
1266 tcg_gen_movi_tl(cpu_pc, dc->pc);
1270 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1272 if (dc->npc == JUMP_PC) {
1273 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1274 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1275 dc->pc = DYNAMIC_PC;
1276 } else if (dc->npc == DYNAMIC_PC) {
1277 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1278 dc->pc = DYNAMIC_PC;
1284 static inline void gen_op_next_insn(void)
1286 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1287 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1290 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1295 #ifdef TARGET_SPARC64
1303 switch (dc->cc_op) {
1307 gen_helper_compute_psr();
1308 dc->cc_op = CC_OP_FLAGS;
1313 gen_op_eval_bn(r_dst);
1316 gen_op_eval_be(r_dst, r_src);
1319 gen_op_eval_ble(r_dst, r_src);
1322 gen_op_eval_bl(r_dst, r_src);
1325 gen_op_eval_bleu(r_dst, r_src);
1328 gen_op_eval_bcs(r_dst, r_src);
1331 gen_op_eval_bneg(r_dst, r_src);
1334 gen_op_eval_bvs(r_dst, r_src);
1337 gen_op_eval_ba(r_dst);
1340 gen_op_eval_bne(r_dst, r_src);
1343 gen_op_eval_bg(r_dst, r_src);
1346 gen_op_eval_bge(r_dst, r_src);
1349 gen_op_eval_bgu(r_dst, r_src);
1352 gen_op_eval_bcc(r_dst, r_src);
1355 gen_op_eval_bpos(r_dst, r_src);
1358 gen_op_eval_bvc(r_dst, r_src);
1363 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1365 unsigned int offset;
1385 gen_op_eval_bn(r_dst);
1388 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1391 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1394 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1397 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1400 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1403 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1406 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1409 gen_op_eval_ba(r_dst);
1412 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1415 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1418 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1421 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1424 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1427 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1430 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1435 #ifdef TARGET_SPARC64
1437 static const int gen_tcg_cond_reg[8] = {
1448 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1452 l1 = gen_new_label();
1453 tcg_gen_movi_tl(r_dst, 0);
1454 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1455 tcg_gen_movi_tl(r_dst, 1);
1460 /* XXX: potentially incorrect if dynamic npc */
1461 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1464 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1465 target_ulong target = dc->pc + offset;
1468 /* unconditional not taken */
1470 dc->pc = dc->npc + 4;
1471 dc->npc = dc->pc + 4;
1474 dc->npc = dc->pc + 4;
1476 } else if (cond == 0x8) {
1477 /* unconditional taken */
1480 dc->npc = dc->pc + 4;
1486 flush_cond(dc, r_cond);
1487 gen_cond(r_cond, cc, cond, dc);
1489 gen_branch_a(dc, target, dc->npc, r_cond);
1493 dc->jump_pc[0] = target;
1494 dc->jump_pc[1] = dc->npc + 4;
1500 /* XXX: potentially incorrect if dynamic npc */
1501 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1504 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1505 target_ulong target = dc->pc + offset;
1508 /* unconditional not taken */
1510 dc->pc = dc->npc + 4;
1511 dc->npc = dc->pc + 4;
1514 dc->npc = dc->pc + 4;
1516 } else if (cond == 0x8) {
1517 /* unconditional taken */
1520 dc->npc = dc->pc + 4;
1526 flush_cond(dc, r_cond);
1527 gen_fcond(r_cond, cc, cond);
1529 gen_branch_a(dc, target, dc->npc, r_cond);
1533 dc->jump_pc[0] = target;
1534 dc->jump_pc[1] = dc->npc + 4;
1540 #ifdef TARGET_SPARC64
1541 /* XXX: potentially incorrect if dynamic npc */
1542 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1543 TCGv r_cond, TCGv r_reg)
1545 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1546 target_ulong target = dc->pc + offset;
1548 flush_cond(dc, r_cond);
1549 gen_cond_reg(r_cond, cond, r_reg);
1551 gen_branch_a(dc, target, dc->npc, r_cond);
1555 dc->jump_pc[0] = target;
1556 dc->jump_pc[1] = dc->npc + 4;
1561 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1565 gen_helper_fcmps(r_rs1, r_rs2);
1568 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1571 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1574 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1579 static inline void gen_op_fcmpd(int fccno)
1586 gen_helper_fcmpd_fcc1();
1589 gen_helper_fcmpd_fcc2();
1592 gen_helper_fcmpd_fcc3();
1597 static inline void gen_op_fcmpq(int fccno)
1604 gen_helper_fcmpq_fcc1();
1607 gen_helper_fcmpq_fcc2();
1610 gen_helper_fcmpq_fcc3();
1615 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1619 gen_helper_fcmpes(r_rs1, r_rs2);
1622 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1625 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1628 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1633 static inline void gen_op_fcmped(int fccno)
1637 gen_helper_fcmped();
1640 gen_helper_fcmped_fcc1();
1643 gen_helper_fcmped_fcc2();
1646 gen_helper_fcmped_fcc3();
1651 static inline void gen_op_fcmpeq(int fccno)
1655 gen_helper_fcmpeq();
1658 gen_helper_fcmpeq_fcc1();
1661 gen_helper_fcmpeq_fcc2();
1664 gen_helper_fcmpeq_fcc3();
1671 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1673 gen_helper_fcmps(r_rs1, r_rs2);
1676 static inline void gen_op_fcmpd(int fccno)
1681 static inline void gen_op_fcmpq(int fccno)
1686 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1688 gen_helper_fcmpes(r_rs1, r_rs2);
1691 static inline void gen_op_fcmped(int fccno)
1693 gen_helper_fcmped();
1696 static inline void gen_op_fcmpeq(int fccno)
1698 gen_helper_fcmpeq();
1702 static inline void gen_op_fpexception_im(int fsr_flags)
1706 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1707 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1708 r_const = tcg_const_i32(TT_FP_EXCP);
1709 gen_helper_raise_exception(r_const);
1710 tcg_temp_free_i32(r_const);
1713 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1715 #if !defined(CONFIG_USER_ONLY)
1716 if (!dc->fpu_enabled) {
1719 save_state(dc, r_cond);
1720 r_const = tcg_const_i32(TT_NFPU_INSN);
1721 gen_helper_raise_exception(r_const);
1722 tcg_temp_free_i32(r_const);
1730 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1732 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1735 static inline void gen_clear_float_exceptions(void)
1737 gen_helper_clear_float_exceptions();
1741 #ifdef TARGET_SPARC64
1742 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1748 r_asi = tcg_temp_new_i32();
1749 tcg_gen_mov_i32(r_asi, cpu_asi);
1751 asi = GET_FIELD(insn, 19, 26);
1752 r_asi = tcg_const_i32(asi);
1757 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1760 TCGv_i32 r_asi, r_size, r_sign;
1762 r_asi = gen_get_asi(insn, addr);
1763 r_size = tcg_const_i32(size);
1764 r_sign = tcg_const_i32(sign);
1765 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1766 tcg_temp_free_i32(r_sign);
1767 tcg_temp_free_i32(r_size);
1768 tcg_temp_free_i32(r_asi);
1771 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1773 TCGv_i32 r_asi, r_size;
1775 r_asi = gen_get_asi(insn, addr);
1776 r_size = tcg_const_i32(size);
1777 gen_helper_st_asi(addr, src, r_asi, r_size);
1778 tcg_temp_free_i32(r_size);
1779 tcg_temp_free_i32(r_asi);
1782 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1784 TCGv_i32 r_asi, r_size, r_rd;
1786 r_asi = gen_get_asi(insn, addr);
1787 r_size = tcg_const_i32(size);
1788 r_rd = tcg_const_i32(rd);
1789 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1790 tcg_temp_free_i32(r_rd);
1791 tcg_temp_free_i32(r_size);
1792 tcg_temp_free_i32(r_asi);
1795 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1797 TCGv_i32 r_asi, r_size, r_rd;
1799 r_asi = gen_get_asi(insn, addr);
1800 r_size = tcg_const_i32(size);
1801 r_rd = tcg_const_i32(rd);
1802 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1803 tcg_temp_free_i32(r_rd);
1804 tcg_temp_free_i32(r_size);
1805 tcg_temp_free_i32(r_asi);
1808 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1810 TCGv_i32 r_asi, r_size, r_sign;
1812 r_asi = gen_get_asi(insn, addr);
1813 r_size = tcg_const_i32(4);
1814 r_sign = tcg_const_i32(0);
1815 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1816 tcg_temp_free_i32(r_sign);
1817 gen_helper_st_asi(addr, dst, r_asi, r_size);
1818 tcg_temp_free_i32(r_size);
1819 tcg_temp_free_i32(r_asi);
1820 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1823 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1825 TCGv_i32 r_asi, r_rd;
1827 r_asi = gen_get_asi(insn, addr);
1828 r_rd = tcg_const_i32(rd);
1829 gen_helper_ldda_asi(addr, r_asi, r_rd);
1830 tcg_temp_free_i32(r_rd);
1831 tcg_temp_free_i32(r_asi);
1834 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1836 TCGv_i32 r_asi, r_size;
1838 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1839 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1840 r_asi = gen_get_asi(insn, addr);
1841 r_size = tcg_const_i32(8);
1842 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1843 tcg_temp_free_i32(r_size);
1844 tcg_temp_free_i32(r_asi);
1847 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1853 r_val1 = tcg_temp_new();
1854 gen_movl_reg_TN(rd, r_val1);
1855 r_asi = gen_get_asi(insn, addr);
1856 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1857 tcg_temp_free_i32(r_asi);
1858 tcg_temp_free(r_val1);
1861 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1866 gen_movl_reg_TN(rd, cpu_tmp64);
1867 r_asi = gen_get_asi(insn, addr);
1868 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1869 tcg_temp_free_i32(r_asi);
1872 #elif !defined(CONFIG_USER_ONLY)
1874 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1877 TCGv_i32 r_asi, r_size, r_sign;
1879 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1880 r_size = tcg_const_i32(size);
1881 r_sign = tcg_const_i32(sign);
1882 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1883 tcg_temp_free(r_sign);
1884 tcg_temp_free(r_size);
1885 tcg_temp_free(r_asi);
1886 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1889 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1891 TCGv_i32 r_asi, r_size;
1893 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1894 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1895 r_size = tcg_const_i32(size);
1896 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1897 tcg_temp_free(r_size);
1898 tcg_temp_free(r_asi);
1901 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1903 TCGv_i32 r_asi, r_size, r_sign;
1906 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1907 r_size = tcg_const_i32(4);
1908 r_sign = tcg_const_i32(0);
1909 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1910 tcg_temp_free(r_sign);
1911 r_val = tcg_temp_new_i64();
1912 tcg_gen_extu_tl_i64(r_val, dst);
1913 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1914 tcg_temp_free_i64(r_val);
1915 tcg_temp_free(r_size);
1916 tcg_temp_free(r_asi);
1917 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1920 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1922 TCGv_i32 r_asi, r_size, r_sign;
1924 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1925 r_size = tcg_const_i32(8);
1926 r_sign = tcg_const_i32(0);
1927 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1928 tcg_temp_free(r_sign);
1929 tcg_temp_free(r_size);
1930 tcg_temp_free(r_asi);
1931 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1932 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1933 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1934 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1935 gen_movl_TN_reg(rd, hi);
1938 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1940 TCGv_i32 r_asi, r_size;
1942 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1943 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1944 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1945 r_size = tcg_const_i32(8);
1946 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1947 tcg_temp_free(r_size);
1948 tcg_temp_free(r_asi);
1952 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1953 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1956 TCGv_i32 r_asi, r_size;
1958 gen_ld_asi(dst, addr, insn, 1, 0);
1960 r_val = tcg_const_i64(0xffULL);
1961 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1962 r_size = tcg_const_i32(1);
1963 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1964 tcg_temp_free_i32(r_size);
1965 tcg_temp_free_i32(r_asi);
1966 tcg_temp_free_i64(r_val);
1970 static inline TCGv get_src1(unsigned int insn, TCGv def)
1975 rs1 = GET_FIELD(insn, 13, 17);
1977 r_rs1 = tcg_const_tl(0); // XXX how to free?
1979 r_rs1 = cpu_gregs[rs1];
1981 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1985 static inline TCGv get_src2(unsigned int insn, TCGv def)
1989 if (IS_IMM) { /* immediate */
1992 simm = GET_FIELDs(insn, 19, 31);
1993 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1994 } else { /* register */
1997 rs2 = GET_FIELD(insn, 27, 31);
1999 r_rs2 = tcg_const_tl(0); // XXX how to free?
2001 r_rs2 = cpu_gregs[rs2];
2003 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2008 #define CHECK_IU_FEATURE(dc, FEATURE) \
2009 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2011 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2012 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2015 /* before an instruction, dc->pc must be static */
2016 static void disas_sparc_insn(DisasContext * dc)
2018 unsigned int insn, opc, rs1, rs2, rd;
2021 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2022 tcg_gen_debug_insn_start(dc->pc);
2023 insn = ldl_code(dc->pc);
2024 opc = GET_FIELD(insn, 0, 1);
2026 rd = GET_FIELD(insn, 2, 6);
2028 cpu_src1 = tcg_temp_new(); // const
2029 cpu_src2 = tcg_temp_new(); // const
2032 case 0: /* branches/sethi */
2034 unsigned int xop = GET_FIELD(insn, 7, 9);
2037 #ifdef TARGET_SPARC64
2038 case 0x1: /* V9 BPcc */
2042 target = GET_FIELD_SP(insn, 0, 18);
2043 target = sign_extend(target, 18);
2045 cc = GET_FIELD_SP(insn, 20, 21);
2047 do_branch(dc, target, insn, 0, cpu_cond);
2049 do_branch(dc, target, insn, 1, cpu_cond);
2054 case 0x3: /* V9 BPr */
2056 target = GET_FIELD_SP(insn, 0, 13) |
2057 (GET_FIELD_SP(insn, 20, 21) << 14);
2058 target = sign_extend(target, 16);
2060 cpu_src1 = get_src1(insn, cpu_src1);
2061 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2064 case 0x5: /* V9 FBPcc */
2066 int cc = GET_FIELD_SP(insn, 20, 21);
2067 if (gen_trap_ifnofpu(dc, cpu_cond))
2069 target = GET_FIELD_SP(insn, 0, 18);
2070 target = sign_extend(target, 19);
2072 do_fbranch(dc, target, insn, cc, cpu_cond);
2076 case 0x7: /* CBN+x */
2081 case 0x2: /* BN+x */
2083 target = GET_FIELD(insn, 10, 31);
2084 target = sign_extend(target, 22);
2086 do_branch(dc, target, insn, 0, cpu_cond);
2089 case 0x6: /* FBN+x */
2091 if (gen_trap_ifnofpu(dc, cpu_cond))
2093 target = GET_FIELD(insn, 10, 31);
2094 target = sign_extend(target, 22);
2096 do_fbranch(dc, target, insn, 0, cpu_cond);
2099 case 0x4: /* SETHI */
2101 uint32_t value = GET_FIELD(insn, 10, 31);
2104 r_const = tcg_const_tl(value << 10);
2105 gen_movl_TN_reg(rd, r_const);
2106 tcg_temp_free(r_const);
2109 case 0x0: /* UNIMPL */
2118 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2121 r_const = tcg_const_tl(dc->pc);
2122 gen_movl_TN_reg(15, r_const);
2123 tcg_temp_free(r_const);
2125 gen_mov_pc_npc(dc, cpu_cond);
2129 case 2: /* FPU & Logical Operations */
2131 unsigned int xop = GET_FIELD(insn, 7, 12);
2132 if (xop == 0x3a) { /* generate trap */
2135 cpu_src1 = get_src1(insn, cpu_src1);
2137 rs2 = GET_FIELD(insn, 25, 31);
2138 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2140 rs2 = GET_FIELD(insn, 27, 31);
2142 gen_movl_reg_TN(rs2, cpu_src2);
2143 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2145 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2147 cond = GET_FIELD(insn, 3, 6);
2149 save_state(dc, cpu_cond);
2150 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2152 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2154 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2155 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2156 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2157 gen_helper_raise_exception(cpu_tmp32);
2158 } else if (cond != 0) {
2159 TCGv r_cond = tcg_temp_new();
2161 #ifdef TARGET_SPARC64
2163 int cc = GET_FIELD_SP(insn, 11, 12);
2165 save_state(dc, cpu_cond);
2167 gen_cond(r_cond, 0, cond, dc);
2169 gen_cond(r_cond, 1, cond, dc);
2173 save_state(dc, cpu_cond);
2174 gen_cond(r_cond, 0, cond, dc);
2176 l1 = gen_new_label();
2177 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2179 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2181 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2183 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2184 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2185 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2186 gen_helper_raise_exception(cpu_tmp32);
2189 tcg_temp_free(r_cond);
2195 } else if (xop == 0x28) {
2196 rs1 = GET_FIELD(insn, 13, 17);
2199 #ifndef TARGET_SPARC64
2200 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2201 manual, rdy on the microSPARC
2203 case 0x0f: /* stbar in the SPARCv8 manual,
2204 rdy on the microSPARC II */
2205 case 0x10 ... 0x1f: /* implementation-dependent in the
2206 SPARCv8 manual, rdy on the
2209 gen_movl_TN_reg(rd, cpu_y);
2211 #ifdef TARGET_SPARC64
2212 case 0x2: /* V9 rdccr */
2213 gen_helper_compute_psr();
2214 gen_helper_rdccr(cpu_dst);
2215 gen_movl_TN_reg(rd, cpu_dst);
2217 case 0x3: /* V9 rdasi */
2218 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2219 gen_movl_TN_reg(rd, cpu_dst);
2221 case 0x4: /* V9 rdtick */
2225 r_tickptr = tcg_temp_new_ptr();
2226 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2227 offsetof(CPUState, tick));
2228 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2229 tcg_temp_free_ptr(r_tickptr);
2230 gen_movl_TN_reg(rd, cpu_dst);
2233 case 0x5: /* V9 rdpc */
2237 r_const = tcg_const_tl(dc->pc);
2238 gen_movl_TN_reg(rd, r_const);
2239 tcg_temp_free(r_const);
2242 case 0x6: /* V9 rdfprs */
2243 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2244 gen_movl_TN_reg(rd, cpu_dst);
2246 case 0xf: /* V9 membar */
2247 break; /* no effect */
2248 case 0x13: /* Graphics Status */
2249 if (gen_trap_ifnofpu(dc, cpu_cond))
2251 gen_movl_TN_reg(rd, cpu_gsr);
2253 case 0x16: /* Softint */
2254 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2255 gen_movl_TN_reg(rd, cpu_dst);
2257 case 0x17: /* Tick compare */
2258 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2260 case 0x18: /* System tick */
2264 r_tickptr = tcg_temp_new_ptr();
2265 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2266 offsetof(CPUState, stick));
2267 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2268 tcg_temp_free_ptr(r_tickptr);
2269 gen_movl_TN_reg(rd, cpu_dst);
2272 case 0x19: /* System tick compare */
2273 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2275 case 0x10: /* Performance Control */
2276 case 0x11: /* Performance Instrumentation Counter */
2277 case 0x12: /* Dispatch Control */
2278 case 0x14: /* Softint set, WO */
2279 case 0x15: /* Softint clear, WO */
2284 #if !defined(CONFIG_USER_ONLY)
2285 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2286 #ifndef TARGET_SPARC64
2287 if (!supervisor(dc))
2289 gen_helper_compute_psr();
2290 dc->cc_op = CC_OP_FLAGS;
2291 gen_helper_rdpsr(cpu_dst);
2293 CHECK_IU_FEATURE(dc, HYPV);
2294 if (!hypervisor(dc))
2296 rs1 = GET_FIELD(insn, 13, 17);
2299 // gen_op_rdhpstate();
2302 // gen_op_rdhtstate();
2305 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2308 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2311 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2313 case 31: // hstick_cmpr
2314 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2320 gen_movl_TN_reg(rd, cpu_dst);
2322 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2323 if (!supervisor(dc))
2325 #ifdef TARGET_SPARC64
2326 rs1 = GET_FIELD(insn, 13, 17);
2332 r_tsptr = tcg_temp_new_ptr();
2333 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2334 offsetof(CPUState, tsptr));
2335 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2336 offsetof(trap_state, tpc));
2337 tcg_temp_free_ptr(r_tsptr);
2344 r_tsptr = tcg_temp_new_ptr();
2345 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2346 offsetof(CPUState, tsptr));
2347 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2348 offsetof(trap_state, tnpc));
2349 tcg_temp_free_ptr(r_tsptr);
2356 r_tsptr = tcg_temp_new_ptr();
2357 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2358 offsetof(CPUState, tsptr));
2359 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2360 offsetof(trap_state, tstate));
2361 tcg_temp_free_ptr(r_tsptr);
2368 r_tsptr = tcg_temp_new_ptr();
2369 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2370 offsetof(CPUState, tsptr));
2371 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2372 offsetof(trap_state, tt));
2373 tcg_temp_free_ptr(r_tsptr);
2374 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2381 r_tickptr = tcg_temp_new_ptr();
2382 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2383 offsetof(CPUState, tick));
2384 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2385 gen_movl_TN_reg(rd, cpu_tmp0);
2386 tcg_temp_free_ptr(r_tickptr);
2390 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2393 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2394 offsetof(CPUSPARCState, pstate));
2395 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2398 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2399 offsetof(CPUSPARCState, tl));
2400 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2403 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2404 offsetof(CPUSPARCState, psrpil));
2405 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2408 gen_helper_rdcwp(cpu_tmp0);
2411 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2412 offsetof(CPUSPARCState, cansave));
2413 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2415 case 11: // canrestore
2416 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2417 offsetof(CPUSPARCState, canrestore));
2418 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2420 case 12: // cleanwin
2421 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2422 offsetof(CPUSPARCState, cleanwin));
2423 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2425 case 13: // otherwin
2426 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2427 offsetof(CPUSPARCState, otherwin));
2428 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2431 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2432 offsetof(CPUSPARCState, wstate));
2433 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2435 case 16: // UA2005 gl
2436 CHECK_IU_FEATURE(dc, GL);
2437 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2438 offsetof(CPUSPARCState, gl));
2439 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2441 case 26: // UA2005 strand status
2442 CHECK_IU_FEATURE(dc, HYPV);
2443 if (!hypervisor(dc))
2445 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2448 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2455 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2457 gen_movl_TN_reg(rd, cpu_tmp0);
2459 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2460 #ifdef TARGET_SPARC64
2461 save_state(dc, cpu_cond);
2462 gen_helper_flushw();
2464 if (!supervisor(dc))
2466 gen_movl_TN_reg(rd, cpu_tbr);
2470 } else if (xop == 0x34) { /* FPU Operations */
2471 if (gen_trap_ifnofpu(dc, cpu_cond))
2473 gen_op_clear_ieee_excp_and_FTT();
2474 rs1 = GET_FIELD(insn, 13, 17);
2475 rs2 = GET_FIELD(insn, 27, 31);
2476 xop = GET_FIELD(insn, 18, 26);
2478 case 0x1: /* fmovs */
2479 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2481 case 0x5: /* fnegs */
2482 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2484 case 0x9: /* fabss */
2485 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2487 case 0x29: /* fsqrts */
2488 CHECK_FPU_FEATURE(dc, FSQRT);
2489 gen_clear_float_exceptions();
2490 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2491 gen_helper_check_ieee_exceptions();
2492 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2494 case 0x2a: /* fsqrtd */
2495 CHECK_FPU_FEATURE(dc, FSQRT);
2496 gen_op_load_fpr_DT1(DFPREG(rs2));
2497 gen_clear_float_exceptions();
2498 gen_helper_fsqrtd();
2499 gen_helper_check_ieee_exceptions();
2500 gen_op_store_DT0_fpr(DFPREG(rd));
2502 case 0x2b: /* fsqrtq */
2503 CHECK_FPU_FEATURE(dc, FLOAT128);
2504 gen_op_load_fpr_QT1(QFPREG(rs2));
2505 gen_clear_float_exceptions();
2506 gen_helper_fsqrtq();
2507 gen_helper_check_ieee_exceptions();
2508 gen_op_store_QT0_fpr(QFPREG(rd));
2510 case 0x41: /* fadds */
2511 gen_clear_float_exceptions();
2512 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2513 gen_helper_check_ieee_exceptions();
2514 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2516 case 0x42: /* faddd */
2517 gen_op_load_fpr_DT0(DFPREG(rs1));
2518 gen_op_load_fpr_DT1(DFPREG(rs2));
2519 gen_clear_float_exceptions();
2521 gen_helper_check_ieee_exceptions();
2522 gen_op_store_DT0_fpr(DFPREG(rd));
2524 case 0x43: /* faddq */
2525 CHECK_FPU_FEATURE(dc, FLOAT128);
2526 gen_op_load_fpr_QT0(QFPREG(rs1));
2527 gen_op_load_fpr_QT1(QFPREG(rs2));
2528 gen_clear_float_exceptions();
2530 gen_helper_check_ieee_exceptions();
2531 gen_op_store_QT0_fpr(QFPREG(rd));
2533 case 0x45: /* fsubs */
2534 gen_clear_float_exceptions();
2535 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2536 gen_helper_check_ieee_exceptions();
2537 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2539 case 0x46: /* fsubd */
2540 gen_op_load_fpr_DT0(DFPREG(rs1));
2541 gen_op_load_fpr_DT1(DFPREG(rs2));
2542 gen_clear_float_exceptions();
2544 gen_helper_check_ieee_exceptions();
2545 gen_op_store_DT0_fpr(DFPREG(rd));
2547 case 0x47: /* fsubq */
2548 CHECK_FPU_FEATURE(dc, FLOAT128);
2549 gen_op_load_fpr_QT0(QFPREG(rs1));
2550 gen_op_load_fpr_QT1(QFPREG(rs2));
2551 gen_clear_float_exceptions();
2553 gen_helper_check_ieee_exceptions();
2554 gen_op_store_QT0_fpr(QFPREG(rd));
2556 case 0x49: /* fmuls */
2557 CHECK_FPU_FEATURE(dc, FMUL);
2558 gen_clear_float_exceptions();
2559 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2560 gen_helper_check_ieee_exceptions();
2561 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2563 case 0x4a: /* fmuld */
2564 CHECK_FPU_FEATURE(dc, FMUL);
2565 gen_op_load_fpr_DT0(DFPREG(rs1));
2566 gen_op_load_fpr_DT1(DFPREG(rs2));
2567 gen_clear_float_exceptions();
2569 gen_helper_check_ieee_exceptions();
2570 gen_op_store_DT0_fpr(DFPREG(rd));
2572 case 0x4b: /* fmulq */
2573 CHECK_FPU_FEATURE(dc, FLOAT128);
2574 CHECK_FPU_FEATURE(dc, FMUL);
2575 gen_op_load_fpr_QT0(QFPREG(rs1));
2576 gen_op_load_fpr_QT1(QFPREG(rs2));
2577 gen_clear_float_exceptions();
2579 gen_helper_check_ieee_exceptions();
2580 gen_op_store_QT0_fpr(QFPREG(rd));
2582 case 0x4d: /* fdivs */
2583 gen_clear_float_exceptions();
2584 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2585 gen_helper_check_ieee_exceptions();
2586 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2588 case 0x4e: /* fdivd */
2589 gen_op_load_fpr_DT0(DFPREG(rs1));
2590 gen_op_load_fpr_DT1(DFPREG(rs2));
2591 gen_clear_float_exceptions();
2593 gen_helper_check_ieee_exceptions();
2594 gen_op_store_DT0_fpr(DFPREG(rd));
2596 case 0x4f: /* fdivq */
2597 CHECK_FPU_FEATURE(dc, FLOAT128);
2598 gen_op_load_fpr_QT0(QFPREG(rs1));
2599 gen_op_load_fpr_QT1(QFPREG(rs2));
2600 gen_clear_float_exceptions();
2602 gen_helper_check_ieee_exceptions();
2603 gen_op_store_QT0_fpr(QFPREG(rd));
2605 case 0x69: /* fsmuld */
2606 CHECK_FPU_FEATURE(dc, FSMULD);
2607 gen_clear_float_exceptions();
2608 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2609 gen_helper_check_ieee_exceptions();
2610 gen_op_store_DT0_fpr(DFPREG(rd));
2612 case 0x6e: /* fdmulq */
2613 CHECK_FPU_FEATURE(dc, FLOAT128);
2614 gen_op_load_fpr_DT0(DFPREG(rs1));
2615 gen_op_load_fpr_DT1(DFPREG(rs2));
2616 gen_clear_float_exceptions();
2617 gen_helper_fdmulq();
2618 gen_helper_check_ieee_exceptions();
2619 gen_op_store_QT0_fpr(QFPREG(rd));
2621 case 0xc4: /* fitos */
2622 gen_clear_float_exceptions();
2623 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2624 gen_helper_check_ieee_exceptions();
2625 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2627 case 0xc6: /* fdtos */
2628 gen_op_load_fpr_DT1(DFPREG(rs2));
2629 gen_clear_float_exceptions();
2630 gen_helper_fdtos(cpu_tmp32);
2631 gen_helper_check_ieee_exceptions();
2632 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2634 case 0xc7: /* fqtos */
2635 CHECK_FPU_FEATURE(dc, FLOAT128);
2636 gen_op_load_fpr_QT1(QFPREG(rs2));
2637 gen_clear_float_exceptions();
2638 gen_helper_fqtos(cpu_tmp32);
2639 gen_helper_check_ieee_exceptions();
2640 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2642 case 0xc8: /* fitod */
2643 gen_helper_fitod(cpu_fpr[rs2]);
2644 gen_op_store_DT0_fpr(DFPREG(rd));
2646 case 0xc9: /* fstod */
2647 gen_helper_fstod(cpu_fpr[rs2]);
2648 gen_op_store_DT0_fpr(DFPREG(rd));
2650 case 0xcb: /* fqtod */
2651 CHECK_FPU_FEATURE(dc, FLOAT128);
2652 gen_op_load_fpr_QT1(QFPREG(rs2));
2653 gen_clear_float_exceptions();
2655 gen_helper_check_ieee_exceptions();
2656 gen_op_store_DT0_fpr(DFPREG(rd));
2658 case 0xcc: /* fitoq */
2659 CHECK_FPU_FEATURE(dc, FLOAT128);
2660 gen_helper_fitoq(cpu_fpr[rs2]);
2661 gen_op_store_QT0_fpr(QFPREG(rd));
2663 case 0xcd: /* fstoq */
2664 CHECK_FPU_FEATURE(dc, FLOAT128);
2665 gen_helper_fstoq(cpu_fpr[rs2]);
2666 gen_op_store_QT0_fpr(QFPREG(rd));
2668 case 0xce: /* fdtoq */
2669 CHECK_FPU_FEATURE(dc, FLOAT128);
2670 gen_op_load_fpr_DT1(DFPREG(rs2));
2672 gen_op_store_QT0_fpr(QFPREG(rd));
2674 case 0xd1: /* fstoi */
2675 gen_clear_float_exceptions();
2676 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2677 gen_helper_check_ieee_exceptions();
2678 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2680 case 0xd2: /* fdtoi */
2681 gen_op_load_fpr_DT1(DFPREG(rs2));
2682 gen_clear_float_exceptions();
2683 gen_helper_fdtoi(cpu_tmp32);
2684 gen_helper_check_ieee_exceptions();
2685 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2687 case 0xd3: /* fqtoi */
2688 CHECK_FPU_FEATURE(dc, FLOAT128);
2689 gen_op_load_fpr_QT1(QFPREG(rs2));
2690 gen_clear_float_exceptions();
2691 gen_helper_fqtoi(cpu_tmp32);
2692 gen_helper_check_ieee_exceptions();
2693 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2695 #ifdef TARGET_SPARC64
2696 case 0x2: /* V9 fmovd */
2697 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2698 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2699 cpu_fpr[DFPREG(rs2) + 1]);
2701 case 0x3: /* V9 fmovq */
2702 CHECK_FPU_FEATURE(dc, FLOAT128);
2703 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2704 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2705 cpu_fpr[QFPREG(rs2) + 1]);
2706 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2707 cpu_fpr[QFPREG(rs2) + 2]);
2708 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2709 cpu_fpr[QFPREG(rs2) + 3]);
2711 case 0x6: /* V9 fnegd */
2712 gen_op_load_fpr_DT1(DFPREG(rs2));
2714 gen_op_store_DT0_fpr(DFPREG(rd));
2716 case 0x7: /* V9 fnegq */
2717 CHECK_FPU_FEATURE(dc, FLOAT128);
2718 gen_op_load_fpr_QT1(QFPREG(rs2));
2720 gen_op_store_QT0_fpr(QFPREG(rd));
2722 case 0xa: /* V9 fabsd */
2723 gen_op_load_fpr_DT1(DFPREG(rs2));
2725 gen_op_store_DT0_fpr(DFPREG(rd));
2727 case 0xb: /* V9 fabsq */
2728 CHECK_FPU_FEATURE(dc, FLOAT128);
2729 gen_op_load_fpr_QT1(QFPREG(rs2));
2731 gen_op_store_QT0_fpr(QFPREG(rd));
2733 case 0x81: /* V9 fstox */
2734 gen_clear_float_exceptions();
2735 gen_helper_fstox(cpu_fpr[rs2]);
2736 gen_helper_check_ieee_exceptions();
2737 gen_op_store_DT0_fpr(DFPREG(rd));
2739 case 0x82: /* V9 fdtox */
2740 gen_op_load_fpr_DT1(DFPREG(rs2));
2741 gen_clear_float_exceptions();
2743 gen_helper_check_ieee_exceptions();
2744 gen_op_store_DT0_fpr(DFPREG(rd));
2746 case 0x83: /* V9 fqtox */
2747 CHECK_FPU_FEATURE(dc, FLOAT128);
2748 gen_op_load_fpr_QT1(QFPREG(rs2));
2749 gen_clear_float_exceptions();
2751 gen_helper_check_ieee_exceptions();
2752 gen_op_store_DT0_fpr(DFPREG(rd));
2754 case 0x84: /* V9 fxtos */
2755 gen_op_load_fpr_DT1(DFPREG(rs2));
2756 gen_clear_float_exceptions();
2757 gen_helper_fxtos(cpu_tmp32);
2758 gen_helper_check_ieee_exceptions();
2759 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2761 case 0x88: /* V9 fxtod */
2762 gen_op_load_fpr_DT1(DFPREG(rs2));
2763 gen_clear_float_exceptions();
2765 gen_helper_check_ieee_exceptions();
2766 gen_op_store_DT0_fpr(DFPREG(rd));
2768 case 0x8c: /* V9 fxtoq */
2769 CHECK_FPU_FEATURE(dc, FLOAT128);
2770 gen_op_load_fpr_DT1(DFPREG(rs2));
2771 gen_clear_float_exceptions();
2773 gen_helper_check_ieee_exceptions();
2774 gen_op_store_QT0_fpr(QFPREG(rd));
2780 } else if (xop == 0x35) { /* FPU Operations */
2781 #ifdef TARGET_SPARC64
2784 if (gen_trap_ifnofpu(dc, cpu_cond))
2786 gen_op_clear_ieee_excp_and_FTT();
2787 rs1 = GET_FIELD(insn, 13, 17);
2788 rs2 = GET_FIELD(insn, 27, 31);
2789 xop = GET_FIELD(insn, 18, 26);
2790 #ifdef TARGET_SPARC64
2791 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2794 l1 = gen_new_label();
2795 cond = GET_FIELD_SP(insn, 14, 17);
2796 cpu_src1 = get_src1(insn, cpu_src1);
2797 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2799 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2802 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2805 l1 = gen_new_label();
2806 cond = GET_FIELD_SP(insn, 14, 17);
2807 cpu_src1 = get_src1(insn, cpu_src1);
2808 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2810 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2811 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2814 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2817 CHECK_FPU_FEATURE(dc, FLOAT128);
2818 l1 = gen_new_label();
2819 cond = GET_FIELD_SP(insn, 14, 17);
2820 cpu_src1 = get_src1(insn, cpu_src1);
2821 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2823 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2824 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2825 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2826 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2832 #ifdef TARGET_SPARC64
2833 #define FMOVSCC(fcc) \
2838 l1 = gen_new_label(); \
2839 r_cond = tcg_temp_new(); \
2840 cond = GET_FIELD_SP(insn, 14, 17); \
2841 gen_fcond(r_cond, fcc, cond); \
2842 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2844 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2845 gen_set_label(l1); \
2846 tcg_temp_free(r_cond); \
2848 #define FMOVDCC(fcc) \
2853 l1 = gen_new_label(); \
2854 r_cond = tcg_temp_new(); \
2855 cond = GET_FIELD_SP(insn, 14, 17); \
2856 gen_fcond(r_cond, fcc, cond); \
2857 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2859 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2860 cpu_fpr[DFPREG(rs2)]); \
2861 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2862 cpu_fpr[DFPREG(rs2) + 1]); \
2863 gen_set_label(l1); \
2864 tcg_temp_free(r_cond); \
2866 #define FMOVQCC(fcc) \
2871 l1 = gen_new_label(); \
2872 r_cond = tcg_temp_new(); \
2873 cond = GET_FIELD_SP(insn, 14, 17); \
2874 gen_fcond(r_cond, fcc, cond); \
2875 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2877 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2878 cpu_fpr[QFPREG(rs2)]); \
2879 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2880 cpu_fpr[QFPREG(rs2) + 1]); \
2881 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2882 cpu_fpr[QFPREG(rs2) + 2]); \
2883 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2884 cpu_fpr[QFPREG(rs2) + 3]); \
2885 gen_set_label(l1); \
2886 tcg_temp_free(r_cond); \
2888 case 0x001: /* V9 fmovscc %fcc0 */
2891 case 0x002: /* V9 fmovdcc %fcc0 */
2894 case 0x003: /* V9 fmovqcc %fcc0 */
2895 CHECK_FPU_FEATURE(dc, FLOAT128);
2898 case 0x041: /* V9 fmovscc %fcc1 */
2901 case 0x042: /* V9 fmovdcc %fcc1 */
2904 case 0x043: /* V9 fmovqcc %fcc1 */
2905 CHECK_FPU_FEATURE(dc, FLOAT128);
2908 case 0x081: /* V9 fmovscc %fcc2 */
2911 case 0x082: /* V9 fmovdcc %fcc2 */
2914 case 0x083: /* V9 fmovqcc %fcc2 */
2915 CHECK_FPU_FEATURE(dc, FLOAT128);
2918 case 0x0c1: /* V9 fmovscc %fcc3 */
2921 case 0x0c2: /* V9 fmovdcc %fcc3 */
2924 case 0x0c3: /* V9 fmovqcc %fcc3 */
2925 CHECK_FPU_FEATURE(dc, FLOAT128);
2931 #define FMOVSCC(icc) \
2936 l1 = gen_new_label(); \
2937 r_cond = tcg_temp_new(); \
2938 cond = GET_FIELD_SP(insn, 14, 17); \
2939 gen_cond(r_cond, icc, cond, dc); \
2940 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2942 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2943 gen_set_label(l1); \
2944 tcg_temp_free(r_cond); \
2946 #define FMOVDCC(icc) \
2951 l1 = gen_new_label(); \
2952 r_cond = tcg_temp_new(); \
2953 cond = GET_FIELD_SP(insn, 14, 17); \
2954 gen_cond(r_cond, icc, cond, dc); \
2955 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2957 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2958 cpu_fpr[DFPREG(rs2)]); \
2959 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2960 cpu_fpr[DFPREG(rs2) + 1]); \
2961 gen_set_label(l1); \
2962 tcg_temp_free(r_cond); \
2964 #define FMOVQCC(icc) \
2969 l1 = gen_new_label(); \
2970 r_cond = tcg_temp_new(); \
2971 cond = GET_FIELD_SP(insn, 14, 17); \
2972 gen_cond(r_cond, icc, cond, dc); \
2973 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2975 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2976 cpu_fpr[QFPREG(rs2)]); \
2977 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2978 cpu_fpr[QFPREG(rs2) + 1]); \
2979 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2980 cpu_fpr[QFPREG(rs2) + 2]); \
2981 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2982 cpu_fpr[QFPREG(rs2) + 3]); \
2983 gen_set_label(l1); \
2984 tcg_temp_free(r_cond); \
2987 case 0x101: /* V9 fmovscc %icc */
2990 case 0x102: /* V9 fmovdcc %icc */
2992 case 0x103: /* V9 fmovqcc %icc */
2993 CHECK_FPU_FEATURE(dc, FLOAT128);
2996 case 0x181: /* V9 fmovscc %xcc */
2999 case 0x182: /* V9 fmovdcc %xcc */
3002 case 0x183: /* V9 fmovqcc %xcc */
3003 CHECK_FPU_FEATURE(dc, FLOAT128);
3010 case 0x51: /* fcmps, V9 %fcc */
3011 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3013 case 0x52: /* fcmpd, V9 %fcc */
3014 gen_op_load_fpr_DT0(DFPREG(rs1));
3015 gen_op_load_fpr_DT1(DFPREG(rs2));
3016 gen_op_fcmpd(rd & 3);
3018 case 0x53: /* fcmpq, V9 %fcc */
3019 CHECK_FPU_FEATURE(dc, FLOAT128);
3020 gen_op_load_fpr_QT0(QFPREG(rs1));
3021 gen_op_load_fpr_QT1(QFPREG(rs2));
3022 gen_op_fcmpq(rd & 3);
3024 case 0x55: /* fcmpes, V9 %fcc */
3025 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3027 case 0x56: /* fcmped, V9 %fcc */
3028 gen_op_load_fpr_DT0(DFPREG(rs1));
3029 gen_op_load_fpr_DT1(DFPREG(rs2));
3030 gen_op_fcmped(rd & 3);
3032 case 0x57: /* fcmpeq, V9 %fcc */
3033 CHECK_FPU_FEATURE(dc, FLOAT128);
3034 gen_op_load_fpr_QT0(QFPREG(rs1));
3035 gen_op_load_fpr_QT1(QFPREG(rs2));
3036 gen_op_fcmpeq(rd & 3);
3041 } else if (xop == 0x2) {
3044 rs1 = GET_FIELD(insn, 13, 17);
3046 // or %g0, x, y -> mov T0, x; mov y, T0
3047 if (IS_IMM) { /* immediate */
3050 simm = GET_FIELDs(insn, 19, 31);
3051 r_const = tcg_const_tl(simm);
3052 gen_movl_TN_reg(rd, r_const);
3053 tcg_temp_free(r_const);
3054 } else { /* register */
3055 rs2 = GET_FIELD(insn, 27, 31);
3056 gen_movl_reg_TN(rs2, cpu_dst);
3057 gen_movl_TN_reg(rd, cpu_dst);
3060 cpu_src1 = get_src1(insn, cpu_src1);
3061 if (IS_IMM) { /* immediate */
3062 simm = GET_FIELDs(insn, 19, 31);
3063 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3064 gen_movl_TN_reg(rd, cpu_dst);
3065 } else { /* register */
3066 // or x, %g0, y -> mov T1, x; mov y, T1
3067 rs2 = GET_FIELD(insn, 27, 31);
3069 gen_movl_reg_TN(rs2, cpu_src2);
3070 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3071 gen_movl_TN_reg(rd, cpu_dst);
3073 gen_movl_TN_reg(rd, cpu_src1);
3076 #ifdef TARGET_SPARC64
3077 } else if (xop == 0x25) { /* sll, V9 sllx */
3078 cpu_src1 = get_src1(insn, cpu_src1);
3079 if (IS_IMM) { /* immediate */
3080 simm = GET_FIELDs(insn, 20, 31);
3081 if (insn & (1 << 12)) {
3082 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3084 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3086 } else { /* register */
3087 rs2 = GET_FIELD(insn, 27, 31);
3088 gen_movl_reg_TN(rs2, cpu_src2);
3089 if (insn & (1 << 12)) {
3090 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3092 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3094 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3096 gen_movl_TN_reg(rd, cpu_dst);
3097 } else if (xop == 0x26) { /* srl, V9 srlx */
3098 cpu_src1 = get_src1(insn, cpu_src1);
3099 if (IS_IMM) { /* immediate */
3100 simm = GET_FIELDs(insn, 20, 31);
3101 if (insn & (1 << 12)) {
3102 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3104 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3105 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3107 } else { /* register */
3108 rs2 = GET_FIELD(insn, 27, 31);
3109 gen_movl_reg_TN(rs2, cpu_src2);
3110 if (insn & (1 << 12)) {
3111 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3112 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3114 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3115 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3116 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3119 gen_movl_TN_reg(rd, cpu_dst);
3120 } else if (xop == 0x27) { /* sra, V9 srax */
3121 cpu_src1 = get_src1(insn, cpu_src1);
3122 if (IS_IMM) { /* immediate */
3123 simm = GET_FIELDs(insn, 20, 31);
3124 if (insn & (1 << 12)) {
3125 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3127 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3128 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3129 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3131 } else { /* register */
3132 rs2 = GET_FIELD(insn, 27, 31);
3133 gen_movl_reg_TN(rs2, cpu_src2);
3134 if (insn & (1 << 12)) {
3135 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3136 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3138 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3139 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3140 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3141 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3144 gen_movl_TN_reg(rd, cpu_dst);
3146 } else if (xop < 0x36) {
3148 cpu_src1 = get_src1(insn, cpu_src1);
3149 cpu_src2 = get_src2(insn, cpu_src2);
3150 switch (xop & ~0x10) {
3153 simm = GET_FIELDs(insn, 19, 31);
3155 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3156 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3157 dc->cc_op = CC_OP_FLAGS;
3159 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3163 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3164 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3165 dc->cc_op = CC_OP_FLAGS;
3167 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3173 simm = GET_FIELDs(insn, 19, 31);
3174 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3176 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3179 gen_op_logic_cc(cpu_dst);
3180 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3181 dc->cc_op = CC_OP_FLAGS;
3186 simm = GET_FIELDs(insn, 19, 31);
3187 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3189 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3192 gen_op_logic_cc(cpu_dst);
3193 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3194 dc->cc_op = CC_OP_FLAGS;
3199 simm = GET_FIELDs(insn, 19, 31);
3200 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3202 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3205 gen_op_logic_cc(cpu_dst);
3206 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3207 dc->cc_op = CC_OP_FLAGS;
3212 simm = GET_FIELDs(insn, 19, 31);
3214 gen_op_subi_cc(cpu_dst, cpu_src1, simm);
3215 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3216 dc->cc_op = CC_OP_FLAGS;
3218 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3222 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3223 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3224 dc->cc_op = CC_OP_FLAGS;
3226 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3230 case 0x5: /* andn */
3232 simm = GET_FIELDs(insn, 19, 31);
3233 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3235 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3238 gen_op_logic_cc(cpu_dst);
3239 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3240 dc->cc_op = CC_OP_FLAGS;
3245 simm = GET_FIELDs(insn, 19, 31);
3246 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3248 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3251 gen_op_logic_cc(cpu_dst);
3252 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3253 dc->cc_op = CC_OP_FLAGS;
3256 case 0x7: /* xorn */
3258 simm = GET_FIELDs(insn, 19, 31);
3259 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3261 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3262 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3265 gen_op_logic_cc(cpu_dst);
3266 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3267 dc->cc_op = CC_OP_FLAGS;
3270 case 0x8: /* addx, V9 addc */
3272 simm = GET_FIELDs(insn, 19, 31);
3274 gen_helper_compute_psr();
3275 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3276 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3277 dc->cc_op = CC_OP_FLAGS;
3279 gen_helper_compute_psr();
3280 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3281 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3282 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3286 gen_helper_compute_psr();
3287 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3288 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3289 dc->cc_op = CC_OP_FLAGS;
3291 gen_helper_compute_psr();
3292 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3293 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3294 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3298 #ifdef TARGET_SPARC64
3299 case 0x9: /* V9 mulx */
3301 simm = GET_FIELDs(insn, 19, 31);
3302 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3304 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3308 case 0xa: /* umul */
3309 CHECK_IU_FEATURE(dc, MUL);
3310 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3312 gen_op_logic_cc(cpu_dst);
3313 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3314 dc->cc_op = CC_OP_FLAGS;
3317 case 0xb: /* smul */
3318 CHECK_IU_FEATURE(dc, MUL);
3319 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3321 gen_op_logic_cc(cpu_dst);
3322 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3323 dc->cc_op = CC_OP_FLAGS;
3326 case 0xc: /* subx, V9 subc */
3328 simm = GET_FIELDs(insn, 19, 31);
3330 gen_helper_compute_psr();
3331 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3332 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3333 dc->cc_op = CC_OP_FLAGS;
3335 gen_helper_compute_psr();
3336 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3337 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3338 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3342 gen_helper_compute_psr();
3343 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3344 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3345 dc->cc_op = CC_OP_FLAGS;
3347 gen_helper_compute_psr();
3348 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3349 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3350 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3354 #ifdef TARGET_SPARC64
3355 case 0xd: /* V9 udivx */
3356 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3357 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3358 gen_trap_ifdivzero_tl(cpu_cc_src2);
3359 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3362 case 0xe: /* udiv */
3363 CHECK_IU_FEATURE(dc, DIV);
3364 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3366 gen_op_div_cc(cpu_dst);
3367 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3368 dc->cc_op = CC_OP_FLAGS;
3371 case 0xf: /* sdiv */
3372 CHECK_IU_FEATURE(dc, DIV);
3373 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3375 gen_op_div_cc(cpu_dst);
3376 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3377 dc->cc_op = CC_OP_FLAGS;
3383 gen_movl_TN_reg(rd, cpu_dst);
3385 cpu_src1 = get_src1(insn, cpu_src1);
3386 cpu_src2 = get_src2(insn, cpu_src2);
3388 case 0x20: /* taddcc */
3389 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3390 gen_movl_TN_reg(rd, cpu_dst);
3391 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3392 dc->cc_op = CC_OP_FLAGS;
3394 case 0x21: /* tsubcc */
3395 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3396 gen_movl_TN_reg(rd, cpu_dst);
3397 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3398 dc->cc_op = CC_OP_FLAGS;
3400 case 0x22: /* taddcctv */
3401 save_state(dc, cpu_cond);
3402 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3403 gen_movl_TN_reg(rd, cpu_dst);
3404 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3405 dc->cc_op = CC_OP_FLAGS;
3407 case 0x23: /* tsubcctv */
3408 save_state(dc, cpu_cond);
3409 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3410 gen_movl_TN_reg(rd, cpu_dst);
3411 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3412 dc->cc_op = CC_OP_FLAGS;
3414 case 0x24: /* mulscc */
3415 gen_helper_compute_psr();
3416 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3417 gen_movl_TN_reg(rd, cpu_dst);
3418 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3419 dc->cc_op = CC_OP_FLAGS;
3421 #ifndef TARGET_SPARC64
3422 case 0x25: /* sll */
3423 if (IS_IMM) { /* immediate */
3424 simm = GET_FIELDs(insn, 20, 31);
3425 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3426 } else { /* register */
3427 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3428 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3430 gen_movl_TN_reg(rd, cpu_dst);
3432 case 0x26: /* srl */
3433 if (IS_IMM) { /* immediate */
3434 simm = GET_FIELDs(insn, 20, 31);
3435 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3436 } else { /* register */
3437 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3438 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3440 gen_movl_TN_reg(rd, cpu_dst);
3442 case 0x27: /* sra */
3443 if (IS_IMM) { /* immediate */
3444 simm = GET_FIELDs(insn, 20, 31);
3445 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3446 } else { /* register */
3447 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3448 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3450 gen_movl_TN_reg(rd, cpu_dst);
3457 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3458 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3460 #ifndef TARGET_SPARC64
3461 case 0x01 ... 0x0f: /* undefined in the
3465 case 0x10 ... 0x1f: /* implementation-dependent
3471 case 0x2: /* V9 wrccr */
3472 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3473 gen_helper_wrccr(cpu_dst);
3474 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3475 dc->cc_op = CC_OP_FLAGS;
3477 case 0x3: /* V9 wrasi */
3478 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3479 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3481 case 0x6: /* V9 wrfprs */
3482 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3483 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3484 save_state(dc, cpu_cond);
3489 case 0xf: /* V9 sir, nop if user */
3490 #if !defined(CONFIG_USER_ONLY)
3495 case 0x13: /* Graphics Status */
3496 if (gen_trap_ifnofpu(dc, cpu_cond))
3498 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3500 case 0x14: /* Softint set */
3501 if (!supervisor(dc))
3503 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3504 gen_helper_set_softint(cpu_tmp64);
3506 case 0x15: /* Softint clear */
3507 if (!supervisor(dc))
3509 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3510 gen_helper_clear_softint(cpu_tmp64);
3512 case 0x16: /* Softint write */
3513 if (!supervisor(dc))
3515 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3516 gen_helper_write_softint(cpu_tmp64);
3518 case 0x17: /* Tick compare */
3519 #if !defined(CONFIG_USER_ONLY)
3520 if (!supervisor(dc))
3526 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3528 r_tickptr = tcg_temp_new_ptr();
3529 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3530 offsetof(CPUState, tick));
3531 gen_helper_tick_set_limit(r_tickptr,
3533 tcg_temp_free_ptr(r_tickptr);
3536 case 0x18: /* System tick */
3537 #if !defined(CONFIG_USER_ONLY)
3538 if (!supervisor(dc))
3544 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3546 r_tickptr = tcg_temp_new_ptr();
3547 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3548 offsetof(CPUState, stick));
3549 gen_helper_tick_set_count(r_tickptr,
3551 tcg_temp_free_ptr(r_tickptr);
3554 case 0x19: /* System tick compare */
3555 #if !defined(CONFIG_USER_ONLY)
3556 if (!supervisor(dc))
3562 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3564 r_tickptr = tcg_temp_new_ptr();
3565 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3566 offsetof(CPUState, stick));
3567 gen_helper_tick_set_limit(r_tickptr,
3569 tcg_temp_free_ptr(r_tickptr);
3573 case 0x10: /* Performance Control */
3574 case 0x11: /* Performance Instrumentation
3576 case 0x12: /* Dispatch Control */
3583 #if !defined(CONFIG_USER_ONLY)
3584 case 0x31: /* wrpsr, V9 saved, restored */
3586 if (!supervisor(dc))
3588 #ifdef TARGET_SPARC64
3594 gen_helper_restored();
3596 case 2: /* UA2005 allclean */
3597 case 3: /* UA2005 otherw */
3598 case 4: /* UA2005 normalw */
3599 case 5: /* UA2005 invalw */
3605 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3606 gen_helper_wrpsr(cpu_dst);
3607 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3608 dc->cc_op = CC_OP_FLAGS;
3609 save_state(dc, cpu_cond);
3616 case 0x32: /* wrwim, V9 wrpr */
3618 if (!supervisor(dc))
3620 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3621 #ifdef TARGET_SPARC64
3627 r_tsptr = tcg_temp_new_ptr();
3628 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3629 offsetof(CPUState, tsptr));
3630 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3631 offsetof(trap_state, tpc));
3632 tcg_temp_free_ptr(r_tsptr);
3639 r_tsptr = tcg_temp_new_ptr();
3640 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3641 offsetof(CPUState, tsptr));
3642 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3643 offsetof(trap_state, tnpc));
3644 tcg_temp_free_ptr(r_tsptr);
3651 r_tsptr = tcg_temp_new_ptr();
3652 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3653 offsetof(CPUState, tsptr));
3654 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3655 offsetof(trap_state,
3657 tcg_temp_free_ptr(r_tsptr);
3664 r_tsptr = tcg_temp_new_ptr();
3665 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3666 offsetof(CPUState, tsptr));
3667 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3668 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3669 offsetof(trap_state, tt));
3670 tcg_temp_free_ptr(r_tsptr);
3677 r_tickptr = tcg_temp_new_ptr();
3678 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3679 offsetof(CPUState, tick));
3680 gen_helper_tick_set_count(r_tickptr,
3682 tcg_temp_free_ptr(r_tickptr);
3686 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3689 save_state(dc, cpu_cond);
3690 gen_helper_wrpstate(cpu_tmp0);
3696 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3697 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3698 offsetof(CPUSPARCState, tl));
3701 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3702 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3703 offsetof(CPUSPARCState,
3707 gen_helper_wrcwp(cpu_tmp0);
3710 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3711 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3712 offsetof(CPUSPARCState,
3715 case 11: // canrestore
3716 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3717 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3718 offsetof(CPUSPARCState,
3721 case 12: // cleanwin
3722 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3723 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3724 offsetof(CPUSPARCState,
3727 case 13: // otherwin
3728 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3729 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3730 offsetof(CPUSPARCState,
3734 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3735 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3736 offsetof(CPUSPARCState,
3739 case 16: // UA2005 gl
3740 CHECK_IU_FEATURE(dc, GL);
3741 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3742 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3743 offsetof(CPUSPARCState, gl));
3745 case 26: // UA2005 strand status
3746 CHECK_IU_FEATURE(dc, HYPV);
3747 if (!hypervisor(dc))
3749 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3755 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3756 if (dc->def->nwindows != 32)
3757 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3758 (1 << dc->def->nwindows) - 1);
3759 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3763 case 0x33: /* wrtbr, UA2005 wrhpr */
3765 #ifndef TARGET_SPARC64
3766 if (!supervisor(dc))
3768 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3770 CHECK_IU_FEATURE(dc, HYPV);
3771 if (!hypervisor(dc))
3773 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3776 // XXX gen_op_wrhpstate();
3777 save_state(dc, cpu_cond);
3783 // XXX gen_op_wrhtstate();
3786 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3789 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3791 case 31: // hstick_cmpr
3795 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3796 r_tickptr = tcg_temp_new_ptr();
3797 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3798 offsetof(CPUState, hstick));
3799 gen_helper_tick_set_limit(r_tickptr,
3801 tcg_temp_free_ptr(r_tickptr);
3804 case 6: // hver readonly
3812 #ifdef TARGET_SPARC64
3813 case 0x2c: /* V9 movcc */
3815 int cc = GET_FIELD_SP(insn, 11, 12);
3816 int cond = GET_FIELD_SP(insn, 14, 17);
3820 r_cond = tcg_temp_new();
3821 if (insn & (1 << 18)) {
3823 gen_cond(r_cond, 0, cond, dc);
3825 gen_cond(r_cond, 1, cond, dc);
3829 gen_fcond(r_cond, cc, cond);
3832 l1 = gen_new_label();
3834 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3835 if (IS_IMM) { /* immediate */
3838 simm = GET_FIELD_SPs(insn, 0, 10);
3839 r_const = tcg_const_tl(simm);
3840 gen_movl_TN_reg(rd, r_const);
3841 tcg_temp_free(r_const);
3843 rs2 = GET_FIELD_SP(insn, 0, 4);
3844 gen_movl_reg_TN(rs2, cpu_tmp0);
3845 gen_movl_TN_reg(rd, cpu_tmp0);
3848 tcg_temp_free(r_cond);
3851 case 0x2d: /* V9 sdivx */
3852 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3853 gen_movl_TN_reg(rd, cpu_dst);
3855 case 0x2e: /* V9 popc */
3857 cpu_src2 = get_src2(insn, cpu_src2);
3858 gen_helper_popc(cpu_dst, cpu_src2);
3859 gen_movl_TN_reg(rd, cpu_dst);
3861 case 0x2f: /* V9 movr */
3863 int cond = GET_FIELD_SP(insn, 10, 12);
3866 cpu_src1 = get_src1(insn, cpu_src1);
3868 l1 = gen_new_label();
3870 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3872 if (IS_IMM) { /* immediate */
3875 simm = GET_FIELD_SPs(insn, 0, 9);
3876 r_const = tcg_const_tl(simm);
3877 gen_movl_TN_reg(rd, r_const);
3878 tcg_temp_free(r_const);
3880 rs2 = GET_FIELD_SP(insn, 0, 4);
3881 gen_movl_reg_TN(rs2, cpu_tmp0);
3882 gen_movl_TN_reg(rd, cpu_tmp0);
3892 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3893 #ifdef TARGET_SPARC64
3894 int opf = GET_FIELD_SP(insn, 5, 13);
3895 rs1 = GET_FIELD(insn, 13, 17);
3896 rs2 = GET_FIELD(insn, 27, 31);
3897 if (gen_trap_ifnofpu(dc, cpu_cond))
3901 case 0x000: /* VIS I edge8cc */
3902 case 0x001: /* VIS II edge8n */
3903 case 0x002: /* VIS I edge8lcc */
3904 case 0x003: /* VIS II edge8ln */
3905 case 0x004: /* VIS I edge16cc */
3906 case 0x005: /* VIS II edge16n */
3907 case 0x006: /* VIS I edge16lcc */
3908 case 0x007: /* VIS II edge16ln */
3909 case 0x008: /* VIS I edge32cc */
3910 case 0x009: /* VIS II edge32n */
3911 case 0x00a: /* VIS I edge32lcc */
3912 case 0x00b: /* VIS II edge32ln */
3915 case 0x010: /* VIS I array8 */
3916 CHECK_FPU_FEATURE(dc, VIS1);
3917 cpu_src1 = get_src1(insn, cpu_src1);
3918 gen_movl_reg_TN(rs2, cpu_src2);
3919 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3920 gen_movl_TN_reg(rd, cpu_dst);
3922 case 0x012: /* VIS I array16 */
3923 CHECK_FPU_FEATURE(dc, VIS1);
3924 cpu_src1 = get_src1(insn, cpu_src1);
3925 gen_movl_reg_TN(rs2, cpu_src2);
3926 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3927 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3928 gen_movl_TN_reg(rd, cpu_dst);
3930 case 0x014: /* VIS I array32 */
3931 CHECK_FPU_FEATURE(dc, VIS1);
3932 cpu_src1 = get_src1(insn, cpu_src1);
3933 gen_movl_reg_TN(rs2, cpu_src2);
3934 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3935 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3936 gen_movl_TN_reg(rd, cpu_dst);
3938 case 0x018: /* VIS I alignaddr */
3939 CHECK_FPU_FEATURE(dc, VIS1);
3940 cpu_src1 = get_src1(insn, cpu_src1);
3941 gen_movl_reg_TN(rs2, cpu_src2);
3942 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3943 gen_movl_TN_reg(rd, cpu_dst);
3945 case 0x019: /* VIS II bmask */
3946 case 0x01a: /* VIS I alignaddrl */
3949 case 0x020: /* VIS I fcmple16 */
3950 CHECK_FPU_FEATURE(dc, VIS1);
3951 gen_op_load_fpr_DT0(DFPREG(rs1));
3952 gen_op_load_fpr_DT1(DFPREG(rs2));
3953 gen_helper_fcmple16();
3954 gen_op_store_DT0_fpr(DFPREG(rd));
3956 case 0x022: /* VIS I fcmpne16 */
3957 CHECK_FPU_FEATURE(dc, VIS1);
3958 gen_op_load_fpr_DT0(DFPREG(rs1));
3959 gen_op_load_fpr_DT1(DFPREG(rs2));
3960 gen_helper_fcmpne16();
3961 gen_op_store_DT0_fpr(DFPREG(rd));
3963 case 0x024: /* VIS I fcmple32 */
3964 CHECK_FPU_FEATURE(dc, VIS1);
3965 gen_op_load_fpr_DT0(DFPREG(rs1));
3966 gen_op_load_fpr_DT1(DFPREG(rs2));
3967 gen_helper_fcmple32();
3968 gen_op_store_DT0_fpr(DFPREG(rd));
3970 case 0x026: /* VIS I fcmpne32 */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 gen_op_load_fpr_DT0(DFPREG(rs1));
3973 gen_op_load_fpr_DT1(DFPREG(rs2));
3974 gen_helper_fcmpne32();
3975 gen_op_store_DT0_fpr(DFPREG(rd));
3977 case 0x028: /* VIS I fcmpgt16 */
3978 CHECK_FPU_FEATURE(dc, VIS1);
3979 gen_op_load_fpr_DT0(DFPREG(rs1));
3980 gen_op_load_fpr_DT1(DFPREG(rs2));
3981 gen_helper_fcmpgt16();
3982 gen_op_store_DT0_fpr(DFPREG(rd));
3984 case 0x02a: /* VIS I fcmpeq16 */
3985 CHECK_FPU_FEATURE(dc, VIS1);
3986 gen_op_load_fpr_DT0(DFPREG(rs1));
3987 gen_op_load_fpr_DT1(DFPREG(rs2));
3988 gen_helper_fcmpeq16();
3989 gen_op_store_DT0_fpr(DFPREG(rd));
3991 case 0x02c: /* VIS I fcmpgt32 */
3992 CHECK_FPU_FEATURE(dc, VIS1);
3993 gen_op_load_fpr_DT0(DFPREG(rs1));
3994 gen_op_load_fpr_DT1(DFPREG(rs2));
3995 gen_helper_fcmpgt32();
3996 gen_op_store_DT0_fpr(DFPREG(rd));
3998 case 0x02e: /* VIS I fcmpeq32 */
3999 CHECK_FPU_FEATURE(dc, VIS1);
4000 gen_op_load_fpr_DT0(DFPREG(rs1));
4001 gen_op_load_fpr_DT1(DFPREG(rs2));
4002 gen_helper_fcmpeq32();
4003 gen_op_store_DT0_fpr(DFPREG(rd));
4005 case 0x031: /* VIS I fmul8x16 */
4006 CHECK_FPU_FEATURE(dc, VIS1);
4007 gen_op_load_fpr_DT0(DFPREG(rs1));
4008 gen_op_load_fpr_DT1(DFPREG(rs2));
4009 gen_helper_fmul8x16();
4010 gen_op_store_DT0_fpr(DFPREG(rd));
4012 case 0x033: /* VIS I fmul8x16au */
4013 CHECK_FPU_FEATURE(dc, VIS1);
4014 gen_op_load_fpr_DT0(DFPREG(rs1));
4015 gen_op_load_fpr_DT1(DFPREG(rs2));
4016 gen_helper_fmul8x16au();
4017 gen_op_store_DT0_fpr(DFPREG(rd));
4019 case 0x035: /* VIS I fmul8x16al */
4020 CHECK_FPU_FEATURE(dc, VIS1);
4021 gen_op_load_fpr_DT0(DFPREG(rs1));
4022 gen_op_load_fpr_DT1(DFPREG(rs2));
4023 gen_helper_fmul8x16al();
4024 gen_op_store_DT0_fpr(DFPREG(rd));
4026 case 0x036: /* VIS I fmul8sux16 */
4027 CHECK_FPU_FEATURE(dc, VIS1);
4028 gen_op_load_fpr_DT0(DFPREG(rs1));
4029 gen_op_load_fpr_DT1(DFPREG(rs2));
4030 gen_helper_fmul8sux16();
4031 gen_op_store_DT0_fpr(DFPREG(rd));
4033 case 0x037: /* VIS I fmul8ulx16 */
4034 CHECK_FPU_FEATURE(dc, VIS1);
4035 gen_op_load_fpr_DT0(DFPREG(rs1));
4036 gen_op_load_fpr_DT1(DFPREG(rs2));
4037 gen_helper_fmul8ulx16();
4038 gen_op_store_DT0_fpr(DFPREG(rd));
4040 case 0x038: /* VIS I fmuld8sux16 */
4041 CHECK_FPU_FEATURE(dc, VIS1);
4042 gen_op_load_fpr_DT0(DFPREG(rs1));
4043 gen_op_load_fpr_DT1(DFPREG(rs2));
4044 gen_helper_fmuld8sux16();
4045 gen_op_store_DT0_fpr(DFPREG(rd));
4047 case 0x039: /* VIS I fmuld8ulx16 */
4048 CHECK_FPU_FEATURE(dc, VIS1);
4049 gen_op_load_fpr_DT0(DFPREG(rs1));
4050 gen_op_load_fpr_DT1(DFPREG(rs2));
4051 gen_helper_fmuld8ulx16();
4052 gen_op_store_DT0_fpr(DFPREG(rd));
4054 case 0x03a: /* VIS I fpack32 */
4055 case 0x03b: /* VIS I fpack16 */
4056 case 0x03d: /* VIS I fpackfix */
4057 case 0x03e: /* VIS I pdist */
4060 case 0x048: /* VIS I faligndata */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 gen_op_load_fpr_DT0(DFPREG(rs1));
4063 gen_op_load_fpr_DT1(DFPREG(rs2));
4064 gen_helper_faligndata();
4065 gen_op_store_DT0_fpr(DFPREG(rd));
4067 case 0x04b: /* VIS I fpmerge */
4068 CHECK_FPU_FEATURE(dc, VIS1);
4069 gen_op_load_fpr_DT0(DFPREG(rs1));
4070 gen_op_load_fpr_DT1(DFPREG(rs2));
4071 gen_helper_fpmerge();
4072 gen_op_store_DT0_fpr(DFPREG(rd));
4074 case 0x04c: /* VIS II bshuffle */
4077 case 0x04d: /* VIS I fexpand */
4078 CHECK_FPU_FEATURE(dc, VIS1);
4079 gen_op_load_fpr_DT0(DFPREG(rs1));
4080 gen_op_load_fpr_DT1(DFPREG(rs2));
4081 gen_helper_fexpand();
4082 gen_op_store_DT0_fpr(DFPREG(rd));
4084 case 0x050: /* VIS I fpadd16 */
4085 CHECK_FPU_FEATURE(dc, VIS1);
4086 gen_op_load_fpr_DT0(DFPREG(rs1));
4087 gen_op_load_fpr_DT1(DFPREG(rs2));
4088 gen_helper_fpadd16();
4089 gen_op_store_DT0_fpr(DFPREG(rd));
4091 case 0x051: /* VIS I fpadd16s */
4092 CHECK_FPU_FEATURE(dc, VIS1);
4093 gen_helper_fpadd16s(cpu_fpr[rd],
4094 cpu_fpr[rs1], cpu_fpr[rs2]);
4096 case 0x052: /* VIS I fpadd32 */
4097 CHECK_FPU_FEATURE(dc, VIS1);
4098 gen_op_load_fpr_DT0(DFPREG(rs1));
4099 gen_op_load_fpr_DT1(DFPREG(rs2));
4100 gen_helper_fpadd32();
4101 gen_op_store_DT0_fpr(DFPREG(rd));
4103 case 0x053: /* VIS I fpadd32s */
4104 CHECK_FPU_FEATURE(dc, VIS1);
4105 gen_helper_fpadd32s(cpu_fpr[rd],
4106 cpu_fpr[rs1], cpu_fpr[rs2]);
4108 case 0x054: /* VIS I fpsub16 */
4109 CHECK_FPU_FEATURE(dc, VIS1);
4110 gen_op_load_fpr_DT0(DFPREG(rs1));
4111 gen_op_load_fpr_DT1(DFPREG(rs2));
4112 gen_helper_fpsub16();
4113 gen_op_store_DT0_fpr(DFPREG(rd));
4115 case 0x055: /* VIS I fpsub16s */
4116 CHECK_FPU_FEATURE(dc, VIS1);
4117 gen_helper_fpsub16s(cpu_fpr[rd],
4118 cpu_fpr[rs1], cpu_fpr[rs2]);
4120 case 0x056: /* VIS I fpsub32 */
4121 CHECK_FPU_FEATURE(dc, VIS1);
4122 gen_op_load_fpr_DT0(DFPREG(rs1));
4123 gen_op_load_fpr_DT1(DFPREG(rs2));
4124 gen_helper_fpsub32();
4125 gen_op_store_DT0_fpr(DFPREG(rd));
4127 case 0x057: /* VIS I fpsub32s */
4128 CHECK_FPU_FEATURE(dc, VIS1);
4129 gen_helper_fpsub32s(cpu_fpr[rd],
4130 cpu_fpr[rs1], cpu_fpr[rs2]);
4132 case 0x060: /* VIS I fzero */
4133 CHECK_FPU_FEATURE(dc, VIS1);
4134 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4135 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4137 case 0x061: /* VIS I fzeros */
4138 CHECK_FPU_FEATURE(dc, VIS1);
4139 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4141 case 0x062: /* VIS I fnor */
4142 CHECK_FPU_FEATURE(dc, VIS1);
4143 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4144 cpu_fpr[DFPREG(rs2)]);
4145 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4146 cpu_fpr[DFPREG(rs2) + 1]);
4148 case 0x063: /* VIS I fnors */
4149 CHECK_FPU_FEATURE(dc, VIS1);
4150 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4152 case 0x064: /* VIS I fandnot2 */
4153 CHECK_FPU_FEATURE(dc, VIS1);
4154 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4155 cpu_fpr[DFPREG(rs2)]);
4156 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4157 cpu_fpr[DFPREG(rs1) + 1],
4158 cpu_fpr[DFPREG(rs2) + 1]);
4160 case 0x065: /* VIS I fandnot2s */
4161 CHECK_FPU_FEATURE(dc, VIS1);
4162 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4164 case 0x066: /* VIS I fnot2 */
4165 CHECK_FPU_FEATURE(dc, VIS1);
4166 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4167 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4168 cpu_fpr[DFPREG(rs2) + 1]);
4170 case 0x067: /* VIS I fnot2s */
4171 CHECK_FPU_FEATURE(dc, VIS1);
4172 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4174 case 0x068: /* VIS I fandnot1 */
4175 CHECK_FPU_FEATURE(dc, VIS1);
4176 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4177 cpu_fpr[DFPREG(rs1)]);
4178 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4179 cpu_fpr[DFPREG(rs2) + 1],
4180 cpu_fpr[DFPREG(rs1) + 1]);
4182 case 0x069: /* VIS I fandnot1s */
4183 CHECK_FPU_FEATURE(dc, VIS1);
4184 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4186 case 0x06a: /* VIS I fnot1 */
4187 CHECK_FPU_FEATURE(dc, VIS1);
4188 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4189 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4190 cpu_fpr[DFPREG(rs1) + 1]);
4192 case 0x06b: /* VIS I fnot1s */
4193 CHECK_FPU_FEATURE(dc, VIS1);
4194 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4196 case 0x06c: /* VIS I fxor */
4197 CHECK_FPU_FEATURE(dc, VIS1);
4198 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4199 cpu_fpr[DFPREG(rs2)]);
4200 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4201 cpu_fpr[DFPREG(rs1) + 1],
4202 cpu_fpr[DFPREG(rs2) + 1]);
4204 case 0x06d: /* VIS I fxors */
4205 CHECK_FPU_FEATURE(dc, VIS1);
4206 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4208 case 0x06e: /* VIS I fnand */
4209 CHECK_FPU_FEATURE(dc, VIS1);
4210 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4211 cpu_fpr[DFPREG(rs2)]);
4212 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4213 cpu_fpr[DFPREG(rs2) + 1]);
4215 case 0x06f: /* VIS I fnands */
4216 CHECK_FPU_FEATURE(dc, VIS1);
4217 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4219 case 0x070: /* VIS I fand */
4220 CHECK_FPU_FEATURE(dc, VIS1);
4221 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4222 cpu_fpr[DFPREG(rs2)]);
4223 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4224 cpu_fpr[DFPREG(rs1) + 1],
4225 cpu_fpr[DFPREG(rs2) + 1]);
4227 case 0x071: /* VIS I fands */
4228 CHECK_FPU_FEATURE(dc, VIS1);
4229 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4231 case 0x072: /* VIS I fxnor */
4232 CHECK_FPU_FEATURE(dc, VIS1);
4233 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4234 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4235 cpu_fpr[DFPREG(rs1)]);
4236 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4237 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4238 cpu_fpr[DFPREG(rs1) + 1]);
4240 case 0x073: /* VIS I fxnors */
4241 CHECK_FPU_FEATURE(dc, VIS1);
4242 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4243 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4245 case 0x074: /* VIS I fsrc1 */
4246 CHECK_FPU_FEATURE(dc, VIS1);
4247 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4248 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4249 cpu_fpr[DFPREG(rs1) + 1]);
4251 case 0x075: /* VIS I fsrc1s */
4252 CHECK_FPU_FEATURE(dc, VIS1);
4253 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4255 case 0x076: /* VIS I fornot2 */
4256 CHECK_FPU_FEATURE(dc, VIS1);
4257 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4258 cpu_fpr[DFPREG(rs2)]);
4259 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4260 cpu_fpr[DFPREG(rs1) + 1],
4261 cpu_fpr[DFPREG(rs2) + 1]);
4263 case 0x077: /* VIS I fornot2s */
4264 CHECK_FPU_FEATURE(dc, VIS1);
4265 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4267 case 0x078: /* VIS I fsrc2 */
4268 CHECK_FPU_FEATURE(dc, VIS1);
4269 gen_op_load_fpr_DT0(DFPREG(rs2));
4270 gen_op_store_DT0_fpr(DFPREG(rd));
4272 case 0x079: /* VIS I fsrc2s */
4273 CHECK_FPU_FEATURE(dc, VIS1);
4274 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4276 case 0x07a: /* VIS I fornot1 */
4277 CHECK_FPU_FEATURE(dc, VIS1);
4278 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4279 cpu_fpr[DFPREG(rs1)]);
4280 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4281 cpu_fpr[DFPREG(rs2) + 1],
4282 cpu_fpr[DFPREG(rs1) + 1]);
4284 case 0x07b: /* VIS I fornot1s */
4285 CHECK_FPU_FEATURE(dc, VIS1);
4286 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4288 case 0x07c: /* VIS I for */
4289 CHECK_FPU_FEATURE(dc, VIS1);
4290 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4291 cpu_fpr[DFPREG(rs2)]);
4292 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4293 cpu_fpr[DFPREG(rs1) + 1],
4294 cpu_fpr[DFPREG(rs2) + 1]);
4296 case 0x07d: /* VIS I fors */
4297 CHECK_FPU_FEATURE(dc, VIS1);
4298 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4300 case 0x07e: /* VIS I fone */
4301 CHECK_FPU_FEATURE(dc, VIS1);
4302 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4303 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4305 case 0x07f: /* VIS I fones */
4306 CHECK_FPU_FEATURE(dc, VIS1);
4307 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4309 case 0x080: /* VIS I shutdown */
4310 case 0x081: /* VIS II siam */
4319 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4320 #ifdef TARGET_SPARC64
4325 #ifdef TARGET_SPARC64
4326 } else if (xop == 0x39) { /* V9 return */
4329 save_state(dc, cpu_cond);
4330 cpu_src1 = get_src1(insn, cpu_src1);
4331 if (IS_IMM) { /* immediate */
4332 simm = GET_FIELDs(insn, 19, 31);
4333 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4334 } else { /* register */
4335 rs2 = GET_FIELD(insn, 27, 31);
4337 gen_movl_reg_TN(rs2, cpu_src2);
4338 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4340 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4342 gen_helper_restore();
4343 gen_mov_pc_npc(dc, cpu_cond);
4344 r_const = tcg_const_i32(3);
4345 gen_helper_check_align(cpu_dst, r_const);
4346 tcg_temp_free_i32(r_const);
4347 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4348 dc->npc = DYNAMIC_PC;
4352 cpu_src1 = get_src1(insn, cpu_src1);
4353 if (IS_IMM) { /* immediate */
4354 simm = GET_FIELDs(insn, 19, 31);
4355 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4356 } else { /* register */
4357 rs2 = GET_FIELD(insn, 27, 31);
4359 gen_movl_reg_TN(rs2, cpu_src2);
4360 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4362 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4365 case 0x38: /* jmpl */
4370 r_pc = tcg_const_tl(dc->pc);
4371 gen_movl_TN_reg(rd, r_pc);
4372 tcg_temp_free(r_pc);
4373 gen_mov_pc_npc(dc, cpu_cond);
4374 r_const = tcg_const_i32(3);
4375 gen_helper_check_align(cpu_dst, r_const);
4376 tcg_temp_free_i32(r_const);
4377 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4378 dc->npc = DYNAMIC_PC;
4381 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4382 case 0x39: /* rett, V9 return */
4386 if (!supervisor(dc))
4388 gen_mov_pc_npc(dc, cpu_cond);
4389 r_const = tcg_const_i32(3);
4390 gen_helper_check_align(cpu_dst, r_const);
4391 tcg_temp_free_i32(r_const);
4392 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4393 dc->npc = DYNAMIC_PC;
4398 case 0x3b: /* flush */
4399 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4401 gen_helper_flush(cpu_dst);
4403 case 0x3c: /* save */
4404 save_state(dc, cpu_cond);
4406 gen_movl_TN_reg(rd, cpu_dst);
4408 case 0x3d: /* restore */
4409 save_state(dc, cpu_cond);
4410 gen_helper_restore();
4411 gen_movl_TN_reg(rd, cpu_dst);
4413 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4414 case 0x3e: /* V9 done/retry */
4418 if (!supervisor(dc))
4420 dc->npc = DYNAMIC_PC;
4421 dc->pc = DYNAMIC_PC;
4425 if (!supervisor(dc))
4427 dc->npc = DYNAMIC_PC;
4428 dc->pc = DYNAMIC_PC;
4444 case 3: /* load/store instructions */
4446 unsigned int xop = GET_FIELD(insn, 7, 12);
4448 cpu_src1 = get_src1(insn, cpu_src1);
4449 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4450 rs2 = GET_FIELD(insn, 27, 31);
4451 gen_movl_reg_TN(rs2, cpu_src2);
4452 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4453 } else if (IS_IMM) { /* immediate */
4454 simm = GET_FIELDs(insn, 19, 31);
4455 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4456 } else { /* register */
4457 rs2 = GET_FIELD(insn, 27, 31);
4459 gen_movl_reg_TN(rs2, cpu_src2);
4460 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4462 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4464 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4465 (xop > 0x17 && xop <= 0x1d ) ||
4466 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4468 case 0x0: /* ld, V9 lduw, load unsigned word */
4469 gen_address_mask(dc, cpu_addr);
4470 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4472 case 0x1: /* ldub, load unsigned byte */
4473 gen_address_mask(dc, cpu_addr);
4474 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4476 case 0x2: /* lduh, load unsigned halfword */
4477 gen_address_mask(dc, cpu_addr);
4478 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4480 case 0x3: /* ldd, load double word */
4486 save_state(dc, cpu_cond);
4487 r_const = tcg_const_i32(7);
4488 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4489 tcg_temp_free_i32(r_const);
4490 gen_address_mask(dc, cpu_addr);
4491 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4492 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4493 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4494 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4495 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4496 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4497 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4500 case 0x9: /* ldsb, load signed byte */
4501 gen_address_mask(dc, cpu_addr);
4502 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4504 case 0xa: /* ldsh, load signed halfword */
4505 gen_address_mask(dc, cpu_addr);
4506 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4508 case 0xd: /* ldstub -- XXX: should be atomically */
4512 gen_address_mask(dc, cpu_addr);
4513 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4514 r_const = tcg_const_tl(0xff);
4515 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4516 tcg_temp_free(r_const);
4519 case 0x0f: /* swap, swap register with memory. Also
4521 CHECK_IU_FEATURE(dc, SWAP);
4522 gen_movl_reg_TN(rd, cpu_val);
4523 gen_address_mask(dc, cpu_addr);
4524 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4525 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4526 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4528 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4529 case 0x10: /* lda, V9 lduwa, load word alternate */
4530 #ifndef TARGET_SPARC64
4533 if (!supervisor(dc))
4536 save_state(dc, cpu_cond);
4537 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4539 case 0x11: /* lduba, load unsigned byte alternate */
4540 #ifndef TARGET_SPARC64
4543 if (!supervisor(dc))
4546 save_state(dc, cpu_cond);
4547 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4549 case 0x12: /* lduha, load unsigned halfword alternate */
4550 #ifndef TARGET_SPARC64
4553 if (!supervisor(dc))
4556 save_state(dc, cpu_cond);
4557 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4559 case 0x13: /* ldda, load double word alternate */
4560 #ifndef TARGET_SPARC64
4563 if (!supervisor(dc))
4568 save_state(dc, cpu_cond);
4569 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4571 case 0x19: /* ldsba, load signed byte alternate */
4572 #ifndef TARGET_SPARC64
4575 if (!supervisor(dc))
4578 save_state(dc, cpu_cond);
4579 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4581 case 0x1a: /* ldsha, load signed halfword alternate */
4582 #ifndef TARGET_SPARC64
4585 if (!supervisor(dc))
4588 save_state(dc, cpu_cond);
4589 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4591 case 0x1d: /* ldstuba -- XXX: should be atomically */
4592 #ifndef TARGET_SPARC64
4595 if (!supervisor(dc))
4598 save_state(dc, cpu_cond);
4599 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4601 case 0x1f: /* swapa, swap reg with alt. memory. Also
4603 CHECK_IU_FEATURE(dc, SWAP);
4604 #ifndef TARGET_SPARC64
4607 if (!supervisor(dc))
4610 save_state(dc, cpu_cond);
4611 gen_movl_reg_TN(rd, cpu_val);
4612 gen_swap_asi(cpu_val, cpu_addr, insn);
4615 #ifndef TARGET_SPARC64
4616 case 0x30: /* ldc */
4617 case 0x31: /* ldcsr */
4618 case 0x33: /* lddc */
4622 #ifdef TARGET_SPARC64
4623 case 0x08: /* V9 ldsw */
4624 gen_address_mask(dc, cpu_addr);
4625 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4627 case 0x0b: /* V9 ldx */
4628 gen_address_mask(dc, cpu_addr);
4629 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4631 case 0x18: /* V9 ldswa */
4632 save_state(dc, cpu_cond);
4633 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4635 case 0x1b: /* V9 ldxa */
4636 save_state(dc, cpu_cond);
4637 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4639 case 0x2d: /* V9 prefetch, no effect */
4641 case 0x30: /* V9 ldfa */
4642 save_state(dc, cpu_cond);
4643 gen_ldf_asi(cpu_addr, insn, 4, rd);
4645 case 0x33: /* V9 lddfa */
4646 save_state(dc, cpu_cond);
4647 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4649 case 0x3d: /* V9 prefetcha, no effect */
4651 case 0x32: /* V9 ldqfa */
4652 CHECK_FPU_FEATURE(dc, FLOAT128);
4653 save_state(dc, cpu_cond);
4654 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4660 gen_movl_TN_reg(rd, cpu_val);
4661 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4664 } else if (xop >= 0x20 && xop < 0x24) {
4665 if (gen_trap_ifnofpu(dc, cpu_cond))
4667 save_state(dc, cpu_cond);
4669 case 0x20: /* ldf, load fpreg */
4670 gen_address_mask(dc, cpu_addr);
4671 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4672 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4674 case 0x21: /* ldfsr, V9 ldxfsr */
4675 #ifdef TARGET_SPARC64
4676 gen_address_mask(dc, cpu_addr);
4678 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4679 gen_helper_ldxfsr(cpu_tmp64);
4683 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4684 gen_helper_ldfsr(cpu_tmp32);
4688 case 0x22: /* ldqf, load quad fpreg */
4692 CHECK_FPU_FEATURE(dc, FLOAT128);
4693 r_const = tcg_const_i32(dc->mem_idx);
4694 gen_helper_ldqf(cpu_addr, r_const);
4695 tcg_temp_free_i32(r_const);
4696 gen_op_store_QT0_fpr(QFPREG(rd));
4699 case 0x23: /* lddf, load double fpreg */
4703 r_const = tcg_const_i32(dc->mem_idx);
4704 gen_helper_lddf(cpu_addr, r_const);
4705 tcg_temp_free_i32(r_const);
4706 gen_op_store_DT0_fpr(DFPREG(rd));
4712 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4713 xop == 0xe || xop == 0x1e) {
4714 gen_movl_reg_TN(rd, cpu_val);
4716 case 0x4: /* st, store word */
4717 gen_address_mask(dc, cpu_addr);
4718 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4720 case 0x5: /* stb, store byte */
4721 gen_address_mask(dc, cpu_addr);
4722 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4724 case 0x6: /* sth, store halfword */
4725 gen_address_mask(dc, cpu_addr);
4726 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4728 case 0x7: /* std, store double word */
4734 save_state(dc, cpu_cond);
4735 gen_address_mask(dc, cpu_addr);
4736 r_const = tcg_const_i32(7);
4737 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4738 tcg_temp_free_i32(r_const);
4739 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4740 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4741 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4744 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4745 case 0x14: /* sta, V9 stwa, store word alternate */
4746 #ifndef TARGET_SPARC64
4749 if (!supervisor(dc))
4752 save_state(dc, cpu_cond);
4753 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4755 case 0x15: /* stba, store byte alternate */
4756 #ifndef TARGET_SPARC64
4759 if (!supervisor(dc))
4762 save_state(dc, cpu_cond);
4763 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4765 case 0x16: /* stha, store halfword alternate */
4766 #ifndef TARGET_SPARC64
4769 if (!supervisor(dc))
4772 save_state(dc, cpu_cond);
4773 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4775 case 0x17: /* stda, store double word alternate */
4776 #ifndef TARGET_SPARC64
4779 if (!supervisor(dc))
4785 save_state(dc, cpu_cond);
4786 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4790 #ifdef TARGET_SPARC64
4791 case 0x0e: /* V9 stx */
4792 gen_address_mask(dc, cpu_addr);
4793 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4795 case 0x1e: /* V9 stxa */
4796 save_state(dc, cpu_cond);
4797 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4803 } else if (xop > 0x23 && xop < 0x28) {
4804 if (gen_trap_ifnofpu(dc, cpu_cond))
4806 save_state(dc, cpu_cond);
4808 case 0x24: /* stf, store fpreg */
4809 gen_address_mask(dc, cpu_addr);
4810 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4811 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4813 case 0x25: /* stfsr, V9 stxfsr */
4814 #ifdef TARGET_SPARC64
4815 gen_address_mask(dc, cpu_addr);
4816 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4818 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4820 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4822 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4823 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4827 #ifdef TARGET_SPARC64
4828 /* V9 stqf, store quad fpreg */
4832 CHECK_FPU_FEATURE(dc, FLOAT128);
4833 gen_op_load_fpr_QT0(QFPREG(rd));
4834 r_const = tcg_const_i32(dc->mem_idx);
4835 gen_helper_stqf(cpu_addr, r_const);
4836 tcg_temp_free_i32(r_const);
4839 #else /* !TARGET_SPARC64 */
4840 /* stdfq, store floating point queue */
4841 #if defined(CONFIG_USER_ONLY)
4844 if (!supervisor(dc))
4846 if (gen_trap_ifnofpu(dc, cpu_cond))
4851 case 0x27: /* stdf, store double fpreg */
4855 gen_op_load_fpr_DT0(DFPREG(rd));
4856 r_const = tcg_const_i32(dc->mem_idx);
4857 gen_helper_stdf(cpu_addr, r_const);
4858 tcg_temp_free_i32(r_const);
4864 } else if (xop > 0x33 && xop < 0x3f) {
4865 save_state(dc, cpu_cond);
4867 #ifdef TARGET_SPARC64
4868 case 0x34: /* V9 stfa */
4869 gen_stf_asi(cpu_addr, insn, 4, rd);
4871 case 0x36: /* V9 stqfa */
4875 CHECK_FPU_FEATURE(dc, FLOAT128);
4876 r_const = tcg_const_i32(7);
4877 gen_helper_check_align(cpu_addr, r_const);
4878 tcg_temp_free_i32(r_const);
4879 gen_op_load_fpr_QT0(QFPREG(rd));
4880 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4883 case 0x37: /* V9 stdfa */
4884 gen_op_load_fpr_DT0(DFPREG(rd));
4885 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4887 case 0x3c: /* V9 casa */
4888 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4889 gen_movl_TN_reg(rd, cpu_val);
4891 case 0x3e: /* V9 casxa */
4892 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4893 gen_movl_TN_reg(rd, cpu_val);
4896 case 0x34: /* stc */
4897 case 0x35: /* stcsr */
4898 case 0x36: /* stdcq */
4899 case 0x37: /* stdc */
4910 /* default case for non jump instructions */
4911 if (dc->npc == DYNAMIC_PC) {
4912 dc->pc = DYNAMIC_PC;
4914 } else if (dc->npc == JUMP_PC) {
4915 /* we can do a static jump */
4916 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4920 dc->npc = dc->npc + 4;
4928 save_state(dc, cpu_cond);
4929 r_const = tcg_const_i32(TT_ILL_INSN);
4930 gen_helper_raise_exception(r_const);
4931 tcg_temp_free_i32(r_const);
4939 save_state(dc, cpu_cond);
4940 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4941 gen_helper_raise_exception(r_const);
4942 tcg_temp_free_i32(r_const);
4946 #if !defined(CONFIG_USER_ONLY)
4951 save_state(dc, cpu_cond);
4952 r_const = tcg_const_i32(TT_PRIV_INSN);
4953 gen_helper_raise_exception(r_const);
4954 tcg_temp_free_i32(r_const);
4960 save_state(dc, cpu_cond);
4961 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4964 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4966 save_state(dc, cpu_cond);
4967 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4971 #ifndef TARGET_SPARC64
4976 save_state(dc, cpu_cond);
4977 r_const = tcg_const_i32(TT_NCP_INSN);
4978 gen_helper_raise_exception(r_const);
4979 tcg_temp_free(r_const);
4986 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4987 int spc, CPUSPARCState *env)
4989 target_ulong pc_start, last_pc;
4990 uint16_t *gen_opc_end;
4991 DisasContext dc1, *dc = &dc1;
4997 memset(dc, 0, sizeof(DisasContext));
5002 dc->npc = (target_ulong) tb->cs_base;
5003 dc->cc_op = CC_OP_DYNAMIC;
5004 dc->mem_idx = cpu_mmu_index(env);
5006 if ((dc->def->features & CPU_FEATURE_FLOAT))
5007 dc->fpu_enabled = cpu_fpu_enabled(env);
5009 dc->fpu_enabled = 0;
5010 #ifdef TARGET_SPARC64
5011 dc->address_mask_32bit = env->pstate & PS_AM;
5013 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5015 cpu_tmp0 = tcg_temp_new();
5016 cpu_tmp32 = tcg_temp_new_i32();
5017 cpu_tmp64 = tcg_temp_new_i64();
5019 cpu_dst = tcg_temp_local_new();
5022 cpu_val = tcg_temp_local_new();
5023 cpu_addr = tcg_temp_local_new();
5026 max_insns = tb->cflags & CF_COUNT_MASK;
5028 max_insns = CF_COUNT_MASK;
5031 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
5032 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
5033 if (bp->pc == dc->pc) {
5034 if (dc->pc != pc_start)
5035 save_state(dc, cpu_cond);
5044 qemu_log("Search PC...\n");
5045 j = gen_opc_ptr - gen_opc_buf;
5049 gen_opc_instr_start[lj++] = 0;
5050 gen_opc_pc[lj] = dc->pc;
5051 gen_opc_npc[lj] = dc->npc;
5052 gen_opc_instr_start[lj] = 1;
5053 gen_opc_icount[lj] = num_insns;
5056 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5059 disas_sparc_insn(dc);
5064 /* if the next PC is different, we abort now */
5065 if (dc->pc != (last_pc + 4))
5067 /* if we reach a page boundary, we stop generation so that the
5068 PC of a TT_TFAULT exception is always in the right page */
5069 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5071 /* if single step mode, we generate only one instruction and
5072 generate an exception */
5073 if (env->singlestep_enabled || singlestep) {
5074 tcg_gen_movi_tl(cpu_pc, dc->pc);
5078 } while ((gen_opc_ptr < gen_opc_end) &&
5079 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5080 num_insns < max_insns);
5083 tcg_temp_free(cpu_addr);
5084 tcg_temp_free(cpu_val);
5085 tcg_temp_free(cpu_dst);
5086 tcg_temp_free_i64(cpu_tmp64);
5087 tcg_temp_free_i32(cpu_tmp32);
5088 tcg_temp_free(cpu_tmp0);
5089 if (tb->cflags & CF_LAST_IO)
5092 if (dc->pc != DYNAMIC_PC &&
5093 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5094 /* static PC and NPC: we can use direct chaining */
5095 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5097 if (dc->pc != DYNAMIC_PC)
5098 tcg_gen_movi_tl(cpu_pc, dc->pc);
5099 save_npc(dc, cpu_cond);
5103 gen_icount_end(tb, num_insns);
5104 *gen_opc_ptr = INDEX_op_end;
5106 j = gen_opc_ptr - gen_opc_buf;
5109 gen_opc_instr_start[lj++] = 0;
5113 gen_opc_jump_pc[0] = dc->jump_pc[0];
5114 gen_opc_jump_pc[1] = dc->jump_pc[1];
5116 tb->size = last_pc + 4 - pc_start;
5117 tb->icount = num_insns;
5120 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5121 qemu_log("--------------\n");
5122 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5123 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5129 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5131 gen_intermediate_code_internal(tb, 0, env);
5134 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5136 gen_intermediate_code_internal(tb, 1, env);
5139 void gen_intermediate_code_init(CPUSPARCState *env)
5143 static const char * const gregnames[8] = {
5144 NULL, // g0 not used
5153 static const char * const fregnames[64] = {
5154 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5155 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5156 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5157 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5158 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5159 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5160 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5161 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5164 /* init various static tables */
5168 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5169 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5170 offsetof(CPUState, regwptr),
5172 #ifdef TARGET_SPARC64
5173 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5175 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5177 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5179 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5181 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5182 offsetof(CPUState, tick_cmpr),
5184 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5185 offsetof(CPUState, stick_cmpr),
5187 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5188 offsetof(CPUState, hstick_cmpr),
5190 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5192 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5194 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5196 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5197 offsetof(CPUState, ssr), "ssr");
5198 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5199 offsetof(CPUState, version), "ver");
5200 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5201 offsetof(CPUState, softint),
5204 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5207 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5209 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5211 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5212 offsetof(CPUState, cc_src2),
5214 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5216 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5218 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5220 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5222 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5224 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5226 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5227 #ifndef CONFIG_USER_ONLY
5228 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5231 for (i = 1; i < 8; i++)
5232 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5233 offsetof(CPUState, gregs[i]),
5235 for (i = 0; i < TARGET_FPREGS; i++)
5236 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5237 offsetof(CPUState, fpr[i]),
5240 /* register helpers */
5242 #define GEN_HELPER 2
5247 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5248 unsigned long searched_pc, int pc_pos, void *puc)
5251 env->pc = gen_opc_pc[pc_pos];
5252 npc = gen_opc_npc[pc_pos];
5254 /* dynamic NPC: already stored */
5255 } else if (npc == 2) {
5256 target_ulong t2 = (target_ulong)(unsigned long)puc;
5257 /* jump PC: use T2 and the jump targets of the translation */
5259 env->npc = gen_opc_jump_pc[0];
5261 env->npc = gen_opc_jump_pc[1];