4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
189 #ifdef TARGET_SPARC64
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
193 #define AM_CHECK(dc) (1)
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
208 tcg_gen_movi_tl(tn, 0);
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_cc_clear_icc(void)
279 tcg_gen_movi_i32(cpu_psr, 0);
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
285 tcg_gen_movi_i32(cpu_xcc, 0);
291 env->psr |= PSR_ZERO;
292 if ((int32_t) T0 < 0)
295 static inline void gen_cc_NZ_icc(TCGv dst)
300 l1 = gen_new_label();
301 l2 = gen_new_label();
302 r_temp = tcg_temp_new();
303 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
307 tcg_gen_ext32s_tl(r_temp, dst);
308 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
311 tcg_temp_free(r_temp);
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
319 l1 = gen_new_label();
320 l2 = gen_new_label();
321 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
324 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
332 env->psr |= PSR_CARRY;
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
336 TCGv r_temp1, r_temp2;
339 l1 = gen_new_label();
340 r_temp1 = tcg_temp_new();
341 r_temp2 = tcg_temp_new();
342 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
347 tcg_temp_free(r_temp1);
348 tcg_temp_free(r_temp2);
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
356 l1 = gen_new_label();
357 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
364 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
371 r_temp = tcg_temp_new();
372 tcg_gen_xor_tl(r_temp, src1, src2);
373 tcg_gen_not_tl(r_temp, r_temp);
374 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379 tcg_temp_free(r_temp);
380 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
388 r_temp = tcg_temp_new();
389 tcg_gen_xor_tl(r_temp, src1, src2);
390 tcg_gen_not_tl(r_temp, r_temp);
391 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396 tcg_temp_free(r_temp);
397 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
407 l1 = gen_new_label();
409 r_temp = tcg_temp_new();
410 tcg_gen_xor_tl(r_temp, src1, src2);
411 tcg_gen_not_tl(r_temp, r_temp);
412 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416 r_const = tcg_const_i32(TT_TOVF);
417 gen_helper_raise_exception(r_const);
418 tcg_temp_free_i32(r_const);
420 tcg_temp_free(r_temp);
423 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
427 l1 = gen_new_label();
428 tcg_gen_or_tl(cpu_tmp0, src1, src2);
429 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
430 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
431 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
435 static inline void gen_op_logic_cc(TCGv dst)
437 tcg_gen_mov_tl(cpu_cc_dst, dst);
440 gen_cc_NZ_icc(cpu_cc_dst);
441 #ifdef TARGET_SPARC64
443 gen_cc_NZ_xcc(cpu_cc_dst);
447 static inline void gen_tag_tv(TCGv src1, TCGv src2)
452 l1 = gen_new_label();
453 tcg_gen_or_tl(cpu_tmp0, src1, src2);
454 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
455 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
456 r_const = tcg_const_i32(TT_TOVF);
457 gen_helper_raise_exception(r_const);
458 tcg_temp_free_i32(r_const);
462 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
464 tcg_gen_mov_tl(cpu_cc_src, src1);
465 tcg_gen_movi_tl(cpu_cc_src2, src2);
466 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
467 tcg_gen_mov_tl(dst, cpu_cc_dst);
470 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
472 tcg_gen_mov_tl(cpu_cc_src, src1);
473 tcg_gen_mov_tl(cpu_cc_src2, src2);
474 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
475 tcg_gen_mov_tl(dst, cpu_cc_dst);
478 static inline void gen_op_addx_cc2(TCGv dst)
480 gen_cc_NZ_icc(cpu_cc_dst);
481 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
482 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
483 #ifdef TARGET_SPARC64
484 gen_cc_NZ_xcc(cpu_cc_dst);
485 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
486 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
488 tcg_gen_mov_tl(dst, cpu_cc_dst);
491 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
493 tcg_gen_mov_tl(cpu_cc_src, src1);
494 tcg_gen_movi_tl(cpu_cc_src2, src2);
495 gen_mov_reg_C(cpu_tmp0, cpu_psr);
496 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
498 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
499 #ifdef TARGET_SPARC64
501 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
503 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
504 gen_op_addx_cc2(dst);
507 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
509 tcg_gen_mov_tl(cpu_cc_src, src1);
510 tcg_gen_mov_tl(cpu_cc_src2, src2);
511 gen_mov_reg_C(cpu_tmp0, cpu_psr);
512 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
514 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
515 #ifdef TARGET_SPARC64
517 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
519 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
520 gen_op_addx_cc2(dst);
523 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
525 tcg_gen_mov_tl(cpu_cc_src, src1);
526 tcg_gen_mov_tl(cpu_cc_src2, src2);
527 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
529 gen_cc_NZ_icc(cpu_cc_dst);
530 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
531 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
532 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
533 #ifdef TARGET_SPARC64
535 gen_cc_NZ_xcc(cpu_cc_dst);
536 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
537 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
539 tcg_gen_mov_tl(dst, cpu_cc_dst);
542 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
544 tcg_gen_mov_tl(cpu_cc_src, src1);
545 tcg_gen_mov_tl(cpu_cc_src2, src2);
546 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
547 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
548 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
550 gen_cc_NZ_icc(cpu_cc_dst);
551 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
552 #ifdef TARGET_SPARC64
554 gen_cc_NZ_xcc(cpu_cc_dst);
555 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
556 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
558 tcg_gen_mov_tl(dst, cpu_cc_dst);
563 env->psr |= PSR_CARRY;
565 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
567 TCGv r_temp1, r_temp2;
570 l1 = gen_new_label();
571 r_temp1 = tcg_temp_new();
572 r_temp2 = tcg_temp_new();
573 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
574 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
575 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
576 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
578 tcg_temp_free(r_temp1);
579 tcg_temp_free(r_temp2);
582 #ifdef TARGET_SPARC64
583 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
587 l1 = gen_new_label();
588 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
589 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
595 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
598 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
602 r_temp = tcg_temp_new();
603 tcg_gen_xor_tl(r_temp, src1, src2);
604 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
605 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
606 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
607 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
608 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
609 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
610 tcg_temp_free(r_temp);
613 #ifdef TARGET_SPARC64
614 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
618 r_temp = tcg_temp_new();
619 tcg_gen_xor_tl(r_temp, src1, src2);
620 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
621 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
622 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
623 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
624 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
625 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
626 tcg_temp_free(r_temp);
630 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
636 l1 = gen_new_label();
638 r_temp = tcg_temp_new();
639 tcg_gen_xor_tl(r_temp, src1, src2);
640 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
641 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
642 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
643 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
644 r_const = tcg_const_i32(TT_TOVF);
645 gen_helper_raise_exception(r_const);
646 tcg_temp_free_i32(r_const);
648 tcg_temp_free(r_temp);
651 static inline void gen_op_sub_cc2(TCGv dst)
654 gen_cc_NZ_icc(cpu_cc_dst);
655 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
656 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 #ifdef TARGET_SPARC64
659 gen_cc_NZ_xcc(cpu_cc_dst);
660 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
661 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
663 tcg_gen_mov_tl(dst, cpu_cc_dst);
666 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2)
668 tcg_gen_mov_tl(cpu_cc_src, src1);
669 tcg_gen_movi_tl(cpu_cc_src2, src2);
671 tcg_gen_mov_tl(dst, src1);
672 gen_op_logic_cc(dst);
674 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
679 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
681 tcg_gen_mov_tl(cpu_cc_src, src1);
682 tcg_gen_mov_tl(cpu_cc_src2, src2);
683 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
687 static inline void gen_op_subx_cc2(TCGv dst)
689 gen_cc_NZ_icc(cpu_cc_dst);
690 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
691 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
692 #ifdef TARGET_SPARC64
693 gen_cc_NZ_xcc(cpu_cc_dst);
694 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
695 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
697 tcg_gen_mov_tl(dst, cpu_cc_dst);
700 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
702 tcg_gen_mov_tl(cpu_cc_src, src1);
703 tcg_gen_movi_tl(cpu_cc_src2, src2);
704 gen_mov_reg_C(cpu_tmp0, cpu_psr);
705 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
707 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
708 #ifdef TARGET_SPARC64
710 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
712 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
713 gen_op_subx_cc2(dst);
716 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
718 tcg_gen_mov_tl(cpu_cc_src, src1);
719 tcg_gen_mov_tl(cpu_cc_src2, src2);
720 gen_mov_reg_C(cpu_tmp0, cpu_psr);
721 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
723 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
724 #ifdef TARGET_SPARC64
726 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
728 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
729 gen_op_subx_cc2(dst);
732 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
734 tcg_gen_mov_tl(cpu_cc_src, src1);
735 tcg_gen_mov_tl(cpu_cc_src2, src2);
736 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
738 gen_cc_NZ_icc(cpu_cc_dst);
739 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
740 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
741 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
742 #ifdef TARGET_SPARC64
744 gen_cc_NZ_xcc(cpu_cc_dst);
745 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
746 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
748 tcg_gen_mov_tl(dst, cpu_cc_dst);
751 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
753 tcg_gen_mov_tl(cpu_cc_src, src1);
754 tcg_gen_mov_tl(cpu_cc_src2, src2);
755 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
756 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
757 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
759 gen_cc_NZ_icc(cpu_cc_dst);
760 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
761 #ifdef TARGET_SPARC64
763 gen_cc_NZ_xcc(cpu_cc_dst);
764 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
765 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
767 tcg_gen_mov_tl(dst, cpu_cc_dst);
770 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
775 l1 = gen_new_label();
776 r_temp = tcg_temp_new();
782 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
783 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
784 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
785 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
786 tcg_gen_movi_tl(cpu_cc_src2, 0);
790 // env->y = (b2 << 31) | (env->y >> 1);
791 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
792 tcg_gen_shli_tl(r_temp, r_temp, 31);
793 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
794 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
795 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
796 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
799 gen_mov_reg_N(cpu_tmp0, cpu_psr);
800 gen_mov_reg_V(r_temp, cpu_psr);
801 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
802 tcg_temp_free(r_temp);
804 // T0 = (b1 << 31) | (T0 >> 1);
806 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
807 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
808 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
810 /* do addition and update flags */
811 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
814 gen_cc_NZ_icc(cpu_cc_dst);
815 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
816 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
817 tcg_gen_mov_tl(dst, cpu_cc_dst);
820 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
822 TCGv_i64 r_temp, r_temp2;
824 r_temp = tcg_temp_new_i64();
825 r_temp2 = tcg_temp_new_i64();
827 tcg_gen_extu_tl_i64(r_temp, src2);
828 tcg_gen_extu_tl_i64(r_temp2, src1);
829 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
831 tcg_gen_shri_i64(r_temp, r_temp2, 32);
832 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
833 tcg_temp_free_i64(r_temp);
834 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
835 #ifdef TARGET_SPARC64
836 tcg_gen_mov_i64(dst, r_temp2);
838 tcg_gen_trunc_i64_tl(dst, r_temp2);
840 tcg_temp_free_i64(r_temp2);
843 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
845 TCGv_i64 r_temp, r_temp2;
847 r_temp = tcg_temp_new_i64();
848 r_temp2 = tcg_temp_new_i64();
850 tcg_gen_ext_tl_i64(r_temp, src2);
851 tcg_gen_ext_tl_i64(r_temp2, src1);
852 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
854 tcg_gen_shri_i64(r_temp, r_temp2, 32);
855 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
856 tcg_temp_free_i64(r_temp);
857 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
858 #ifdef TARGET_SPARC64
859 tcg_gen_mov_i64(dst, r_temp2);
861 tcg_gen_trunc_i64_tl(dst, r_temp2);
863 tcg_temp_free_i64(r_temp2);
866 #ifdef TARGET_SPARC64
867 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
872 l1 = gen_new_label();
873 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
874 r_const = tcg_const_i32(TT_DIV_ZERO);
875 gen_helper_raise_exception(r_const);
876 tcg_temp_free_i32(r_const);
880 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
884 l1 = gen_new_label();
885 l2 = gen_new_label();
886 tcg_gen_mov_tl(cpu_cc_src, src1);
887 tcg_gen_mov_tl(cpu_cc_src2, src2);
888 gen_trap_ifdivzero_tl(cpu_cc_src2);
889 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
890 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
891 tcg_gen_movi_i64(dst, INT64_MIN);
894 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
899 static inline void gen_op_div_cc(TCGv dst)
903 tcg_gen_mov_tl(cpu_cc_dst, dst);
905 gen_cc_NZ_icc(cpu_cc_dst);
906 l1 = gen_new_label();
907 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
908 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
913 static inline void gen_op_eval_ba(TCGv dst)
915 tcg_gen_movi_tl(dst, 1);
919 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
921 gen_mov_reg_Z(dst, src);
925 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
927 gen_mov_reg_N(cpu_tmp0, src);
928 gen_mov_reg_V(dst, src);
929 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
930 gen_mov_reg_Z(cpu_tmp0, src);
931 tcg_gen_or_tl(dst, dst, cpu_tmp0);
935 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
937 gen_mov_reg_V(cpu_tmp0, src);
938 gen_mov_reg_N(dst, src);
939 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
943 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
945 gen_mov_reg_Z(cpu_tmp0, src);
946 gen_mov_reg_C(dst, src);
947 tcg_gen_or_tl(dst, dst, cpu_tmp0);
951 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
953 gen_mov_reg_C(dst, src);
957 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
959 gen_mov_reg_V(dst, src);
963 static inline void gen_op_eval_bn(TCGv dst)
965 tcg_gen_movi_tl(dst, 0);
969 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
971 gen_mov_reg_N(dst, src);
975 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
977 gen_mov_reg_Z(dst, src);
978 tcg_gen_xori_tl(dst, dst, 0x1);
982 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
984 gen_mov_reg_N(cpu_tmp0, src);
985 gen_mov_reg_V(dst, src);
986 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
987 gen_mov_reg_Z(cpu_tmp0, src);
988 tcg_gen_or_tl(dst, dst, cpu_tmp0);
989 tcg_gen_xori_tl(dst, dst, 0x1);
993 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
995 gen_mov_reg_V(cpu_tmp0, src);
996 gen_mov_reg_N(dst, src);
997 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
998 tcg_gen_xori_tl(dst, dst, 0x1);
1002 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
1004 gen_mov_reg_Z(cpu_tmp0, src);
1005 gen_mov_reg_C(dst, src);
1006 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1007 tcg_gen_xori_tl(dst, dst, 0x1);
1011 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
1013 gen_mov_reg_C(dst, src);
1014 tcg_gen_xori_tl(dst, dst, 0x1);
1018 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
1020 gen_mov_reg_N(dst, src);
1021 tcg_gen_xori_tl(dst, dst, 0x1);
1025 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
1027 gen_mov_reg_V(dst, src);
1028 tcg_gen_xori_tl(dst, dst, 0x1);
1032 FPSR bit field FCC1 | FCC0:
1038 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1039 unsigned int fcc_offset)
1041 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
1042 tcg_gen_andi_tl(reg, reg, 0x1);
1045 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1046 unsigned int fcc_offset)
1048 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1049 tcg_gen_andi_tl(reg, reg, 0x1);
1053 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1054 unsigned int fcc_offset)
1056 gen_mov_reg_FCC0(dst, src, fcc_offset);
1057 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1058 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1061 // 1 or 2: FCC0 ^ FCC1
1062 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1071 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1072 unsigned int fcc_offset)
1074 gen_mov_reg_FCC0(dst, src, fcc_offset);
1078 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1079 unsigned int fcc_offset)
1081 gen_mov_reg_FCC0(dst, src, fcc_offset);
1082 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1083 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1084 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1088 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1089 unsigned int fcc_offset)
1091 gen_mov_reg_FCC1(dst, src, fcc_offset);
1095 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1096 unsigned int fcc_offset)
1098 gen_mov_reg_FCC0(dst, src, fcc_offset);
1099 tcg_gen_xori_tl(dst, dst, 0x1);
1100 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1101 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1105 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1106 unsigned int fcc_offset)
1108 gen_mov_reg_FCC0(dst, src, fcc_offset);
1109 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1113 // 0: !(FCC0 | FCC1)
1114 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1115 unsigned int fcc_offset)
1117 gen_mov_reg_FCC0(dst, src, fcc_offset);
1118 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1119 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1120 tcg_gen_xori_tl(dst, dst, 0x1);
1123 // 0 or 3: !(FCC0 ^ FCC1)
1124 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1125 unsigned int fcc_offset)
1127 gen_mov_reg_FCC0(dst, src, fcc_offset);
1128 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1129 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1130 tcg_gen_xori_tl(dst, dst, 0x1);
1134 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1135 unsigned int fcc_offset)
1137 gen_mov_reg_FCC0(dst, src, fcc_offset);
1138 tcg_gen_xori_tl(dst, dst, 0x1);
1141 // !1: !(FCC0 & !FCC1)
1142 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1143 unsigned int fcc_offset)
1145 gen_mov_reg_FCC0(dst, src, fcc_offset);
1146 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1147 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1148 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1149 tcg_gen_xori_tl(dst, dst, 0x1);
1153 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1154 unsigned int fcc_offset)
1156 gen_mov_reg_FCC1(dst, src, fcc_offset);
1157 tcg_gen_xori_tl(dst, dst, 0x1);
1160 // !2: !(!FCC0 & FCC1)
1161 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1162 unsigned int fcc_offset)
1164 gen_mov_reg_FCC0(dst, src, fcc_offset);
1165 tcg_gen_xori_tl(dst, dst, 0x1);
1166 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1167 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1168 tcg_gen_xori_tl(dst, dst, 0x1);
1171 // !3: !(FCC0 & FCC1)
1172 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1173 unsigned int fcc_offset)
1175 gen_mov_reg_FCC0(dst, src, fcc_offset);
1176 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1177 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1178 tcg_gen_xori_tl(dst, dst, 0x1);
1181 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1182 target_ulong pc2, TCGv r_cond)
1186 l1 = gen_new_label();
1188 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1190 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1193 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1196 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1197 target_ulong pc2, TCGv r_cond)
1201 l1 = gen_new_label();
1203 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1205 gen_goto_tb(dc, 0, pc2, pc1);
1208 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1211 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1216 l1 = gen_new_label();
1217 l2 = gen_new_label();
1219 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1221 tcg_gen_movi_tl(cpu_npc, npc1);
1225 tcg_gen_movi_tl(cpu_npc, npc2);
1229 /* call this function before using the condition register as it may
1230 have been set for a jump */
1231 static inline void flush_cond(DisasContext *dc, TCGv cond)
1233 if (dc->npc == JUMP_PC) {
1234 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1235 dc->npc = DYNAMIC_PC;
1239 static inline void save_npc(DisasContext *dc, TCGv cond)
1241 if (dc->npc == JUMP_PC) {
1242 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1243 dc->npc = DYNAMIC_PC;
1244 } else if (dc->npc != DYNAMIC_PC) {
1245 tcg_gen_movi_tl(cpu_npc, dc->npc);
1249 static inline void save_state(DisasContext *dc, TCGv cond)
1251 tcg_gen_movi_tl(cpu_pc, dc->pc);
1255 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1257 if (dc->npc == JUMP_PC) {
1258 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1259 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1260 dc->pc = DYNAMIC_PC;
1261 } else if (dc->npc == DYNAMIC_PC) {
1262 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1263 dc->pc = DYNAMIC_PC;
1269 static inline void gen_op_next_insn(void)
1271 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1272 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1275 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1280 #ifdef TARGET_SPARC64
1288 switch (dc->cc_op) {
1292 gen_helper_compute_psr();
1293 dc->cc_op = CC_OP_FLAGS;
1298 gen_op_eval_bn(r_dst);
1301 gen_op_eval_be(r_dst, r_src);
1304 gen_op_eval_ble(r_dst, r_src);
1307 gen_op_eval_bl(r_dst, r_src);
1310 gen_op_eval_bleu(r_dst, r_src);
1313 gen_op_eval_bcs(r_dst, r_src);
1316 gen_op_eval_bneg(r_dst, r_src);
1319 gen_op_eval_bvs(r_dst, r_src);
1322 gen_op_eval_ba(r_dst);
1325 gen_op_eval_bne(r_dst, r_src);
1328 gen_op_eval_bg(r_dst, r_src);
1331 gen_op_eval_bge(r_dst, r_src);
1334 gen_op_eval_bgu(r_dst, r_src);
1337 gen_op_eval_bcc(r_dst, r_src);
1340 gen_op_eval_bpos(r_dst, r_src);
1343 gen_op_eval_bvc(r_dst, r_src);
1348 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1350 unsigned int offset;
1370 gen_op_eval_bn(r_dst);
1373 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1376 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1379 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1382 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1385 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1388 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1391 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1394 gen_op_eval_ba(r_dst);
1397 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1400 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1403 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1406 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1409 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1412 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1415 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1420 #ifdef TARGET_SPARC64
1422 static const int gen_tcg_cond_reg[8] = {
1433 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1437 l1 = gen_new_label();
1438 tcg_gen_movi_tl(r_dst, 0);
1439 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1440 tcg_gen_movi_tl(r_dst, 1);
1445 /* XXX: potentially incorrect if dynamic npc */
1446 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1449 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1450 target_ulong target = dc->pc + offset;
1453 /* unconditional not taken */
1455 dc->pc = dc->npc + 4;
1456 dc->npc = dc->pc + 4;
1459 dc->npc = dc->pc + 4;
1461 } else if (cond == 0x8) {
1462 /* unconditional taken */
1465 dc->npc = dc->pc + 4;
1471 flush_cond(dc, r_cond);
1472 gen_cond(r_cond, cc, cond, dc);
1474 gen_branch_a(dc, target, dc->npc, r_cond);
1478 dc->jump_pc[0] = target;
1479 dc->jump_pc[1] = dc->npc + 4;
1485 /* XXX: potentially incorrect if dynamic npc */
1486 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1489 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1490 target_ulong target = dc->pc + offset;
1493 /* unconditional not taken */
1495 dc->pc = dc->npc + 4;
1496 dc->npc = dc->pc + 4;
1499 dc->npc = dc->pc + 4;
1501 } else if (cond == 0x8) {
1502 /* unconditional taken */
1505 dc->npc = dc->pc + 4;
1511 flush_cond(dc, r_cond);
1512 gen_fcond(r_cond, cc, cond);
1514 gen_branch_a(dc, target, dc->npc, r_cond);
1518 dc->jump_pc[0] = target;
1519 dc->jump_pc[1] = dc->npc + 4;
1525 #ifdef TARGET_SPARC64
1526 /* XXX: potentially incorrect if dynamic npc */
1527 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1528 TCGv r_cond, TCGv r_reg)
1530 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1531 target_ulong target = dc->pc + offset;
1533 flush_cond(dc, r_cond);
1534 gen_cond_reg(r_cond, cond, r_reg);
1536 gen_branch_a(dc, target, dc->npc, r_cond);
1540 dc->jump_pc[0] = target;
1541 dc->jump_pc[1] = dc->npc + 4;
1546 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1550 gen_helper_fcmps(r_rs1, r_rs2);
1553 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1556 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1559 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1564 static inline void gen_op_fcmpd(int fccno)
1571 gen_helper_fcmpd_fcc1();
1574 gen_helper_fcmpd_fcc2();
1577 gen_helper_fcmpd_fcc3();
1582 static inline void gen_op_fcmpq(int fccno)
1589 gen_helper_fcmpq_fcc1();
1592 gen_helper_fcmpq_fcc2();
1595 gen_helper_fcmpq_fcc3();
1600 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1604 gen_helper_fcmpes(r_rs1, r_rs2);
1607 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1610 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1613 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1618 static inline void gen_op_fcmped(int fccno)
1622 gen_helper_fcmped();
1625 gen_helper_fcmped_fcc1();
1628 gen_helper_fcmped_fcc2();
1631 gen_helper_fcmped_fcc3();
1636 static inline void gen_op_fcmpeq(int fccno)
1640 gen_helper_fcmpeq();
1643 gen_helper_fcmpeq_fcc1();
1646 gen_helper_fcmpeq_fcc2();
1649 gen_helper_fcmpeq_fcc3();
1656 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1658 gen_helper_fcmps(r_rs1, r_rs2);
1661 static inline void gen_op_fcmpd(int fccno)
1666 static inline void gen_op_fcmpq(int fccno)
1671 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1673 gen_helper_fcmpes(r_rs1, r_rs2);
1676 static inline void gen_op_fcmped(int fccno)
1678 gen_helper_fcmped();
1681 static inline void gen_op_fcmpeq(int fccno)
1683 gen_helper_fcmpeq();
1687 static inline void gen_op_fpexception_im(int fsr_flags)
1691 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1692 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1693 r_const = tcg_const_i32(TT_FP_EXCP);
1694 gen_helper_raise_exception(r_const);
1695 tcg_temp_free_i32(r_const);
1698 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1700 #if !defined(CONFIG_USER_ONLY)
1701 if (!dc->fpu_enabled) {
1704 save_state(dc, r_cond);
1705 r_const = tcg_const_i32(TT_NFPU_INSN);
1706 gen_helper_raise_exception(r_const);
1707 tcg_temp_free_i32(r_const);
1715 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1717 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1720 static inline void gen_clear_float_exceptions(void)
1722 gen_helper_clear_float_exceptions();
1726 #ifdef TARGET_SPARC64
1727 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1733 r_asi = tcg_temp_new_i32();
1734 tcg_gen_mov_i32(r_asi, cpu_asi);
1736 asi = GET_FIELD(insn, 19, 26);
1737 r_asi = tcg_const_i32(asi);
1742 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1745 TCGv_i32 r_asi, r_size, r_sign;
1747 r_asi = gen_get_asi(insn, addr);
1748 r_size = tcg_const_i32(size);
1749 r_sign = tcg_const_i32(sign);
1750 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1751 tcg_temp_free_i32(r_sign);
1752 tcg_temp_free_i32(r_size);
1753 tcg_temp_free_i32(r_asi);
1756 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1758 TCGv_i32 r_asi, r_size;
1760 r_asi = gen_get_asi(insn, addr);
1761 r_size = tcg_const_i32(size);
1762 gen_helper_st_asi(addr, src, r_asi, r_size);
1763 tcg_temp_free_i32(r_size);
1764 tcg_temp_free_i32(r_asi);
1767 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1769 TCGv_i32 r_asi, r_size, r_rd;
1771 r_asi = gen_get_asi(insn, addr);
1772 r_size = tcg_const_i32(size);
1773 r_rd = tcg_const_i32(rd);
1774 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1775 tcg_temp_free_i32(r_rd);
1776 tcg_temp_free_i32(r_size);
1777 tcg_temp_free_i32(r_asi);
1780 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1782 TCGv_i32 r_asi, r_size, r_rd;
1784 r_asi = gen_get_asi(insn, addr);
1785 r_size = tcg_const_i32(size);
1786 r_rd = tcg_const_i32(rd);
1787 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1788 tcg_temp_free_i32(r_rd);
1789 tcg_temp_free_i32(r_size);
1790 tcg_temp_free_i32(r_asi);
1793 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1795 TCGv_i32 r_asi, r_size, r_sign;
1797 r_asi = gen_get_asi(insn, addr);
1798 r_size = tcg_const_i32(4);
1799 r_sign = tcg_const_i32(0);
1800 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1801 tcg_temp_free_i32(r_sign);
1802 gen_helper_st_asi(addr, dst, r_asi, r_size);
1803 tcg_temp_free_i32(r_size);
1804 tcg_temp_free_i32(r_asi);
1805 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1808 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1810 TCGv_i32 r_asi, r_rd;
1812 r_asi = gen_get_asi(insn, addr);
1813 r_rd = tcg_const_i32(rd);
1814 gen_helper_ldda_asi(addr, r_asi, r_rd);
1815 tcg_temp_free_i32(r_rd);
1816 tcg_temp_free_i32(r_asi);
1819 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1821 TCGv_i32 r_asi, r_size;
1823 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1824 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1825 r_asi = gen_get_asi(insn, addr);
1826 r_size = tcg_const_i32(8);
1827 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1828 tcg_temp_free_i32(r_size);
1829 tcg_temp_free_i32(r_asi);
1832 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1838 r_val1 = tcg_temp_new();
1839 gen_movl_reg_TN(rd, r_val1);
1840 r_asi = gen_get_asi(insn, addr);
1841 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1842 tcg_temp_free_i32(r_asi);
1843 tcg_temp_free(r_val1);
1846 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1851 gen_movl_reg_TN(rd, cpu_tmp64);
1852 r_asi = gen_get_asi(insn, addr);
1853 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1854 tcg_temp_free_i32(r_asi);
1857 #elif !defined(CONFIG_USER_ONLY)
1859 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1862 TCGv_i32 r_asi, r_size, r_sign;
1864 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1865 r_size = tcg_const_i32(size);
1866 r_sign = tcg_const_i32(sign);
1867 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1868 tcg_temp_free(r_sign);
1869 tcg_temp_free(r_size);
1870 tcg_temp_free(r_asi);
1871 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1874 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1876 TCGv_i32 r_asi, r_size;
1878 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1879 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1880 r_size = tcg_const_i32(size);
1881 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1882 tcg_temp_free(r_size);
1883 tcg_temp_free(r_asi);
1886 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1888 TCGv_i32 r_asi, r_size, r_sign;
1891 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1892 r_size = tcg_const_i32(4);
1893 r_sign = tcg_const_i32(0);
1894 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1895 tcg_temp_free(r_sign);
1896 r_val = tcg_temp_new_i64();
1897 tcg_gen_extu_tl_i64(r_val, dst);
1898 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1899 tcg_temp_free_i64(r_val);
1900 tcg_temp_free(r_size);
1901 tcg_temp_free(r_asi);
1902 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1905 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1907 TCGv_i32 r_asi, r_size, r_sign;
1909 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1910 r_size = tcg_const_i32(8);
1911 r_sign = tcg_const_i32(0);
1912 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1913 tcg_temp_free(r_sign);
1914 tcg_temp_free(r_size);
1915 tcg_temp_free(r_asi);
1916 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1917 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1918 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1919 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1920 gen_movl_TN_reg(rd, hi);
1923 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1925 TCGv_i32 r_asi, r_size;
1927 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1928 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1929 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1930 r_size = tcg_const_i32(8);
1931 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1932 tcg_temp_free(r_size);
1933 tcg_temp_free(r_asi);
1937 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1938 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1941 TCGv_i32 r_asi, r_size;
1943 gen_ld_asi(dst, addr, insn, 1, 0);
1945 r_val = tcg_const_i64(0xffULL);
1946 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1947 r_size = tcg_const_i32(1);
1948 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1949 tcg_temp_free_i32(r_size);
1950 tcg_temp_free_i32(r_asi);
1951 tcg_temp_free_i64(r_val);
1955 static inline TCGv get_src1(unsigned int insn, TCGv def)
1960 rs1 = GET_FIELD(insn, 13, 17);
1962 r_rs1 = tcg_const_tl(0); // XXX how to free?
1964 r_rs1 = cpu_gregs[rs1];
1966 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1970 static inline TCGv get_src2(unsigned int insn, TCGv def)
1974 if (IS_IMM) { /* immediate */
1977 simm = GET_FIELDs(insn, 19, 31);
1978 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1979 } else { /* register */
1982 rs2 = GET_FIELD(insn, 27, 31);
1984 r_rs2 = tcg_const_tl(0); // XXX how to free?
1986 r_rs2 = cpu_gregs[rs2];
1988 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1993 #define CHECK_IU_FEATURE(dc, FEATURE) \
1994 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1996 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1997 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2000 /* before an instruction, dc->pc must be static */
2001 static void disas_sparc_insn(DisasContext * dc)
2003 unsigned int insn, opc, rs1, rs2, rd;
2006 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2007 tcg_gen_debug_insn_start(dc->pc);
2008 insn = ldl_code(dc->pc);
2009 opc = GET_FIELD(insn, 0, 1);
2011 rd = GET_FIELD(insn, 2, 6);
2013 cpu_src1 = tcg_temp_new(); // const
2014 cpu_src2 = tcg_temp_new(); // const
2017 case 0: /* branches/sethi */
2019 unsigned int xop = GET_FIELD(insn, 7, 9);
2022 #ifdef TARGET_SPARC64
2023 case 0x1: /* V9 BPcc */
2027 target = GET_FIELD_SP(insn, 0, 18);
2028 target = sign_extend(target, 18);
2030 cc = GET_FIELD_SP(insn, 20, 21);
2032 do_branch(dc, target, insn, 0, cpu_cond);
2034 do_branch(dc, target, insn, 1, cpu_cond);
2039 case 0x3: /* V9 BPr */
2041 target = GET_FIELD_SP(insn, 0, 13) |
2042 (GET_FIELD_SP(insn, 20, 21) << 14);
2043 target = sign_extend(target, 16);
2045 cpu_src1 = get_src1(insn, cpu_src1);
2046 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2049 case 0x5: /* V9 FBPcc */
2051 int cc = GET_FIELD_SP(insn, 20, 21);
2052 if (gen_trap_ifnofpu(dc, cpu_cond))
2054 target = GET_FIELD_SP(insn, 0, 18);
2055 target = sign_extend(target, 19);
2057 do_fbranch(dc, target, insn, cc, cpu_cond);
2061 case 0x7: /* CBN+x */
2066 case 0x2: /* BN+x */
2068 target = GET_FIELD(insn, 10, 31);
2069 target = sign_extend(target, 22);
2071 do_branch(dc, target, insn, 0, cpu_cond);
2074 case 0x6: /* FBN+x */
2076 if (gen_trap_ifnofpu(dc, cpu_cond))
2078 target = GET_FIELD(insn, 10, 31);
2079 target = sign_extend(target, 22);
2081 do_fbranch(dc, target, insn, 0, cpu_cond);
2084 case 0x4: /* SETHI */
2086 uint32_t value = GET_FIELD(insn, 10, 31);
2089 r_const = tcg_const_tl(value << 10);
2090 gen_movl_TN_reg(rd, r_const);
2091 tcg_temp_free(r_const);
2094 case 0x0: /* UNIMPL */
2103 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2106 r_const = tcg_const_tl(dc->pc);
2107 gen_movl_TN_reg(15, r_const);
2108 tcg_temp_free(r_const);
2110 gen_mov_pc_npc(dc, cpu_cond);
2114 case 2: /* FPU & Logical Operations */
2116 unsigned int xop = GET_FIELD(insn, 7, 12);
2117 if (xop == 0x3a) { /* generate trap */
2120 cpu_src1 = get_src1(insn, cpu_src1);
2122 rs2 = GET_FIELD(insn, 25, 31);
2123 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2125 rs2 = GET_FIELD(insn, 27, 31);
2127 gen_movl_reg_TN(rs2, cpu_src2);
2128 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2130 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2132 cond = GET_FIELD(insn, 3, 6);
2134 save_state(dc, cpu_cond);
2135 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2137 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2139 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2140 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2141 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2142 gen_helper_raise_exception(cpu_tmp32);
2143 } else if (cond != 0) {
2144 TCGv r_cond = tcg_temp_new();
2146 #ifdef TARGET_SPARC64
2148 int cc = GET_FIELD_SP(insn, 11, 12);
2150 save_state(dc, cpu_cond);
2152 gen_cond(r_cond, 0, cond, dc);
2154 gen_cond(r_cond, 1, cond, dc);
2158 save_state(dc, cpu_cond);
2159 gen_cond(r_cond, 0, cond, dc);
2161 l1 = gen_new_label();
2162 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2164 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2166 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2168 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2169 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2170 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2171 gen_helper_raise_exception(cpu_tmp32);
2174 tcg_temp_free(r_cond);
2180 } else if (xop == 0x28) {
2181 rs1 = GET_FIELD(insn, 13, 17);
2184 #ifndef TARGET_SPARC64
2185 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2186 manual, rdy on the microSPARC
2188 case 0x0f: /* stbar in the SPARCv8 manual,
2189 rdy on the microSPARC II */
2190 case 0x10 ... 0x1f: /* implementation-dependent in the
2191 SPARCv8 manual, rdy on the
2194 gen_movl_TN_reg(rd, cpu_y);
2196 #ifdef TARGET_SPARC64
2197 case 0x2: /* V9 rdccr */
2198 gen_helper_compute_psr();
2199 gen_helper_rdccr(cpu_dst);
2200 gen_movl_TN_reg(rd, cpu_dst);
2202 case 0x3: /* V9 rdasi */
2203 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2204 gen_movl_TN_reg(rd, cpu_dst);
2206 case 0x4: /* V9 rdtick */
2210 r_tickptr = tcg_temp_new_ptr();
2211 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2212 offsetof(CPUState, tick));
2213 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2214 tcg_temp_free_ptr(r_tickptr);
2215 gen_movl_TN_reg(rd, cpu_dst);
2218 case 0x5: /* V9 rdpc */
2222 r_const = tcg_const_tl(dc->pc);
2223 gen_movl_TN_reg(rd, r_const);
2224 tcg_temp_free(r_const);
2227 case 0x6: /* V9 rdfprs */
2228 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2229 gen_movl_TN_reg(rd, cpu_dst);
2231 case 0xf: /* V9 membar */
2232 break; /* no effect */
2233 case 0x13: /* Graphics Status */
2234 if (gen_trap_ifnofpu(dc, cpu_cond))
2236 gen_movl_TN_reg(rd, cpu_gsr);
2238 case 0x16: /* Softint */
2239 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2240 gen_movl_TN_reg(rd, cpu_dst);
2242 case 0x17: /* Tick compare */
2243 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2245 case 0x18: /* System tick */
2249 r_tickptr = tcg_temp_new_ptr();
2250 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2251 offsetof(CPUState, stick));
2252 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2253 tcg_temp_free_ptr(r_tickptr);
2254 gen_movl_TN_reg(rd, cpu_dst);
2257 case 0x19: /* System tick compare */
2258 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2260 case 0x10: /* Performance Control */
2261 case 0x11: /* Performance Instrumentation Counter */
2262 case 0x12: /* Dispatch Control */
2263 case 0x14: /* Softint set, WO */
2264 case 0x15: /* Softint clear, WO */
2269 #if !defined(CONFIG_USER_ONLY)
2270 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2271 #ifndef TARGET_SPARC64
2272 if (!supervisor(dc))
2274 gen_helper_compute_psr();
2275 dc->cc_op = CC_OP_FLAGS;
2276 gen_helper_rdpsr(cpu_dst);
2278 CHECK_IU_FEATURE(dc, HYPV);
2279 if (!hypervisor(dc))
2281 rs1 = GET_FIELD(insn, 13, 17);
2284 // gen_op_rdhpstate();
2287 // gen_op_rdhtstate();
2290 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2293 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2296 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2298 case 31: // hstick_cmpr
2299 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2305 gen_movl_TN_reg(rd, cpu_dst);
2307 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2308 if (!supervisor(dc))
2310 #ifdef TARGET_SPARC64
2311 rs1 = GET_FIELD(insn, 13, 17);
2317 r_tsptr = tcg_temp_new_ptr();
2318 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2319 offsetof(CPUState, tsptr));
2320 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2321 offsetof(trap_state, tpc));
2322 tcg_temp_free_ptr(r_tsptr);
2329 r_tsptr = tcg_temp_new_ptr();
2330 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2331 offsetof(CPUState, tsptr));
2332 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2333 offsetof(trap_state, tnpc));
2334 tcg_temp_free_ptr(r_tsptr);
2341 r_tsptr = tcg_temp_new_ptr();
2342 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2343 offsetof(CPUState, tsptr));
2344 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2345 offsetof(trap_state, tstate));
2346 tcg_temp_free_ptr(r_tsptr);
2353 r_tsptr = tcg_temp_new_ptr();
2354 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2355 offsetof(CPUState, tsptr));
2356 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2357 offsetof(trap_state, tt));
2358 tcg_temp_free_ptr(r_tsptr);
2359 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2366 r_tickptr = tcg_temp_new_ptr();
2367 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2368 offsetof(CPUState, tick));
2369 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2370 gen_movl_TN_reg(rd, cpu_tmp0);
2371 tcg_temp_free_ptr(r_tickptr);
2375 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2378 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2379 offsetof(CPUSPARCState, pstate));
2380 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2383 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2384 offsetof(CPUSPARCState, tl));
2385 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2388 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2389 offsetof(CPUSPARCState, psrpil));
2390 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2393 gen_helper_rdcwp(cpu_tmp0);
2396 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2397 offsetof(CPUSPARCState, cansave));
2398 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2400 case 11: // canrestore
2401 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2402 offsetof(CPUSPARCState, canrestore));
2403 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2405 case 12: // cleanwin
2406 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2407 offsetof(CPUSPARCState, cleanwin));
2408 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2410 case 13: // otherwin
2411 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2412 offsetof(CPUSPARCState, otherwin));
2413 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2416 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2417 offsetof(CPUSPARCState, wstate));
2418 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2420 case 16: // UA2005 gl
2421 CHECK_IU_FEATURE(dc, GL);
2422 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2423 offsetof(CPUSPARCState, gl));
2424 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2426 case 26: // UA2005 strand status
2427 CHECK_IU_FEATURE(dc, HYPV);
2428 if (!hypervisor(dc))
2430 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2433 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2440 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2442 gen_movl_TN_reg(rd, cpu_tmp0);
2444 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2445 #ifdef TARGET_SPARC64
2446 save_state(dc, cpu_cond);
2447 gen_helper_flushw();
2449 if (!supervisor(dc))
2451 gen_movl_TN_reg(rd, cpu_tbr);
2455 } else if (xop == 0x34) { /* FPU Operations */
2456 if (gen_trap_ifnofpu(dc, cpu_cond))
2458 gen_op_clear_ieee_excp_and_FTT();
2459 rs1 = GET_FIELD(insn, 13, 17);
2460 rs2 = GET_FIELD(insn, 27, 31);
2461 xop = GET_FIELD(insn, 18, 26);
2463 case 0x1: /* fmovs */
2464 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2466 case 0x5: /* fnegs */
2467 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2469 case 0x9: /* fabss */
2470 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2472 case 0x29: /* fsqrts */
2473 CHECK_FPU_FEATURE(dc, FSQRT);
2474 gen_clear_float_exceptions();
2475 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2476 gen_helper_check_ieee_exceptions();
2477 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2479 case 0x2a: /* fsqrtd */
2480 CHECK_FPU_FEATURE(dc, FSQRT);
2481 gen_op_load_fpr_DT1(DFPREG(rs2));
2482 gen_clear_float_exceptions();
2483 gen_helper_fsqrtd();
2484 gen_helper_check_ieee_exceptions();
2485 gen_op_store_DT0_fpr(DFPREG(rd));
2487 case 0x2b: /* fsqrtq */
2488 CHECK_FPU_FEATURE(dc, FLOAT128);
2489 gen_op_load_fpr_QT1(QFPREG(rs2));
2490 gen_clear_float_exceptions();
2491 gen_helper_fsqrtq();
2492 gen_helper_check_ieee_exceptions();
2493 gen_op_store_QT0_fpr(QFPREG(rd));
2495 case 0x41: /* fadds */
2496 gen_clear_float_exceptions();
2497 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2498 gen_helper_check_ieee_exceptions();
2499 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2501 case 0x42: /* faddd */
2502 gen_op_load_fpr_DT0(DFPREG(rs1));
2503 gen_op_load_fpr_DT1(DFPREG(rs2));
2504 gen_clear_float_exceptions();
2506 gen_helper_check_ieee_exceptions();
2507 gen_op_store_DT0_fpr(DFPREG(rd));
2509 case 0x43: /* faddq */
2510 CHECK_FPU_FEATURE(dc, FLOAT128);
2511 gen_op_load_fpr_QT0(QFPREG(rs1));
2512 gen_op_load_fpr_QT1(QFPREG(rs2));
2513 gen_clear_float_exceptions();
2515 gen_helper_check_ieee_exceptions();
2516 gen_op_store_QT0_fpr(QFPREG(rd));
2518 case 0x45: /* fsubs */
2519 gen_clear_float_exceptions();
2520 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2521 gen_helper_check_ieee_exceptions();
2522 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2524 case 0x46: /* fsubd */
2525 gen_op_load_fpr_DT0(DFPREG(rs1));
2526 gen_op_load_fpr_DT1(DFPREG(rs2));
2527 gen_clear_float_exceptions();
2529 gen_helper_check_ieee_exceptions();
2530 gen_op_store_DT0_fpr(DFPREG(rd));
2532 case 0x47: /* fsubq */
2533 CHECK_FPU_FEATURE(dc, FLOAT128);
2534 gen_op_load_fpr_QT0(QFPREG(rs1));
2535 gen_op_load_fpr_QT1(QFPREG(rs2));
2536 gen_clear_float_exceptions();
2538 gen_helper_check_ieee_exceptions();
2539 gen_op_store_QT0_fpr(QFPREG(rd));
2541 case 0x49: /* fmuls */
2542 CHECK_FPU_FEATURE(dc, FMUL);
2543 gen_clear_float_exceptions();
2544 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2545 gen_helper_check_ieee_exceptions();
2546 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2548 case 0x4a: /* fmuld */
2549 CHECK_FPU_FEATURE(dc, FMUL);
2550 gen_op_load_fpr_DT0(DFPREG(rs1));
2551 gen_op_load_fpr_DT1(DFPREG(rs2));
2552 gen_clear_float_exceptions();
2554 gen_helper_check_ieee_exceptions();
2555 gen_op_store_DT0_fpr(DFPREG(rd));
2557 case 0x4b: /* fmulq */
2558 CHECK_FPU_FEATURE(dc, FLOAT128);
2559 CHECK_FPU_FEATURE(dc, FMUL);
2560 gen_op_load_fpr_QT0(QFPREG(rs1));
2561 gen_op_load_fpr_QT1(QFPREG(rs2));
2562 gen_clear_float_exceptions();
2564 gen_helper_check_ieee_exceptions();
2565 gen_op_store_QT0_fpr(QFPREG(rd));
2567 case 0x4d: /* fdivs */
2568 gen_clear_float_exceptions();
2569 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2570 gen_helper_check_ieee_exceptions();
2571 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2573 case 0x4e: /* fdivd */
2574 gen_op_load_fpr_DT0(DFPREG(rs1));
2575 gen_op_load_fpr_DT1(DFPREG(rs2));
2576 gen_clear_float_exceptions();
2578 gen_helper_check_ieee_exceptions();
2579 gen_op_store_DT0_fpr(DFPREG(rd));
2581 case 0x4f: /* fdivq */
2582 CHECK_FPU_FEATURE(dc, FLOAT128);
2583 gen_op_load_fpr_QT0(QFPREG(rs1));
2584 gen_op_load_fpr_QT1(QFPREG(rs2));
2585 gen_clear_float_exceptions();
2587 gen_helper_check_ieee_exceptions();
2588 gen_op_store_QT0_fpr(QFPREG(rd));
2590 case 0x69: /* fsmuld */
2591 CHECK_FPU_FEATURE(dc, FSMULD);
2592 gen_clear_float_exceptions();
2593 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2594 gen_helper_check_ieee_exceptions();
2595 gen_op_store_DT0_fpr(DFPREG(rd));
2597 case 0x6e: /* fdmulq */
2598 CHECK_FPU_FEATURE(dc, FLOAT128);
2599 gen_op_load_fpr_DT0(DFPREG(rs1));
2600 gen_op_load_fpr_DT1(DFPREG(rs2));
2601 gen_clear_float_exceptions();
2602 gen_helper_fdmulq();
2603 gen_helper_check_ieee_exceptions();
2604 gen_op_store_QT0_fpr(QFPREG(rd));
2606 case 0xc4: /* fitos */
2607 gen_clear_float_exceptions();
2608 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2609 gen_helper_check_ieee_exceptions();
2610 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2612 case 0xc6: /* fdtos */
2613 gen_op_load_fpr_DT1(DFPREG(rs2));
2614 gen_clear_float_exceptions();
2615 gen_helper_fdtos(cpu_tmp32);
2616 gen_helper_check_ieee_exceptions();
2617 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2619 case 0xc7: /* fqtos */
2620 CHECK_FPU_FEATURE(dc, FLOAT128);
2621 gen_op_load_fpr_QT1(QFPREG(rs2));
2622 gen_clear_float_exceptions();
2623 gen_helper_fqtos(cpu_tmp32);
2624 gen_helper_check_ieee_exceptions();
2625 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2627 case 0xc8: /* fitod */
2628 gen_helper_fitod(cpu_fpr[rs2]);
2629 gen_op_store_DT0_fpr(DFPREG(rd));
2631 case 0xc9: /* fstod */
2632 gen_helper_fstod(cpu_fpr[rs2]);
2633 gen_op_store_DT0_fpr(DFPREG(rd));
2635 case 0xcb: /* fqtod */
2636 CHECK_FPU_FEATURE(dc, FLOAT128);
2637 gen_op_load_fpr_QT1(QFPREG(rs2));
2638 gen_clear_float_exceptions();
2640 gen_helper_check_ieee_exceptions();
2641 gen_op_store_DT0_fpr(DFPREG(rd));
2643 case 0xcc: /* fitoq */
2644 CHECK_FPU_FEATURE(dc, FLOAT128);
2645 gen_helper_fitoq(cpu_fpr[rs2]);
2646 gen_op_store_QT0_fpr(QFPREG(rd));
2648 case 0xcd: /* fstoq */
2649 CHECK_FPU_FEATURE(dc, FLOAT128);
2650 gen_helper_fstoq(cpu_fpr[rs2]);
2651 gen_op_store_QT0_fpr(QFPREG(rd));
2653 case 0xce: /* fdtoq */
2654 CHECK_FPU_FEATURE(dc, FLOAT128);
2655 gen_op_load_fpr_DT1(DFPREG(rs2));
2657 gen_op_store_QT0_fpr(QFPREG(rd));
2659 case 0xd1: /* fstoi */
2660 gen_clear_float_exceptions();
2661 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2662 gen_helper_check_ieee_exceptions();
2663 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2665 case 0xd2: /* fdtoi */
2666 gen_op_load_fpr_DT1(DFPREG(rs2));
2667 gen_clear_float_exceptions();
2668 gen_helper_fdtoi(cpu_tmp32);
2669 gen_helper_check_ieee_exceptions();
2670 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2672 case 0xd3: /* fqtoi */
2673 CHECK_FPU_FEATURE(dc, FLOAT128);
2674 gen_op_load_fpr_QT1(QFPREG(rs2));
2675 gen_clear_float_exceptions();
2676 gen_helper_fqtoi(cpu_tmp32);
2677 gen_helper_check_ieee_exceptions();
2678 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2680 #ifdef TARGET_SPARC64
2681 case 0x2: /* V9 fmovd */
2682 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2683 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2684 cpu_fpr[DFPREG(rs2) + 1]);
2686 case 0x3: /* V9 fmovq */
2687 CHECK_FPU_FEATURE(dc, FLOAT128);
2688 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2689 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2690 cpu_fpr[QFPREG(rs2) + 1]);
2691 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2692 cpu_fpr[QFPREG(rs2) + 2]);
2693 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2694 cpu_fpr[QFPREG(rs2) + 3]);
2696 case 0x6: /* V9 fnegd */
2697 gen_op_load_fpr_DT1(DFPREG(rs2));
2699 gen_op_store_DT0_fpr(DFPREG(rd));
2701 case 0x7: /* V9 fnegq */
2702 CHECK_FPU_FEATURE(dc, FLOAT128);
2703 gen_op_load_fpr_QT1(QFPREG(rs2));
2705 gen_op_store_QT0_fpr(QFPREG(rd));
2707 case 0xa: /* V9 fabsd */
2708 gen_op_load_fpr_DT1(DFPREG(rs2));
2710 gen_op_store_DT0_fpr(DFPREG(rd));
2712 case 0xb: /* V9 fabsq */
2713 CHECK_FPU_FEATURE(dc, FLOAT128);
2714 gen_op_load_fpr_QT1(QFPREG(rs2));
2716 gen_op_store_QT0_fpr(QFPREG(rd));
2718 case 0x81: /* V9 fstox */
2719 gen_clear_float_exceptions();
2720 gen_helper_fstox(cpu_fpr[rs2]);
2721 gen_helper_check_ieee_exceptions();
2722 gen_op_store_DT0_fpr(DFPREG(rd));
2724 case 0x82: /* V9 fdtox */
2725 gen_op_load_fpr_DT1(DFPREG(rs2));
2726 gen_clear_float_exceptions();
2728 gen_helper_check_ieee_exceptions();
2729 gen_op_store_DT0_fpr(DFPREG(rd));
2731 case 0x83: /* V9 fqtox */
2732 CHECK_FPU_FEATURE(dc, FLOAT128);
2733 gen_op_load_fpr_QT1(QFPREG(rs2));
2734 gen_clear_float_exceptions();
2736 gen_helper_check_ieee_exceptions();
2737 gen_op_store_DT0_fpr(DFPREG(rd));
2739 case 0x84: /* V9 fxtos */
2740 gen_op_load_fpr_DT1(DFPREG(rs2));
2741 gen_clear_float_exceptions();
2742 gen_helper_fxtos(cpu_tmp32);
2743 gen_helper_check_ieee_exceptions();
2744 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2746 case 0x88: /* V9 fxtod */
2747 gen_op_load_fpr_DT1(DFPREG(rs2));
2748 gen_clear_float_exceptions();
2750 gen_helper_check_ieee_exceptions();
2751 gen_op_store_DT0_fpr(DFPREG(rd));
2753 case 0x8c: /* V9 fxtoq */
2754 CHECK_FPU_FEATURE(dc, FLOAT128);
2755 gen_op_load_fpr_DT1(DFPREG(rs2));
2756 gen_clear_float_exceptions();
2758 gen_helper_check_ieee_exceptions();
2759 gen_op_store_QT0_fpr(QFPREG(rd));
2765 } else if (xop == 0x35) { /* FPU Operations */
2766 #ifdef TARGET_SPARC64
2769 if (gen_trap_ifnofpu(dc, cpu_cond))
2771 gen_op_clear_ieee_excp_and_FTT();
2772 rs1 = GET_FIELD(insn, 13, 17);
2773 rs2 = GET_FIELD(insn, 27, 31);
2774 xop = GET_FIELD(insn, 18, 26);
2775 #ifdef TARGET_SPARC64
2776 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2779 l1 = gen_new_label();
2780 cond = GET_FIELD_SP(insn, 14, 17);
2781 cpu_src1 = get_src1(insn, cpu_src1);
2782 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2784 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2787 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2790 l1 = gen_new_label();
2791 cond = GET_FIELD_SP(insn, 14, 17);
2792 cpu_src1 = get_src1(insn, cpu_src1);
2793 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2795 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2796 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2799 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2802 CHECK_FPU_FEATURE(dc, FLOAT128);
2803 l1 = gen_new_label();
2804 cond = GET_FIELD_SP(insn, 14, 17);
2805 cpu_src1 = get_src1(insn, cpu_src1);
2806 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2808 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2809 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2810 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2811 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2817 #ifdef TARGET_SPARC64
2818 #define FMOVSCC(fcc) \
2823 l1 = gen_new_label(); \
2824 r_cond = tcg_temp_new(); \
2825 cond = GET_FIELD_SP(insn, 14, 17); \
2826 gen_fcond(r_cond, fcc, cond); \
2827 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2829 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2830 gen_set_label(l1); \
2831 tcg_temp_free(r_cond); \
2833 #define FMOVDCC(fcc) \
2838 l1 = gen_new_label(); \
2839 r_cond = tcg_temp_new(); \
2840 cond = GET_FIELD_SP(insn, 14, 17); \
2841 gen_fcond(r_cond, fcc, cond); \
2842 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2844 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2845 cpu_fpr[DFPREG(rs2)]); \
2846 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2847 cpu_fpr[DFPREG(rs2) + 1]); \
2848 gen_set_label(l1); \
2849 tcg_temp_free(r_cond); \
2851 #define FMOVQCC(fcc) \
2856 l1 = gen_new_label(); \
2857 r_cond = tcg_temp_new(); \
2858 cond = GET_FIELD_SP(insn, 14, 17); \
2859 gen_fcond(r_cond, fcc, cond); \
2860 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2862 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2863 cpu_fpr[QFPREG(rs2)]); \
2864 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2865 cpu_fpr[QFPREG(rs2) + 1]); \
2866 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2867 cpu_fpr[QFPREG(rs2) + 2]); \
2868 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2869 cpu_fpr[QFPREG(rs2) + 3]); \
2870 gen_set_label(l1); \
2871 tcg_temp_free(r_cond); \
2873 case 0x001: /* V9 fmovscc %fcc0 */
2876 case 0x002: /* V9 fmovdcc %fcc0 */
2879 case 0x003: /* V9 fmovqcc %fcc0 */
2880 CHECK_FPU_FEATURE(dc, FLOAT128);
2883 case 0x041: /* V9 fmovscc %fcc1 */
2886 case 0x042: /* V9 fmovdcc %fcc1 */
2889 case 0x043: /* V9 fmovqcc %fcc1 */
2890 CHECK_FPU_FEATURE(dc, FLOAT128);
2893 case 0x081: /* V9 fmovscc %fcc2 */
2896 case 0x082: /* V9 fmovdcc %fcc2 */
2899 case 0x083: /* V9 fmovqcc %fcc2 */
2900 CHECK_FPU_FEATURE(dc, FLOAT128);
2903 case 0x0c1: /* V9 fmovscc %fcc3 */
2906 case 0x0c2: /* V9 fmovdcc %fcc3 */
2909 case 0x0c3: /* V9 fmovqcc %fcc3 */
2910 CHECK_FPU_FEATURE(dc, FLOAT128);
2916 #define FMOVSCC(icc) \
2921 l1 = gen_new_label(); \
2922 r_cond = tcg_temp_new(); \
2923 cond = GET_FIELD_SP(insn, 14, 17); \
2924 gen_cond(r_cond, icc, cond, dc); \
2925 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2927 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2928 gen_set_label(l1); \
2929 tcg_temp_free(r_cond); \
2931 #define FMOVDCC(icc) \
2936 l1 = gen_new_label(); \
2937 r_cond = tcg_temp_new(); \
2938 cond = GET_FIELD_SP(insn, 14, 17); \
2939 gen_cond(r_cond, icc, cond, dc); \
2940 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2942 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2943 cpu_fpr[DFPREG(rs2)]); \
2944 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2945 cpu_fpr[DFPREG(rs2) + 1]); \
2946 gen_set_label(l1); \
2947 tcg_temp_free(r_cond); \
2949 #define FMOVQCC(icc) \
2954 l1 = gen_new_label(); \
2955 r_cond = tcg_temp_new(); \
2956 cond = GET_FIELD_SP(insn, 14, 17); \
2957 gen_cond(r_cond, icc, cond, dc); \
2958 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2960 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2961 cpu_fpr[QFPREG(rs2)]); \
2962 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2963 cpu_fpr[QFPREG(rs2) + 1]); \
2964 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2965 cpu_fpr[QFPREG(rs2) + 2]); \
2966 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2967 cpu_fpr[QFPREG(rs2) + 3]); \
2968 gen_set_label(l1); \
2969 tcg_temp_free(r_cond); \
2972 case 0x101: /* V9 fmovscc %icc */
2975 case 0x102: /* V9 fmovdcc %icc */
2977 case 0x103: /* V9 fmovqcc %icc */
2978 CHECK_FPU_FEATURE(dc, FLOAT128);
2981 case 0x181: /* V9 fmovscc %xcc */
2984 case 0x182: /* V9 fmovdcc %xcc */
2987 case 0x183: /* V9 fmovqcc %xcc */
2988 CHECK_FPU_FEATURE(dc, FLOAT128);
2995 case 0x51: /* fcmps, V9 %fcc */
2996 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2998 case 0x52: /* fcmpd, V9 %fcc */
2999 gen_op_load_fpr_DT0(DFPREG(rs1));
3000 gen_op_load_fpr_DT1(DFPREG(rs2));
3001 gen_op_fcmpd(rd & 3);
3003 case 0x53: /* fcmpq, V9 %fcc */
3004 CHECK_FPU_FEATURE(dc, FLOAT128);
3005 gen_op_load_fpr_QT0(QFPREG(rs1));
3006 gen_op_load_fpr_QT1(QFPREG(rs2));
3007 gen_op_fcmpq(rd & 3);
3009 case 0x55: /* fcmpes, V9 %fcc */
3010 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3012 case 0x56: /* fcmped, V9 %fcc */
3013 gen_op_load_fpr_DT0(DFPREG(rs1));
3014 gen_op_load_fpr_DT1(DFPREG(rs2));
3015 gen_op_fcmped(rd & 3);
3017 case 0x57: /* fcmpeq, V9 %fcc */
3018 CHECK_FPU_FEATURE(dc, FLOAT128);
3019 gen_op_load_fpr_QT0(QFPREG(rs1));
3020 gen_op_load_fpr_QT1(QFPREG(rs2));
3021 gen_op_fcmpeq(rd & 3);
3026 } else if (xop == 0x2) {
3029 rs1 = GET_FIELD(insn, 13, 17);
3031 // or %g0, x, y -> mov T0, x; mov y, T0
3032 if (IS_IMM) { /* immediate */
3035 simm = GET_FIELDs(insn, 19, 31);
3036 r_const = tcg_const_tl(simm);
3037 gen_movl_TN_reg(rd, r_const);
3038 tcg_temp_free(r_const);
3039 } else { /* register */
3040 rs2 = GET_FIELD(insn, 27, 31);
3041 gen_movl_reg_TN(rs2, cpu_dst);
3042 gen_movl_TN_reg(rd, cpu_dst);
3045 cpu_src1 = get_src1(insn, cpu_src1);
3046 if (IS_IMM) { /* immediate */
3047 simm = GET_FIELDs(insn, 19, 31);
3048 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3049 gen_movl_TN_reg(rd, cpu_dst);
3050 } else { /* register */
3051 // or x, %g0, y -> mov T1, x; mov y, T1
3052 rs2 = GET_FIELD(insn, 27, 31);
3054 gen_movl_reg_TN(rs2, cpu_src2);
3055 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3056 gen_movl_TN_reg(rd, cpu_dst);
3058 gen_movl_TN_reg(rd, cpu_src1);
3061 #ifdef TARGET_SPARC64
3062 } else if (xop == 0x25) { /* sll, V9 sllx */
3063 cpu_src1 = get_src1(insn, cpu_src1);
3064 if (IS_IMM) { /* immediate */
3065 simm = GET_FIELDs(insn, 20, 31);
3066 if (insn & (1 << 12)) {
3067 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3069 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3071 } else { /* register */
3072 rs2 = GET_FIELD(insn, 27, 31);
3073 gen_movl_reg_TN(rs2, cpu_src2);
3074 if (insn & (1 << 12)) {
3075 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3077 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3079 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3081 gen_movl_TN_reg(rd, cpu_dst);
3082 } else if (xop == 0x26) { /* srl, V9 srlx */
3083 cpu_src1 = get_src1(insn, cpu_src1);
3084 if (IS_IMM) { /* immediate */
3085 simm = GET_FIELDs(insn, 20, 31);
3086 if (insn & (1 << 12)) {
3087 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3089 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3090 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3092 } else { /* register */
3093 rs2 = GET_FIELD(insn, 27, 31);
3094 gen_movl_reg_TN(rs2, cpu_src2);
3095 if (insn & (1 << 12)) {
3096 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3097 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3099 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3100 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3101 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3104 gen_movl_TN_reg(rd, cpu_dst);
3105 } else if (xop == 0x27) { /* sra, V9 srax */
3106 cpu_src1 = get_src1(insn, cpu_src1);
3107 if (IS_IMM) { /* immediate */
3108 simm = GET_FIELDs(insn, 20, 31);
3109 if (insn & (1 << 12)) {
3110 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3112 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3113 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3114 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3116 } else { /* register */
3117 rs2 = GET_FIELD(insn, 27, 31);
3118 gen_movl_reg_TN(rs2, cpu_src2);
3119 if (insn & (1 << 12)) {
3120 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3121 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3123 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3124 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3125 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3126 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3129 gen_movl_TN_reg(rd, cpu_dst);
3131 } else if (xop < 0x36) {
3133 cpu_src1 = get_src1(insn, cpu_src1);
3134 cpu_src2 = get_src2(insn, cpu_src2);
3135 switch (xop & ~0x10) {
3138 simm = GET_FIELDs(insn, 19, 31);
3140 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3141 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3142 dc->cc_op = CC_OP_ADD;
3144 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3148 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3149 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3150 dc->cc_op = CC_OP_ADD;
3152 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3158 simm = GET_FIELDs(insn, 19, 31);
3159 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3161 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3164 gen_op_logic_cc(cpu_dst);
3165 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3166 dc->cc_op = CC_OP_FLAGS;
3171 simm = GET_FIELDs(insn, 19, 31);
3172 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3174 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3177 gen_op_logic_cc(cpu_dst);
3178 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3179 dc->cc_op = CC_OP_FLAGS;
3184 simm = GET_FIELDs(insn, 19, 31);
3185 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3187 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3190 gen_op_logic_cc(cpu_dst);
3191 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3192 dc->cc_op = CC_OP_FLAGS;
3197 simm = GET_FIELDs(insn, 19, 31);
3199 gen_op_subi_cc(cpu_dst, cpu_src1, simm);
3200 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3201 dc->cc_op = CC_OP_FLAGS;
3203 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3207 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3208 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3209 dc->cc_op = CC_OP_FLAGS;
3211 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3215 case 0x5: /* andn */
3217 simm = GET_FIELDs(insn, 19, 31);
3218 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3220 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3223 gen_op_logic_cc(cpu_dst);
3224 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3225 dc->cc_op = CC_OP_FLAGS;
3230 simm = GET_FIELDs(insn, 19, 31);
3231 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3233 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3236 gen_op_logic_cc(cpu_dst);
3237 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3238 dc->cc_op = CC_OP_FLAGS;
3241 case 0x7: /* xorn */
3243 simm = GET_FIELDs(insn, 19, 31);
3244 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3246 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3247 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3250 gen_op_logic_cc(cpu_dst);
3251 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3252 dc->cc_op = CC_OP_FLAGS;
3255 case 0x8: /* addx, V9 addc */
3257 simm = GET_FIELDs(insn, 19, 31);
3259 gen_helper_compute_psr();
3260 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3261 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3262 dc->cc_op = CC_OP_FLAGS;
3264 gen_helper_compute_psr();
3265 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3266 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3267 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3271 gen_helper_compute_psr();
3272 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3273 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3274 dc->cc_op = CC_OP_FLAGS;
3276 gen_helper_compute_psr();
3277 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3278 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3279 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3283 #ifdef TARGET_SPARC64
3284 case 0x9: /* V9 mulx */
3286 simm = GET_FIELDs(insn, 19, 31);
3287 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3289 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3293 case 0xa: /* umul */
3294 CHECK_IU_FEATURE(dc, MUL);
3295 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3297 gen_op_logic_cc(cpu_dst);
3298 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3299 dc->cc_op = CC_OP_FLAGS;
3302 case 0xb: /* smul */
3303 CHECK_IU_FEATURE(dc, MUL);
3304 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3306 gen_op_logic_cc(cpu_dst);
3307 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3308 dc->cc_op = CC_OP_FLAGS;
3311 case 0xc: /* subx, V9 subc */
3313 simm = GET_FIELDs(insn, 19, 31);
3315 gen_helper_compute_psr();
3316 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3317 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3318 dc->cc_op = CC_OP_FLAGS;
3320 gen_helper_compute_psr();
3321 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3322 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3323 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3327 gen_helper_compute_psr();
3328 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3329 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3330 dc->cc_op = CC_OP_FLAGS;
3332 gen_helper_compute_psr();
3333 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3334 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3335 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3339 #ifdef TARGET_SPARC64
3340 case 0xd: /* V9 udivx */
3341 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3342 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3343 gen_trap_ifdivzero_tl(cpu_cc_src2);
3344 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3347 case 0xe: /* udiv */
3348 CHECK_IU_FEATURE(dc, DIV);
3349 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3351 gen_op_div_cc(cpu_dst);
3352 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3353 dc->cc_op = CC_OP_FLAGS;
3356 case 0xf: /* sdiv */
3357 CHECK_IU_FEATURE(dc, DIV);
3358 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3360 gen_op_div_cc(cpu_dst);
3361 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3362 dc->cc_op = CC_OP_FLAGS;
3368 gen_movl_TN_reg(rd, cpu_dst);
3370 cpu_src1 = get_src1(insn, cpu_src1);
3371 cpu_src2 = get_src2(insn, cpu_src2);
3373 case 0x20: /* taddcc */
3374 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3375 gen_movl_TN_reg(rd, cpu_dst);
3376 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3377 dc->cc_op = CC_OP_FLAGS;
3379 case 0x21: /* tsubcc */
3380 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3381 gen_movl_TN_reg(rd, cpu_dst);
3382 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3383 dc->cc_op = CC_OP_FLAGS;
3385 case 0x22: /* taddcctv */
3386 save_state(dc, cpu_cond);
3387 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3388 gen_movl_TN_reg(rd, cpu_dst);
3389 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3390 dc->cc_op = CC_OP_FLAGS;
3392 case 0x23: /* tsubcctv */
3393 save_state(dc, cpu_cond);
3394 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3395 gen_movl_TN_reg(rd, cpu_dst);
3396 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3397 dc->cc_op = CC_OP_FLAGS;
3399 case 0x24: /* mulscc */
3400 gen_helper_compute_psr();
3401 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3402 gen_movl_TN_reg(rd, cpu_dst);
3403 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3404 dc->cc_op = CC_OP_FLAGS;
3406 #ifndef TARGET_SPARC64
3407 case 0x25: /* sll */
3408 if (IS_IMM) { /* immediate */
3409 simm = GET_FIELDs(insn, 20, 31);
3410 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3411 } else { /* register */
3412 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3413 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3415 gen_movl_TN_reg(rd, cpu_dst);
3417 case 0x26: /* srl */
3418 if (IS_IMM) { /* immediate */
3419 simm = GET_FIELDs(insn, 20, 31);
3420 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3421 } else { /* register */
3422 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3423 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3425 gen_movl_TN_reg(rd, cpu_dst);
3427 case 0x27: /* sra */
3428 if (IS_IMM) { /* immediate */
3429 simm = GET_FIELDs(insn, 20, 31);
3430 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3431 } else { /* register */
3432 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3433 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3435 gen_movl_TN_reg(rd, cpu_dst);
3442 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3443 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3445 #ifndef TARGET_SPARC64
3446 case 0x01 ... 0x0f: /* undefined in the
3450 case 0x10 ... 0x1f: /* implementation-dependent
3456 case 0x2: /* V9 wrccr */
3457 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3458 gen_helper_wrccr(cpu_dst);
3459 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3460 dc->cc_op = CC_OP_FLAGS;
3462 case 0x3: /* V9 wrasi */
3463 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3464 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3466 case 0x6: /* V9 wrfprs */
3467 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3468 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3469 save_state(dc, cpu_cond);
3474 case 0xf: /* V9 sir, nop if user */
3475 #if !defined(CONFIG_USER_ONLY)
3480 case 0x13: /* Graphics Status */
3481 if (gen_trap_ifnofpu(dc, cpu_cond))
3483 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3485 case 0x14: /* Softint set */
3486 if (!supervisor(dc))
3488 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3489 gen_helper_set_softint(cpu_tmp64);
3491 case 0x15: /* Softint clear */
3492 if (!supervisor(dc))
3494 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3495 gen_helper_clear_softint(cpu_tmp64);
3497 case 0x16: /* Softint write */
3498 if (!supervisor(dc))
3500 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3501 gen_helper_write_softint(cpu_tmp64);
3503 case 0x17: /* Tick compare */
3504 #if !defined(CONFIG_USER_ONLY)
3505 if (!supervisor(dc))
3511 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3513 r_tickptr = tcg_temp_new_ptr();
3514 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3515 offsetof(CPUState, tick));
3516 gen_helper_tick_set_limit(r_tickptr,
3518 tcg_temp_free_ptr(r_tickptr);
3521 case 0x18: /* System tick */
3522 #if !defined(CONFIG_USER_ONLY)
3523 if (!supervisor(dc))
3529 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3531 r_tickptr = tcg_temp_new_ptr();
3532 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3533 offsetof(CPUState, stick));
3534 gen_helper_tick_set_count(r_tickptr,
3536 tcg_temp_free_ptr(r_tickptr);
3539 case 0x19: /* System tick compare */
3540 #if !defined(CONFIG_USER_ONLY)
3541 if (!supervisor(dc))
3547 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3549 r_tickptr = tcg_temp_new_ptr();
3550 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3551 offsetof(CPUState, stick));
3552 gen_helper_tick_set_limit(r_tickptr,
3554 tcg_temp_free_ptr(r_tickptr);
3558 case 0x10: /* Performance Control */
3559 case 0x11: /* Performance Instrumentation
3561 case 0x12: /* Dispatch Control */
3568 #if !defined(CONFIG_USER_ONLY)
3569 case 0x31: /* wrpsr, V9 saved, restored */
3571 if (!supervisor(dc))
3573 #ifdef TARGET_SPARC64
3579 gen_helper_restored();
3581 case 2: /* UA2005 allclean */
3582 case 3: /* UA2005 otherw */
3583 case 4: /* UA2005 normalw */
3584 case 5: /* UA2005 invalw */
3590 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3591 gen_helper_wrpsr(cpu_dst);
3592 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3593 dc->cc_op = CC_OP_FLAGS;
3594 save_state(dc, cpu_cond);
3601 case 0x32: /* wrwim, V9 wrpr */
3603 if (!supervisor(dc))
3605 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3606 #ifdef TARGET_SPARC64
3612 r_tsptr = tcg_temp_new_ptr();
3613 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3614 offsetof(CPUState, tsptr));
3615 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3616 offsetof(trap_state, tpc));
3617 tcg_temp_free_ptr(r_tsptr);
3624 r_tsptr = tcg_temp_new_ptr();
3625 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3626 offsetof(CPUState, tsptr));
3627 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3628 offsetof(trap_state, tnpc));
3629 tcg_temp_free_ptr(r_tsptr);
3636 r_tsptr = tcg_temp_new_ptr();
3637 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3638 offsetof(CPUState, tsptr));
3639 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3640 offsetof(trap_state,
3642 tcg_temp_free_ptr(r_tsptr);
3649 r_tsptr = tcg_temp_new_ptr();
3650 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3651 offsetof(CPUState, tsptr));
3652 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3653 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3654 offsetof(trap_state, tt));
3655 tcg_temp_free_ptr(r_tsptr);
3662 r_tickptr = tcg_temp_new_ptr();
3663 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3664 offsetof(CPUState, tick));
3665 gen_helper_tick_set_count(r_tickptr,
3667 tcg_temp_free_ptr(r_tickptr);
3671 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3674 save_state(dc, cpu_cond);
3675 gen_helper_wrpstate(cpu_tmp0);
3681 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3682 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3683 offsetof(CPUSPARCState, tl));
3686 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3687 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3688 offsetof(CPUSPARCState,
3692 gen_helper_wrcwp(cpu_tmp0);
3695 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3696 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3697 offsetof(CPUSPARCState,
3700 case 11: // canrestore
3701 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3702 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3703 offsetof(CPUSPARCState,
3706 case 12: // cleanwin
3707 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3708 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3709 offsetof(CPUSPARCState,
3712 case 13: // otherwin
3713 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3714 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3715 offsetof(CPUSPARCState,
3719 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3720 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3721 offsetof(CPUSPARCState,
3724 case 16: // UA2005 gl
3725 CHECK_IU_FEATURE(dc, GL);
3726 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3727 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3728 offsetof(CPUSPARCState, gl));
3730 case 26: // UA2005 strand status
3731 CHECK_IU_FEATURE(dc, HYPV);
3732 if (!hypervisor(dc))
3734 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3740 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3741 if (dc->def->nwindows != 32)
3742 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3743 (1 << dc->def->nwindows) - 1);
3744 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3748 case 0x33: /* wrtbr, UA2005 wrhpr */
3750 #ifndef TARGET_SPARC64
3751 if (!supervisor(dc))
3753 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3755 CHECK_IU_FEATURE(dc, HYPV);
3756 if (!hypervisor(dc))
3758 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3761 // XXX gen_op_wrhpstate();
3762 save_state(dc, cpu_cond);
3768 // XXX gen_op_wrhtstate();
3771 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3774 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3776 case 31: // hstick_cmpr
3780 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3781 r_tickptr = tcg_temp_new_ptr();
3782 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3783 offsetof(CPUState, hstick));
3784 gen_helper_tick_set_limit(r_tickptr,
3786 tcg_temp_free_ptr(r_tickptr);
3789 case 6: // hver readonly
3797 #ifdef TARGET_SPARC64
3798 case 0x2c: /* V9 movcc */
3800 int cc = GET_FIELD_SP(insn, 11, 12);
3801 int cond = GET_FIELD_SP(insn, 14, 17);
3805 r_cond = tcg_temp_new();
3806 if (insn & (1 << 18)) {
3808 gen_cond(r_cond, 0, cond, dc);
3810 gen_cond(r_cond, 1, cond, dc);
3814 gen_fcond(r_cond, cc, cond);
3817 l1 = gen_new_label();
3819 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3820 if (IS_IMM) { /* immediate */
3823 simm = GET_FIELD_SPs(insn, 0, 10);
3824 r_const = tcg_const_tl(simm);
3825 gen_movl_TN_reg(rd, r_const);
3826 tcg_temp_free(r_const);
3828 rs2 = GET_FIELD_SP(insn, 0, 4);
3829 gen_movl_reg_TN(rs2, cpu_tmp0);
3830 gen_movl_TN_reg(rd, cpu_tmp0);
3833 tcg_temp_free(r_cond);
3836 case 0x2d: /* V9 sdivx */
3837 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3838 gen_movl_TN_reg(rd, cpu_dst);
3840 case 0x2e: /* V9 popc */
3842 cpu_src2 = get_src2(insn, cpu_src2);
3843 gen_helper_popc(cpu_dst, cpu_src2);
3844 gen_movl_TN_reg(rd, cpu_dst);
3846 case 0x2f: /* V9 movr */
3848 int cond = GET_FIELD_SP(insn, 10, 12);
3851 cpu_src1 = get_src1(insn, cpu_src1);
3853 l1 = gen_new_label();
3855 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3857 if (IS_IMM) { /* immediate */
3860 simm = GET_FIELD_SPs(insn, 0, 9);
3861 r_const = tcg_const_tl(simm);
3862 gen_movl_TN_reg(rd, r_const);
3863 tcg_temp_free(r_const);
3865 rs2 = GET_FIELD_SP(insn, 0, 4);
3866 gen_movl_reg_TN(rs2, cpu_tmp0);
3867 gen_movl_TN_reg(rd, cpu_tmp0);
3877 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3878 #ifdef TARGET_SPARC64
3879 int opf = GET_FIELD_SP(insn, 5, 13);
3880 rs1 = GET_FIELD(insn, 13, 17);
3881 rs2 = GET_FIELD(insn, 27, 31);
3882 if (gen_trap_ifnofpu(dc, cpu_cond))
3886 case 0x000: /* VIS I edge8cc */
3887 case 0x001: /* VIS II edge8n */
3888 case 0x002: /* VIS I edge8lcc */
3889 case 0x003: /* VIS II edge8ln */
3890 case 0x004: /* VIS I edge16cc */
3891 case 0x005: /* VIS II edge16n */
3892 case 0x006: /* VIS I edge16lcc */
3893 case 0x007: /* VIS II edge16ln */
3894 case 0x008: /* VIS I edge32cc */
3895 case 0x009: /* VIS II edge32n */
3896 case 0x00a: /* VIS I edge32lcc */
3897 case 0x00b: /* VIS II edge32ln */
3900 case 0x010: /* VIS I array8 */
3901 CHECK_FPU_FEATURE(dc, VIS1);
3902 cpu_src1 = get_src1(insn, cpu_src1);
3903 gen_movl_reg_TN(rs2, cpu_src2);
3904 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3905 gen_movl_TN_reg(rd, cpu_dst);
3907 case 0x012: /* VIS I array16 */
3908 CHECK_FPU_FEATURE(dc, VIS1);
3909 cpu_src1 = get_src1(insn, cpu_src1);
3910 gen_movl_reg_TN(rs2, cpu_src2);
3911 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3912 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3913 gen_movl_TN_reg(rd, cpu_dst);
3915 case 0x014: /* VIS I array32 */
3916 CHECK_FPU_FEATURE(dc, VIS1);
3917 cpu_src1 = get_src1(insn, cpu_src1);
3918 gen_movl_reg_TN(rs2, cpu_src2);
3919 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3920 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3921 gen_movl_TN_reg(rd, cpu_dst);
3923 case 0x018: /* VIS I alignaddr */
3924 CHECK_FPU_FEATURE(dc, VIS1);
3925 cpu_src1 = get_src1(insn, cpu_src1);
3926 gen_movl_reg_TN(rs2, cpu_src2);
3927 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3928 gen_movl_TN_reg(rd, cpu_dst);
3930 case 0x019: /* VIS II bmask */
3931 case 0x01a: /* VIS I alignaddrl */
3934 case 0x020: /* VIS I fcmple16 */
3935 CHECK_FPU_FEATURE(dc, VIS1);
3936 gen_op_load_fpr_DT0(DFPREG(rs1));
3937 gen_op_load_fpr_DT1(DFPREG(rs2));
3938 gen_helper_fcmple16();
3939 gen_op_store_DT0_fpr(DFPREG(rd));
3941 case 0x022: /* VIS I fcmpne16 */
3942 CHECK_FPU_FEATURE(dc, VIS1);
3943 gen_op_load_fpr_DT0(DFPREG(rs1));
3944 gen_op_load_fpr_DT1(DFPREG(rs2));
3945 gen_helper_fcmpne16();
3946 gen_op_store_DT0_fpr(DFPREG(rd));
3948 case 0x024: /* VIS I fcmple32 */
3949 CHECK_FPU_FEATURE(dc, VIS1);
3950 gen_op_load_fpr_DT0(DFPREG(rs1));
3951 gen_op_load_fpr_DT1(DFPREG(rs2));
3952 gen_helper_fcmple32();
3953 gen_op_store_DT0_fpr(DFPREG(rd));
3955 case 0x026: /* VIS I fcmpne32 */
3956 CHECK_FPU_FEATURE(dc, VIS1);
3957 gen_op_load_fpr_DT0(DFPREG(rs1));
3958 gen_op_load_fpr_DT1(DFPREG(rs2));
3959 gen_helper_fcmpne32();
3960 gen_op_store_DT0_fpr(DFPREG(rd));
3962 case 0x028: /* VIS I fcmpgt16 */
3963 CHECK_FPU_FEATURE(dc, VIS1);
3964 gen_op_load_fpr_DT0(DFPREG(rs1));
3965 gen_op_load_fpr_DT1(DFPREG(rs2));
3966 gen_helper_fcmpgt16();
3967 gen_op_store_DT0_fpr(DFPREG(rd));
3969 case 0x02a: /* VIS I fcmpeq16 */
3970 CHECK_FPU_FEATURE(dc, VIS1);
3971 gen_op_load_fpr_DT0(DFPREG(rs1));
3972 gen_op_load_fpr_DT1(DFPREG(rs2));
3973 gen_helper_fcmpeq16();
3974 gen_op_store_DT0_fpr(DFPREG(rd));
3976 case 0x02c: /* VIS I fcmpgt32 */
3977 CHECK_FPU_FEATURE(dc, VIS1);
3978 gen_op_load_fpr_DT0(DFPREG(rs1));
3979 gen_op_load_fpr_DT1(DFPREG(rs2));
3980 gen_helper_fcmpgt32();
3981 gen_op_store_DT0_fpr(DFPREG(rd));
3983 case 0x02e: /* VIS I fcmpeq32 */
3984 CHECK_FPU_FEATURE(dc, VIS1);
3985 gen_op_load_fpr_DT0(DFPREG(rs1));
3986 gen_op_load_fpr_DT1(DFPREG(rs2));
3987 gen_helper_fcmpeq32();
3988 gen_op_store_DT0_fpr(DFPREG(rd));
3990 case 0x031: /* VIS I fmul8x16 */
3991 CHECK_FPU_FEATURE(dc, VIS1);
3992 gen_op_load_fpr_DT0(DFPREG(rs1));
3993 gen_op_load_fpr_DT1(DFPREG(rs2));
3994 gen_helper_fmul8x16();
3995 gen_op_store_DT0_fpr(DFPREG(rd));
3997 case 0x033: /* VIS I fmul8x16au */
3998 CHECK_FPU_FEATURE(dc, VIS1);
3999 gen_op_load_fpr_DT0(DFPREG(rs1));
4000 gen_op_load_fpr_DT1(DFPREG(rs2));
4001 gen_helper_fmul8x16au();
4002 gen_op_store_DT0_fpr(DFPREG(rd));
4004 case 0x035: /* VIS I fmul8x16al */
4005 CHECK_FPU_FEATURE(dc, VIS1);
4006 gen_op_load_fpr_DT0(DFPREG(rs1));
4007 gen_op_load_fpr_DT1(DFPREG(rs2));
4008 gen_helper_fmul8x16al();
4009 gen_op_store_DT0_fpr(DFPREG(rd));
4011 case 0x036: /* VIS I fmul8sux16 */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 gen_op_load_fpr_DT0(DFPREG(rs1));
4014 gen_op_load_fpr_DT1(DFPREG(rs2));
4015 gen_helper_fmul8sux16();
4016 gen_op_store_DT0_fpr(DFPREG(rd));
4018 case 0x037: /* VIS I fmul8ulx16 */
4019 CHECK_FPU_FEATURE(dc, VIS1);
4020 gen_op_load_fpr_DT0(DFPREG(rs1));
4021 gen_op_load_fpr_DT1(DFPREG(rs2));
4022 gen_helper_fmul8ulx16();
4023 gen_op_store_DT0_fpr(DFPREG(rd));
4025 case 0x038: /* VIS I fmuld8sux16 */
4026 CHECK_FPU_FEATURE(dc, VIS1);
4027 gen_op_load_fpr_DT0(DFPREG(rs1));
4028 gen_op_load_fpr_DT1(DFPREG(rs2));
4029 gen_helper_fmuld8sux16();
4030 gen_op_store_DT0_fpr(DFPREG(rd));
4032 case 0x039: /* VIS I fmuld8ulx16 */
4033 CHECK_FPU_FEATURE(dc, VIS1);
4034 gen_op_load_fpr_DT0(DFPREG(rs1));
4035 gen_op_load_fpr_DT1(DFPREG(rs2));
4036 gen_helper_fmuld8ulx16();
4037 gen_op_store_DT0_fpr(DFPREG(rd));
4039 case 0x03a: /* VIS I fpack32 */
4040 case 0x03b: /* VIS I fpack16 */
4041 case 0x03d: /* VIS I fpackfix */
4042 case 0x03e: /* VIS I pdist */
4045 case 0x048: /* VIS I faligndata */
4046 CHECK_FPU_FEATURE(dc, VIS1);
4047 gen_op_load_fpr_DT0(DFPREG(rs1));
4048 gen_op_load_fpr_DT1(DFPREG(rs2));
4049 gen_helper_faligndata();
4050 gen_op_store_DT0_fpr(DFPREG(rd));
4052 case 0x04b: /* VIS I fpmerge */
4053 CHECK_FPU_FEATURE(dc, VIS1);
4054 gen_op_load_fpr_DT0(DFPREG(rs1));
4055 gen_op_load_fpr_DT1(DFPREG(rs2));
4056 gen_helper_fpmerge();
4057 gen_op_store_DT0_fpr(DFPREG(rd));
4059 case 0x04c: /* VIS II bshuffle */
4062 case 0x04d: /* VIS I fexpand */
4063 CHECK_FPU_FEATURE(dc, VIS1);
4064 gen_op_load_fpr_DT0(DFPREG(rs1));
4065 gen_op_load_fpr_DT1(DFPREG(rs2));
4066 gen_helper_fexpand();
4067 gen_op_store_DT0_fpr(DFPREG(rd));
4069 case 0x050: /* VIS I fpadd16 */
4070 CHECK_FPU_FEATURE(dc, VIS1);
4071 gen_op_load_fpr_DT0(DFPREG(rs1));
4072 gen_op_load_fpr_DT1(DFPREG(rs2));
4073 gen_helper_fpadd16();
4074 gen_op_store_DT0_fpr(DFPREG(rd));
4076 case 0x051: /* VIS I fpadd16s */
4077 CHECK_FPU_FEATURE(dc, VIS1);
4078 gen_helper_fpadd16s(cpu_fpr[rd],
4079 cpu_fpr[rs1], cpu_fpr[rs2]);
4081 case 0x052: /* VIS I fpadd32 */
4082 CHECK_FPU_FEATURE(dc, VIS1);
4083 gen_op_load_fpr_DT0(DFPREG(rs1));
4084 gen_op_load_fpr_DT1(DFPREG(rs2));
4085 gen_helper_fpadd32();
4086 gen_op_store_DT0_fpr(DFPREG(rd));
4088 case 0x053: /* VIS I fpadd32s */
4089 CHECK_FPU_FEATURE(dc, VIS1);
4090 gen_helper_fpadd32s(cpu_fpr[rd],
4091 cpu_fpr[rs1], cpu_fpr[rs2]);
4093 case 0x054: /* VIS I fpsub16 */
4094 CHECK_FPU_FEATURE(dc, VIS1);
4095 gen_op_load_fpr_DT0(DFPREG(rs1));
4096 gen_op_load_fpr_DT1(DFPREG(rs2));
4097 gen_helper_fpsub16();
4098 gen_op_store_DT0_fpr(DFPREG(rd));
4100 case 0x055: /* VIS I fpsub16s */
4101 CHECK_FPU_FEATURE(dc, VIS1);
4102 gen_helper_fpsub16s(cpu_fpr[rd],
4103 cpu_fpr[rs1], cpu_fpr[rs2]);
4105 case 0x056: /* VIS I fpsub32 */
4106 CHECK_FPU_FEATURE(dc, VIS1);
4107 gen_op_load_fpr_DT0(DFPREG(rs1));
4108 gen_op_load_fpr_DT1(DFPREG(rs2));
4109 gen_helper_fpsub32();
4110 gen_op_store_DT0_fpr(DFPREG(rd));
4112 case 0x057: /* VIS I fpsub32s */
4113 CHECK_FPU_FEATURE(dc, VIS1);
4114 gen_helper_fpsub32s(cpu_fpr[rd],
4115 cpu_fpr[rs1], cpu_fpr[rs2]);
4117 case 0x060: /* VIS I fzero */
4118 CHECK_FPU_FEATURE(dc, VIS1);
4119 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4120 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4122 case 0x061: /* VIS I fzeros */
4123 CHECK_FPU_FEATURE(dc, VIS1);
4124 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4126 case 0x062: /* VIS I fnor */
4127 CHECK_FPU_FEATURE(dc, VIS1);
4128 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4129 cpu_fpr[DFPREG(rs2)]);
4130 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4131 cpu_fpr[DFPREG(rs2) + 1]);
4133 case 0x063: /* VIS I fnors */
4134 CHECK_FPU_FEATURE(dc, VIS1);
4135 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4137 case 0x064: /* VIS I fandnot2 */
4138 CHECK_FPU_FEATURE(dc, VIS1);
4139 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4140 cpu_fpr[DFPREG(rs2)]);
4141 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4142 cpu_fpr[DFPREG(rs1) + 1],
4143 cpu_fpr[DFPREG(rs2) + 1]);
4145 case 0x065: /* VIS I fandnot2s */
4146 CHECK_FPU_FEATURE(dc, VIS1);
4147 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4149 case 0x066: /* VIS I fnot2 */
4150 CHECK_FPU_FEATURE(dc, VIS1);
4151 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4152 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4153 cpu_fpr[DFPREG(rs2) + 1]);
4155 case 0x067: /* VIS I fnot2s */
4156 CHECK_FPU_FEATURE(dc, VIS1);
4157 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4159 case 0x068: /* VIS I fandnot1 */
4160 CHECK_FPU_FEATURE(dc, VIS1);
4161 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4162 cpu_fpr[DFPREG(rs1)]);
4163 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4164 cpu_fpr[DFPREG(rs2) + 1],
4165 cpu_fpr[DFPREG(rs1) + 1]);
4167 case 0x069: /* VIS I fandnot1s */
4168 CHECK_FPU_FEATURE(dc, VIS1);
4169 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4171 case 0x06a: /* VIS I fnot1 */
4172 CHECK_FPU_FEATURE(dc, VIS1);
4173 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4174 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4175 cpu_fpr[DFPREG(rs1) + 1]);
4177 case 0x06b: /* VIS I fnot1s */
4178 CHECK_FPU_FEATURE(dc, VIS1);
4179 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4181 case 0x06c: /* VIS I fxor */
4182 CHECK_FPU_FEATURE(dc, VIS1);
4183 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4184 cpu_fpr[DFPREG(rs2)]);
4185 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4186 cpu_fpr[DFPREG(rs1) + 1],
4187 cpu_fpr[DFPREG(rs2) + 1]);
4189 case 0x06d: /* VIS I fxors */
4190 CHECK_FPU_FEATURE(dc, VIS1);
4191 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4193 case 0x06e: /* VIS I fnand */
4194 CHECK_FPU_FEATURE(dc, VIS1);
4195 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4196 cpu_fpr[DFPREG(rs2)]);
4197 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4198 cpu_fpr[DFPREG(rs2) + 1]);
4200 case 0x06f: /* VIS I fnands */
4201 CHECK_FPU_FEATURE(dc, VIS1);
4202 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4204 case 0x070: /* VIS I fand */
4205 CHECK_FPU_FEATURE(dc, VIS1);
4206 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4207 cpu_fpr[DFPREG(rs2)]);
4208 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4209 cpu_fpr[DFPREG(rs1) + 1],
4210 cpu_fpr[DFPREG(rs2) + 1]);
4212 case 0x071: /* VIS I fands */
4213 CHECK_FPU_FEATURE(dc, VIS1);
4214 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4216 case 0x072: /* VIS I fxnor */
4217 CHECK_FPU_FEATURE(dc, VIS1);
4218 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4219 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4220 cpu_fpr[DFPREG(rs1)]);
4221 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4222 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4223 cpu_fpr[DFPREG(rs1) + 1]);
4225 case 0x073: /* VIS I fxnors */
4226 CHECK_FPU_FEATURE(dc, VIS1);
4227 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4228 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4230 case 0x074: /* VIS I fsrc1 */
4231 CHECK_FPU_FEATURE(dc, VIS1);
4232 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4233 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4234 cpu_fpr[DFPREG(rs1) + 1]);
4236 case 0x075: /* VIS I fsrc1s */
4237 CHECK_FPU_FEATURE(dc, VIS1);
4238 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4240 case 0x076: /* VIS I fornot2 */
4241 CHECK_FPU_FEATURE(dc, VIS1);
4242 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4243 cpu_fpr[DFPREG(rs2)]);
4244 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4245 cpu_fpr[DFPREG(rs1) + 1],
4246 cpu_fpr[DFPREG(rs2) + 1]);
4248 case 0x077: /* VIS I fornot2s */
4249 CHECK_FPU_FEATURE(dc, VIS1);
4250 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4252 case 0x078: /* VIS I fsrc2 */
4253 CHECK_FPU_FEATURE(dc, VIS1);
4254 gen_op_load_fpr_DT0(DFPREG(rs2));
4255 gen_op_store_DT0_fpr(DFPREG(rd));
4257 case 0x079: /* VIS I fsrc2s */
4258 CHECK_FPU_FEATURE(dc, VIS1);
4259 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4261 case 0x07a: /* VIS I fornot1 */
4262 CHECK_FPU_FEATURE(dc, VIS1);
4263 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4264 cpu_fpr[DFPREG(rs1)]);
4265 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4266 cpu_fpr[DFPREG(rs2) + 1],
4267 cpu_fpr[DFPREG(rs1) + 1]);
4269 case 0x07b: /* VIS I fornot1s */
4270 CHECK_FPU_FEATURE(dc, VIS1);
4271 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4273 case 0x07c: /* VIS I for */
4274 CHECK_FPU_FEATURE(dc, VIS1);
4275 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4276 cpu_fpr[DFPREG(rs2)]);
4277 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4278 cpu_fpr[DFPREG(rs1) + 1],
4279 cpu_fpr[DFPREG(rs2) + 1]);
4281 case 0x07d: /* VIS I fors */
4282 CHECK_FPU_FEATURE(dc, VIS1);
4283 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4285 case 0x07e: /* VIS I fone */
4286 CHECK_FPU_FEATURE(dc, VIS1);
4287 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4288 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4290 case 0x07f: /* VIS I fones */
4291 CHECK_FPU_FEATURE(dc, VIS1);
4292 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4294 case 0x080: /* VIS I shutdown */
4295 case 0x081: /* VIS II siam */
4304 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4305 #ifdef TARGET_SPARC64
4310 #ifdef TARGET_SPARC64
4311 } else if (xop == 0x39) { /* V9 return */
4314 save_state(dc, cpu_cond);
4315 cpu_src1 = get_src1(insn, cpu_src1);
4316 if (IS_IMM) { /* immediate */
4317 simm = GET_FIELDs(insn, 19, 31);
4318 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4319 } else { /* register */
4320 rs2 = GET_FIELD(insn, 27, 31);
4322 gen_movl_reg_TN(rs2, cpu_src2);
4323 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4325 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4327 gen_helper_restore();
4328 gen_mov_pc_npc(dc, cpu_cond);
4329 r_const = tcg_const_i32(3);
4330 gen_helper_check_align(cpu_dst, r_const);
4331 tcg_temp_free_i32(r_const);
4332 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4333 dc->npc = DYNAMIC_PC;
4337 cpu_src1 = get_src1(insn, cpu_src1);
4338 if (IS_IMM) { /* immediate */
4339 simm = GET_FIELDs(insn, 19, 31);
4340 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4341 } else { /* register */
4342 rs2 = GET_FIELD(insn, 27, 31);
4344 gen_movl_reg_TN(rs2, cpu_src2);
4345 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4347 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4350 case 0x38: /* jmpl */
4355 r_pc = tcg_const_tl(dc->pc);
4356 gen_movl_TN_reg(rd, r_pc);
4357 tcg_temp_free(r_pc);
4358 gen_mov_pc_npc(dc, cpu_cond);
4359 r_const = tcg_const_i32(3);
4360 gen_helper_check_align(cpu_dst, r_const);
4361 tcg_temp_free_i32(r_const);
4362 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4363 dc->npc = DYNAMIC_PC;
4366 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4367 case 0x39: /* rett, V9 return */
4371 if (!supervisor(dc))
4373 gen_mov_pc_npc(dc, cpu_cond);
4374 r_const = tcg_const_i32(3);
4375 gen_helper_check_align(cpu_dst, r_const);
4376 tcg_temp_free_i32(r_const);
4377 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4378 dc->npc = DYNAMIC_PC;
4383 case 0x3b: /* flush */
4384 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4386 gen_helper_flush(cpu_dst);
4388 case 0x3c: /* save */
4389 save_state(dc, cpu_cond);
4391 gen_movl_TN_reg(rd, cpu_dst);
4393 case 0x3d: /* restore */
4394 save_state(dc, cpu_cond);
4395 gen_helper_restore();
4396 gen_movl_TN_reg(rd, cpu_dst);
4398 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4399 case 0x3e: /* V9 done/retry */
4403 if (!supervisor(dc))
4405 dc->npc = DYNAMIC_PC;
4406 dc->pc = DYNAMIC_PC;
4410 if (!supervisor(dc))
4412 dc->npc = DYNAMIC_PC;
4413 dc->pc = DYNAMIC_PC;
4429 case 3: /* load/store instructions */
4431 unsigned int xop = GET_FIELD(insn, 7, 12);
4433 cpu_src1 = get_src1(insn, cpu_src1);
4434 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4435 rs2 = GET_FIELD(insn, 27, 31);
4436 gen_movl_reg_TN(rs2, cpu_src2);
4437 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4438 } else if (IS_IMM) { /* immediate */
4439 simm = GET_FIELDs(insn, 19, 31);
4440 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4441 } else { /* register */
4442 rs2 = GET_FIELD(insn, 27, 31);
4444 gen_movl_reg_TN(rs2, cpu_src2);
4445 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4447 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4449 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4450 (xop > 0x17 && xop <= 0x1d ) ||
4451 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4453 case 0x0: /* ld, V9 lduw, load unsigned word */
4454 gen_address_mask(dc, cpu_addr);
4455 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4457 case 0x1: /* ldub, load unsigned byte */
4458 gen_address_mask(dc, cpu_addr);
4459 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4461 case 0x2: /* lduh, load unsigned halfword */
4462 gen_address_mask(dc, cpu_addr);
4463 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4465 case 0x3: /* ldd, load double word */
4471 save_state(dc, cpu_cond);
4472 r_const = tcg_const_i32(7);
4473 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4474 tcg_temp_free_i32(r_const);
4475 gen_address_mask(dc, cpu_addr);
4476 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4477 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4478 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4479 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4480 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4481 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4482 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4485 case 0x9: /* ldsb, load signed byte */
4486 gen_address_mask(dc, cpu_addr);
4487 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4489 case 0xa: /* ldsh, load signed halfword */
4490 gen_address_mask(dc, cpu_addr);
4491 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4493 case 0xd: /* ldstub -- XXX: should be atomically */
4497 gen_address_mask(dc, cpu_addr);
4498 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4499 r_const = tcg_const_tl(0xff);
4500 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4501 tcg_temp_free(r_const);
4504 case 0x0f: /* swap, swap register with memory. Also
4506 CHECK_IU_FEATURE(dc, SWAP);
4507 gen_movl_reg_TN(rd, cpu_val);
4508 gen_address_mask(dc, cpu_addr);
4509 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4510 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4511 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4513 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4514 case 0x10: /* lda, V9 lduwa, load word alternate */
4515 #ifndef TARGET_SPARC64
4518 if (!supervisor(dc))
4521 save_state(dc, cpu_cond);
4522 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4524 case 0x11: /* lduba, load unsigned byte alternate */
4525 #ifndef TARGET_SPARC64
4528 if (!supervisor(dc))
4531 save_state(dc, cpu_cond);
4532 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4534 case 0x12: /* lduha, load unsigned halfword alternate */
4535 #ifndef TARGET_SPARC64
4538 if (!supervisor(dc))
4541 save_state(dc, cpu_cond);
4542 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4544 case 0x13: /* ldda, load double word alternate */
4545 #ifndef TARGET_SPARC64
4548 if (!supervisor(dc))
4553 save_state(dc, cpu_cond);
4554 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4556 case 0x19: /* ldsba, load signed byte alternate */
4557 #ifndef TARGET_SPARC64
4560 if (!supervisor(dc))
4563 save_state(dc, cpu_cond);
4564 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4566 case 0x1a: /* ldsha, load signed halfword alternate */
4567 #ifndef TARGET_SPARC64
4570 if (!supervisor(dc))
4573 save_state(dc, cpu_cond);
4574 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4576 case 0x1d: /* ldstuba -- XXX: should be atomically */
4577 #ifndef TARGET_SPARC64
4580 if (!supervisor(dc))
4583 save_state(dc, cpu_cond);
4584 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4586 case 0x1f: /* swapa, swap reg with alt. memory. Also
4588 CHECK_IU_FEATURE(dc, SWAP);
4589 #ifndef TARGET_SPARC64
4592 if (!supervisor(dc))
4595 save_state(dc, cpu_cond);
4596 gen_movl_reg_TN(rd, cpu_val);
4597 gen_swap_asi(cpu_val, cpu_addr, insn);
4600 #ifndef TARGET_SPARC64
4601 case 0x30: /* ldc */
4602 case 0x31: /* ldcsr */
4603 case 0x33: /* lddc */
4607 #ifdef TARGET_SPARC64
4608 case 0x08: /* V9 ldsw */
4609 gen_address_mask(dc, cpu_addr);
4610 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4612 case 0x0b: /* V9 ldx */
4613 gen_address_mask(dc, cpu_addr);
4614 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4616 case 0x18: /* V9 ldswa */
4617 save_state(dc, cpu_cond);
4618 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4620 case 0x1b: /* V9 ldxa */
4621 save_state(dc, cpu_cond);
4622 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4624 case 0x2d: /* V9 prefetch, no effect */
4626 case 0x30: /* V9 ldfa */
4627 save_state(dc, cpu_cond);
4628 gen_ldf_asi(cpu_addr, insn, 4, rd);
4630 case 0x33: /* V9 lddfa */
4631 save_state(dc, cpu_cond);
4632 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4634 case 0x3d: /* V9 prefetcha, no effect */
4636 case 0x32: /* V9 ldqfa */
4637 CHECK_FPU_FEATURE(dc, FLOAT128);
4638 save_state(dc, cpu_cond);
4639 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4645 gen_movl_TN_reg(rd, cpu_val);
4646 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4649 } else if (xop >= 0x20 && xop < 0x24) {
4650 if (gen_trap_ifnofpu(dc, cpu_cond))
4652 save_state(dc, cpu_cond);
4654 case 0x20: /* ldf, load fpreg */
4655 gen_address_mask(dc, cpu_addr);
4656 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4657 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4659 case 0x21: /* ldfsr, V9 ldxfsr */
4660 #ifdef TARGET_SPARC64
4661 gen_address_mask(dc, cpu_addr);
4663 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4664 gen_helper_ldxfsr(cpu_tmp64);
4668 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4669 gen_helper_ldfsr(cpu_tmp32);
4673 case 0x22: /* ldqf, load quad fpreg */
4677 CHECK_FPU_FEATURE(dc, FLOAT128);
4678 r_const = tcg_const_i32(dc->mem_idx);
4679 gen_helper_ldqf(cpu_addr, r_const);
4680 tcg_temp_free_i32(r_const);
4681 gen_op_store_QT0_fpr(QFPREG(rd));
4684 case 0x23: /* lddf, load double fpreg */
4688 r_const = tcg_const_i32(dc->mem_idx);
4689 gen_helper_lddf(cpu_addr, r_const);
4690 tcg_temp_free_i32(r_const);
4691 gen_op_store_DT0_fpr(DFPREG(rd));
4697 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4698 xop == 0xe || xop == 0x1e) {
4699 gen_movl_reg_TN(rd, cpu_val);
4701 case 0x4: /* st, store word */
4702 gen_address_mask(dc, cpu_addr);
4703 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4705 case 0x5: /* stb, store byte */
4706 gen_address_mask(dc, cpu_addr);
4707 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4709 case 0x6: /* sth, store halfword */
4710 gen_address_mask(dc, cpu_addr);
4711 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4713 case 0x7: /* std, store double word */
4719 save_state(dc, cpu_cond);
4720 gen_address_mask(dc, cpu_addr);
4721 r_const = tcg_const_i32(7);
4722 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4723 tcg_temp_free_i32(r_const);
4724 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4725 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4726 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4729 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4730 case 0x14: /* sta, V9 stwa, store word alternate */
4731 #ifndef TARGET_SPARC64
4734 if (!supervisor(dc))
4737 save_state(dc, cpu_cond);
4738 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4740 case 0x15: /* stba, store byte alternate */
4741 #ifndef TARGET_SPARC64
4744 if (!supervisor(dc))
4747 save_state(dc, cpu_cond);
4748 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4750 case 0x16: /* stha, store halfword alternate */
4751 #ifndef TARGET_SPARC64
4754 if (!supervisor(dc))
4757 save_state(dc, cpu_cond);
4758 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4760 case 0x17: /* stda, store double word alternate */
4761 #ifndef TARGET_SPARC64
4764 if (!supervisor(dc))
4770 save_state(dc, cpu_cond);
4771 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4775 #ifdef TARGET_SPARC64
4776 case 0x0e: /* V9 stx */
4777 gen_address_mask(dc, cpu_addr);
4778 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4780 case 0x1e: /* V9 stxa */
4781 save_state(dc, cpu_cond);
4782 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4788 } else if (xop > 0x23 && xop < 0x28) {
4789 if (gen_trap_ifnofpu(dc, cpu_cond))
4791 save_state(dc, cpu_cond);
4793 case 0x24: /* stf, store fpreg */
4794 gen_address_mask(dc, cpu_addr);
4795 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4796 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4798 case 0x25: /* stfsr, V9 stxfsr */
4799 #ifdef TARGET_SPARC64
4800 gen_address_mask(dc, cpu_addr);
4801 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4803 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4805 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4807 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4808 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4812 #ifdef TARGET_SPARC64
4813 /* V9 stqf, store quad fpreg */
4817 CHECK_FPU_FEATURE(dc, FLOAT128);
4818 gen_op_load_fpr_QT0(QFPREG(rd));
4819 r_const = tcg_const_i32(dc->mem_idx);
4820 gen_helper_stqf(cpu_addr, r_const);
4821 tcg_temp_free_i32(r_const);
4824 #else /* !TARGET_SPARC64 */
4825 /* stdfq, store floating point queue */
4826 #if defined(CONFIG_USER_ONLY)
4829 if (!supervisor(dc))
4831 if (gen_trap_ifnofpu(dc, cpu_cond))
4836 case 0x27: /* stdf, store double fpreg */
4840 gen_op_load_fpr_DT0(DFPREG(rd));
4841 r_const = tcg_const_i32(dc->mem_idx);
4842 gen_helper_stdf(cpu_addr, r_const);
4843 tcg_temp_free_i32(r_const);
4849 } else if (xop > 0x33 && xop < 0x3f) {
4850 save_state(dc, cpu_cond);
4852 #ifdef TARGET_SPARC64
4853 case 0x34: /* V9 stfa */
4854 gen_stf_asi(cpu_addr, insn, 4, rd);
4856 case 0x36: /* V9 stqfa */
4860 CHECK_FPU_FEATURE(dc, FLOAT128);
4861 r_const = tcg_const_i32(7);
4862 gen_helper_check_align(cpu_addr, r_const);
4863 tcg_temp_free_i32(r_const);
4864 gen_op_load_fpr_QT0(QFPREG(rd));
4865 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4868 case 0x37: /* V9 stdfa */
4869 gen_op_load_fpr_DT0(DFPREG(rd));
4870 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4872 case 0x3c: /* V9 casa */
4873 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4874 gen_movl_TN_reg(rd, cpu_val);
4876 case 0x3e: /* V9 casxa */
4877 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4878 gen_movl_TN_reg(rd, cpu_val);
4881 case 0x34: /* stc */
4882 case 0x35: /* stcsr */
4883 case 0x36: /* stdcq */
4884 case 0x37: /* stdc */
4895 /* default case for non jump instructions */
4896 if (dc->npc == DYNAMIC_PC) {
4897 dc->pc = DYNAMIC_PC;
4899 } else if (dc->npc == JUMP_PC) {
4900 /* we can do a static jump */
4901 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4905 dc->npc = dc->npc + 4;
4913 save_state(dc, cpu_cond);
4914 r_const = tcg_const_i32(TT_ILL_INSN);
4915 gen_helper_raise_exception(r_const);
4916 tcg_temp_free_i32(r_const);
4924 save_state(dc, cpu_cond);
4925 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4926 gen_helper_raise_exception(r_const);
4927 tcg_temp_free_i32(r_const);
4931 #if !defined(CONFIG_USER_ONLY)
4936 save_state(dc, cpu_cond);
4937 r_const = tcg_const_i32(TT_PRIV_INSN);
4938 gen_helper_raise_exception(r_const);
4939 tcg_temp_free_i32(r_const);
4945 save_state(dc, cpu_cond);
4946 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4949 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4951 save_state(dc, cpu_cond);
4952 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4956 #ifndef TARGET_SPARC64
4961 save_state(dc, cpu_cond);
4962 r_const = tcg_const_i32(TT_NCP_INSN);
4963 gen_helper_raise_exception(r_const);
4964 tcg_temp_free(r_const);
4971 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4972 int spc, CPUSPARCState *env)
4974 target_ulong pc_start, last_pc;
4975 uint16_t *gen_opc_end;
4976 DisasContext dc1, *dc = &dc1;
4982 memset(dc, 0, sizeof(DisasContext));
4987 dc->npc = (target_ulong) tb->cs_base;
4988 dc->cc_op = CC_OP_DYNAMIC;
4989 dc->mem_idx = cpu_mmu_index(env);
4991 if ((dc->def->features & CPU_FEATURE_FLOAT))
4992 dc->fpu_enabled = cpu_fpu_enabled(env);
4994 dc->fpu_enabled = 0;
4995 #ifdef TARGET_SPARC64
4996 dc->address_mask_32bit = env->pstate & PS_AM;
4998 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5000 cpu_tmp0 = tcg_temp_new();
5001 cpu_tmp32 = tcg_temp_new_i32();
5002 cpu_tmp64 = tcg_temp_new_i64();
5004 cpu_dst = tcg_temp_local_new();
5007 cpu_val = tcg_temp_local_new();
5008 cpu_addr = tcg_temp_local_new();
5011 max_insns = tb->cflags & CF_COUNT_MASK;
5013 max_insns = CF_COUNT_MASK;
5016 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
5017 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
5018 if (bp->pc == dc->pc) {
5019 if (dc->pc != pc_start)
5020 save_state(dc, cpu_cond);
5029 qemu_log("Search PC...\n");
5030 j = gen_opc_ptr - gen_opc_buf;
5034 gen_opc_instr_start[lj++] = 0;
5035 gen_opc_pc[lj] = dc->pc;
5036 gen_opc_npc[lj] = dc->npc;
5037 gen_opc_instr_start[lj] = 1;
5038 gen_opc_icount[lj] = num_insns;
5041 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5044 disas_sparc_insn(dc);
5049 /* if the next PC is different, we abort now */
5050 if (dc->pc != (last_pc + 4))
5052 /* if we reach a page boundary, we stop generation so that the
5053 PC of a TT_TFAULT exception is always in the right page */
5054 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5056 /* if single step mode, we generate only one instruction and
5057 generate an exception */
5058 if (env->singlestep_enabled || singlestep) {
5059 tcg_gen_movi_tl(cpu_pc, dc->pc);
5063 } while ((gen_opc_ptr < gen_opc_end) &&
5064 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5065 num_insns < max_insns);
5068 tcg_temp_free(cpu_addr);
5069 tcg_temp_free(cpu_val);
5070 tcg_temp_free(cpu_dst);
5071 tcg_temp_free_i64(cpu_tmp64);
5072 tcg_temp_free_i32(cpu_tmp32);
5073 tcg_temp_free(cpu_tmp0);
5074 if (tb->cflags & CF_LAST_IO)
5077 if (dc->pc != DYNAMIC_PC &&
5078 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5079 /* static PC and NPC: we can use direct chaining */
5080 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5082 if (dc->pc != DYNAMIC_PC)
5083 tcg_gen_movi_tl(cpu_pc, dc->pc);
5084 save_npc(dc, cpu_cond);
5088 gen_icount_end(tb, num_insns);
5089 *gen_opc_ptr = INDEX_op_end;
5091 j = gen_opc_ptr - gen_opc_buf;
5094 gen_opc_instr_start[lj++] = 0;
5098 gen_opc_jump_pc[0] = dc->jump_pc[0];
5099 gen_opc_jump_pc[1] = dc->jump_pc[1];
5101 tb->size = last_pc + 4 - pc_start;
5102 tb->icount = num_insns;
5105 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5106 qemu_log("--------------\n");
5107 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5108 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5114 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5116 gen_intermediate_code_internal(tb, 0, env);
5119 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5121 gen_intermediate_code_internal(tb, 1, env);
5124 void gen_intermediate_code_init(CPUSPARCState *env)
5128 static const char * const gregnames[8] = {
5129 NULL, // g0 not used
5138 static const char * const fregnames[64] = {
5139 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5140 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5141 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5142 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5143 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5144 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5145 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5146 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5149 /* init various static tables */
5153 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5154 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5155 offsetof(CPUState, regwptr),
5157 #ifdef TARGET_SPARC64
5158 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5160 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5162 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5164 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5166 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5167 offsetof(CPUState, tick_cmpr),
5169 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5170 offsetof(CPUState, stick_cmpr),
5172 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5173 offsetof(CPUState, hstick_cmpr),
5175 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5177 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5179 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5181 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5182 offsetof(CPUState, ssr), "ssr");
5183 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5184 offsetof(CPUState, version), "ver");
5185 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5186 offsetof(CPUState, softint),
5189 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5192 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5194 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5196 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5197 offsetof(CPUState, cc_src2),
5199 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5201 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5203 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5205 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5207 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5209 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5211 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5212 #ifndef CONFIG_USER_ONLY
5213 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5216 for (i = 1; i < 8; i++)
5217 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5218 offsetof(CPUState, gregs[i]),
5220 for (i = 0; i < TARGET_FPREGS; i++)
5221 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5222 offsetof(CPUState, fpr[i]),
5225 /* register helpers */
5227 #define GEN_HELPER 2
5232 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5233 unsigned long searched_pc, int pc_pos, void *puc)
5236 env->pc = gen_opc_pc[pc_pos];
5237 npc = gen_opc_npc[pc_pos];
5239 /* dynamic NPC: already stored */
5240 } else if (npc == 2) {
5241 target_ulong t2 = (target_ulong)(unsigned long)puc;
5242 /* jump PC: use T2 and the jump targets of the translation */
5244 env->npc = gen_opc_jump_pc[0];
5246 env->npc = gen_opc_jump_pc[1];