4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
189 #ifdef TARGET_SPARC64
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
193 #define AM_CHECK(dc) (1)
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
208 tcg_gen_movi_tl(tn, 0);
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_cc_clear_icc(void)
279 tcg_gen_movi_i32(cpu_psr, 0);
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
285 tcg_gen_movi_i32(cpu_xcc, 0);
291 env->psr |= PSR_ZERO;
292 if ((int32_t) T0 < 0)
295 static inline void gen_cc_NZ_icc(TCGv dst)
300 l1 = gen_new_label();
301 l2 = gen_new_label();
302 r_temp = tcg_temp_new();
303 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
307 tcg_gen_ext32s_tl(r_temp, dst);
308 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
311 tcg_temp_free(r_temp);
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
319 l1 = gen_new_label();
320 l2 = gen_new_label();
321 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
324 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
332 env->psr |= PSR_CARRY;
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
336 TCGv r_temp1, r_temp2;
339 l1 = gen_new_label();
340 r_temp1 = tcg_temp_new();
341 r_temp2 = tcg_temp_new();
342 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
347 tcg_temp_free(r_temp1);
348 tcg_temp_free(r_temp2);
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
356 l1 = gen_new_label();
357 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
364 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
371 r_temp = tcg_temp_new();
372 tcg_gen_xor_tl(r_temp, src1, src2);
373 tcg_gen_not_tl(r_temp, r_temp);
374 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379 tcg_temp_free(r_temp);
380 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
388 r_temp = tcg_temp_new();
389 tcg_gen_xor_tl(r_temp, src1, src2);
390 tcg_gen_not_tl(r_temp, r_temp);
391 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396 tcg_temp_free(r_temp);
397 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
407 l1 = gen_new_label();
409 r_temp = tcg_temp_new();
410 tcg_gen_xor_tl(r_temp, src1, src2);
411 tcg_gen_not_tl(r_temp, r_temp);
412 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416 r_const = tcg_const_i32(TT_TOVF);
417 gen_helper_raise_exception(r_const);
418 tcg_temp_free_i32(r_const);
420 tcg_temp_free(r_temp);
423 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
427 l1 = gen_new_label();
428 tcg_gen_or_tl(cpu_tmp0, src1, src2);
429 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
430 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
431 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
435 static inline void gen_tag_tv(TCGv src1, TCGv src2)
440 l1 = gen_new_label();
441 tcg_gen_or_tl(cpu_tmp0, src1, src2);
442 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
443 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
444 r_const = tcg_const_i32(TT_TOVF);
445 gen_helper_raise_exception(r_const);
446 tcg_temp_free_i32(r_const);
450 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
452 tcg_gen_mov_tl(cpu_cc_src, src1);
453 tcg_gen_movi_tl(cpu_cc_src2, src2);
454 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
455 tcg_gen_mov_tl(dst, cpu_cc_dst);
458 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
460 tcg_gen_mov_tl(cpu_cc_src, src1);
461 tcg_gen_mov_tl(cpu_cc_src2, src2);
462 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
463 tcg_gen_mov_tl(dst, cpu_cc_dst);
466 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
468 tcg_gen_mov_tl(cpu_cc_src, src1);
469 tcg_gen_movi_tl(cpu_cc_src2, src2);
470 gen_mov_reg_C(cpu_tmp0, cpu_psr);
471 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
472 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
473 tcg_gen_mov_tl(dst, cpu_cc_dst);
476 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
478 tcg_gen_mov_tl(cpu_cc_src, src1);
479 tcg_gen_mov_tl(cpu_cc_src2, src2);
480 gen_mov_reg_C(cpu_tmp0, cpu_psr);
481 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
482 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
483 tcg_gen_mov_tl(dst, cpu_cc_dst);
486 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
488 tcg_gen_mov_tl(cpu_cc_src, src1);
489 tcg_gen_mov_tl(cpu_cc_src2, src2);
490 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
492 gen_cc_NZ_icc(cpu_cc_dst);
493 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
494 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
495 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
496 #ifdef TARGET_SPARC64
498 gen_cc_NZ_xcc(cpu_cc_dst);
499 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
500 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
502 tcg_gen_mov_tl(dst, cpu_cc_dst);
505 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
507 tcg_gen_mov_tl(cpu_cc_src, src1);
508 tcg_gen_mov_tl(cpu_cc_src2, src2);
509 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
510 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
511 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
513 gen_cc_NZ_icc(cpu_cc_dst);
514 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
515 #ifdef TARGET_SPARC64
517 gen_cc_NZ_xcc(cpu_cc_dst);
518 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
519 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
521 tcg_gen_mov_tl(dst, cpu_cc_dst);
526 env->psr |= PSR_CARRY;
528 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
530 TCGv r_temp1, r_temp2;
533 l1 = gen_new_label();
534 r_temp1 = tcg_temp_new();
535 r_temp2 = tcg_temp_new();
536 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
537 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
538 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
539 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
541 tcg_temp_free(r_temp1);
542 tcg_temp_free(r_temp2);
545 #ifdef TARGET_SPARC64
546 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
550 l1 = gen_new_label();
551 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
552 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
558 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
561 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
565 r_temp = tcg_temp_new();
566 tcg_gen_xor_tl(r_temp, src1, src2);
567 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
568 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
569 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
570 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
571 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
572 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
573 tcg_temp_free(r_temp);
576 #ifdef TARGET_SPARC64
577 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
581 r_temp = tcg_temp_new();
582 tcg_gen_xor_tl(r_temp, src1, src2);
583 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
584 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
585 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
586 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
587 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
588 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
589 tcg_temp_free(r_temp);
593 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
599 l1 = gen_new_label();
601 r_temp = tcg_temp_new();
602 tcg_gen_xor_tl(r_temp, src1, src2);
603 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
604 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
605 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
606 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
607 r_const = tcg_const_i32(TT_TOVF);
608 gen_helper_raise_exception(r_const);
609 tcg_temp_free_i32(r_const);
611 tcg_temp_free(r_temp);
614 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
616 tcg_gen_mov_tl(cpu_cc_src, src1);
617 tcg_gen_movi_tl(cpu_cc_src2, src2);
619 tcg_gen_mov_tl(cpu_cc_dst, src1);
620 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
621 dc->cc_op = CC_OP_LOGIC;
623 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
624 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
625 dc->cc_op = CC_OP_SUB;
627 tcg_gen_mov_tl(dst, cpu_cc_dst);
630 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
632 tcg_gen_mov_tl(cpu_cc_src, src1);
633 tcg_gen_mov_tl(cpu_cc_src2, src2);
634 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
635 tcg_gen_mov_tl(dst, cpu_cc_dst);
638 static inline void gen_op_subx_cc2(TCGv dst)
640 gen_cc_NZ_icc(cpu_cc_dst);
641 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
642 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
643 #ifdef TARGET_SPARC64
644 gen_cc_NZ_xcc(cpu_cc_dst);
645 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
646 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
648 tcg_gen_mov_tl(dst, cpu_cc_dst);
651 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
653 tcg_gen_mov_tl(cpu_cc_src, src1);
654 tcg_gen_movi_tl(cpu_cc_src2, src2);
655 gen_mov_reg_C(cpu_tmp0, cpu_psr);
656 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
658 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
659 #ifdef TARGET_SPARC64
661 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
663 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
664 gen_op_subx_cc2(dst);
667 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
669 tcg_gen_mov_tl(cpu_cc_src, src1);
670 tcg_gen_mov_tl(cpu_cc_src2, src2);
671 gen_mov_reg_C(cpu_tmp0, cpu_psr);
672 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
674 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
675 #ifdef TARGET_SPARC64
677 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
679 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
680 gen_op_subx_cc2(dst);
683 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
685 tcg_gen_mov_tl(cpu_cc_src, src1);
686 tcg_gen_mov_tl(cpu_cc_src2, src2);
687 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
689 gen_cc_NZ_icc(cpu_cc_dst);
690 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
691 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
692 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
693 #ifdef TARGET_SPARC64
695 gen_cc_NZ_xcc(cpu_cc_dst);
696 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
697 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
699 tcg_gen_mov_tl(dst, cpu_cc_dst);
702 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
704 tcg_gen_mov_tl(cpu_cc_src, src1);
705 tcg_gen_mov_tl(cpu_cc_src2, src2);
706 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
707 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
708 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
710 gen_cc_NZ_icc(cpu_cc_dst);
711 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
712 #ifdef TARGET_SPARC64
714 gen_cc_NZ_xcc(cpu_cc_dst);
715 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
716 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
718 tcg_gen_mov_tl(dst, cpu_cc_dst);
721 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
726 l1 = gen_new_label();
727 r_temp = tcg_temp_new();
733 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
734 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
735 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
736 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
737 tcg_gen_movi_tl(cpu_cc_src2, 0);
741 // env->y = (b2 << 31) | (env->y >> 1);
742 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
743 tcg_gen_shli_tl(r_temp, r_temp, 31);
744 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
745 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
746 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
747 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
750 gen_mov_reg_N(cpu_tmp0, cpu_psr);
751 gen_mov_reg_V(r_temp, cpu_psr);
752 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
753 tcg_temp_free(r_temp);
755 // T0 = (b1 << 31) | (T0 >> 1);
757 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
758 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
759 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
761 /* do addition and update flags */
762 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
765 gen_cc_NZ_icc(cpu_cc_dst);
766 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
767 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
768 tcg_gen_mov_tl(dst, cpu_cc_dst);
771 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
773 TCGv_i64 r_temp, r_temp2;
775 r_temp = tcg_temp_new_i64();
776 r_temp2 = tcg_temp_new_i64();
778 tcg_gen_extu_tl_i64(r_temp, src2);
779 tcg_gen_extu_tl_i64(r_temp2, src1);
780 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
782 tcg_gen_shri_i64(r_temp, r_temp2, 32);
783 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
784 tcg_temp_free_i64(r_temp);
785 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
786 #ifdef TARGET_SPARC64
787 tcg_gen_mov_i64(dst, r_temp2);
789 tcg_gen_trunc_i64_tl(dst, r_temp2);
791 tcg_temp_free_i64(r_temp2);
794 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
796 TCGv_i64 r_temp, r_temp2;
798 r_temp = tcg_temp_new_i64();
799 r_temp2 = tcg_temp_new_i64();
801 tcg_gen_ext_tl_i64(r_temp, src2);
802 tcg_gen_ext_tl_i64(r_temp2, src1);
803 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
805 tcg_gen_shri_i64(r_temp, r_temp2, 32);
806 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
807 tcg_temp_free_i64(r_temp);
808 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
809 #ifdef TARGET_SPARC64
810 tcg_gen_mov_i64(dst, r_temp2);
812 tcg_gen_trunc_i64_tl(dst, r_temp2);
814 tcg_temp_free_i64(r_temp2);
817 #ifdef TARGET_SPARC64
818 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
823 l1 = gen_new_label();
824 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
825 r_const = tcg_const_i32(TT_DIV_ZERO);
826 gen_helper_raise_exception(r_const);
827 tcg_temp_free_i32(r_const);
831 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
835 l1 = gen_new_label();
836 l2 = gen_new_label();
837 tcg_gen_mov_tl(cpu_cc_src, src1);
838 tcg_gen_mov_tl(cpu_cc_src2, src2);
839 gen_trap_ifdivzero_tl(cpu_cc_src2);
840 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
841 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
842 tcg_gen_movi_i64(dst, INT64_MIN);
845 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
850 static inline void gen_op_div_cc(TCGv dst)
854 tcg_gen_mov_tl(cpu_cc_dst, dst);
856 gen_cc_NZ_icc(cpu_cc_dst);
857 l1 = gen_new_label();
858 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
859 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
864 static inline void gen_op_eval_ba(TCGv dst)
866 tcg_gen_movi_tl(dst, 1);
870 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
872 gen_mov_reg_Z(dst, src);
876 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
878 gen_mov_reg_N(cpu_tmp0, src);
879 gen_mov_reg_V(dst, src);
880 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
881 gen_mov_reg_Z(cpu_tmp0, src);
882 tcg_gen_or_tl(dst, dst, cpu_tmp0);
886 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
888 gen_mov_reg_V(cpu_tmp0, src);
889 gen_mov_reg_N(dst, src);
890 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
894 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
896 gen_mov_reg_Z(cpu_tmp0, src);
897 gen_mov_reg_C(dst, src);
898 tcg_gen_or_tl(dst, dst, cpu_tmp0);
902 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
904 gen_mov_reg_C(dst, src);
908 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
910 gen_mov_reg_V(dst, src);
914 static inline void gen_op_eval_bn(TCGv dst)
916 tcg_gen_movi_tl(dst, 0);
920 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
922 gen_mov_reg_N(dst, src);
926 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
928 gen_mov_reg_Z(dst, src);
929 tcg_gen_xori_tl(dst, dst, 0x1);
933 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
935 gen_mov_reg_N(cpu_tmp0, src);
936 gen_mov_reg_V(dst, src);
937 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
938 gen_mov_reg_Z(cpu_tmp0, src);
939 tcg_gen_or_tl(dst, dst, cpu_tmp0);
940 tcg_gen_xori_tl(dst, dst, 0x1);
944 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
946 gen_mov_reg_V(cpu_tmp0, src);
947 gen_mov_reg_N(dst, src);
948 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
949 tcg_gen_xori_tl(dst, dst, 0x1);
953 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
955 gen_mov_reg_Z(cpu_tmp0, src);
956 gen_mov_reg_C(dst, src);
957 tcg_gen_or_tl(dst, dst, cpu_tmp0);
958 tcg_gen_xori_tl(dst, dst, 0x1);
962 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
964 gen_mov_reg_C(dst, src);
965 tcg_gen_xori_tl(dst, dst, 0x1);
969 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
971 gen_mov_reg_N(dst, src);
972 tcg_gen_xori_tl(dst, dst, 0x1);
976 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
978 gen_mov_reg_V(dst, src);
979 tcg_gen_xori_tl(dst, dst, 0x1);
983 FPSR bit field FCC1 | FCC0:
989 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
990 unsigned int fcc_offset)
992 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
993 tcg_gen_andi_tl(reg, reg, 0x1);
996 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
997 unsigned int fcc_offset)
999 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1000 tcg_gen_andi_tl(reg, reg, 0x1);
1004 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1005 unsigned int fcc_offset)
1007 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1012 // 1 or 2: FCC0 ^ FCC1
1013 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1014 unsigned int fcc_offset)
1016 gen_mov_reg_FCC0(dst, src, fcc_offset);
1017 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1018 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1022 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1023 unsigned int fcc_offset)
1025 gen_mov_reg_FCC0(dst, src, fcc_offset);
1029 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1030 unsigned int fcc_offset)
1032 gen_mov_reg_FCC0(dst, src, fcc_offset);
1033 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1034 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1035 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1040 unsigned int fcc_offset)
1042 gen_mov_reg_FCC1(dst, src, fcc_offset);
1046 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1047 unsigned int fcc_offset)
1049 gen_mov_reg_FCC0(dst, src, fcc_offset);
1050 tcg_gen_xori_tl(dst, dst, 0x1);
1051 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1052 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1056 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1057 unsigned int fcc_offset)
1059 gen_mov_reg_FCC0(dst, src, fcc_offset);
1060 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1061 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1064 // 0: !(FCC0 | FCC1)
1065 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1066 unsigned int fcc_offset)
1068 gen_mov_reg_FCC0(dst, src, fcc_offset);
1069 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1070 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1071 tcg_gen_xori_tl(dst, dst, 0x1);
1074 // 0 or 3: !(FCC0 ^ FCC1)
1075 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1076 unsigned int fcc_offset)
1078 gen_mov_reg_FCC0(dst, src, fcc_offset);
1079 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1080 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1081 tcg_gen_xori_tl(dst, dst, 0x1);
1085 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1086 unsigned int fcc_offset)
1088 gen_mov_reg_FCC0(dst, src, fcc_offset);
1089 tcg_gen_xori_tl(dst, dst, 0x1);
1092 // !1: !(FCC0 & !FCC1)
1093 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1094 unsigned int fcc_offset)
1096 gen_mov_reg_FCC0(dst, src, fcc_offset);
1097 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1098 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1099 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1100 tcg_gen_xori_tl(dst, dst, 0x1);
1104 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1105 unsigned int fcc_offset)
1107 gen_mov_reg_FCC1(dst, src, fcc_offset);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1111 // !2: !(!FCC0 & FCC1)
1112 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1113 unsigned int fcc_offset)
1115 gen_mov_reg_FCC0(dst, src, fcc_offset);
1116 tcg_gen_xori_tl(dst, dst, 0x1);
1117 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1118 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1119 tcg_gen_xori_tl(dst, dst, 0x1);
1122 // !3: !(FCC0 & FCC1)
1123 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1124 unsigned int fcc_offset)
1126 gen_mov_reg_FCC0(dst, src, fcc_offset);
1127 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1128 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1129 tcg_gen_xori_tl(dst, dst, 0x1);
1132 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1133 target_ulong pc2, TCGv r_cond)
1137 l1 = gen_new_label();
1139 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1141 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1144 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1147 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1148 target_ulong pc2, TCGv r_cond)
1152 l1 = gen_new_label();
1154 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1156 gen_goto_tb(dc, 0, pc2, pc1);
1159 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1162 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1167 l1 = gen_new_label();
1168 l2 = gen_new_label();
1170 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1172 tcg_gen_movi_tl(cpu_npc, npc1);
1176 tcg_gen_movi_tl(cpu_npc, npc2);
1180 /* call this function before using the condition register as it may
1181 have been set for a jump */
1182 static inline void flush_cond(DisasContext *dc, TCGv cond)
1184 if (dc->npc == JUMP_PC) {
1185 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1186 dc->npc = DYNAMIC_PC;
1190 static inline void save_npc(DisasContext *dc, TCGv cond)
1192 if (dc->npc == JUMP_PC) {
1193 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1194 dc->npc = DYNAMIC_PC;
1195 } else if (dc->npc != DYNAMIC_PC) {
1196 tcg_gen_movi_tl(cpu_npc, dc->npc);
1200 static inline void save_state(DisasContext *dc, TCGv cond)
1202 tcg_gen_movi_tl(cpu_pc, dc->pc);
1206 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1208 if (dc->npc == JUMP_PC) {
1209 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1210 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1211 dc->pc = DYNAMIC_PC;
1212 } else if (dc->npc == DYNAMIC_PC) {
1213 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1214 dc->pc = DYNAMIC_PC;
1220 static inline void gen_op_next_insn(void)
1222 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1223 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1226 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1231 #ifdef TARGET_SPARC64
1239 switch (dc->cc_op) {
1243 gen_helper_compute_psr();
1244 dc->cc_op = CC_OP_FLAGS;
1249 gen_op_eval_bn(r_dst);
1252 gen_op_eval_be(r_dst, r_src);
1255 gen_op_eval_ble(r_dst, r_src);
1258 gen_op_eval_bl(r_dst, r_src);
1261 gen_op_eval_bleu(r_dst, r_src);
1264 gen_op_eval_bcs(r_dst, r_src);
1267 gen_op_eval_bneg(r_dst, r_src);
1270 gen_op_eval_bvs(r_dst, r_src);
1273 gen_op_eval_ba(r_dst);
1276 gen_op_eval_bne(r_dst, r_src);
1279 gen_op_eval_bg(r_dst, r_src);
1282 gen_op_eval_bge(r_dst, r_src);
1285 gen_op_eval_bgu(r_dst, r_src);
1288 gen_op_eval_bcc(r_dst, r_src);
1291 gen_op_eval_bpos(r_dst, r_src);
1294 gen_op_eval_bvc(r_dst, r_src);
1299 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1301 unsigned int offset;
1321 gen_op_eval_bn(r_dst);
1324 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1327 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1330 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1333 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1336 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1339 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1342 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1345 gen_op_eval_ba(r_dst);
1348 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1351 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1354 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1357 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1360 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1363 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1366 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1371 #ifdef TARGET_SPARC64
1373 static const int gen_tcg_cond_reg[8] = {
1384 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1388 l1 = gen_new_label();
1389 tcg_gen_movi_tl(r_dst, 0);
1390 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1391 tcg_gen_movi_tl(r_dst, 1);
1396 /* XXX: potentially incorrect if dynamic npc */
1397 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1400 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1401 target_ulong target = dc->pc + offset;
1404 /* unconditional not taken */
1406 dc->pc = dc->npc + 4;
1407 dc->npc = dc->pc + 4;
1410 dc->npc = dc->pc + 4;
1412 } else if (cond == 0x8) {
1413 /* unconditional taken */
1416 dc->npc = dc->pc + 4;
1422 flush_cond(dc, r_cond);
1423 gen_cond(r_cond, cc, cond, dc);
1425 gen_branch_a(dc, target, dc->npc, r_cond);
1429 dc->jump_pc[0] = target;
1430 dc->jump_pc[1] = dc->npc + 4;
1436 /* XXX: potentially incorrect if dynamic npc */
1437 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1440 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1441 target_ulong target = dc->pc + offset;
1444 /* unconditional not taken */
1446 dc->pc = dc->npc + 4;
1447 dc->npc = dc->pc + 4;
1450 dc->npc = dc->pc + 4;
1452 } else if (cond == 0x8) {
1453 /* unconditional taken */
1456 dc->npc = dc->pc + 4;
1462 flush_cond(dc, r_cond);
1463 gen_fcond(r_cond, cc, cond);
1465 gen_branch_a(dc, target, dc->npc, r_cond);
1469 dc->jump_pc[0] = target;
1470 dc->jump_pc[1] = dc->npc + 4;
1476 #ifdef TARGET_SPARC64
1477 /* XXX: potentially incorrect if dynamic npc */
1478 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1479 TCGv r_cond, TCGv r_reg)
1481 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1482 target_ulong target = dc->pc + offset;
1484 flush_cond(dc, r_cond);
1485 gen_cond_reg(r_cond, cond, r_reg);
1487 gen_branch_a(dc, target, dc->npc, r_cond);
1491 dc->jump_pc[0] = target;
1492 dc->jump_pc[1] = dc->npc + 4;
1497 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1501 gen_helper_fcmps(r_rs1, r_rs2);
1504 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1507 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1510 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1515 static inline void gen_op_fcmpd(int fccno)
1522 gen_helper_fcmpd_fcc1();
1525 gen_helper_fcmpd_fcc2();
1528 gen_helper_fcmpd_fcc3();
1533 static inline void gen_op_fcmpq(int fccno)
1540 gen_helper_fcmpq_fcc1();
1543 gen_helper_fcmpq_fcc2();
1546 gen_helper_fcmpq_fcc3();
1551 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1555 gen_helper_fcmpes(r_rs1, r_rs2);
1558 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1561 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1564 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1569 static inline void gen_op_fcmped(int fccno)
1573 gen_helper_fcmped();
1576 gen_helper_fcmped_fcc1();
1579 gen_helper_fcmped_fcc2();
1582 gen_helper_fcmped_fcc3();
1587 static inline void gen_op_fcmpeq(int fccno)
1591 gen_helper_fcmpeq();
1594 gen_helper_fcmpeq_fcc1();
1597 gen_helper_fcmpeq_fcc2();
1600 gen_helper_fcmpeq_fcc3();
1607 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1609 gen_helper_fcmps(r_rs1, r_rs2);
1612 static inline void gen_op_fcmpd(int fccno)
1617 static inline void gen_op_fcmpq(int fccno)
1622 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1624 gen_helper_fcmpes(r_rs1, r_rs2);
1627 static inline void gen_op_fcmped(int fccno)
1629 gen_helper_fcmped();
1632 static inline void gen_op_fcmpeq(int fccno)
1634 gen_helper_fcmpeq();
1638 static inline void gen_op_fpexception_im(int fsr_flags)
1642 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1643 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1644 r_const = tcg_const_i32(TT_FP_EXCP);
1645 gen_helper_raise_exception(r_const);
1646 tcg_temp_free_i32(r_const);
1649 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1651 #if !defined(CONFIG_USER_ONLY)
1652 if (!dc->fpu_enabled) {
1655 save_state(dc, r_cond);
1656 r_const = tcg_const_i32(TT_NFPU_INSN);
1657 gen_helper_raise_exception(r_const);
1658 tcg_temp_free_i32(r_const);
1666 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1668 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1671 static inline void gen_clear_float_exceptions(void)
1673 gen_helper_clear_float_exceptions();
1677 #ifdef TARGET_SPARC64
1678 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1684 r_asi = tcg_temp_new_i32();
1685 tcg_gen_mov_i32(r_asi, cpu_asi);
1687 asi = GET_FIELD(insn, 19, 26);
1688 r_asi = tcg_const_i32(asi);
1693 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1696 TCGv_i32 r_asi, r_size, r_sign;
1698 r_asi = gen_get_asi(insn, addr);
1699 r_size = tcg_const_i32(size);
1700 r_sign = tcg_const_i32(sign);
1701 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1702 tcg_temp_free_i32(r_sign);
1703 tcg_temp_free_i32(r_size);
1704 tcg_temp_free_i32(r_asi);
1707 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1709 TCGv_i32 r_asi, r_size;
1711 r_asi = gen_get_asi(insn, addr);
1712 r_size = tcg_const_i32(size);
1713 gen_helper_st_asi(addr, src, r_asi, r_size);
1714 tcg_temp_free_i32(r_size);
1715 tcg_temp_free_i32(r_asi);
1718 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1720 TCGv_i32 r_asi, r_size, r_rd;
1722 r_asi = gen_get_asi(insn, addr);
1723 r_size = tcg_const_i32(size);
1724 r_rd = tcg_const_i32(rd);
1725 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1726 tcg_temp_free_i32(r_rd);
1727 tcg_temp_free_i32(r_size);
1728 tcg_temp_free_i32(r_asi);
1731 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1733 TCGv_i32 r_asi, r_size, r_rd;
1735 r_asi = gen_get_asi(insn, addr);
1736 r_size = tcg_const_i32(size);
1737 r_rd = tcg_const_i32(rd);
1738 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1739 tcg_temp_free_i32(r_rd);
1740 tcg_temp_free_i32(r_size);
1741 tcg_temp_free_i32(r_asi);
1744 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1746 TCGv_i32 r_asi, r_size, r_sign;
1748 r_asi = gen_get_asi(insn, addr);
1749 r_size = tcg_const_i32(4);
1750 r_sign = tcg_const_i32(0);
1751 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1752 tcg_temp_free_i32(r_sign);
1753 gen_helper_st_asi(addr, dst, r_asi, r_size);
1754 tcg_temp_free_i32(r_size);
1755 tcg_temp_free_i32(r_asi);
1756 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1759 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1761 TCGv_i32 r_asi, r_rd;
1763 r_asi = gen_get_asi(insn, addr);
1764 r_rd = tcg_const_i32(rd);
1765 gen_helper_ldda_asi(addr, r_asi, r_rd);
1766 tcg_temp_free_i32(r_rd);
1767 tcg_temp_free_i32(r_asi);
1770 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1772 TCGv_i32 r_asi, r_size;
1774 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1775 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1776 r_asi = gen_get_asi(insn, addr);
1777 r_size = tcg_const_i32(8);
1778 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1779 tcg_temp_free_i32(r_size);
1780 tcg_temp_free_i32(r_asi);
1783 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1789 r_val1 = tcg_temp_new();
1790 gen_movl_reg_TN(rd, r_val1);
1791 r_asi = gen_get_asi(insn, addr);
1792 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1793 tcg_temp_free_i32(r_asi);
1794 tcg_temp_free(r_val1);
1797 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1802 gen_movl_reg_TN(rd, cpu_tmp64);
1803 r_asi = gen_get_asi(insn, addr);
1804 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1805 tcg_temp_free_i32(r_asi);
1808 #elif !defined(CONFIG_USER_ONLY)
1810 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1813 TCGv_i32 r_asi, r_size, r_sign;
1815 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1816 r_size = tcg_const_i32(size);
1817 r_sign = tcg_const_i32(sign);
1818 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1819 tcg_temp_free(r_sign);
1820 tcg_temp_free(r_size);
1821 tcg_temp_free(r_asi);
1822 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1825 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1827 TCGv_i32 r_asi, r_size;
1829 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1830 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1831 r_size = tcg_const_i32(size);
1832 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1833 tcg_temp_free(r_size);
1834 tcg_temp_free(r_asi);
1837 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1839 TCGv_i32 r_asi, r_size, r_sign;
1842 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1843 r_size = tcg_const_i32(4);
1844 r_sign = tcg_const_i32(0);
1845 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1846 tcg_temp_free(r_sign);
1847 r_val = tcg_temp_new_i64();
1848 tcg_gen_extu_tl_i64(r_val, dst);
1849 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1850 tcg_temp_free_i64(r_val);
1851 tcg_temp_free(r_size);
1852 tcg_temp_free(r_asi);
1853 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1856 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1858 TCGv_i32 r_asi, r_size, r_sign;
1860 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1861 r_size = tcg_const_i32(8);
1862 r_sign = tcg_const_i32(0);
1863 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1864 tcg_temp_free(r_sign);
1865 tcg_temp_free(r_size);
1866 tcg_temp_free(r_asi);
1867 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1868 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1869 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1870 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1871 gen_movl_TN_reg(rd, hi);
1874 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1876 TCGv_i32 r_asi, r_size;
1878 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1879 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1880 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1881 r_size = tcg_const_i32(8);
1882 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1883 tcg_temp_free(r_size);
1884 tcg_temp_free(r_asi);
1888 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1889 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1892 TCGv_i32 r_asi, r_size;
1894 gen_ld_asi(dst, addr, insn, 1, 0);
1896 r_val = tcg_const_i64(0xffULL);
1897 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1898 r_size = tcg_const_i32(1);
1899 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1900 tcg_temp_free_i32(r_size);
1901 tcg_temp_free_i32(r_asi);
1902 tcg_temp_free_i64(r_val);
1906 static inline TCGv get_src1(unsigned int insn, TCGv def)
1911 rs1 = GET_FIELD(insn, 13, 17);
1913 r_rs1 = tcg_const_tl(0); // XXX how to free?
1915 r_rs1 = cpu_gregs[rs1];
1917 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1921 static inline TCGv get_src2(unsigned int insn, TCGv def)
1925 if (IS_IMM) { /* immediate */
1928 simm = GET_FIELDs(insn, 19, 31);
1929 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1930 } else { /* register */
1933 rs2 = GET_FIELD(insn, 27, 31);
1935 r_rs2 = tcg_const_tl(0); // XXX how to free?
1937 r_rs2 = cpu_gregs[rs2];
1939 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1944 #define CHECK_IU_FEATURE(dc, FEATURE) \
1945 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1947 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1948 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1951 /* before an instruction, dc->pc must be static */
1952 static void disas_sparc_insn(DisasContext * dc)
1954 unsigned int insn, opc, rs1, rs2, rd;
1957 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1958 tcg_gen_debug_insn_start(dc->pc);
1959 insn = ldl_code(dc->pc);
1960 opc = GET_FIELD(insn, 0, 1);
1962 rd = GET_FIELD(insn, 2, 6);
1964 cpu_src1 = tcg_temp_new(); // const
1965 cpu_src2 = tcg_temp_new(); // const
1968 case 0: /* branches/sethi */
1970 unsigned int xop = GET_FIELD(insn, 7, 9);
1973 #ifdef TARGET_SPARC64
1974 case 0x1: /* V9 BPcc */
1978 target = GET_FIELD_SP(insn, 0, 18);
1979 target = sign_extend(target, 18);
1981 cc = GET_FIELD_SP(insn, 20, 21);
1983 do_branch(dc, target, insn, 0, cpu_cond);
1985 do_branch(dc, target, insn, 1, cpu_cond);
1990 case 0x3: /* V9 BPr */
1992 target = GET_FIELD_SP(insn, 0, 13) |
1993 (GET_FIELD_SP(insn, 20, 21) << 14);
1994 target = sign_extend(target, 16);
1996 cpu_src1 = get_src1(insn, cpu_src1);
1997 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2000 case 0x5: /* V9 FBPcc */
2002 int cc = GET_FIELD_SP(insn, 20, 21);
2003 if (gen_trap_ifnofpu(dc, cpu_cond))
2005 target = GET_FIELD_SP(insn, 0, 18);
2006 target = sign_extend(target, 19);
2008 do_fbranch(dc, target, insn, cc, cpu_cond);
2012 case 0x7: /* CBN+x */
2017 case 0x2: /* BN+x */
2019 target = GET_FIELD(insn, 10, 31);
2020 target = sign_extend(target, 22);
2022 do_branch(dc, target, insn, 0, cpu_cond);
2025 case 0x6: /* FBN+x */
2027 if (gen_trap_ifnofpu(dc, cpu_cond))
2029 target = GET_FIELD(insn, 10, 31);
2030 target = sign_extend(target, 22);
2032 do_fbranch(dc, target, insn, 0, cpu_cond);
2035 case 0x4: /* SETHI */
2037 uint32_t value = GET_FIELD(insn, 10, 31);
2040 r_const = tcg_const_tl(value << 10);
2041 gen_movl_TN_reg(rd, r_const);
2042 tcg_temp_free(r_const);
2045 case 0x0: /* UNIMPL */
2054 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2057 r_const = tcg_const_tl(dc->pc);
2058 gen_movl_TN_reg(15, r_const);
2059 tcg_temp_free(r_const);
2061 gen_mov_pc_npc(dc, cpu_cond);
2065 case 2: /* FPU & Logical Operations */
2067 unsigned int xop = GET_FIELD(insn, 7, 12);
2068 if (xop == 0x3a) { /* generate trap */
2071 cpu_src1 = get_src1(insn, cpu_src1);
2073 rs2 = GET_FIELD(insn, 25, 31);
2074 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2076 rs2 = GET_FIELD(insn, 27, 31);
2078 gen_movl_reg_TN(rs2, cpu_src2);
2079 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2081 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2083 cond = GET_FIELD(insn, 3, 6);
2085 save_state(dc, cpu_cond);
2086 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2088 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2090 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2091 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2092 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2093 gen_helper_raise_exception(cpu_tmp32);
2094 } else if (cond != 0) {
2095 TCGv r_cond = tcg_temp_new();
2097 #ifdef TARGET_SPARC64
2099 int cc = GET_FIELD_SP(insn, 11, 12);
2101 save_state(dc, cpu_cond);
2103 gen_cond(r_cond, 0, cond, dc);
2105 gen_cond(r_cond, 1, cond, dc);
2109 save_state(dc, cpu_cond);
2110 gen_cond(r_cond, 0, cond, dc);
2112 l1 = gen_new_label();
2113 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2115 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2117 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2119 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2120 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2121 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2122 gen_helper_raise_exception(cpu_tmp32);
2125 tcg_temp_free(r_cond);
2131 } else if (xop == 0x28) {
2132 rs1 = GET_FIELD(insn, 13, 17);
2135 #ifndef TARGET_SPARC64
2136 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2137 manual, rdy on the microSPARC
2139 case 0x0f: /* stbar in the SPARCv8 manual,
2140 rdy on the microSPARC II */
2141 case 0x10 ... 0x1f: /* implementation-dependent in the
2142 SPARCv8 manual, rdy on the
2145 gen_movl_TN_reg(rd, cpu_y);
2147 #ifdef TARGET_SPARC64
2148 case 0x2: /* V9 rdccr */
2149 gen_helper_compute_psr();
2150 gen_helper_rdccr(cpu_dst);
2151 gen_movl_TN_reg(rd, cpu_dst);
2153 case 0x3: /* V9 rdasi */
2154 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2155 gen_movl_TN_reg(rd, cpu_dst);
2157 case 0x4: /* V9 rdtick */
2161 r_tickptr = tcg_temp_new_ptr();
2162 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2163 offsetof(CPUState, tick));
2164 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2165 tcg_temp_free_ptr(r_tickptr);
2166 gen_movl_TN_reg(rd, cpu_dst);
2169 case 0x5: /* V9 rdpc */
2173 r_const = tcg_const_tl(dc->pc);
2174 gen_movl_TN_reg(rd, r_const);
2175 tcg_temp_free(r_const);
2178 case 0x6: /* V9 rdfprs */
2179 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2180 gen_movl_TN_reg(rd, cpu_dst);
2182 case 0xf: /* V9 membar */
2183 break; /* no effect */
2184 case 0x13: /* Graphics Status */
2185 if (gen_trap_ifnofpu(dc, cpu_cond))
2187 gen_movl_TN_reg(rd, cpu_gsr);
2189 case 0x16: /* Softint */
2190 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2191 gen_movl_TN_reg(rd, cpu_dst);
2193 case 0x17: /* Tick compare */
2194 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2196 case 0x18: /* System tick */
2200 r_tickptr = tcg_temp_new_ptr();
2201 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2202 offsetof(CPUState, stick));
2203 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2204 tcg_temp_free_ptr(r_tickptr);
2205 gen_movl_TN_reg(rd, cpu_dst);
2208 case 0x19: /* System tick compare */
2209 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2211 case 0x10: /* Performance Control */
2212 case 0x11: /* Performance Instrumentation Counter */
2213 case 0x12: /* Dispatch Control */
2214 case 0x14: /* Softint set, WO */
2215 case 0x15: /* Softint clear, WO */
2220 #if !defined(CONFIG_USER_ONLY)
2221 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2222 #ifndef TARGET_SPARC64
2223 if (!supervisor(dc))
2225 gen_helper_compute_psr();
2226 dc->cc_op = CC_OP_FLAGS;
2227 gen_helper_rdpsr(cpu_dst);
2229 CHECK_IU_FEATURE(dc, HYPV);
2230 if (!hypervisor(dc))
2232 rs1 = GET_FIELD(insn, 13, 17);
2235 // gen_op_rdhpstate();
2238 // gen_op_rdhtstate();
2241 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2244 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2247 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2249 case 31: // hstick_cmpr
2250 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2256 gen_movl_TN_reg(rd, cpu_dst);
2258 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2259 if (!supervisor(dc))
2261 #ifdef TARGET_SPARC64
2262 rs1 = GET_FIELD(insn, 13, 17);
2268 r_tsptr = tcg_temp_new_ptr();
2269 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2270 offsetof(CPUState, tsptr));
2271 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2272 offsetof(trap_state, tpc));
2273 tcg_temp_free_ptr(r_tsptr);
2280 r_tsptr = tcg_temp_new_ptr();
2281 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2282 offsetof(CPUState, tsptr));
2283 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2284 offsetof(trap_state, tnpc));
2285 tcg_temp_free_ptr(r_tsptr);
2292 r_tsptr = tcg_temp_new_ptr();
2293 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2294 offsetof(CPUState, tsptr));
2295 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2296 offsetof(trap_state, tstate));
2297 tcg_temp_free_ptr(r_tsptr);
2304 r_tsptr = tcg_temp_new_ptr();
2305 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2306 offsetof(CPUState, tsptr));
2307 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2308 offsetof(trap_state, tt));
2309 tcg_temp_free_ptr(r_tsptr);
2310 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2317 r_tickptr = tcg_temp_new_ptr();
2318 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2319 offsetof(CPUState, tick));
2320 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2321 gen_movl_TN_reg(rd, cpu_tmp0);
2322 tcg_temp_free_ptr(r_tickptr);
2326 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2329 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2330 offsetof(CPUSPARCState, pstate));
2331 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2334 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2335 offsetof(CPUSPARCState, tl));
2336 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2339 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2340 offsetof(CPUSPARCState, psrpil));
2341 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2344 gen_helper_rdcwp(cpu_tmp0);
2347 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2348 offsetof(CPUSPARCState, cansave));
2349 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2351 case 11: // canrestore
2352 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2353 offsetof(CPUSPARCState, canrestore));
2354 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2356 case 12: // cleanwin
2357 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2358 offsetof(CPUSPARCState, cleanwin));
2359 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2361 case 13: // otherwin
2362 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2363 offsetof(CPUSPARCState, otherwin));
2364 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2367 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2368 offsetof(CPUSPARCState, wstate));
2369 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2371 case 16: // UA2005 gl
2372 CHECK_IU_FEATURE(dc, GL);
2373 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2374 offsetof(CPUSPARCState, gl));
2375 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2377 case 26: // UA2005 strand status
2378 CHECK_IU_FEATURE(dc, HYPV);
2379 if (!hypervisor(dc))
2381 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2384 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2391 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2393 gen_movl_TN_reg(rd, cpu_tmp0);
2395 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2396 #ifdef TARGET_SPARC64
2397 save_state(dc, cpu_cond);
2398 gen_helper_flushw();
2400 if (!supervisor(dc))
2402 gen_movl_TN_reg(rd, cpu_tbr);
2406 } else if (xop == 0x34) { /* FPU Operations */
2407 if (gen_trap_ifnofpu(dc, cpu_cond))
2409 gen_op_clear_ieee_excp_and_FTT();
2410 rs1 = GET_FIELD(insn, 13, 17);
2411 rs2 = GET_FIELD(insn, 27, 31);
2412 xop = GET_FIELD(insn, 18, 26);
2414 case 0x1: /* fmovs */
2415 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2417 case 0x5: /* fnegs */
2418 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2420 case 0x9: /* fabss */
2421 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2423 case 0x29: /* fsqrts */
2424 CHECK_FPU_FEATURE(dc, FSQRT);
2425 gen_clear_float_exceptions();
2426 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2427 gen_helper_check_ieee_exceptions();
2428 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2430 case 0x2a: /* fsqrtd */
2431 CHECK_FPU_FEATURE(dc, FSQRT);
2432 gen_op_load_fpr_DT1(DFPREG(rs2));
2433 gen_clear_float_exceptions();
2434 gen_helper_fsqrtd();
2435 gen_helper_check_ieee_exceptions();
2436 gen_op_store_DT0_fpr(DFPREG(rd));
2438 case 0x2b: /* fsqrtq */
2439 CHECK_FPU_FEATURE(dc, FLOAT128);
2440 gen_op_load_fpr_QT1(QFPREG(rs2));
2441 gen_clear_float_exceptions();
2442 gen_helper_fsqrtq();
2443 gen_helper_check_ieee_exceptions();
2444 gen_op_store_QT0_fpr(QFPREG(rd));
2446 case 0x41: /* fadds */
2447 gen_clear_float_exceptions();
2448 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2449 gen_helper_check_ieee_exceptions();
2450 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2452 case 0x42: /* faddd */
2453 gen_op_load_fpr_DT0(DFPREG(rs1));
2454 gen_op_load_fpr_DT1(DFPREG(rs2));
2455 gen_clear_float_exceptions();
2457 gen_helper_check_ieee_exceptions();
2458 gen_op_store_DT0_fpr(DFPREG(rd));
2460 case 0x43: /* faddq */
2461 CHECK_FPU_FEATURE(dc, FLOAT128);
2462 gen_op_load_fpr_QT0(QFPREG(rs1));
2463 gen_op_load_fpr_QT1(QFPREG(rs2));
2464 gen_clear_float_exceptions();
2466 gen_helper_check_ieee_exceptions();
2467 gen_op_store_QT0_fpr(QFPREG(rd));
2469 case 0x45: /* fsubs */
2470 gen_clear_float_exceptions();
2471 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2472 gen_helper_check_ieee_exceptions();
2473 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2475 case 0x46: /* fsubd */
2476 gen_op_load_fpr_DT0(DFPREG(rs1));
2477 gen_op_load_fpr_DT1(DFPREG(rs2));
2478 gen_clear_float_exceptions();
2480 gen_helper_check_ieee_exceptions();
2481 gen_op_store_DT0_fpr(DFPREG(rd));
2483 case 0x47: /* fsubq */
2484 CHECK_FPU_FEATURE(dc, FLOAT128);
2485 gen_op_load_fpr_QT0(QFPREG(rs1));
2486 gen_op_load_fpr_QT1(QFPREG(rs2));
2487 gen_clear_float_exceptions();
2489 gen_helper_check_ieee_exceptions();
2490 gen_op_store_QT0_fpr(QFPREG(rd));
2492 case 0x49: /* fmuls */
2493 CHECK_FPU_FEATURE(dc, FMUL);
2494 gen_clear_float_exceptions();
2495 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2496 gen_helper_check_ieee_exceptions();
2497 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2499 case 0x4a: /* fmuld */
2500 CHECK_FPU_FEATURE(dc, FMUL);
2501 gen_op_load_fpr_DT0(DFPREG(rs1));
2502 gen_op_load_fpr_DT1(DFPREG(rs2));
2503 gen_clear_float_exceptions();
2505 gen_helper_check_ieee_exceptions();
2506 gen_op_store_DT0_fpr(DFPREG(rd));
2508 case 0x4b: /* fmulq */
2509 CHECK_FPU_FEATURE(dc, FLOAT128);
2510 CHECK_FPU_FEATURE(dc, FMUL);
2511 gen_op_load_fpr_QT0(QFPREG(rs1));
2512 gen_op_load_fpr_QT1(QFPREG(rs2));
2513 gen_clear_float_exceptions();
2515 gen_helper_check_ieee_exceptions();
2516 gen_op_store_QT0_fpr(QFPREG(rd));
2518 case 0x4d: /* fdivs */
2519 gen_clear_float_exceptions();
2520 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2521 gen_helper_check_ieee_exceptions();
2522 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2524 case 0x4e: /* fdivd */
2525 gen_op_load_fpr_DT0(DFPREG(rs1));
2526 gen_op_load_fpr_DT1(DFPREG(rs2));
2527 gen_clear_float_exceptions();
2529 gen_helper_check_ieee_exceptions();
2530 gen_op_store_DT0_fpr(DFPREG(rd));
2532 case 0x4f: /* fdivq */
2533 CHECK_FPU_FEATURE(dc, FLOAT128);
2534 gen_op_load_fpr_QT0(QFPREG(rs1));
2535 gen_op_load_fpr_QT1(QFPREG(rs2));
2536 gen_clear_float_exceptions();
2538 gen_helper_check_ieee_exceptions();
2539 gen_op_store_QT0_fpr(QFPREG(rd));
2541 case 0x69: /* fsmuld */
2542 CHECK_FPU_FEATURE(dc, FSMULD);
2543 gen_clear_float_exceptions();
2544 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2545 gen_helper_check_ieee_exceptions();
2546 gen_op_store_DT0_fpr(DFPREG(rd));
2548 case 0x6e: /* fdmulq */
2549 CHECK_FPU_FEATURE(dc, FLOAT128);
2550 gen_op_load_fpr_DT0(DFPREG(rs1));
2551 gen_op_load_fpr_DT1(DFPREG(rs2));
2552 gen_clear_float_exceptions();
2553 gen_helper_fdmulq();
2554 gen_helper_check_ieee_exceptions();
2555 gen_op_store_QT0_fpr(QFPREG(rd));
2557 case 0xc4: /* fitos */
2558 gen_clear_float_exceptions();
2559 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2560 gen_helper_check_ieee_exceptions();
2561 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2563 case 0xc6: /* fdtos */
2564 gen_op_load_fpr_DT1(DFPREG(rs2));
2565 gen_clear_float_exceptions();
2566 gen_helper_fdtos(cpu_tmp32);
2567 gen_helper_check_ieee_exceptions();
2568 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2570 case 0xc7: /* fqtos */
2571 CHECK_FPU_FEATURE(dc, FLOAT128);
2572 gen_op_load_fpr_QT1(QFPREG(rs2));
2573 gen_clear_float_exceptions();
2574 gen_helper_fqtos(cpu_tmp32);
2575 gen_helper_check_ieee_exceptions();
2576 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2578 case 0xc8: /* fitod */
2579 gen_helper_fitod(cpu_fpr[rs2]);
2580 gen_op_store_DT0_fpr(DFPREG(rd));
2582 case 0xc9: /* fstod */
2583 gen_helper_fstod(cpu_fpr[rs2]);
2584 gen_op_store_DT0_fpr(DFPREG(rd));
2586 case 0xcb: /* fqtod */
2587 CHECK_FPU_FEATURE(dc, FLOAT128);
2588 gen_op_load_fpr_QT1(QFPREG(rs2));
2589 gen_clear_float_exceptions();
2591 gen_helper_check_ieee_exceptions();
2592 gen_op_store_DT0_fpr(DFPREG(rd));
2594 case 0xcc: /* fitoq */
2595 CHECK_FPU_FEATURE(dc, FLOAT128);
2596 gen_helper_fitoq(cpu_fpr[rs2]);
2597 gen_op_store_QT0_fpr(QFPREG(rd));
2599 case 0xcd: /* fstoq */
2600 CHECK_FPU_FEATURE(dc, FLOAT128);
2601 gen_helper_fstoq(cpu_fpr[rs2]);
2602 gen_op_store_QT0_fpr(QFPREG(rd));
2604 case 0xce: /* fdtoq */
2605 CHECK_FPU_FEATURE(dc, FLOAT128);
2606 gen_op_load_fpr_DT1(DFPREG(rs2));
2608 gen_op_store_QT0_fpr(QFPREG(rd));
2610 case 0xd1: /* fstoi */
2611 gen_clear_float_exceptions();
2612 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2613 gen_helper_check_ieee_exceptions();
2614 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2616 case 0xd2: /* fdtoi */
2617 gen_op_load_fpr_DT1(DFPREG(rs2));
2618 gen_clear_float_exceptions();
2619 gen_helper_fdtoi(cpu_tmp32);
2620 gen_helper_check_ieee_exceptions();
2621 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2623 case 0xd3: /* fqtoi */
2624 CHECK_FPU_FEATURE(dc, FLOAT128);
2625 gen_op_load_fpr_QT1(QFPREG(rs2));
2626 gen_clear_float_exceptions();
2627 gen_helper_fqtoi(cpu_tmp32);
2628 gen_helper_check_ieee_exceptions();
2629 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2631 #ifdef TARGET_SPARC64
2632 case 0x2: /* V9 fmovd */
2633 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2634 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2635 cpu_fpr[DFPREG(rs2) + 1]);
2637 case 0x3: /* V9 fmovq */
2638 CHECK_FPU_FEATURE(dc, FLOAT128);
2639 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2640 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2641 cpu_fpr[QFPREG(rs2) + 1]);
2642 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2643 cpu_fpr[QFPREG(rs2) + 2]);
2644 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2645 cpu_fpr[QFPREG(rs2) + 3]);
2647 case 0x6: /* V9 fnegd */
2648 gen_op_load_fpr_DT1(DFPREG(rs2));
2650 gen_op_store_DT0_fpr(DFPREG(rd));
2652 case 0x7: /* V9 fnegq */
2653 CHECK_FPU_FEATURE(dc, FLOAT128);
2654 gen_op_load_fpr_QT1(QFPREG(rs2));
2656 gen_op_store_QT0_fpr(QFPREG(rd));
2658 case 0xa: /* V9 fabsd */
2659 gen_op_load_fpr_DT1(DFPREG(rs2));
2661 gen_op_store_DT0_fpr(DFPREG(rd));
2663 case 0xb: /* V9 fabsq */
2664 CHECK_FPU_FEATURE(dc, FLOAT128);
2665 gen_op_load_fpr_QT1(QFPREG(rs2));
2667 gen_op_store_QT0_fpr(QFPREG(rd));
2669 case 0x81: /* V9 fstox */
2670 gen_clear_float_exceptions();
2671 gen_helper_fstox(cpu_fpr[rs2]);
2672 gen_helper_check_ieee_exceptions();
2673 gen_op_store_DT0_fpr(DFPREG(rd));
2675 case 0x82: /* V9 fdtox */
2676 gen_op_load_fpr_DT1(DFPREG(rs2));
2677 gen_clear_float_exceptions();
2679 gen_helper_check_ieee_exceptions();
2680 gen_op_store_DT0_fpr(DFPREG(rd));
2682 case 0x83: /* V9 fqtox */
2683 CHECK_FPU_FEATURE(dc, FLOAT128);
2684 gen_op_load_fpr_QT1(QFPREG(rs2));
2685 gen_clear_float_exceptions();
2687 gen_helper_check_ieee_exceptions();
2688 gen_op_store_DT0_fpr(DFPREG(rd));
2690 case 0x84: /* V9 fxtos */
2691 gen_op_load_fpr_DT1(DFPREG(rs2));
2692 gen_clear_float_exceptions();
2693 gen_helper_fxtos(cpu_tmp32);
2694 gen_helper_check_ieee_exceptions();
2695 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2697 case 0x88: /* V9 fxtod */
2698 gen_op_load_fpr_DT1(DFPREG(rs2));
2699 gen_clear_float_exceptions();
2701 gen_helper_check_ieee_exceptions();
2702 gen_op_store_DT0_fpr(DFPREG(rd));
2704 case 0x8c: /* V9 fxtoq */
2705 CHECK_FPU_FEATURE(dc, FLOAT128);
2706 gen_op_load_fpr_DT1(DFPREG(rs2));
2707 gen_clear_float_exceptions();
2709 gen_helper_check_ieee_exceptions();
2710 gen_op_store_QT0_fpr(QFPREG(rd));
2716 } else if (xop == 0x35) { /* FPU Operations */
2717 #ifdef TARGET_SPARC64
2720 if (gen_trap_ifnofpu(dc, cpu_cond))
2722 gen_op_clear_ieee_excp_and_FTT();
2723 rs1 = GET_FIELD(insn, 13, 17);
2724 rs2 = GET_FIELD(insn, 27, 31);
2725 xop = GET_FIELD(insn, 18, 26);
2726 #ifdef TARGET_SPARC64
2727 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2730 l1 = gen_new_label();
2731 cond = GET_FIELD_SP(insn, 14, 17);
2732 cpu_src1 = get_src1(insn, cpu_src1);
2733 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2735 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2738 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2741 l1 = gen_new_label();
2742 cond = GET_FIELD_SP(insn, 14, 17);
2743 cpu_src1 = get_src1(insn, cpu_src1);
2744 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2746 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2747 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2750 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2753 CHECK_FPU_FEATURE(dc, FLOAT128);
2754 l1 = gen_new_label();
2755 cond = GET_FIELD_SP(insn, 14, 17);
2756 cpu_src1 = get_src1(insn, cpu_src1);
2757 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2759 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2760 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2761 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2762 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2768 #ifdef TARGET_SPARC64
2769 #define FMOVSCC(fcc) \
2774 l1 = gen_new_label(); \
2775 r_cond = tcg_temp_new(); \
2776 cond = GET_FIELD_SP(insn, 14, 17); \
2777 gen_fcond(r_cond, fcc, cond); \
2778 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2780 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2781 gen_set_label(l1); \
2782 tcg_temp_free(r_cond); \
2784 #define FMOVDCC(fcc) \
2789 l1 = gen_new_label(); \
2790 r_cond = tcg_temp_new(); \
2791 cond = GET_FIELD_SP(insn, 14, 17); \
2792 gen_fcond(r_cond, fcc, cond); \
2793 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2795 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2796 cpu_fpr[DFPREG(rs2)]); \
2797 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2798 cpu_fpr[DFPREG(rs2) + 1]); \
2799 gen_set_label(l1); \
2800 tcg_temp_free(r_cond); \
2802 #define FMOVQCC(fcc) \
2807 l1 = gen_new_label(); \
2808 r_cond = tcg_temp_new(); \
2809 cond = GET_FIELD_SP(insn, 14, 17); \
2810 gen_fcond(r_cond, fcc, cond); \
2811 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2813 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2814 cpu_fpr[QFPREG(rs2)]); \
2815 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2816 cpu_fpr[QFPREG(rs2) + 1]); \
2817 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2818 cpu_fpr[QFPREG(rs2) + 2]); \
2819 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2820 cpu_fpr[QFPREG(rs2) + 3]); \
2821 gen_set_label(l1); \
2822 tcg_temp_free(r_cond); \
2824 case 0x001: /* V9 fmovscc %fcc0 */
2827 case 0x002: /* V9 fmovdcc %fcc0 */
2830 case 0x003: /* V9 fmovqcc %fcc0 */
2831 CHECK_FPU_FEATURE(dc, FLOAT128);
2834 case 0x041: /* V9 fmovscc %fcc1 */
2837 case 0x042: /* V9 fmovdcc %fcc1 */
2840 case 0x043: /* V9 fmovqcc %fcc1 */
2841 CHECK_FPU_FEATURE(dc, FLOAT128);
2844 case 0x081: /* V9 fmovscc %fcc2 */
2847 case 0x082: /* V9 fmovdcc %fcc2 */
2850 case 0x083: /* V9 fmovqcc %fcc2 */
2851 CHECK_FPU_FEATURE(dc, FLOAT128);
2854 case 0x0c1: /* V9 fmovscc %fcc3 */
2857 case 0x0c2: /* V9 fmovdcc %fcc3 */
2860 case 0x0c3: /* V9 fmovqcc %fcc3 */
2861 CHECK_FPU_FEATURE(dc, FLOAT128);
2867 #define FMOVSCC(icc) \
2872 l1 = gen_new_label(); \
2873 r_cond = tcg_temp_new(); \
2874 cond = GET_FIELD_SP(insn, 14, 17); \
2875 gen_cond(r_cond, icc, cond, dc); \
2876 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2878 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2879 gen_set_label(l1); \
2880 tcg_temp_free(r_cond); \
2882 #define FMOVDCC(icc) \
2887 l1 = gen_new_label(); \
2888 r_cond = tcg_temp_new(); \
2889 cond = GET_FIELD_SP(insn, 14, 17); \
2890 gen_cond(r_cond, icc, cond, dc); \
2891 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2893 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2894 cpu_fpr[DFPREG(rs2)]); \
2895 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2896 cpu_fpr[DFPREG(rs2) + 1]); \
2897 gen_set_label(l1); \
2898 tcg_temp_free(r_cond); \
2900 #define FMOVQCC(icc) \
2905 l1 = gen_new_label(); \
2906 r_cond = tcg_temp_new(); \
2907 cond = GET_FIELD_SP(insn, 14, 17); \
2908 gen_cond(r_cond, icc, cond, dc); \
2909 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2911 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2912 cpu_fpr[QFPREG(rs2)]); \
2913 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2914 cpu_fpr[QFPREG(rs2) + 1]); \
2915 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2916 cpu_fpr[QFPREG(rs2) + 2]); \
2917 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2918 cpu_fpr[QFPREG(rs2) + 3]); \
2919 gen_set_label(l1); \
2920 tcg_temp_free(r_cond); \
2923 case 0x101: /* V9 fmovscc %icc */
2926 case 0x102: /* V9 fmovdcc %icc */
2928 case 0x103: /* V9 fmovqcc %icc */
2929 CHECK_FPU_FEATURE(dc, FLOAT128);
2932 case 0x181: /* V9 fmovscc %xcc */
2935 case 0x182: /* V9 fmovdcc %xcc */
2938 case 0x183: /* V9 fmovqcc %xcc */
2939 CHECK_FPU_FEATURE(dc, FLOAT128);
2946 case 0x51: /* fcmps, V9 %fcc */
2947 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2949 case 0x52: /* fcmpd, V9 %fcc */
2950 gen_op_load_fpr_DT0(DFPREG(rs1));
2951 gen_op_load_fpr_DT1(DFPREG(rs2));
2952 gen_op_fcmpd(rd & 3);
2954 case 0x53: /* fcmpq, V9 %fcc */
2955 CHECK_FPU_FEATURE(dc, FLOAT128);
2956 gen_op_load_fpr_QT0(QFPREG(rs1));
2957 gen_op_load_fpr_QT1(QFPREG(rs2));
2958 gen_op_fcmpq(rd & 3);
2960 case 0x55: /* fcmpes, V9 %fcc */
2961 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2963 case 0x56: /* fcmped, V9 %fcc */
2964 gen_op_load_fpr_DT0(DFPREG(rs1));
2965 gen_op_load_fpr_DT1(DFPREG(rs2));
2966 gen_op_fcmped(rd & 3);
2968 case 0x57: /* fcmpeq, V9 %fcc */
2969 CHECK_FPU_FEATURE(dc, FLOAT128);
2970 gen_op_load_fpr_QT0(QFPREG(rs1));
2971 gen_op_load_fpr_QT1(QFPREG(rs2));
2972 gen_op_fcmpeq(rd & 3);
2977 } else if (xop == 0x2) {
2980 rs1 = GET_FIELD(insn, 13, 17);
2982 // or %g0, x, y -> mov T0, x; mov y, T0
2983 if (IS_IMM) { /* immediate */
2986 simm = GET_FIELDs(insn, 19, 31);
2987 r_const = tcg_const_tl(simm);
2988 gen_movl_TN_reg(rd, r_const);
2989 tcg_temp_free(r_const);
2990 } else { /* register */
2991 rs2 = GET_FIELD(insn, 27, 31);
2992 gen_movl_reg_TN(rs2, cpu_dst);
2993 gen_movl_TN_reg(rd, cpu_dst);
2996 cpu_src1 = get_src1(insn, cpu_src1);
2997 if (IS_IMM) { /* immediate */
2998 simm = GET_FIELDs(insn, 19, 31);
2999 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3000 gen_movl_TN_reg(rd, cpu_dst);
3001 } else { /* register */
3002 // or x, %g0, y -> mov T1, x; mov y, T1
3003 rs2 = GET_FIELD(insn, 27, 31);
3005 gen_movl_reg_TN(rs2, cpu_src2);
3006 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3007 gen_movl_TN_reg(rd, cpu_dst);
3009 gen_movl_TN_reg(rd, cpu_src1);
3012 #ifdef TARGET_SPARC64
3013 } else if (xop == 0x25) { /* sll, V9 sllx */
3014 cpu_src1 = get_src1(insn, cpu_src1);
3015 if (IS_IMM) { /* immediate */
3016 simm = GET_FIELDs(insn, 20, 31);
3017 if (insn & (1 << 12)) {
3018 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3020 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3022 } else { /* register */
3023 rs2 = GET_FIELD(insn, 27, 31);
3024 gen_movl_reg_TN(rs2, cpu_src2);
3025 if (insn & (1 << 12)) {
3026 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3028 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3030 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3032 gen_movl_TN_reg(rd, cpu_dst);
3033 } else if (xop == 0x26) { /* srl, V9 srlx */
3034 cpu_src1 = get_src1(insn, cpu_src1);
3035 if (IS_IMM) { /* immediate */
3036 simm = GET_FIELDs(insn, 20, 31);
3037 if (insn & (1 << 12)) {
3038 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3040 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3041 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3043 } else { /* register */
3044 rs2 = GET_FIELD(insn, 27, 31);
3045 gen_movl_reg_TN(rs2, cpu_src2);
3046 if (insn & (1 << 12)) {
3047 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3048 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3050 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3051 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3052 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3055 gen_movl_TN_reg(rd, cpu_dst);
3056 } else if (xop == 0x27) { /* sra, V9 srax */
3057 cpu_src1 = get_src1(insn, cpu_src1);
3058 if (IS_IMM) { /* immediate */
3059 simm = GET_FIELDs(insn, 20, 31);
3060 if (insn & (1 << 12)) {
3061 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3063 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3064 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3065 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3067 } else { /* register */
3068 rs2 = GET_FIELD(insn, 27, 31);
3069 gen_movl_reg_TN(rs2, cpu_src2);
3070 if (insn & (1 << 12)) {
3071 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3072 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3074 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3075 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3076 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3077 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3080 gen_movl_TN_reg(rd, cpu_dst);
3082 } else if (xop < 0x36) {
3084 cpu_src1 = get_src1(insn, cpu_src1);
3085 cpu_src2 = get_src2(insn, cpu_src2);
3086 switch (xop & ~0x10) {
3089 simm = GET_FIELDs(insn, 19, 31);
3091 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3092 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3093 dc->cc_op = CC_OP_ADD;
3095 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3099 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3100 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3101 dc->cc_op = CC_OP_ADD;
3103 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3109 simm = GET_FIELDs(insn, 19, 31);
3110 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3112 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3115 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3116 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3117 dc->cc_op = CC_OP_LOGIC;
3122 simm = GET_FIELDs(insn, 19, 31);
3123 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3125 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3128 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3129 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3130 dc->cc_op = CC_OP_LOGIC;
3135 simm = GET_FIELDs(insn, 19, 31);
3136 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3138 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3141 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3142 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3143 dc->cc_op = CC_OP_LOGIC;
3148 simm = GET_FIELDs(insn, 19, 31);
3150 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3152 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3156 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3157 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3158 dc->cc_op = CC_OP_SUB;
3160 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3164 case 0x5: /* andn */
3166 simm = GET_FIELDs(insn, 19, 31);
3167 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3169 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3172 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3173 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3174 dc->cc_op = CC_OP_LOGIC;
3179 simm = GET_FIELDs(insn, 19, 31);
3180 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3182 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3185 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3186 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3187 dc->cc_op = CC_OP_LOGIC;
3190 case 0x7: /* xorn */
3192 simm = GET_FIELDs(insn, 19, 31);
3193 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3195 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3196 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3199 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3200 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3201 dc->cc_op = CC_OP_LOGIC;
3204 case 0x8: /* addx, V9 addc */
3206 simm = GET_FIELDs(insn, 19, 31);
3208 gen_helper_compute_psr();
3209 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3210 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3211 dc->cc_op = CC_OP_ADDX;
3213 gen_helper_compute_psr();
3214 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3215 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3216 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3220 gen_helper_compute_psr();
3221 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3222 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3223 dc->cc_op = CC_OP_ADDX;
3225 gen_helper_compute_psr();
3226 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3227 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3228 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3232 #ifdef TARGET_SPARC64
3233 case 0x9: /* V9 mulx */
3235 simm = GET_FIELDs(insn, 19, 31);
3236 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3238 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3242 case 0xa: /* umul */
3243 CHECK_IU_FEATURE(dc, MUL);
3244 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3246 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3247 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3248 dc->cc_op = CC_OP_LOGIC;
3251 case 0xb: /* smul */
3252 CHECK_IU_FEATURE(dc, MUL);
3253 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3255 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3256 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3257 dc->cc_op = CC_OP_LOGIC;
3260 case 0xc: /* subx, V9 subc */
3262 simm = GET_FIELDs(insn, 19, 31);
3264 gen_helper_compute_psr();
3265 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3266 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3267 dc->cc_op = CC_OP_FLAGS;
3269 gen_helper_compute_psr();
3270 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3271 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3272 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3276 gen_helper_compute_psr();
3277 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3278 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3279 dc->cc_op = CC_OP_FLAGS;
3281 gen_helper_compute_psr();
3282 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3283 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3284 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3288 #ifdef TARGET_SPARC64
3289 case 0xd: /* V9 udivx */
3290 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3291 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3292 gen_trap_ifdivzero_tl(cpu_cc_src2);
3293 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3296 case 0xe: /* udiv */
3297 CHECK_IU_FEATURE(dc, DIV);
3298 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3300 gen_op_div_cc(cpu_dst);
3301 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3302 dc->cc_op = CC_OP_FLAGS;
3305 case 0xf: /* sdiv */
3306 CHECK_IU_FEATURE(dc, DIV);
3307 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3309 gen_op_div_cc(cpu_dst);
3310 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3311 dc->cc_op = CC_OP_FLAGS;
3317 gen_movl_TN_reg(rd, cpu_dst);
3319 cpu_src1 = get_src1(insn, cpu_src1);
3320 cpu_src2 = get_src2(insn, cpu_src2);
3322 case 0x20: /* taddcc */
3323 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3324 gen_movl_TN_reg(rd, cpu_dst);
3325 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3326 dc->cc_op = CC_OP_FLAGS;
3328 case 0x21: /* tsubcc */
3329 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3330 gen_movl_TN_reg(rd, cpu_dst);
3331 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3332 dc->cc_op = CC_OP_FLAGS;
3334 case 0x22: /* taddcctv */
3335 save_state(dc, cpu_cond);
3336 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3337 gen_movl_TN_reg(rd, cpu_dst);
3338 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3339 dc->cc_op = CC_OP_FLAGS;
3341 case 0x23: /* tsubcctv */
3342 save_state(dc, cpu_cond);
3343 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3344 gen_movl_TN_reg(rd, cpu_dst);
3345 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3346 dc->cc_op = CC_OP_FLAGS;
3348 case 0x24: /* mulscc */
3349 gen_helper_compute_psr();
3350 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3351 gen_movl_TN_reg(rd, cpu_dst);
3352 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3353 dc->cc_op = CC_OP_FLAGS;
3355 #ifndef TARGET_SPARC64
3356 case 0x25: /* sll */
3357 if (IS_IMM) { /* immediate */
3358 simm = GET_FIELDs(insn, 20, 31);
3359 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3360 } else { /* register */
3361 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3362 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3364 gen_movl_TN_reg(rd, cpu_dst);
3366 case 0x26: /* srl */
3367 if (IS_IMM) { /* immediate */
3368 simm = GET_FIELDs(insn, 20, 31);
3369 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3370 } else { /* register */
3371 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3372 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3374 gen_movl_TN_reg(rd, cpu_dst);
3376 case 0x27: /* sra */
3377 if (IS_IMM) { /* immediate */
3378 simm = GET_FIELDs(insn, 20, 31);
3379 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3380 } else { /* register */
3381 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3382 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3384 gen_movl_TN_reg(rd, cpu_dst);
3391 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3392 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3394 #ifndef TARGET_SPARC64
3395 case 0x01 ... 0x0f: /* undefined in the
3399 case 0x10 ... 0x1f: /* implementation-dependent
3405 case 0x2: /* V9 wrccr */
3406 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3407 gen_helper_wrccr(cpu_dst);
3408 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3409 dc->cc_op = CC_OP_FLAGS;
3411 case 0x3: /* V9 wrasi */
3412 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3413 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3415 case 0x6: /* V9 wrfprs */
3416 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3417 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3418 save_state(dc, cpu_cond);
3423 case 0xf: /* V9 sir, nop if user */
3424 #if !defined(CONFIG_USER_ONLY)
3429 case 0x13: /* Graphics Status */
3430 if (gen_trap_ifnofpu(dc, cpu_cond))
3432 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3434 case 0x14: /* Softint set */
3435 if (!supervisor(dc))
3437 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3438 gen_helper_set_softint(cpu_tmp64);
3440 case 0x15: /* Softint clear */
3441 if (!supervisor(dc))
3443 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3444 gen_helper_clear_softint(cpu_tmp64);
3446 case 0x16: /* Softint write */
3447 if (!supervisor(dc))
3449 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3450 gen_helper_write_softint(cpu_tmp64);
3452 case 0x17: /* Tick compare */
3453 #if !defined(CONFIG_USER_ONLY)
3454 if (!supervisor(dc))
3460 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3462 r_tickptr = tcg_temp_new_ptr();
3463 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3464 offsetof(CPUState, tick));
3465 gen_helper_tick_set_limit(r_tickptr,
3467 tcg_temp_free_ptr(r_tickptr);
3470 case 0x18: /* System tick */
3471 #if !defined(CONFIG_USER_ONLY)
3472 if (!supervisor(dc))
3478 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3480 r_tickptr = tcg_temp_new_ptr();
3481 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3482 offsetof(CPUState, stick));
3483 gen_helper_tick_set_count(r_tickptr,
3485 tcg_temp_free_ptr(r_tickptr);
3488 case 0x19: /* System tick compare */
3489 #if !defined(CONFIG_USER_ONLY)
3490 if (!supervisor(dc))
3496 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3498 r_tickptr = tcg_temp_new_ptr();
3499 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3500 offsetof(CPUState, stick));
3501 gen_helper_tick_set_limit(r_tickptr,
3503 tcg_temp_free_ptr(r_tickptr);
3507 case 0x10: /* Performance Control */
3508 case 0x11: /* Performance Instrumentation
3510 case 0x12: /* Dispatch Control */
3517 #if !defined(CONFIG_USER_ONLY)
3518 case 0x31: /* wrpsr, V9 saved, restored */
3520 if (!supervisor(dc))
3522 #ifdef TARGET_SPARC64
3528 gen_helper_restored();
3530 case 2: /* UA2005 allclean */
3531 case 3: /* UA2005 otherw */
3532 case 4: /* UA2005 normalw */
3533 case 5: /* UA2005 invalw */
3539 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3540 gen_helper_wrpsr(cpu_dst);
3541 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3542 dc->cc_op = CC_OP_FLAGS;
3543 save_state(dc, cpu_cond);
3550 case 0x32: /* wrwim, V9 wrpr */
3552 if (!supervisor(dc))
3554 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3555 #ifdef TARGET_SPARC64
3561 r_tsptr = tcg_temp_new_ptr();
3562 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3563 offsetof(CPUState, tsptr));
3564 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3565 offsetof(trap_state, tpc));
3566 tcg_temp_free_ptr(r_tsptr);
3573 r_tsptr = tcg_temp_new_ptr();
3574 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3575 offsetof(CPUState, tsptr));
3576 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3577 offsetof(trap_state, tnpc));
3578 tcg_temp_free_ptr(r_tsptr);
3585 r_tsptr = tcg_temp_new_ptr();
3586 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3587 offsetof(CPUState, tsptr));
3588 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3589 offsetof(trap_state,
3591 tcg_temp_free_ptr(r_tsptr);
3598 r_tsptr = tcg_temp_new_ptr();
3599 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3600 offsetof(CPUState, tsptr));
3601 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3602 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3603 offsetof(trap_state, tt));
3604 tcg_temp_free_ptr(r_tsptr);
3611 r_tickptr = tcg_temp_new_ptr();
3612 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3613 offsetof(CPUState, tick));
3614 gen_helper_tick_set_count(r_tickptr,
3616 tcg_temp_free_ptr(r_tickptr);
3620 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3623 save_state(dc, cpu_cond);
3624 gen_helper_wrpstate(cpu_tmp0);
3630 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3631 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3632 offsetof(CPUSPARCState, tl));
3635 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3636 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3637 offsetof(CPUSPARCState,
3641 gen_helper_wrcwp(cpu_tmp0);
3644 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3645 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3646 offsetof(CPUSPARCState,
3649 case 11: // canrestore
3650 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3651 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3652 offsetof(CPUSPARCState,
3655 case 12: // cleanwin
3656 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3657 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3658 offsetof(CPUSPARCState,
3661 case 13: // otherwin
3662 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3663 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3664 offsetof(CPUSPARCState,
3668 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3669 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3670 offsetof(CPUSPARCState,
3673 case 16: // UA2005 gl
3674 CHECK_IU_FEATURE(dc, GL);
3675 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3676 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3677 offsetof(CPUSPARCState, gl));
3679 case 26: // UA2005 strand status
3680 CHECK_IU_FEATURE(dc, HYPV);
3681 if (!hypervisor(dc))
3683 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3689 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3690 if (dc->def->nwindows != 32)
3691 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3692 (1 << dc->def->nwindows) - 1);
3693 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3697 case 0x33: /* wrtbr, UA2005 wrhpr */
3699 #ifndef TARGET_SPARC64
3700 if (!supervisor(dc))
3702 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3704 CHECK_IU_FEATURE(dc, HYPV);
3705 if (!hypervisor(dc))
3707 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3710 // XXX gen_op_wrhpstate();
3711 save_state(dc, cpu_cond);
3717 // XXX gen_op_wrhtstate();
3720 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3723 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3725 case 31: // hstick_cmpr
3729 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3730 r_tickptr = tcg_temp_new_ptr();
3731 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3732 offsetof(CPUState, hstick));
3733 gen_helper_tick_set_limit(r_tickptr,
3735 tcg_temp_free_ptr(r_tickptr);
3738 case 6: // hver readonly
3746 #ifdef TARGET_SPARC64
3747 case 0x2c: /* V9 movcc */
3749 int cc = GET_FIELD_SP(insn, 11, 12);
3750 int cond = GET_FIELD_SP(insn, 14, 17);
3754 r_cond = tcg_temp_new();
3755 if (insn & (1 << 18)) {
3757 gen_cond(r_cond, 0, cond, dc);
3759 gen_cond(r_cond, 1, cond, dc);
3763 gen_fcond(r_cond, cc, cond);
3766 l1 = gen_new_label();
3768 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3769 if (IS_IMM) { /* immediate */
3772 simm = GET_FIELD_SPs(insn, 0, 10);
3773 r_const = tcg_const_tl(simm);
3774 gen_movl_TN_reg(rd, r_const);
3775 tcg_temp_free(r_const);
3777 rs2 = GET_FIELD_SP(insn, 0, 4);
3778 gen_movl_reg_TN(rs2, cpu_tmp0);
3779 gen_movl_TN_reg(rd, cpu_tmp0);
3782 tcg_temp_free(r_cond);
3785 case 0x2d: /* V9 sdivx */
3786 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3787 gen_movl_TN_reg(rd, cpu_dst);
3789 case 0x2e: /* V9 popc */
3791 cpu_src2 = get_src2(insn, cpu_src2);
3792 gen_helper_popc(cpu_dst, cpu_src2);
3793 gen_movl_TN_reg(rd, cpu_dst);
3795 case 0x2f: /* V9 movr */
3797 int cond = GET_FIELD_SP(insn, 10, 12);
3800 cpu_src1 = get_src1(insn, cpu_src1);
3802 l1 = gen_new_label();
3804 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3806 if (IS_IMM) { /* immediate */
3809 simm = GET_FIELD_SPs(insn, 0, 9);
3810 r_const = tcg_const_tl(simm);
3811 gen_movl_TN_reg(rd, r_const);
3812 tcg_temp_free(r_const);
3814 rs2 = GET_FIELD_SP(insn, 0, 4);
3815 gen_movl_reg_TN(rs2, cpu_tmp0);
3816 gen_movl_TN_reg(rd, cpu_tmp0);
3826 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3827 #ifdef TARGET_SPARC64
3828 int opf = GET_FIELD_SP(insn, 5, 13);
3829 rs1 = GET_FIELD(insn, 13, 17);
3830 rs2 = GET_FIELD(insn, 27, 31);
3831 if (gen_trap_ifnofpu(dc, cpu_cond))
3835 case 0x000: /* VIS I edge8cc */
3836 case 0x001: /* VIS II edge8n */
3837 case 0x002: /* VIS I edge8lcc */
3838 case 0x003: /* VIS II edge8ln */
3839 case 0x004: /* VIS I edge16cc */
3840 case 0x005: /* VIS II edge16n */
3841 case 0x006: /* VIS I edge16lcc */
3842 case 0x007: /* VIS II edge16ln */
3843 case 0x008: /* VIS I edge32cc */
3844 case 0x009: /* VIS II edge32n */
3845 case 0x00a: /* VIS I edge32lcc */
3846 case 0x00b: /* VIS II edge32ln */
3849 case 0x010: /* VIS I array8 */
3850 CHECK_FPU_FEATURE(dc, VIS1);
3851 cpu_src1 = get_src1(insn, cpu_src1);
3852 gen_movl_reg_TN(rs2, cpu_src2);
3853 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3854 gen_movl_TN_reg(rd, cpu_dst);
3856 case 0x012: /* VIS I array16 */
3857 CHECK_FPU_FEATURE(dc, VIS1);
3858 cpu_src1 = get_src1(insn, cpu_src1);
3859 gen_movl_reg_TN(rs2, cpu_src2);
3860 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3861 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3862 gen_movl_TN_reg(rd, cpu_dst);
3864 case 0x014: /* VIS I array32 */
3865 CHECK_FPU_FEATURE(dc, VIS1);
3866 cpu_src1 = get_src1(insn, cpu_src1);
3867 gen_movl_reg_TN(rs2, cpu_src2);
3868 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3869 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3870 gen_movl_TN_reg(rd, cpu_dst);
3872 case 0x018: /* VIS I alignaddr */
3873 CHECK_FPU_FEATURE(dc, VIS1);
3874 cpu_src1 = get_src1(insn, cpu_src1);
3875 gen_movl_reg_TN(rs2, cpu_src2);
3876 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3877 gen_movl_TN_reg(rd, cpu_dst);
3879 case 0x019: /* VIS II bmask */
3880 case 0x01a: /* VIS I alignaddrl */
3883 case 0x020: /* VIS I fcmple16 */
3884 CHECK_FPU_FEATURE(dc, VIS1);
3885 gen_op_load_fpr_DT0(DFPREG(rs1));
3886 gen_op_load_fpr_DT1(DFPREG(rs2));
3887 gen_helper_fcmple16();
3888 gen_op_store_DT0_fpr(DFPREG(rd));
3890 case 0x022: /* VIS I fcmpne16 */
3891 CHECK_FPU_FEATURE(dc, VIS1);
3892 gen_op_load_fpr_DT0(DFPREG(rs1));
3893 gen_op_load_fpr_DT1(DFPREG(rs2));
3894 gen_helper_fcmpne16();
3895 gen_op_store_DT0_fpr(DFPREG(rd));
3897 case 0x024: /* VIS I fcmple32 */
3898 CHECK_FPU_FEATURE(dc, VIS1);
3899 gen_op_load_fpr_DT0(DFPREG(rs1));
3900 gen_op_load_fpr_DT1(DFPREG(rs2));
3901 gen_helper_fcmple32();
3902 gen_op_store_DT0_fpr(DFPREG(rd));
3904 case 0x026: /* VIS I fcmpne32 */
3905 CHECK_FPU_FEATURE(dc, VIS1);
3906 gen_op_load_fpr_DT0(DFPREG(rs1));
3907 gen_op_load_fpr_DT1(DFPREG(rs2));
3908 gen_helper_fcmpne32();
3909 gen_op_store_DT0_fpr(DFPREG(rd));
3911 case 0x028: /* VIS I fcmpgt16 */
3912 CHECK_FPU_FEATURE(dc, VIS1);
3913 gen_op_load_fpr_DT0(DFPREG(rs1));
3914 gen_op_load_fpr_DT1(DFPREG(rs2));
3915 gen_helper_fcmpgt16();
3916 gen_op_store_DT0_fpr(DFPREG(rd));
3918 case 0x02a: /* VIS I fcmpeq16 */
3919 CHECK_FPU_FEATURE(dc, VIS1);
3920 gen_op_load_fpr_DT0(DFPREG(rs1));
3921 gen_op_load_fpr_DT1(DFPREG(rs2));
3922 gen_helper_fcmpeq16();
3923 gen_op_store_DT0_fpr(DFPREG(rd));
3925 case 0x02c: /* VIS I fcmpgt32 */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 gen_op_load_fpr_DT0(DFPREG(rs1));
3928 gen_op_load_fpr_DT1(DFPREG(rs2));
3929 gen_helper_fcmpgt32();
3930 gen_op_store_DT0_fpr(DFPREG(rd));
3932 case 0x02e: /* VIS I fcmpeq32 */
3933 CHECK_FPU_FEATURE(dc, VIS1);
3934 gen_op_load_fpr_DT0(DFPREG(rs1));
3935 gen_op_load_fpr_DT1(DFPREG(rs2));
3936 gen_helper_fcmpeq32();
3937 gen_op_store_DT0_fpr(DFPREG(rd));
3939 case 0x031: /* VIS I fmul8x16 */
3940 CHECK_FPU_FEATURE(dc, VIS1);
3941 gen_op_load_fpr_DT0(DFPREG(rs1));
3942 gen_op_load_fpr_DT1(DFPREG(rs2));
3943 gen_helper_fmul8x16();
3944 gen_op_store_DT0_fpr(DFPREG(rd));
3946 case 0x033: /* VIS I fmul8x16au */
3947 CHECK_FPU_FEATURE(dc, VIS1);
3948 gen_op_load_fpr_DT0(DFPREG(rs1));
3949 gen_op_load_fpr_DT1(DFPREG(rs2));
3950 gen_helper_fmul8x16au();
3951 gen_op_store_DT0_fpr(DFPREG(rd));
3953 case 0x035: /* VIS I fmul8x16al */
3954 CHECK_FPU_FEATURE(dc, VIS1);
3955 gen_op_load_fpr_DT0(DFPREG(rs1));
3956 gen_op_load_fpr_DT1(DFPREG(rs2));
3957 gen_helper_fmul8x16al();
3958 gen_op_store_DT0_fpr(DFPREG(rd));
3960 case 0x036: /* VIS I fmul8sux16 */
3961 CHECK_FPU_FEATURE(dc, VIS1);
3962 gen_op_load_fpr_DT0(DFPREG(rs1));
3963 gen_op_load_fpr_DT1(DFPREG(rs2));
3964 gen_helper_fmul8sux16();
3965 gen_op_store_DT0_fpr(DFPREG(rd));
3967 case 0x037: /* VIS I fmul8ulx16 */
3968 CHECK_FPU_FEATURE(dc, VIS1);
3969 gen_op_load_fpr_DT0(DFPREG(rs1));
3970 gen_op_load_fpr_DT1(DFPREG(rs2));
3971 gen_helper_fmul8ulx16();
3972 gen_op_store_DT0_fpr(DFPREG(rd));
3974 case 0x038: /* VIS I fmuld8sux16 */
3975 CHECK_FPU_FEATURE(dc, VIS1);
3976 gen_op_load_fpr_DT0(DFPREG(rs1));
3977 gen_op_load_fpr_DT1(DFPREG(rs2));
3978 gen_helper_fmuld8sux16();
3979 gen_op_store_DT0_fpr(DFPREG(rd));
3981 case 0x039: /* VIS I fmuld8ulx16 */
3982 CHECK_FPU_FEATURE(dc, VIS1);
3983 gen_op_load_fpr_DT0(DFPREG(rs1));
3984 gen_op_load_fpr_DT1(DFPREG(rs2));
3985 gen_helper_fmuld8ulx16();
3986 gen_op_store_DT0_fpr(DFPREG(rd));
3988 case 0x03a: /* VIS I fpack32 */
3989 case 0x03b: /* VIS I fpack16 */
3990 case 0x03d: /* VIS I fpackfix */
3991 case 0x03e: /* VIS I pdist */
3994 case 0x048: /* VIS I faligndata */
3995 CHECK_FPU_FEATURE(dc, VIS1);
3996 gen_op_load_fpr_DT0(DFPREG(rs1));
3997 gen_op_load_fpr_DT1(DFPREG(rs2));
3998 gen_helper_faligndata();
3999 gen_op_store_DT0_fpr(DFPREG(rd));
4001 case 0x04b: /* VIS I fpmerge */
4002 CHECK_FPU_FEATURE(dc, VIS1);
4003 gen_op_load_fpr_DT0(DFPREG(rs1));
4004 gen_op_load_fpr_DT1(DFPREG(rs2));
4005 gen_helper_fpmerge();
4006 gen_op_store_DT0_fpr(DFPREG(rd));
4008 case 0x04c: /* VIS II bshuffle */
4011 case 0x04d: /* VIS I fexpand */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 gen_op_load_fpr_DT0(DFPREG(rs1));
4014 gen_op_load_fpr_DT1(DFPREG(rs2));
4015 gen_helper_fexpand();
4016 gen_op_store_DT0_fpr(DFPREG(rd));
4018 case 0x050: /* VIS I fpadd16 */
4019 CHECK_FPU_FEATURE(dc, VIS1);
4020 gen_op_load_fpr_DT0(DFPREG(rs1));
4021 gen_op_load_fpr_DT1(DFPREG(rs2));
4022 gen_helper_fpadd16();
4023 gen_op_store_DT0_fpr(DFPREG(rd));
4025 case 0x051: /* VIS I fpadd16s */
4026 CHECK_FPU_FEATURE(dc, VIS1);
4027 gen_helper_fpadd16s(cpu_fpr[rd],
4028 cpu_fpr[rs1], cpu_fpr[rs2]);
4030 case 0x052: /* VIS I fpadd32 */
4031 CHECK_FPU_FEATURE(dc, VIS1);
4032 gen_op_load_fpr_DT0(DFPREG(rs1));
4033 gen_op_load_fpr_DT1(DFPREG(rs2));
4034 gen_helper_fpadd32();
4035 gen_op_store_DT0_fpr(DFPREG(rd));
4037 case 0x053: /* VIS I fpadd32s */
4038 CHECK_FPU_FEATURE(dc, VIS1);
4039 gen_helper_fpadd32s(cpu_fpr[rd],
4040 cpu_fpr[rs1], cpu_fpr[rs2]);
4042 case 0x054: /* VIS I fpsub16 */
4043 CHECK_FPU_FEATURE(dc, VIS1);
4044 gen_op_load_fpr_DT0(DFPREG(rs1));
4045 gen_op_load_fpr_DT1(DFPREG(rs2));
4046 gen_helper_fpsub16();
4047 gen_op_store_DT0_fpr(DFPREG(rd));
4049 case 0x055: /* VIS I fpsub16s */
4050 CHECK_FPU_FEATURE(dc, VIS1);
4051 gen_helper_fpsub16s(cpu_fpr[rd],
4052 cpu_fpr[rs1], cpu_fpr[rs2]);
4054 case 0x056: /* VIS I fpsub32 */
4055 CHECK_FPU_FEATURE(dc, VIS1);
4056 gen_op_load_fpr_DT0(DFPREG(rs1));
4057 gen_op_load_fpr_DT1(DFPREG(rs2));
4058 gen_helper_fpsub32();
4059 gen_op_store_DT0_fpr(DFPREG(rd));
4061 case 0x057: /* VIS I fpsub32s */
4062 CHECK_FPU_FEATURE(dc, VIS1);
4063 gen_helper_fpsub32s(cpu_fpr[rd],
4064 cpu_fpr[rs1], cpu_fpr[rs2]);
4066 case 0x060: /* VIS I fzero */
4067 CHECK_FPU_FEATURE(dc, VIS1);
4068 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4069 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4071 case 0x061: /* VIS I fzeros */
4072 CHECK_FPU_FEATURE(dc, VIS1);
4073 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4075 case 0x062: /* VIS I fnor */
4076 CHECK_FPU_FEATURE(dc, VIS1);
4077 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4078 cpu_fpr[DFPREG(rs2)]);
4079 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4080 cpu_fpr[DFPREG(rs2) + 1]);
4082 case 0x063: /* VIS I fnors */
4083 CHECK_FPU_FEATURE(dc, VIS1);
4084 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4086 case 0x064: /* VIS I fandnot2 */
4087 CHECK_FPU_FEATURE(dc, VIS1);
4088 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4089 cpu_fpr[DFPREG(rs2)]);
4090 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4091 cpu_fpr[DFPREG(rs1) + 1],
4092 cpu_fpr[DFPREG(rs2) + 1]);
4094 case 0x065: /* VIS I fandnot2s */
4095 CHECK_FPU_FEATURE(dc, VIS1);
4096 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4098 case 0x066: /* VIS I fnot2 */
4099 CHECK_FPU_FEATURE(dc, VIS1);
4100 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4101 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4102 cpu_fpr[DFPREG(rs2) + 1]);
4104 case 0x067: /* VIS I fnot2s */
4105 CHECK_FPU_FEATURE(dc, VIS1);
4106 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4108 case 0x068: /* VIS I fandnot1 */
4109 CHECK_FPU_FEATURE(dc, VIS1);
4110 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4111 cpu_fpr[DFPREG(rs1)]);
4112 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4113 cpu_fpr[DFPREG(rs2) + 1],
4114 cpu_fpr[DFPREG(rs1) + 1]);
4116 case 0x069: /* VIS I fandnot1s */
4117 CHECK_FPU_FEATURE(dc, VIS1);
4118 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4120 case 0x06a: /* VIS I fnot1 */
4121 CHECK_FPU_FEATURE(dc, VIS1);
4122 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4123 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4124 cpu_fpr[DFPREG(rs1) + 1]);
4126 case 0x06b: /* VIS I fnot1s */
4127 CHECK_FPU_FEATURE(dc, VIS1);
4128 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4130 case 0x06c: /* VIS I fxor */
4131 CHECK_FPU_FEATURE(dc, VIS1);
4132 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4133 cpu_fpr[DFPREG(rs2)]);
4134 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4135 cpu_fpr[DFPREG(rs1) + 1],
4136 cpu_fpr[DFPREG(rs2) + 1]);
4138 case 0x06d: /* VIS I fxors */
4139 CHECK_FPU_FEATURE(dc, VIS1);
4140 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4142 case 0x06e: /* VIS I fnand */
4143 CHECK_FPU_FEATURE(dc, VIS1);
4144 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4145 cpu_fpr[DFPREG(rs2)]);
4146 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4147 cpu_fpr[DFPREG(rs2) + 1]);
4149 case 0x06f: /* VIS I fnands */
4150 CHECK_FPU_FEATURE(dc, VIS1);
4151 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4153 case 0x070: /* VIS I fand */
4154 CHECK_FPU_FEATURE(dc, VIS1);
4155 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4156 cpu_fpr[DFPREG(rs2)]);
4157 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4158 cpu_fpr[DFPREG(rs1) + 1],
4159 cpu_fpr[DFPREG(rs2) + 1]);
4161 case 0x071: /* VIS I fands */
4162 CHECK_FPU_FEATURE(dc, VIS1);
4163 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4165 case 0x072: /* VIS I fxnor */
4166 CHECK_FPU_FEATURE(dc, VIS1);
4167 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4168 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4169 cpu_fpr[DFPREG(rs1)]);
4170 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4171 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4172 cpu_fpr[DFPREG(rs1) + 1]);
4174 case 0x073: /* VIS I fxnors */
4175 CHECK_FPU_FEATURE(dc, VIS1);
4176 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4177 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4179 case 0x074: /* VIS I fsrc1 */
4180 CHECK_FPU_FEATURE(dc, VIS1);
4181 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4182 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4183 cpu_fpr[DFPREG(rs1) + 1]);
4185 case 0x075: /* VIS I fsrc1s */
4186 CHECK_FPU_FEATURE(dc, VIS1);
4187 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4189 case 0x076: /* VIS I fornot2 */
4190 CHECK_FPU_FEATURE(dc, VIS1);
4191 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4192 cpu_fpr[DFPREG(rs2)]);
4193 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4194 cpu_fpr[DFPREG(rs1) + 1],
4195 cpu_fpr[DFPREG(rs2) + 1]);
4197 case 0x077: /* VIS I fornot2s */
4198 CHECK_FPU_FEATURE(dc, VIS1);
4199 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4201 case 0x078: /* VIS I fsrc2 */
4202 CHECK_FPU_FEATURE(dc, VIS1);
4203 gen_op_load_fpr_DT0(DFPREG(rs2));
4204 gen_op_store_DT0_fpr(DFPREG(rd));
4206 case 0x079: /* VIS I fsrc2s */
4207 CHECK_FPU_FEATURE(dc, VIS1);
4208 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4210 case 0x07a: /* VIS I fornot1 */
4211 CHECK_FPU_FEATURE(dc, VIS1);
4212 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4213 cpu_fpr[DFPREG(rs1)]);
4214 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4215 cpu_fpr[DFPREG(rs2) + 1],
4216 cpu_fpr[DFPREG(rs1) + 1]);
4218 case 0x07b: /* VIS I fornot1s */
4219 CHECK_FPU_FEATURE(dc, VIS1);
4220 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4222 case 0x07c: /* VIS I for */
4223 CHECK_FPU_FEATURE(dc, VIS1);
4224 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4225 cpu_fpr[DFPREG(rs2)]);
4226 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4227 cpu_fpr[DFPREG(rs1) + 1],
4228 cpu_fpr[DFPREG(rs2) + 1]);
4230 case 0x07d: /* VIS I fors */
4231 CHECK_FPU_FEATURE(dc, VIS1);
4232 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4234 case 0x07e: /* VIS I fone */
4235 CHECK_FPU_FEATURE(dc, VIS1);
4236 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4237 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4239 case 0x07f: /* VIS I fones */
4240 CHECK_FPU_FEATURE(dc, VIS1);
4241 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4243 case 0x080: /* VIS I shutdown */
4244 case 0x081: /* VIS II siam */
4253 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4254 #ifdef TARGET_SPARC64
4259 #ifdef TARGET_SPARC64
4260 } else if (xop == 0x39) { /* V9 return */
4263 save_state(dc, cpu_cond);
4264 cpu_src1 = get_src1(insn, cpu_src1);
4265 if (IS_IMM) { /* immediate */
4266 simm = GET_FIELDs(insn, 19, 31);
4267 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4268 } else { /* register */
4269 rs2 = GET_FIELD(insn, 27, 31);
4271 gen_movl_reg_TN(rs2, cpu_src2);
4272 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4274 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4276 gen_helper_restore();
4277 gen_mov_pc_npc(dc, cpu_cond);
4278 r_const = tcg_const_i32(3);
4279 gen_helper_check_align(cpu_dst, r_const);
4280 tcg_temp_free_i32(r_const);
4281 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4282 dc->npc = DYNAMIC_PC;
4286 cpu_src1 = get_src1(insn, cpu_src1);
4287 if (IS_IMM) { /* immediate */
4288 simm = GET_FIELDs(insn, 19, 31);
4289 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4290 } else { /* register */
4291 rs2 = GET_FIELD(insn, 27, 31);
4293 gen_movl_reg_TN(rs2, cpu_src2);
4294 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4296 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4299 case 0x38: /* jmpl */
4304 r_pc = tcg_const_tl(dc->pc);
4305 gen_movl_TN_reg(rd, r_pc);
4306 tcg_temp_free(r_pc);
4307 gen_mov_pc_npc(dc, cpu_cond);
4308 r_const = tcg_const_i32(3);
4309 gen_helper_check_align(cpu_dst, r_const);
4310 tcg_temp_free_i32(r_const);
4311 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4312 dc->npc = DYNAMIC_PC;
4315 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4316 case 0x39: /* rett, V9 return */
4320 if (!supervisor(dc))
4322 gen_mov_pc_npc(dc, cpu_cond);
4323 r_const = tcg_const_i32(3);
4324 gen_helper_check_align(cpu_dst, r_const);
4325 tcg_temp_free_i32(r_const);
4326 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4327 dc->npc = DYNAMIC_PC;
4332 case 0x3b: /* flush */
4333 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4335 gen_helper_flush(cpu_dst);
4337 case 0x3c: /* save */
4338 save_state(dc, cpu_cond);
4340 gen_movl_TN_reg(rd, cpu_dst);
4342 case 0x3d: /* restore */
4343 save_state(dc, cpu_cond);
4344 gen_helper_restore();
4345 gen_movl_TN_reg(rd, cpu_dst);
4347 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4348 case 0x3e: /* V9 done/retry */
4352 if (!supervisor(dc))
4354 dc->npc = DYNAMIC_PC;
4355 dc->pc = DYNAMIC_PC;
4359 if (!supervisor(dc))
4361 dc->npc = DYNAMIC_PC;
4362 dc->pc = DYNAMIC_PC;
4378 case 3: /* load/store instructions */
4380 unsigned int xop = GET_FIELD(insn, 7, 12);
4382 cpu_src1 = get_src1(insn, cpu_src1);
4383 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4384 rs2 = GET_FIELD(insn, 27, 31);
4385 gen_movl_reg_TN(rs2, cpu_src2);
4386 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4387 } else if (IS_IMM) { /* immediate */
4388 simm = GET_FIELDs(insn, 19, 31);
4389 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4390 } else { /* register */
4391 rs2 = GET_FIELD(insn, 27, 31);
4393 gen_movl_reg_TN(rs2, cpu_src2);
4394 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4396 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4398 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4399 (xop > 0x17 && xop <= 0x1d ) ||
4400 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4402 case 0x0: /* ld, V9 lduw, load unsigned word */
4403 gen_address_mask(dc, cpu_addr);
4404 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4406 case 0x1: /* ldub, load unsigned byte */
4407 gen_address_mask(dc, cpu_addr);
4408 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4410 case 0x2: /* lduh, load unsigned halfword */
4411 gen_address_mask(dc, cpu_addr);
4412 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4414 case 0x3: /* ldd, load double word */
4420 save_state(dc, cpu_cond);
4421 r_const = tcg_const_i32(7);
4422 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4423 tcg_temp_free_i32(r_const);
4424 gen_address_mask(dc, cpu_addr);
4425 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4426 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4427 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4428 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4429 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4430 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4431 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4434 case 0x9: /* ldsb, load signed byte */
4435 gen_address_mask(dc, cpu_addr);
4436 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4438 case 0xa: /* ldsh, load signed halfword */
4439 gen_address_mask(dc, cpu_addr);
4440 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4442 case 0xd: /* ldstub -- XXX: should be atomically */
4446 gen_address_mask(dc, cpu_addr);
4447 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4448 r_const = tcg_const_tl(0xff);
4449 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4450 tcg_temp_free(r_const);
4453 case 0x0f: /* swap, swap register with memory. Also
4455 CHECK_IU_FEATURE(dc, SWAP);
4456 gen_movl_reg_TN(rd, cpu_val);
4457 gen_address_mask(dc, cpu_addr);
4458 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4459 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4460 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4462 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4463 case 0x10: /* lda, V9 lduwa, load word alternate */
4464 #ifndef TARGET_SPARC64
4467 if (!supervisor(dc))
4470 save_state(dc, cpu_cond);
4471 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4473 case 0x11: /* lduba, load unsigned byte alternate */
4474 #ifndef TARGET_SPARC64
4477 if (!supervisor(dc))
4480 save_state(dc, cpu_cond);
4481 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4483 case 0x12: /* lduha, load unsigned halfword alternate */
4484 #ifndef TARGET_SPARC64
4487 if (!supervisor(dc))
4490 save_state(dc, cpu_cond);
4491 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4493 case 0x13: /* ldda, load double word alternate */
4494 #ifndef TARGET_SPARC64
4497 if (!supervisor(dc))
4502 save_state(dc, cpu_cond);
4503 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4505 case 0x19: /* ldsba, load signed byte alternate */
4506 #ifndef TARGET_SPARC64
4509 if (!supervisor(dc))
4512 save_state(dc, cpu_cond);
4513 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4515 case 0x1a: /* ldsha, load signed halfword alternate */
4516 #ifndef TARGET_SPARC64
4519 if (!supervisor(dc))
4522 save_state(dc, cpu_cond);
4523 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4525 case 0x1d: /* ldstuba -- XXX: should be atomically */
4526 #ifndef TARGET_SPARC64
4529 if (!supervisor(dc))
4532 save_state(dc, cpu_cond);
4533 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4535 case 0x1f: /* swapa, swap reg with alt. memory. Also
4537 CHECK_IU_FEATURE(dc, SWAP);
4538 #ifndef TARGET_SPARC64
4541 if (!supervisor(dc))
4544 save_state(dc, cpu_cond);
4545 gen_movl_reg_TN(rd, cpu_val);
4546 gen_swap_asi(cpu_val, cpu_addr, insn);
4549 #ifndef TARGET_SPARC64
4550 case 0x30: /* ldc */
4551 case 0x31: /* ldcsr */
4552 case 0x33: /* lddc */
4556 #ifdef TARGET_SPARC64
4557 case 0x08: /* V9 ldsw */
4558 gen_address_mask(dc, cpu_addr);
4559 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4561 case 0x0b: /* V9 ldx */
4562 gen_address_mask(dc, cpu_addr);
4563 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4565 case 0x18: /* V9 ldswa */
4566 save_state(dc, cpu_cond);
4567 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4569 case 0x1b: /* V9 ldxa */
4570 save_state(dc, cpu_cond);
4571 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4573 case 0x2d: /* V9 prefetch, no effect */
4575 case 0x30: /* V9 ldfa */
4576 save_state(dc, cpu_cond);
4577 gen_ldf_asi(cpu_addr, insn, 4, rd);
4579 case 0x33: /* V9 lddfa */
4580 save_state(dc, cpu_cond);
4581 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4583 case 0x3d: /* V9 prefetcha, no effect */
4585 case 0x32: /* V9 ldqfa */
4586 CHECK_FPU_FEATURE(dc, FLOAT128);
4587 save_state(dc, cpu_cond);
4588 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4594 gen_movl_TN_reg(rd, cpu_val);
4595 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4598 } else if (xop >= 0x20 && xop < 0x24) {
4599 if (gen_trap_ifnofpu(dc, cpu_cond))
4601 save_state(dc, cpu_cond);
4603 case 0x20: /* ldf, load fpreg */
4604 gen_address_mask(dc, cpu_addr);
4605 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4606 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4608 case 0x21: /* ldfsr, V9 ldxfsr */
4609 #ifdef TARGET_SPARC64
4610 gen_address_mask(dc, cpu_addr);
4612 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4613 gen_helper_ldxfsr(cpu_tmp64);
4617 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4618 gen_helper_ldfsr(cpu_tmp32);
4622 case 0x22: /* ldqf, load quad fpreg */
4626 CHECK_FPU_FEATURE(dc, FLOAT128);
4627 r_const = tcg_const_i32(dc->mem_idx);
4628 gen_helper_ldqf(cpu_addr, r_const);
4629 tcg_temp_free_i32(r_const);
4630 gen_op_store_QT0_fpr(QFPREG(rd));
4633 case 0x23: /* lddf, load double fpreg */
4637 r_const = tcg_const_i32(dc->mem_idx);
4638 gen_helper_lddf(cpu_addr, r_const);
4639 tcg_temp_free_i32(r_const);
4640 gen_op_store_DT0_fpr(DFPREG(rd));
4646 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4647 xop == 0xe || xop == 0x1e) {
4648 gen_movl_reg_TN(rd, cpu_val);
4650 case 0x4: /* st, store word */
4651 gen_address_mask(dc, cpu_addr);
4652 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4654 case 0x5: /* stb, store byte */
4655 gen_address_mask(dc, cpu_addr);
4656 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4658 case 0x6: /* sth, store halfword */
4659 gen_address_mask(dc, cpu_addr);
4660 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4662 case 0x7: /* std, store double word */
4668 save_state(dc, cpu_cond);
4669 gen_address_mask(dc, cpu_addr);
4670 r_const = tcg_const_i32(7);
4671 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4672 tcg_temp_free_i32(r_const);
4673 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4674 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4675 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4678 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4679 case 0x14: /* sta, V9 stwa, store word alternate */
4680 #ifndef TARGET_SPARC64
4683 if (!supervisor(dc))
4686 save_state(dc, cpu_cond);
4687 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4689 case 0x15: /* stba, store byte alternate */
4690 #ifndef TARGET_SPARC64
4693 if (!supervisor(dc))
4696 save_state(dc, cpu_cond);
4697 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4699 case 0x16: /* stha, store halfword alternate */
4700 #ifndef TARGET_SPARC64
4703 if (!supervisor(dc))
4706 save_state(dc, cpu_cond);
4707 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4709 case 0x17: /* stda, store double word alternate */
4710 #ifndef TARGET_SPARC64
4713 if (!supervisor(dc))
4719 save_state(dc, cpu_cond);
4720 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4724 #ifdef TARGET_SPARC64
4725 case 0x0e: /* V9 stx */
4726 gen_address_mask(dc, cpu_addr);
4727 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4729 case 0x1e: /* V9 stxa */
4730 save_state(dc, cpu_cond);
4731 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4737 } else if (xop > 0x23 && xop < 0x28) {
4738 if (gen_trap_ifnofpu(dc, cpu_cond))
4740 save_state(dc, cpu_cond);
4742 case 0x24: /* stf, store fpreg */
4743 gen_address_mask(dc, cpu_addr);
4744 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4745 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4747 case 0x25: /* stfsr, V9 stxfsr */
4748 #ifdef TARGET_SPARC64
4749 gen_address_mask(dc, cpu_addr);
4750 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4752 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4754 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4756 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4757 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4761 #ifdef TARGET_SPARC64
4762 /* V9 stqf, store quad fpreg */
4766 CHECK_FPU_FEATURE(dc, FLOAT128);
4767 gen_op_load_fpr_QT0(QFPREG(rd));
4768 r_const = tcg_const_i32(dc->mem_idx);
4769 gen_helper_stqf(cpu_addr, r_const);
4770 tcg_temp_free_i32(r_const);
4773 #else /* !TARGET_SPARC64 */
4774 /* stdfq, store floating point queue */
4775 #if defined(CONFIG_USER_ONLY)
4778 if (!supervisor(dc))
4780 if (gen_trap_ifnofpu(dc, cpu_cond))
4785 case 0x27: /* stdf, store double fpreg */
4789 gen_op_load_fpr_DT0(DFPREG(rd));
4790 r_const = tcg_const_i32(dc->mem_idx);
4791 gen_helper_stdf(cpu_addr, r_const);
4792 tcg_temp_free_i32(r_const);
4798 } else if (xop > 0x33 && xop < 0x3f) {
4799 save_state(dc, cpu_cond);
4801 #ifdef TARGET_SPARC64
4802 case 0x34: /* V9 stfa */
4803 gen_stf_asi(cpu_addr, insn, 4, rd);
4805 case 0x36: /* V9 stqfa */
4809 CHECK_FPU_FEATURE(dc, FLOAT128);
4810 r_const = tcg_const_i32(7);
4811 gen_helper_check_align(cpu_addr, r_const);
4812 tcg_temp_free_i32(r_const);
4813 gen_op_load_fpr_QT0(QFPREG(rd));
4814 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4817 case 0x37: /* V9 stdfa */
4818 gen_op_load_fpr_DT0(DFPREG(rd));
4819 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4821 case 0x3c: /* V9 casa */
4822 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4823 gen_movl_TN_reg(rd, cpu_val);
4825 case 0x3e: /* V9 casxa */
4826 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4827 gen_movl_TN_reg(rd, cpu_val);
4830 case 0x34: /* stc */
4831 case 0x35: /* stcsr */
4832 case 0x36: /* stdcq */
4833 case 0x37: /* stdc */
4844 /* default case for non jump instructions */
4845 if (dc->npc == DYNAMIC_PC) {
4846 dc->pc = DYNAMIC_PC;
4848 } else if (dc->npc == JUMP_PC) {
4849 /* we can do a static jump */
4850 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4854 dc->npc = dc->npc + 4;
4862 save_state(dc, cpu_cond);
4863 r_const = tcg_const_i32(TT_ILL_INSN);
4864 gen_helper_raise_exception(r_const);
4865 tcg_temp_free_i32(r_const);
4873 save_state(dc, cpu_cond);
4874 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4875 gen_helper_raise_exception(r_const);
4876 tcg_temp_free_i32(r_const);
4880 #if !defined(CONFIG_USER_ONLY)
4885 save_state(dc, cpu_cond);
4886 r_const = tcg_const_i32(TT_PRIV_INSN);
4887 gen_helper_raise_exception(r_const);
4888 tcg_temp_free_i32(r_const);
4894 save_state(dc, cpu_cond);
4895 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4898 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4900 save_state(dc, cpu_cond);
4901 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4905 #ifndef TARGET_SPARC64
4910 save_state(dc, cpu_cond);
4911 r_const = tcg_const_i32(TT_NCP_INSN);
4912 gen_helper_raise_exception(r_const);
4913 tcg_temp_free(r_const);
4920 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4921 int spc, CPUSPARCState *env)
4923 target_ulong pc_start, last_pc;
4924 uint16_t *gen_opc_end;
4925 DisasContext dc1, *dc = &dc1;
4931 memset(dc, 0, sizeof(DisasContext));
4936 dc->npc = (target_ulong) tb->cs_base;
4937 dc->cc_op = CC_OP_DYNAMIC;
4938 dc->mem_idx = cpu_mmu_index(env);
4940 if ((dc->def->features & CPU_FEATURE_FLOAT))
4941 dc->fpu_enabled = cpu_fpu_enabled(env);
4943 dc->fpu_enabled = 0;
4944 #ifdef TARGET_SPARC64
4945 dc->address_mask_32bit = env->pstate & PS_AM;
4947 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4949 cpu_tmp0 = tcg_temp_new();
4950 cpu_tmp32 = tcg_temp_new_i32();
4951 cpu_tmp64 = tcg_temp_new_i64();
4953 cpu_dst = tcg_temp_local_new();
4956 cpu_val = tcg_temp_local_new();
4957 cpu_addr = tcg_temp_local_new();
4960 max_insns = tb->cflags & CF_COUNT_MASK;
4962 max_insns = CF_COUNT_MASK;
4965 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4966 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4967 if (bp->pc == dc->pc) {
4968 if (dc->pc != pc_start)
4969 save_state(dc, cpu_cond);
4978 qemu_log("Search PC...\n");
4979 j = gen_opc_ptr - gen_opc_buf;
4983 gen_opc_instr_start[lj++] = 0;
4984 gen_opc_pc[lj] = dc->pc;
4985 gen_opc_npc[lj] = dc->npc;
4986 gen_opc_instr_start[lj] = 1;
4987 gen_opc_icount[lj] = num_insns;
4990 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4993 disas_sparc_insn(dc);
4998 /* if the next PC is different, we abort now */
4999 if (dc->pc != (last_pc + 4))
5001 /* if we reach a page boundary, we stop generation so that the
5002 PC of a TT_TFAULT exception is always in the right page */
5003 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5005 /* if single step mode, we generate only one instruction and
5006 generate an exception */
5007 if (env->singlestep_enabled || singlestep) {
5008 tcg_gen_movi_tl(cpu_pc, dc->pc);
5012 } while ((gen_opc_ptr < gen_opc_end) &&
5013 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5014 num_insns < max_insns);
5017 tcg_temp_free(cpu_addr);
5018 tcg_temp_free(cpu_val);
5019 tcg_temp_free(cpu_dst);
5020 tcg_temp_free_i64(cpu_tmp64);
5021 tcg_temp_free_i32(cpu_tmp32);
5022 tcg_temp_free(cpu_tmp0);
5023 if (tb->cflags & CF_LAST_IO)
5026 if (dc->pc != DYNAMIC_PC &&
5027 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5028 /* static PC and NPC: we can use direct chaining */
5029 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5031 if (dc->pc != DYNAMIC_PC)
5032 tcg_gen_movi_tl(cpu_pc, dc->pc);
5033 save_npc(dc, cpu_cond);
5037 gen_icount_end(tb, num_insns);
5038 *gen_opc_ptr = INDEX_op_end;
5040 j = gen_opc_ptr - gen_opc_buf;
5043 gen_opc_instr_start[lj++] = 0;
5047 gen_opc_jump_pc[0] = dc->jump_pc[0];
5048 gen_opc_jump_pc[1] = dc->jump_pc[1];
5050 tb->size = last_pc + 4 - pc_start;
5051 tb->icount = num_insns;
5054 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5055 qemu_log("--------------\n");
5056 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5057 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5063 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5065 gen_intermediate_code_internal(tb, 0, env);
5068 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5070 gen_intermediate_code_internal(tb, 1, env);
5073 void gen_intermediate_code_init(CPUSPARCState *env)
5077 static const char * const gregnames[8] = {
5078 NULL, // g0 not used
5087 static const char * const fregnames[64] = {
5088 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5089 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5090 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5091 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5092 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5093 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5094 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5095 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5098 /* init various static tables */
5102 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5103 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5104 offsetof(CPUState, regwptr),
5106 #ifdef TARGET_SPARC64
5107 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5109 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5111 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5113 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5115 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5116 offsetof(CPUState, tick_cmpr),
5118 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5119 offsetof(CPUState, stick_cmpr),
5121 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5122 offsetof(CPUState, hstick_cmpr),
5124 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5126 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5128 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5130 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5131 offsetof(CPUState, ssr), "ssr");
5132 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5133 offsetof(CPUState, version), "ver");
5134 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5135 offsetof(CPUState, softint),
5138 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5141 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5143 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5145 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5146 offsetof(CPUState, cc_src2),
5148 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5150 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5152 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5154 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5156 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5158 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5160 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5161 #ifndef CONFIG_USER_ONLY
5162 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5165 for (i = 1; i < 8; i++)
5166 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5167 offsetof(CPUState, gregs[i]),
5169 for (i = 0; i < TARGET_FPREGS; i++)
5170 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5171 offsetof(CPUState, fpr[i]),
5174 /* register helpers */
5176 #define GEN_HELPER 2
5181 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5182 unsigned long searched_pc, int pc_pos, void *puc)
5185 env->pc = gen_opc_pc[pc_pos];
5186 npc = gen_opc_npc[pc_pos];
5188 /* dynamic NPC: already stored */
5189 } else if (npc == 2) {
5190 target_ulong t2 = (target_ulong)(unsigned long)puc;
5191 /* jump PC: use T2 and the jump targets of the translation */
5193 env->npc = gen_opc_jump_pc[0];
5195 env->npc = gen_opc_jump_pc[1];