4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
189 #ifdef TARGET_SPARC64
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
193 #define AM_CHECK(dc) (1)
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
208 tcg_gen_movi_tl(tn, 0);
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_cc_clear_icc(void)
279 tcg_gen_movi_i32(cpu_psr, 0);
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
285 tcg_gen_movi_i32(cpu_xcc, 0);
291 env->psr |= PSR_ZERO;
292 if ((int32_t) T0 < 0)
295 static inline void gen_cc_NZ_icc(TCGv dst)
300 l1 = gen_new_label();
301 l2 = gen_new_label();
302 r_temp = tcg_temp_new();
303 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
307 tcg_gen_ext32s_tl(r_temp, dst);
308 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
311 tcg_temp_free(r_temp);
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
319 l1 = gen_new_label();
320 l2 = gen_new_label();
321 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
324 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
332 env->psr |= PSR_CARRY;
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
336 TCGv r_temp1, r_temp2;
339 l1 = gen_new_label();
340 r_temp1 = tcg_temp_new();
341 r_temp2 = tcg_temp_new();
342 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
347 tcg_temp_free(r_temp1);
348 tcg_temp_free(r_temp2);
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
356 l1 = gen_new_label();
357 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
364 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
371 r_temp = tcg_temp_new();
372 tcg_gen_xor_tl(r_temp, src1, src2);
373 tcg_gen_not_tl(r_temp, r_temp);
374 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379 tcg_temp_free(r_temp);
380 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
388 r_temp = tcg_temp_new();
389 tcg_gen_xor_tl(r_temp, src1, src2);
390 tcg_gen_not_tl(r_temp, r_temp);
391 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396 tcg_temp_free(r_temp);
397 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
407 l1 = gen_new_label();
409 r_temp = tcg_temp_new();
410 tcg_gen_xor_tl(r_temp, src1, src2);
411 tcg_gen_not_tl(r_temp, r_temp);
412 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416 r_const = tcg_const_i32(TT_TOVF);
417 gen_helper_raise_exception(r_const);
418 tcg_temp_free_i32(r_const);
420 tcg_temp_free(r_temp);
423 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
427 l1 = gen_new_label();
428 tcg_gen_or_tl(cpu_tmp0, src1, src2);
429 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
430 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
431 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
435 static inline void gen_tag_tv(TCGv src1, TCGv src2)
440 l1 = gen_new_label();
441 tcg_gen_or_tl(cpu_tmp0, src1, src2);
442 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
443 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
444 r_const = tcg_const_i32(TT_TOVF);
445 gen_helper_raise_exception(r_const);
446 tcg_temp_free_i32(r_const);
450 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
452 tcg_gen_mov_tl(cpu_cc_src, src1);
453 tcg_gen_movi_tl(cpu_cc_src2, src2);
454 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
455 tcg_gen_mov_tl(dst, cpu_cc_dst);
458 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
460 tcg_gen_mov_tl(cpu_cc_src, src1);
461 tcg_gen_mov_tl(cpu_cc_src2, src2);
462 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
463 tcg_gen_mov_tl(dst, cpu_cc_dst);
466 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
468 tcg_gen_mov_tl(cpu_cc_src, src1);
469 tcg_gen_movi_tl(cpu_cc_src2, src2);
470 gen_mov_reg_C(cpu_tmp0, cpu_psr);
471 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
472 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
473 tcg_gen_mov_tl(dst, cpu_cc_dst);
476 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
478 tcg_gen_mov_tl(cpu_cc_src, src1);
479 tcg_gen_mov_tl(cpu_cc_src2, src2);
480 gen_mov_reg_C(cpu_tmp0, cpu_psr);
481 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
482 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
483 tcg_gen_mov_tl(dst, cpu_cc_dst);
486 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
488 tcg_gen_mov_tl(cpu_cc_src, src1);
489 tcg_gen_mov_tl(cpu_cc_src2, src2);
490 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
492 gen_cc_NZ_icc(cpu_cc_dst);
493 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
494 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
495 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
496 #ifdef TARGET_SPARC64
498 gen_cc_NZ_xcc(cpu_cc_dst);
499 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
500 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
502 tcg_gen_mov_tl(dst, cpu_cc_dst);
505 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
507 tcg_gen_mov_tl(cpu_cc_src, src1);
508 tcg_gen_mov_tl(cpu_cc_src2, src2);
509 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
510 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
511 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
513 gen_cc_NZ_icc(cpu_cc_dst);
514 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
515 #ifdef TARGET_SPARC64
517 gen_cc_NZ_xcc(cpu_cc_dst);
518 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
519 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
521 tcg_gen_mov_tl(dst, cpu_cc_dst);
526 env->psr |= PSR_CARRY;
528 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
530 TCGv r_temp1, r_temp2;
533 l1 = gen_new_label();
534 r_temp1 = tcg_temp_new();
535 r_temp2 = tcg_temp_new();
536 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
537 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
538 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
539 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
541 tcg_temp_free(r_temp1);
542 tcg_temp_free(r_temp2);
545 #ifdef TARGET_SPARC64
546 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
550 l1 = gen_new_label();
551 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
552 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
558 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
561 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
565 r_temp = tcg_temp_new();
566 tcg_gen_xor_tl(r_temp, src1, src2);
567 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
568 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
569 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
570 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
571 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
572 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
573 tcg_temp_free(r_temp);
576 #ifdef TARGET_SPARC64
577 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
581 r_temp = tcg_temp_new();
582 tcg_gen_xor_tl(r_temp, src1, src2);
583 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
584 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
585 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
586 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
587 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
588 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
589 tcg_temp_free(r_temp);
593 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
599 l1 = gen_new_label();
601 r_temp = tcg_temp_new();
602 tcg_gen_xor_tl(r_temp, src1, src2);
603 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
604 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
605 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
606 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
607 r_const = tcg_const_i32(TT_TOVF);
608 gen_helper_raise_exception(r_const);
609 tcg_temp_free_i32(r_const);
611 tcg_temp_free(r_temp);
614 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
616 tcg_gen_mov_tl(cpu_cc_src, src1);
617 tcg_gen_movi_tl(cpu_cc_src2, src2);
619 tcg_gen_mov_tl(cpu_cc_dst, src1);
620 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
621 dc->cc_op = CC_OP_LOGIC;
623 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
624 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
625 dc->cc_op = CC_OP_SUB;
627 tcg_gen_mov_tl(dst, cpu_cc_dst);
630 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
632 tcg_gen_mov_tl(cpu_cc_src, src1);
633 tcg_gen_mov_tl(cpu_cc_src2, src2);
634 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
635 tcg_gen_mov_tl(dst, cpu_cc_dst);
638 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
640 tcg_gen_mov_tl(cpu_cc_src, src1);
641 tcg_gen_movi_tl(cpu_cc_src2, src2);
642 gen_mov_reg_C(cpu_tmp0, cpu_psr);
643 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
644 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
645 tcg_gen_mov_tl(dst, cpu_cc_dst);
648 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
650 tcg_gen_mov_tl(cpu_cc_src, src1);
651 tcg_gen_mov_tl(cpu_cc_src2, src2);
652 gen_mov_reg_C(cpu_tmp0, cpu_psr);
653 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
654 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
655 tcg_gen_mov_tl(dst, cpu_cc_dst);
658 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
660 tcg_gen_mov_tl(cpu_cc_src, src1);
661 tcg_gen_mov_tl(cpu_cc_src2, src2);
662 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
664 gen_cc_NZ_icc(cpu_cc_dst);
665 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
666 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
668 #ifdef TARGET_SPARC64
670 gen_cc_NZ_xcc(cpu_cc_dst);
671 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
672 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
674 tcg_gen_mov_tl(dst, cpu_cc_dst);
677 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
679 tcg_gen_mov_tl(cpu_cc_src, src1);
680 tcg_gen_mov_tl(cpu_cc_src2, src2);
681 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
682 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
683 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
685 gen_cc_NZ_icc(cpu_cc_dst);
686 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
687 #ifdef TARGET_SPARC64
689 gen_cc_NZ_xcc(cpu_cc_dst);
690 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
691 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
693 tcg_gen_mov_tl(dst, cpu_cc_dst);
696 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
701 l1 = gen_new_label();
702 r_temp = tcg_temp_new();
708 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
709 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
710 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
711 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
712 tcg_gen_movi_tl(cpu_cc_src2, 0);
716 // env->y = (b2 << 31) | (env->y >> 1);
717 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
718 tcg_gen_shli_tl(r_temp, r_temp, 31);
719 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
720 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
721 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
722 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
725 gen_mov_reg_N(cpu_tmp0, cpu_psr);
726 gen_mov_reg_V(r_temp, cpu_psr);
727 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
728 tcg_temp_free(r_temp);
730 // T0 = (b1 << 31) | (T0 >> 1);
732 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
733 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
734 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
736 /* do addition and update flags */
737 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
740 gen_cc_NZ_icc(cpu_cc_dst);
741 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
742 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
743 tcg_gen_mov_tl(dst, cpu_cc_dst);
746 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
748 TCGv_i64 r_temp, r_temp2;
750 r_temp = tcg_temp_new_i64();
751 r_temp2 = tcg_temp_new_i64();
753 tcg_gen_extu_tl_i64(r_temp, src2);
754 tcg_gen_extu_tl_i64(r_temp2, src1);
755 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
757 tcg_gen_shri_i64(r_temp, r_temp2, 32);
758 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
759 tcg_temp_free_i64(r_temp);
760 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
761 #ifdef TARGET_SPARC64
762 tcg_gen_mov_i64(dst, r_temp2);
764 tcg_gen_trunc_i64_tl(dst, r_temp2);
766 tcg_temp_free_i64(r_temp2);
769 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
771 TCGv_i64 r_temp, r_temp2;
773 r_temp = tcg_temp_new_i64();
774 r_temp2 = tcg_temp_new_i64();
776 tcg_gen_ext_tl_i64(r_temp, src2);
777 tcg_gen_ext_tl_i64(r_temp2, src1);
778 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
780 tcg_gen_shri_i64(r_temp, r_temp2, 32);
781 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
782 tcg_temp_free_i64(r_temp);
783 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
784 #ifdef TARGET_SPARC64
785 tcg_gen_mov_i64(dst, r_temp2);
787 tcg_gen_trunc_i64_tl(dst, r_temp2);
789 tcg_temp_free_i64(r_temp2);
792 #ifdef TARGET_SPARC64
793 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
798 l1 = gen_new_label();
799 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
800 r_const = tcg_const_i32(TT_DIV_ZERO);
801 gen_helper_raise_exception(r_const);
802 tcg_temp_free_i32(r_const);
806 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
810 l1 = gen_new_label();
811 l2 = gen_new_label();
812 tcg_gen_mov_tl(cpu_cc_src, src1);
813 tcg_gen_mov_tl(cpu_cc_src2, src2);
814 gen_trap_ifdivzero_tl(cpu_cc_src2);
815 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
816 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
817 tcg_gen_movi_i64(dst, INT64_MIN);
820 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
825 static inline void gen_op_div_cc(TCGv dst)
829 tcg_gen_mov_tl(cpu_cc_dst, dst);
831 gen_cc_NZ_icc(cpu_cc_dst);
832 l1 = gen_new_label();
833 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
834 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
839 static inline void gen_op_eval_ba(TCGv dst)
841 tcg_gen_movi_tl(dst, 1);
845 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
847 gen_mov_reg_Z(dst, src);
851 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
853 gen_mov_reg_N(cpu_tmp0, src);
854 gen_mov_reg_V(dst, src);
855 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
856 gen_mov_reg_Z(cpu_tmp0, src);
857 tcg_gen_or_tl(dst, dst, cpu_tmp0);
861 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
863 gen_mov_reg_V(cpu_tmp0, src);
864 gen_mov_reg_N(dst, src);
865 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
869 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
871 gen_mov_reg_Z(cpu_tmp0, src);
872 gen_mov_reg_C(dst, src);
873 tcg_gen_or_tl(dst, dst, cpu_tmp0);
877 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
879 gen_mov_reg_C(dst, src);
883 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
885 gen_mov_reg_V(dst, src);
889 static inline void gen_op_eval_bn(TCGv dst)
891 tcg_gen_movi_tl(dst, 0);
895 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
897 gen_mov_reg_N(dst, src);
901 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
903 gen_mov_reg_Z(dst, src);
904 tcg_gen_xori_tl(dst, dst, 0x1);
908 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
910 gen_mov_reg_N(cpu_tmp0, src);
911 gen_mov_reg_V(dst, src);
912 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
913 gen_mov_reg_Z(cpu_tmp0, src);
914 tcg_gen_or_tl(dst, dst, cpu_tmp0);
915 tcg_gen_xori_tl(dst, dst, 0x1);
919 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
921 gen_mov_reg_V(cpu_tmp0, src);
922 gen_mov_reg_N(dst, src);
923 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
924 tcg_gen_xori_tl(dst, dst, 0x1);
928 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
930 gen_mov_reg_Z(cpu_tmp0, src);
931 gen_mov_reg_C(dst, src);
932 tcg_gen_or_tl(dst, dst, cpu_tmp0);
933 tcg_gen_xori_tl(dst, dst, 0x1);
937 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
939 gen_mov_reg_C(dst, src);
940 tcg_gen_xori_tl(dst, dst, 0x1);
944 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
946 gen_mov_reg_N(dst, src);
947 tcg_gen_xori_tl(dst, dst, 0x1);
951 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
953 gen_mov_reg_V(dst, src);
954 tcg_gen_xori_tl(dst, dst, 0x1);
958 FPSR bit field FCC1 | FCC0:
964 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
965 unsigned int fcc_offset)
967 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
968 tcg_gen_andi_tl(reg, reg, 0x1);
971 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
972 unsigned int fcc_offset)
974 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
975 tcg_gen_andi_tl(reg, reg, 0x1);
979 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
980 unsigned int fcc_offset)
982 gen_mov_reg_FCC0(dst, src, fcc_offset);
983 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
984 tcg_gen_or_tl(dst, dst, cpu_tmp0);
987 // 1 or 2: FCC0 ^ FCC1
988 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
989 unsigned int fcc_offset)
991 gen_mov_reg_FCC0(dst, src, fcc_offset);
992 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
993 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
997 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
998 unsigned int fcc_offset)
1000 gen_mov_reg_FCC0(dst, src, fcc_offset);
1004 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1005 unsigned int fcc_offset)
1007 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1010 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1014 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1017 gen_mov_reg_FCC1(dst, src, fcc_offset);
1021 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1022 unsigned int fcc_offset)
1024 gen_mov_reg_FCC0(dst, src, fcc_offset);
1025 tcg_gen_xori_tl(dst, dst, 0x1);
1026 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1027 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1031 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1032 unsigned int fcc_offset)
1034 gen_mov_reg_FCC0(dst, src, fcc_offset);
1035 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1036 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039 // 0: !(FCC0 | FCC1)
1040 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1041 unsigned int fcc_offset)
1043 gen_mov_reg_FCC0(dst, src, fcc_offset);
1044 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1045 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1046 tcg_gen_xori_tl(dst, dst, 0x1);
1049 // 0 or 3: !(FCC0 ^ FCC1)
1050 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1051 unsigned int fcc_offset)
1053 gen_mov_reg_FCC0(dst, src, fcc_offset);
1054 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1055 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1056 tcg_gen_xori_tl(dst, dst, 0x1);
1060 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1061 unsigned int fcc_offset)
1063 gen_mov_reg_FCC0(dst, src, fcc_offset);
1064 tcg_gen_xori_tl(dst, dst, 0x1);
1067 // !1: !(FCC0 & !FCC1)
1068 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1069 unsigned int fcc_offset)
1071 gen_mov_reg_FCC0(dst, src, fcc_offset);
1072 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1073 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1074 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1075 tcg_gen_xori_tl(dst, dst, 0x1);
1079 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1080 unsigned int fcc_offset)
1082 gen_mov_reg_FCC1(dst, src, fcc_offset);
1083 tcg_gen_xori_tl(dst, dst, 0x1);
1086 // !2: !(!FCC0 & FCC1)
1087 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1088 unsigned int fcc_offset)
1090 gen_mov_reg_FCC0(dst, src, fcc_offset);
1091 tcg_gen_xori_tl(dst, dst, 0x1);
1092 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1093 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1094 tcg_gen_xori_tl(dst, dst, 0x1);
1097 // !3: !(FCC0 & FCC1)
1098 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1099 unsigned int fcc_offset)
1101 gen_mov_reg_FCC0(dst, src, fcc_offset);
1102 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1103 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1104 tcg_gen_xori_tl(dst, dst, 0x1);
1107 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1108 target_ulong pc2, TCGv r_cond)
1112 l1 = gen_new_label();
1114 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1116 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1119 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1122 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1123 target_ulong pc2, TCGv r_cond)
1127 l1 = gen_new_label();
1129 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1131 gen_goto_tb(dc, 0, pc2, pc1);
1134 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1137 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1142 l1 = gen_new_label();
1143 l2 = gen_new_label();
1145 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1147 tcg_gen_movi_tl(cpu_npc, npc1);
1151 tcg_gen_movi_tl(cpu_npc, npc2);
1155 /* call this function before using the condition register as it may
1156 have been set for a jump */
1157 static inline void flush_cond(DisasContext *dc, TCGv cond)
1159 if (dc->npc == JUMP_PC) {
1160 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1161 dc->npc = DYNAMIC_PC;
1165 static inline void save_npc(DisasContext *dc, TCGv cond)
1167 if (dc->npc == JUMP_PC) {
1168 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1169 dc->npc = DYNAMIC_PC;
1170 } else if (dc->npc != DYNAMIC_PC) {
1171 tcg_gen_movi_tl(cpu_npc, dc->npc);
1175 static inline void save_state(DisasContext *dc, TCGv cond)
1177 tcg_gen_movi_tl(cpu_pc, dc->pc);
1181 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1183 if (dc->npc == JUMP_PC) {
1184 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1185 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1186 dc->pc = DYNAMIC_PC;
1187 } else if (dc->npc == DYNAMIC_PC) {
1188 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1189 dc->pc = DYNAMIC_PC;
1195 static inline void gen_op_next_insn(void)
1197 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1198 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1201 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1206 #ifdef TARGET_SPARC64
1214 switch (dc->cc_op) {
1218 gen_helper_compute_psr();
1219 dc->cc_op = CC_OP_FLAGS;
1224 gen_op_eval_bn(r_dst);
1227 gen_op_eval_be(r_dst, r_src);
1230 gen_op_eval_ble(r_dst, r_src);
1233 gen_op_eval_bl(r_dst, r_src);
1236 gen_op_eval_bleu(r_dst, r_src);
1239 gen_op_eval_bcs(r_dst, r_src);
1242 gen_op_eval_bneg(r_dst, r_src);
1245 gen_op_eval_bvs(r_dst, r_src);
1248 gen_op_eval_ba(r_dst);
1251 gen_op_eval_bne(r_dst, r_src);
1254 gen_op_eval_bg(r_dst, r_src);
1257 gen_op_eval_bge(r_dst, r_src);
1260 gen_op_eval_bgu(r_dst, r_src);
1263 gen_op_eval_bcc(r_dst, r_src);
1266 gen_op_eval_bpos(r_dst, r_src);
1269 gen_op_eval_bvc(r_dst, r_src);
1274 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1276 unsigned int offset;
1296 gen_op_eval_bn(r_dst);
1299 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1302 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1305 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1308 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1311 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1314 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1317 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1320 gen_op_eval_ba(r_dst);
1323 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1326 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1329 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1332 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1335 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1338 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1341 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1346 #ifdef TARGET_SPARC64
1348 static const int gen_tcg_cond_reg[8] = {
1359 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1363 l1 = gen_new_label();
1364 tcg_gen_movi_tl(r_dst, 0);
1365 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1366 tcg_gen_movi_tl(r_dst, 1);
1371 /* XXX: potentially incorrect if dynamic npc */
1372 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1375 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1376 target_ulong target = dc->pc + offset;
1379 /* unconditional not taken */
1381 dc->pc = dc->npc + 4;
1382 dc->npc = dc->pc + 4;
1385 dc->npc = dc->pc + 4;
1387 } else if (cond == 0x8) {
1388 /* unconditional taken */
1391 dc->npc = dc->pc + 4;
1397 flush_cond(dc, r_cond);
1398 gen_cond(r_cond, cc, cond, dc);
1400 gen_branch_a(dc, target, dc->npc, r_cond);
1404 dc->jump_pc[0] = target;
1405 dc->jump_pc[1] = dc->npc + 4;
1411 /* XXX: potentially incorrect if dynamic npc */
1412 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1415 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1416 target_ulong target = dc->pc + offset;
1419 /* unconditional not taken */
1421 dc->pc = dc->npc + 4;
1422 dc->npc = dc->pc + 4;
1425 dc->npc = dc->pc + 4;
1427 } else if (cond == 0x8) {
1428 /* unconditional taken */
1431 dc->npc = dc->pc + 4;
1437 flush_cond(dc, r_cond);
1438 gen_fcond(r_cond, cc, cond);
1440 gen_branch_a(dc, target, dc->npc, r_cond);
1444 dc->jump_pc[0] = target;
1445 dc->jump_pc[1] = dc->npc + 4;
1451 #ifdef TARGET_SPARC64
1452 /* XXX: potentially incorrect if dynamic npc */
1453 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1454 TCGv r_cond, TCGv r_reg)
1456 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1457 target_ulong target = dc->pc + offset;
1459 flush_cond(dc, r_cond);
1460 gen_cond_reg(r_cond, cond, r_reg);
1462 gen_branch_a(dc, target, dc->npc, r_cond);
1466 dc->jump_pc[0] = target;
1467 dc->jump_pc[1] = dc->npc + 4;
1472 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1476 gen_helper_fcmps(r_rs1, r_rs2);
1479 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1482 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1485 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1490 static inline void gen_op_fcmpd(int fccno)
1497 gen_helper_fcmpd_fcc1();
1500 gen_helper_fcmpd_fcc2();
1503 gen_helper_fcmpd_fcc3();
1508 static inline void gen_op_fcmpq(int fccno)
1515 gen_helper_fcmpq_fcc1();
1518 gen_helper_fcmpq_fcc2();
1521 gen_helper_fcmpq_fcc3();
1526 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1530 gen_helper_fcmpes(r_rs1, r_rs2);
1533 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1536 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1539 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1544 static inline void gen_op_fcmped(int fccno)
1548 gen_helper_fcmped();
1551 gen_helper_fcmped_fcc1();
1554 gen_helper_fcmped_fcc2();
1557 gen_helper_fcmped_fcc3();
1562 static inline void gen_op_fcmpeq(int fccno)
1566 gen_helper_fcmpeq();
1569 gen_helper_fcmpeq_fcc1();
1572 gen_helper_fcmpeq_fcc2();
1575 gen_helper_fcmpeq_fcc3();
1582 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1584 gen_helper_fcmps(r_rs1, r_rs2);
1587 static inline void gen_op_fcmpd(int fccno)
1592 static inline void gen_op_fcmpq(int fccno)
1597 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1599 gen_helper_fcmpes(r_rs1, r_rs2);
1602 static inline void gen_op_fcmped(int fccno)
1604 gen_helper_fcmped();
1607 static inline void gen_op_fcmpeq(int fccno)
1609 gen_helper_fcmpeq();
1613 static inline void gen_op_fpexception_im(int fsr_flags)
1617 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1618 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1619 r_const = tcg_const_i32(TT_FP_EXCP);
1620 gen_helper_raise_exception(r_const);
1621 tcg_temp_free_i32(r_const);
1624 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1626 #if !defined(CONFIG_USER_ONLY)
1627 if (!dc->fpu_enabled) {
1630 save_state(dc, r_cond);
1631 r_const = tcg_const_i32(TT_NFPU_INSN);
1632 gen_helper_raise_exception(r_const);
1633 tcg_temp_free_i32(r_const);
1641 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1643 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1646 static inline void gen_clear_float_exceptions(void)
1648 gen_helper_clear_float_exceptions();
1652 #ifdef TARGET_SPARC64
1653 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1659 r_asi = tcg_temp_new_i32();
1660 tcg_gen_mov_i32(r_asi, cpu_asi);
1662 asi = GET_FIELD(insn, 19, 26);
1663 r_asi = tcg_const_i32(asi);
1668 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1671 TCGv_i32 r_asi, r_size, r_sign;
1673 r_asi = gen_get_asi(insn, addr);
1674 r_size = tcg_const_i32(size);
1675 r_sign = tcg_const_i32(sign);
1676 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1677 tcg_temp_free_i32(r_sign);
1678 tcg_temp_free_i32(r_size);
1679 tcg_temp_free_i32(r_asi);
1682 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1684 TCGv_i32 r_asi, r_size;
1686 r_asi = gen_get_asi(insn, addr);
1687 r_size = tcg_const_i32(size);
1688 gen_helper_st_asi(addr, src, r_asi, r_size);
1689 tcg_temp_free_i32(r_size);
1690 tcg_temp_free_i32(r_asi);
1693 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1695 TCGv_i32 r_asi, r_size, r_rd;
1697 r_asi = gen_get_asi(insn, addr);
1698 r_size = tcg_const_i32(size);
1699 r_rd = tcg_const_i32(rd);
1700 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1701 tcg_temp_free_i32(r_rd);
1702 tcg_temp_free_i32(r_size);
1703 tcg_temp_free_i32(r_asi);
1706 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1708 TCGv_i32 r_asi, r_size, r_rd;
1710 r_asi = gen_get_asi(insn, addr);
1711 r_size = tcg_const_i32(size);
1712 r_rd = tcg_const_i32(rd);
1713 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1714 tcg_temp_free_i32(r_rd);
1715 tcg_temp_free_i32(r_size);
1716 tcg_temp_free_i32(r_asi);
1719 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1721 TCGv_i32 r_asi, r_size, r_sign;
1723 r_asi = gen_get_asi(insn, addr);
1724 r_size = tcg_const_i32(4);
1725 r_sign = tcg_const_i32(0);
1726 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1727 tcg_temp_free_i32(r_sign);
1728 gen_helper_st_asi(addr, dst, r_asi, r_size);
1729 tcg_temp_free_i32(r_size);
1730 tcg_temp_free_i32(r_asi);
1731 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1734 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1736 TCGv_i32 r_asi, r_rd;
1738 r_asi = gen_get_asi(insn, addr);
1739 r_rd = tcg_const_i32(rd);
1740 gen_helper_ldda_asi(addr, r_asi, r_rd);
1741 tcg_temp_free_i32(r_rd);
1742 tcg_temp_free_i32(r_asi);
1745 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1747 TCGv_i32 r_asi, r_size;
1749 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1750 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1751 r_asi = gen_get_asi(insn, addr);
1752 r_size = tcg_const_i32(8);
1753 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1754 tcg_temp_free_i32(r_size);
1755 tcg_temp_free_i32(r_asi);
1758 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1764 r_val1 = tcg_temp_new();
1765 gen_movl_reg_TN(rd, r_val1);
1766 r_asi = gen_get_asi(insn, addr);
1767 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1768 tcg_temp_free_i32(r_asi);
1769 tcg_temp_free(r_val1);
1772 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1777 gen_movl_reg_TN(rd, cpu_tmp64);
1778 r_asi = gen_get_asi(insn, addr);
1779 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1780 tcg_temp_free_i32(r_asi);
1783 #elif !defined(CONFIG_USER_ONLY)
1785 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1788 TCGv_i32 r_asi, r_size, r_sign;
1790 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1791 r_size = tcg_const_i32(size);
1792 r_sign = tcg_const_i32(sign);
1793 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1794 tcg_temp_free(r_sign);
1795 tcg_temp_free(r_size);
1796 tcg_temp_free(r_asi);
1797 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1800 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1802 TCGv_i32 r_asi, r_size;
1804 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1805 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1806 r_size = tcg_const_i32(size);
1807 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1808 tcg_temp_free(r_size);
1809 tcg_temp_free(r_asi);
1812 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1814 TCGv_i32 r_asi, r_size, r_sign;
1817 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1818 r_size = tcg_const_i32(4);
1819 r_sign = tcg_const_i32(0);
1820 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1821 tcg_temp_free(r_sign);
1822 r_val = tcg_temp_new_i64();
1823 tcg_gen_extu_tl_i64(r_val, dst);
1824 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1825 tcg_temp_free_i64(r_val);
1826 tcg_temp_free(r_size);
1827 tcg_temp_free(r_asi);
1828 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1831 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1833 TCGv_i32 r_asi, r_size, r_sign;
1835 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1836 r_size = tcg_const_i32(8);
1837 r_sign = tcg_const_i32(0);
1838 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1839 tcg_temp_free(r_sign);
1840 tcg_temp_free(r_size);
1841 tcg_temp_free(r_asi);
1842 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1843 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1844 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1845 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1846 gen_movl_TN_reg(rd, hi);
1849 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1851 TCGv_i32 r_asi, r_size;
1853 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1854 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1855 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1856 r_size = tcg_const_i32(8);
1857 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1858 tcg_temp_free(r_size);
1859 tcg_temp_free(r_asi);
1863 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1864 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1867 TCGv_i32 r_asi, r_size;
1869 gen_ld_asi(dst, addr, insn, 1, 0);
1871 r_val = tcg_const_i64(0xffULL);
1872 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1873 r_size = tcg_const_i32(1);
1874 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1875 tcg_temp_free_i32(r_size);
1876 tcg_temp_free_i32(r_asi);
1877 tcg_temp_free_i64(r_val);
1881 static inline TCGv get_src1(unsigned int insn, TCGv def)
1886 rs1 = GET_FIELD(insn, 13, 17);
1888 r_rs1 = tcg_const_tl(0); // XXX how to free?
1890 r_rs1 = cpu_gregs[rs1];
1892 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1896 static inline TCGv get_src2(unsigned int insn, TCGv def)
1900 if (IS_IMM) { /* immediate */
1903 simm = GET_FIELDs(insn, 19, 31);
1904 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1905 } else { /* register */
1908 rs2 = GET_FIELD(insn, 27, 31);
1910 r_rs2 = tcg_const_tl(0); // XXX how to free?
1912 r_rs2 = cpu_gregs[rs2];
1914 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1919 #define CHECK_IU_FEATURE(dc, FEATURE) \
1920 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1922 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1923 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1926 /* before an instruction, dc->pc must be static */
1927 static void disas_sparc_insn(DisasContext * dc)
1929 unsigned int insn, opc, rs1, rs2, rd;
1932 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1933 tcg_gen_debug_insn_start(dc->pc);
1934 insn = ldl_code(dc->pc);
1935 opc = GET_FIELD(insn, 0, 1);
1937 rd = GET_FIELD(insn, 2, 6);
1939 cpu_src1 = tcg_temp_new(); // const
1940 cpu_src2 = tcg_temp_new(); // const
1943 case 0: /* branches/sethi */
1945 unsigned int xop = GET_FIELD(insn, 7, 9);
1948 #ifdef TARGET_SPARC64
1949 case 0x1: /* V9 BPcc */
1953 target = GET_FIELD_SP(insn, 0, 18);
1954 target = sign_extend(target, 18);
1956 cc = GET_FIELD_SP(insn, 20, 21);
1958 do_branch(dc, target, insn, 0, cpu_cond);
1960 do_branch(dc, target, insn, 1, cpu_cond);
1965 case 0x3: /* V9 BPr */
1967 target = GET_FIELD_SP(insn, 0, 13) |
1968 (GET_FIELD_SP(insn, 20, 21) << 14);
1969 target = sign_extend(target, 16);
1971 cpu_src1 = get_src1(insn, cpu_src1);
1972 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1975 case 0x5: /* V9 FBPcc */
1977 int cc = GET_FIELD_SP(insn, 20, 21);
1978 if (gen_trap_ifnofpu(dc, cpu_cond))
1980 target = GET_FIELD_SP(insn, 0, 18);
1981 target = sign_extend(target, 19);
1983 do_fbranch(dc, target, insn, cc, cpu_cond);
1987 case 0x7: /* CBN+x */
1992 case 0x2: /* BN+x */
1994 target = GET_FIELD(insn, 10, 31);
1995 target = sign_extend(target, 22);
1997 do_branch(dc, target, insn, 0, cpu_cond);
2000 case 0x6: /* FBN+x */
2002 if (gen_trap_ifnofpu(dc, cpu_cond))
2004 target = GET_FIELD(insn, 10, 31);
2005 target = sign_extend(target, 22);
2007 do_fbranch(dc, target, insn, 0, cpu_cond);
2010 case 0x4: /* SETHI */
2012 uint32_t value = GET_FIELD(insn, 10, 31);
2015 r_const = tcg_const_tl(value << 10);
2016 gen_movl_TN_reg(rd, r_const);
2017 tcg_temp_free(r_const);
2020 case 0x0: /* UNIMPL */
2029 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2032 r_const = tcg_const_tl(dc->pc);
2033 gen_movl_TN_reg(15, r_const);
2034 tcg_temp_free(r_const);
2036 gen_mov_pc_npc(dc, cpu_cond);
2040 case 2: /* FPU & Logical Operations */
2042 unsigned int xop = GET_FIELD(insn, 7, 12);
2043 if (xop == 0x3a) { /* generate trap */
2046 cpu_src1 = get_src1(insn, cpu_src1);
2048 rs2 = GET_FIELD(insn, 25, 31);
2049 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2051 rs2 = GET_FIELD(insn, 27, 31);
2053 gen_movl_reg_TN(rs2, cpu_src2);
2054 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2056 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2058 cond = GET_FIELD(insn, 3, 6);
2060 save_state(dc, cpu_cond);
2061 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2063 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2065 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2066 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2067 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2068 gen_helper_raise_exception(cpu_tmp32);
2069 } else if (cond != 0) {
2070 TCGv r_cond = tcg_temp_new();
2072 #ifdef TARGET_SPARC64
2074 int cc = GET_FIELD_SP(insn, 11, 12);
2076 save_state(dc, cpu_cond);
2078 gen_cond(r_cond, 0, cond, dc);
2080 gen_cond(r_cond, 1, cond, dc);
2084 save_state(dc, cpu_cond);
2085 gen_cond(r_cond, 0, cond, dc);
2087 l1 = gen_new_label();
2088 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2090 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2092 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2094 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2095 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2096 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2097 gen_helper_raise_exception(cpu_tmp32);
2100 tcg_temp_free(r_cond);
2106 } else if (xop == 0x28) {
2107 rs1 = GET_FIELD(insn, 13, 17);
2110 #ifndef TARGET_SPARC64
2111 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2112 manual, rdy on the microSPARC
2114 case 0x0f: /* stbar in the SPARCv8 manual,
2115 rdy on the microSPARC II */
2116 case 0x10 ... 0x1f: /* implementation-dependent in the
2117 SPARCv8 manual, rdy on the
2120 gen_movl_TN_reg(rd, cpu_y);
2122 #ifdef TARGET_SPARC64
2123 case 0x2: /* V9 rdccr */
2124 gen_helper_compute_psr();
2125 gen_helper_rdccr(cpu_dst);
2126 gen_movl_TN_reg(rd, cpu_dst);
2128 case 0x3: /* V9 rdasi */
2129 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2130 gen_movl_TN_reg(rd, cpu_dst);
2132 case 0x4: /* V9 rdtick */
2136 r_tickptr = tcg_temp_new_ptr();
2137 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2138 offsetof(CPUState, tick));
2139 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2140 tcg_temp_free_ptr(r_tickptr);
2141 gen_movl_TN_reg(rd, cpu_dst);
2144 case 0x5: /* V9 rdpc */
2148 r_const = tcg_const_tl(dc->pc);
2149 gen_movl_TN_reg(rd, r_const);
2150 tcg_temp_free(r_const);
2153 case 0x6: /* V9 rdfprs */
2154 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2155 gen_movl_TN_reg(rd, cpu_dst);
2157 case 0xf: /* V9 membar */
2158 break; /* no effect */
2159 case 0x13: /* Graphics Status */
2160 if (gen_trap_ifnofpu(dc, cpu_cond))
2162 gen_movl_TN_reg(rd, cpu_gsr);
2164 case 0x16: /* Softint */
2165 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2166 gen_movl_TN_reg(rd, cpu_dst);
2168 case 0x17: /* Tick compare */
2169 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2171 case 0x18: /* System tick */
2175 r_tickptr = tcg_temp_new_ptr();
2176 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2177 offsetof(CPUState, stick));
2178 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2179 tcg_temp_free_ptr(r_tickptr);
2180 gen_movl_TN_reg(rd, cpu_dst);
2183 case 0x19: /* System tick compare */
2184 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2186 case 0x10: /* Performance Control */
2187 case 0x11: /* Performance Instrumentation Counter */
2188 case 0x12: /* Dispatch Control */
2189 case 0x14: /* Softint set, WO */
2190 case 0x15: /* Softint clear, WO */
2195 #if !defined(CONFIG_USER_ONLY)
2196 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2197 #ifndef TARGET_SPARC64
2198 if (!supervisor(dc))
2200 gen_helper_compute_psr();
2201 dc->cc_op = CC_OP_FLAGS;
2202 gen_helper_rdpsr(cpu_dst);
2204 CHECK_IU_FEATURE(dc, HYPV);
2205 if (!hypervisor(dc))
2207 rs1 = GET_FIELD(insn, 13, 17);
2210 // gen_op_rdhpstate();
2213 // gen_op_rdhtstate();
2216 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2219 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2222 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2224 case 31: // hstick_cmpr
2225 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2231 gen_movl_TN_reg(rd, cpu_dst);
2233 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2234 if (!supervisor(dc))
2236 #ifdef TARGET_SPARC64
2237 rs1 = GET_FIELD(insn, 13, 17);
2243 r_tsptr = tcg_temp_new_ptr();
2244 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2245 offsetof(CPUState, tsptr));
2246 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2247 offsetof(trap_state, tpc));
2248 tcg_temp_free_ptr(r_tsptr);
2255 r_tsptr = tcg_temp_new_ptr();
2256 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2257 offsetof(CPUState, tsptr));
2258 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2259 offsetof(trap_state, tnpc));
2260 tcg_temp_free_ptr(r_tsptr);
2267 r_tsptr = tcg_temp_new_ptr();
2268 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2269 offsetof(CPUState, tsptr));
2270 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2271 offsetof(trap_state, tstate));
2272 tcg_temp_free_ptr(r_tsptr);
2279 r_tsptr = tcg_temp_new_ptr();
2280 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2281 offsetof(CPUState, tsptr));
2282 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2283 offsetof(trap_state, tt));
2284 tcg_temp_free_ptr(r_tsptr);
2285 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2292 r_tickptr = tcg_temp_new_ptr();
2293 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2294 offsetof(CPUState, tick));
2295 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2296 gen_movl_TN_reg(rd, cpu_tmp0);
2297 tcg_temp_free_ptr(r_tickptr);
2301 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2304 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2305 offsetof(CPUSPARCState, pstate));
2306 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2309 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2310 offsetof(CPUSPARCState, tl));
2311 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2314 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2315 offsetof(CPUSPARCState, psrpil));
2316 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2319 gen_helper_rdcwp(cpu_tmp0);
2322 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2323 offsetof(CPUSPARCState, cansave));
2324 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2326 case 11: // canrestore
2327 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2328 offsetof(CPUSPARCState, canrestore));
2329 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2331 case 12: // cleanwin
2332 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2333 offsetof(CPUSPARCState, cleanwin));
2334 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2336 case 13: // otherwin
2337 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2338 offsetof(CPUSPARCState, otherwin));
2339 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2342 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2343 offsetof(CPUSPARCState, wstate));
2344 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2346 case 16: // UA2005 gl
2347 CHECK_IU_FEATURE(dc, GL);
2348 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2349 offsetof(CPUSPARCState, gl));
2350 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2352 case 26: // UA2005 strand status
2353 CHECK_IU_FEATURE(dc, HYPV);
2354 if (!hypervisor(dc))
2356 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2359 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2366 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2368 gen_movl_TN_reg(rd, cpu_tmp0);
2370 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2371 #ifdef TARGET_SPARC64
2372 save_state(dc, cpu_cond);
2373 gen_helper_flushw();
2375 if (!supervisor(dc))
2377 gen_movl_TN_reg(rd, cpu_tbr);
2381 } else if (xop == 0x34) { /* FPU Operations */
2382 if (gen_trap_ifnofpu(dc, cpu_cond))
2384 gen_op_clear_ieee_excp_and_FTT();
2385 rs1 = GET_FIELD(insn, 13, 17);
2386 rs2 = GET_FIELD(insn, 27, 31);
2387 xop = GET_FIELD(insn, 18, 26);
2389 case 0x1: /* fmovs */
2390 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2392 case 0x5: /* fnegs */
2393 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2395 case 0x9: /* fabss */
2396 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2398 case 0x29: /* fsqrts */
2399 CHECK_FPU_FEATURE(dc, FSQRT);
2400 gen_clear_float_exceptions();
2401 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2402 gen_helper_check_ieee_exceptions();
2403 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2405 case 0x2a: /* fsqrtd */
2406 CHECK_FPU_FEATURE(dc, FSQRT);
2407 gen_op_load_fpr_DT1(DFPREG(rs2));
2408 gen_clear_float_exceptions();
2409 gen_helper_fsqrtd();
2410 gen_helper_check_ieee_exceptions();
2411 gen_op_store_DT0_fpr(DFPREG(rd));
2413 case 0x2b: /* fsqrtq */
2414 CHECK_FPU_FEATURE(dc, FLOAT128);
2415 gen_op_load_fpr_QT1(QFPREG(rs2));
2416 gen_clear_float_exceptions();
2417 gen_helper_fsqrtq();
2418 gen_helper_check_ieee_exceptions();
2419 gen_op_store_QT0_fpr(QFPREG(rd));
2421 case 0x41: /* fadds */
2422 gen_clear_float_exceptions();
2423 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2424 gen_helper_check_ieee_exceptions();
2425 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2427 case 0x42: /* faddd */
2428 gen_op_load_fpr_DT0(DFPREG(rs1));
2429 gen_op_load_fpr_DT1(DFPREG(rs2));
2430 gen_clear_float_exceptions();
2432 gen_helper_check_ieee_exceptions();
2433 gen_op_store_DT0_fpr(DFPREG(rd));
2435 case 0x43: /* faddq */
2436 CHECK_FPU_FEATURE(dc, FLOAT128);
2437 gen_op_load_fpr_QT0(QFPREG(rs1));
2438 gen_op_load_fpr_QT1(QFPREG(rs2));
2439 gen_clear_float_exceptions();
2441 gen_helper_check_ieee_exceptions();
2442 gen_op_store_QT0_fpr(QFPREG(rd));
2444 case 0x45: /* fsubs */
2445 gen_clear_float_exceptions();
2446 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2447 gen_helper_check_ieee_exceptions();
2448 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2450 case 0x46: /* fsubd */
2451 gen_op_load_fpr_DT0(DFPREG(rs1));
2452 gen_op_load_fpr_DT1(DFPREG(rs2));
2453 gen_clear_float_exceptions();
2455 gen_helper_check_ieee_exceptions();
2456 gen_op_store_DT0_fpr(DFPREG(rd));
2458 case 0x47: /* fsubq */
2459 CHECK_FPU_FEATURE(dc, FLOAT128);
2460 gen_op_load_fpr_QT0(QFPREG(rs1));
2461 gen_op_load_fpr_QT1(QFPREG(rs2));
2462 gen_clear_float_exceptions();
2464 gen_helper_check_ieee_exceptions();
2465 gen_op_store_QT0_fpr(QFPREG(rd));
2467 case 0x49: /* fmuls */
2468 CHECK_FPU_FEATURE(dc, FMUL);
2469 gen_clear_float_exceptions();
2470 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2471 gen_helper_check_ieee_exceptions();
2472 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2474 case 0x4a: /* fmuld */
2475 CHECK_FPU_FEATURE(dc, FMUL);
2476 gen_op_load_fpr_DT0(DFPREG(rs1));
2477 gen_op_load_fpr_DT1(DFPREG(rs2));
2478 gen_clear_float_exceptions();
2480 gen_helper_check_ieee_exceptions();
2481 gen_op_store_DT0_fpr(DFPREG(rd));
2483 case 0x4b: /* fmulq */
2484 CHECK_FPU_FEATURE(dc, FLOAT128);
2485 CHECK_FPU_FEATURE(dc, FMUL);
2486 gen_op_load_fpr_QT0(QFPREG(rs1));
2487 gen_op_load_fpr_QT1(QFPREG(rs2));
2488 gen_clear_float_exceptions();
2490 gen_helper_check_ieee_exceptions();
2491 gen_op_store_QT0_fpr(QFPREG(rd));
2493 case 0x4d: /* fdivs */
2494 gen_clear_float_exceptions();
2495 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2496 gen_helper_check_ieee_exceptions();
2497 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2499 case 0x4e: /* fdivd */
2500 gen_op_load_fpr_DT0(DFPREG(rs1));
2501 gen_op_load_fpr_DT1(DFPREG(rs2));
2502 gen_clear_float_exceptions();
2504 gen_helper_check_ieee_exceptions();
2505 gen_op_store_DT0_fpr(DFPREG(rd));
2507 case 0x4f: /* fdivq */
2508 CHECK_FPU_FEATURE(dc, FLOAT128);
2509 gen_op_load_fpr_QT0(QFPREG(rs1));
2510 gen_op_load_fpr_QT1(QFPREG(rs2));
2511 gen_clear_float_exceptions();
2513 gen_helper_check_ieee_exceptions();
2514 gen_op_store_QT0_fpr(QFPREG(rd));
2516 case 0x69: /* fsmuld */
2517 CHECK_FPU_FEATURE(dc, FSMULD);
2518 gen_clear_float_exceptions();
2519 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2520 gen_helper_check_ieee_exceptions();
2521 gen_op_store_DT0_fpr(DFPREG(rd));
2523 case 0x6e: /* fdmulq */
2524 CHECK_FPU_FEATURE(dc, FLOAT128);
2525 gen_op_load_fpr_DT0(DFPREG(rs1));
2526 gen_op_load_fpr_DT1(DFPREG(rs2));
2527 gen_clear_float_exceptions();
2528 gen_helper_fdmulq();
2529 gen_helper_check_ieee_exceptions();
2530 gen_op_store_QT0_fpr(QFPREG(rd));
2532 case 0xc4: /* fitos */
2533 gen_clear_float_exceptions();
2534 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2535 gen_helper_check_ieee_exceptions();
2536 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2538 case 0xc6: /* fdtos */
2539 gen_op_load_fpr_DT1(DFPREG(rs2));
2540 gen_clear_float_exceptions();
2541 gen_helper_fdtos(cpu_tmp32);
2542 gen_helper_check_ieee_exceptions();
2543 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2545 case 0xc7: /* fqtos */
2546 CHECK_FPU_FEATURE(dc, FLOAT128);
2547 gen_op_load_fpr_QT1(QFPREG(rs2));
2548 gen_clear_float_exceptions();
2549 gen_helper_fqtos(cpu_tmp32);
2550 gen_helper_check_ieee_exceptions();
2551 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2553 case 0xc8: /* fitod */
2554 gen_helper_fitod(cpu_fpr[rs2]);
2555 gen_op_store_DT0_fpr(DFPREG(rd));
2557 case 0xc9: /* fstod */
2558 gen_helper_fstod(cpu_fpr[rs2]);
2559 gen_op_store_DT0_fpr(DFPREG(rd));
2561 case 0xcb: /* fqtod */
2562 CHECK_FPU_FEATURE(dc, FLOAT128);
2563 gen_op_load_fpr_QT1(QFPREG(rs2));
2564 gen_clear_float_exceptions();
2566 gen_helper_check_ieee_exceptions();
2567 gen_op_store_DT0_fpr(DFPREG(rd));
2569 case 0xcc: /* fitoq */
2570 CHECK_FPU_FEATURE(dc, FLOAT128);
2571 gen_helper_fitoq(cpu_fpr[rs2]);
2572 gen_op_store_QT0_fpr(QFPREG(rd));
2574 case 0xcd: /* fstoq */
2575 CHECK_FPU_FEATURE(dc, FLOAT128);
2576 gen_helper_fstoq(cpu_fpr[rs2]);
2577 gen_op_store_QT0_fpr(QFPREG(rd));
2579 case 0xce: /* fdtoq */
2580 CHECK_FPU_FEATURE(dc, FLOAT128);
2581 gen_op_load_fpr_DT1(DFPREG(rs2));
2583 gen_op_store_QT0_fpr(QFPREG(rd));
2585 case 0xd1: /* fstoi */
2586 gen_clear_float_exceptions();
2587 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2588 gen_helper_check_ieee_exceptions();
2589 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2591 case 0xd2: /* fdtoi */
2592 gen_op_load_fpr_DT1(DFPREG(rs2));
2593 gen_clear_float_exceptions();
2594 gen_helper_fdtoi(cpu_tmp32);
2595 gen_helper_check_ieee_exceptions();
2596 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2598 case 0xd3: /* fqtoi */
2599 CHECK_FPU_FEATURE(dc, FLOAT128);
2600 gen_op_load_fpr_QT1(QFPREG(rs2));
2601 gen_clear_float_exceptions();
2602 gen_helper_fqtoi(cpu_tmp32);
2603 gen_helper_check_ieee_exceptions();
2604 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2606 #ifdef TARGET_SPARC64
2607 case 0x2: /* V9 fmovd */
2608 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2609 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2610 cpu_fpr[DFPREG(rs2) + 1]);
2612 case 0x3: /* V9 fmovq */
2613 CHECK_FPU_FEATURE(dc, FLOAT128);
2614 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2615 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2616 cpu_fpr[QFPREG(rs2) + 1]);
2617 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2618 cpu_fpr[QFPREG(rs2) + 2]);
2619 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2620 cpu_fpr[QFPREG(rs2) + 3]);
2622 case 0x6: /* V9 fnegd */
2623 gen_op_load_fpr_DT1(DFPREG(rs2));
2625 gen_op_store_DT0_fpr(DFPREG(rd));
2627 case 0x7: /* V9 fnegq */
2628 CHECK_FPU_FEATURE(dc, FLOAT128);
2629 gen_op_load_fpr_QT1(QFPREG(rs2));
2631 gen_op_store_QT0_fpr(QFPREG(rd));
2633 case 0xa: /* V9 fabsd */
2634 gen_op_load_fpr_DT1(DFPREG(rs2));
2636 gen_op_store_DT0_fpr(DFPREG(rd));
2638 case 0xb: /* V9 fabsq */
2639 CHECK_FPU_FEATURE(dc, FLOAT128);
2640 gen_op_load_fpr_QT1(QFPREG(rs2));
2642 gen_op_store_QT0_fpr(QFPREG(rd));
2644 case 0x81: /* V9 fstox */
2645 gen_clear_float_exceptions();
2646 gen_helper_fstox(cpu_fpr[rs2]);
2647 gen_helper_check_ieee_exceptions();
2648 gen_op_store_DT0_fpr(DFPREG(rd));
2650 case 0x82: /* V9 fdtox */
2651 gen_op_load_fpr_DT1(DFPREG(rs2));
2652 gen_clear_float_exceptions();
2654 gen_helper_check_ieee_exceptions();
2655 gen_op_store_DT0_fpr(DFPREG(rd));
2657 case 0x83: /* V9 fqtox */
2658 CHECK_FPU_FEATURE(dc, FLOAT128);
2659 gen_op_load_fpr_QT1(QFPREG(rs2));
2660 gen_clear_float_exceptions();
2662 gen_helper_check_ieee_exceptions();
2663 gen_op_store_DT0_fpr(DFPREG(rd));
2665 case 0x84: /* V9 fxtos */
2666 gen_op_load_fpr_DT1(DFPREG(rs2));
2667 gen_clear_float_exceptions();
2668 gen_helper_fxtos(cpu_tmp32);
2669 gen_helper_check_ieee_exceptions();
2670 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2672 case 0x88: /* V9 fxtod */
2673 gen_op_load_fpr_DT1(DFPREG(rs2));
2674 gen_clear_float_exceptions();
2676 gen_helper_check_ieee_exceptions();
2677 gen_op_store_DT0_fpr(DFPREG(rd));
2679 case 0x8c: /* V9 fxtoq */
2680 CHECK_FPU_FEATURE(dc, FLOAT128);
2681 gen_op_load_fpr_DT1(DFPREG(rs2));
2682 gen_clear_float_exceptions();
2684 gen_helper_check_ieee_exceptions();
2685 gen_op_store_QT0_fpr(QFPREG(rd));
2691 } else if (xop == 0x35) { /* FPU Operations */
2692 #ifdef TARGET_SPARC64
2695 if (gen_trap_ifnofpu(dc, cpu_cond))
2697 gen_op_clear_ieee_excp_and_FTT();
2698 rs1 = GET_FIELD(insn, 13, 17);
2699 rs2 = GET_FIELD(insn, 27, 31);
2700 xop = GET_FIELD(insn, 18, 26);
2701 #ifdef TARGET_SPARC64
2702 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2705 l1 = gen_new_label();
2706 cond = GET_FIELD_SP(insn, 14, 17);
2707 cpu_src1 = get_src1(insn, cpu_src1);
2708 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2710 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2713 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2716 l1 = gen_new_label();
2717 cond = GET_FIELD_SP(insn, 14, 17);
2718 cpu_src1 = get_src1(insn, cpu_src1);
2719 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2721 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2722 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2725 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2728 CHECK_FPU_FEATURE(dc, FLOAT128);
2729 l1 = gen_new_label();
2730 cond = GET_FIELD_SP(insn, 14, 17);
2731 cpu_src1 = get_src1(insn, cpu_src1);
2732 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2734 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2735 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2736 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2737 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2743 #ifdef TARGET_SPARC64
2744 #define FMOVSCC(fcc) \
2749 l1 = gen_new_label(); \
2750 r_cond = tcg_temp_new(); \
2751 cond = GET_FIELD_SP(insn, 14, 17); \
2752 gen_fcond(r_cond, fcc, cond); \
2753 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2755 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2756 gen_set_label(l1); \
2757 tcg_temp_free(r_cond); \
2759 #define FMOVDCC(fcc) \
2764 l1 = gen_new_label(); \
2765 r_cond = tcg_temp_new(); \
2766 cond = GET_FIELD_SP(insn, 14, 17); \
2767 gen_fcond(r_cond, fcc, cond); \
2768 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2770 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2771 cpu_fpr[DFPREG(rs2)]); \
2772 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2773 cpu_fpr[DFPREG(rs2) + 1]); \
2774 gen_set_label(l1); \
2775 tcg_temp_free(r_cond); \
2777 #define FMOVQCC(fcc) \
2782 l1 = gen_new_label(); \
2783 r_cond = tcg_temp_new(); \
2784 cond = GET_FIELD_SP(insn, 14, 17); \
2785 gen_fcond(r_cond, fcc, cond); \
2786 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2788 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2789 cpu_fpr[QFPREG(rs2)]); \
2790 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2791 cpu_fpr[QFPREG(rs2) + 1]); \
2792 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2793 cpu_fpr[QFPREG(rs2) + 2]); \
2794 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2795 cpu_fpr[QFPREG(rs2) + 3]); \
2796 gen_set_label(l1); \
2797 tcg_temp_free(r_cond); \
2799 case 0x001: /* V9 fmovscc %fcc0 */
2802 case 0x002: /* V9 fmovdcc %fcc0 */
2805 case 0x003: /* V9 fmovqcc %fcc0 */
2806 CHECK_FPU_FEATURE(dc, FLOAT128);
2809 case 0x041: /* V9 fmovscc %fcc1 */
2812 case 0x042: /* V9 fmovdcc %fcc1 */
2815 case 0x043: /* V9 fmovqcc %fcc1 */
2816 CHECK_FPU_FEATURE(dc, FLOAT128);
2819 case 0x081: /* V9 fmovscc %fcc2 */
2822 case 0x082: /* V9 fmovdcc %fcc2 */
2825 case 0x083: /* V9 fmovqcc %fcc2 */
2826 CHECK_FPU_FEATURE(dc, FLOAT128);
2829 case 0x0c1: /* V9 fmovscc %fcc3 */
2832 case 0x0c2: /* V9 fmovdcc %fcc3 */
2835 case 0x0c3: /* V9 fmovqcc %fcc3 */
2836 CHECK_FPU_FEATURE(dc, FLOAT128);
2842 #define FMOVSCC(icc) \
2847 l1 = gen_new_label(); \
2848 r_cond = tcg_temp_new(); \
2849 cond = GET_FIELD_SP(insn, 14, 17); \
2850 gen_cond(r_cond, icc, cond, dc); \
2851 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2853 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2854 gen_set_label(l1); \
2855 tcg_temp_free(r_cond); \
2857 #define FMOVDCC(icc) \
2862 l1 = gen_new_label(); \
2863 r_cond = tcg_temp_new(); \
2864 cond = GET_FIELD_SP(insn, 14, 17); \
2865 gen_cond(r_cond, icc, cond, dc); \
2866 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2868 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2869 cpu_fpr[DFPREG(rs2)]); \
2870 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2871 cpu_fpr[DFPREG(rs2) + 1]); \
2872 gen_set_label(l1); \
2873 tcg_temp_free(r_cond); \
2875 #define FMOVQCC(icc) \
2880 l1 = gen_new_label(); \
2881 r_cond = tcg_temp_new(); \
2882 cond = GET_FIELD_SP(insn, 14, 17); \
2883 gen_cond(r_cond, icc, cond, dc); \
2884 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2886 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2887 cpu_fpr[QFPREG(rs2)]); \
2888 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2889 cpu_fpr[QFPREG(rs2) + 1]); \
2890 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2891 cpu_fpr[QFPREG(rs2) + 2]); \
2892 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2893 cpu_fpr[QFPREG(rs2) + 3]); \
2894 gen_set_label(l1); \
2895 tcg_temp_free(r_cond); \
2898 case 0x101: /* V9 fmovscc %icc */
2901 case 0x102: /* V9 fmovdcc %icc */
2903 case 0x103: /* V9 fmovqcc %icc */
2904 CHECK_FPU_FEATURE(dc, FLOAT128);
2907 case 0x181: /* V9 fmovscc %xcc */
2910 case 0x182: /* V9 fmovdcc %xcc */
2913 case 0x183: /* V9 fmovqcc %xcc */
2914 CHECK_FPU_FEATURE(dc, FLOAT128);
2921 case 0x51: /* fcmps, V9 %fcc */
2922 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2924 case 0x52: /* fcmpd, V9 %fcc */
2925 gen_op_load_fpr_DT0(DFPREG(rs1));
2926 gen_op_load_fpr_DT1(DFPREG(rs2));
2927 gen_op_fcmpd(rd & 3);
2929 case 0x53: /* fcmpq, V9 %fcc */
2930 CHECK_FPU_FEATURE(dc, FLOAT128);
2931 gen_op_load_fpr_QT0(QFPREG(rs1));
2932 gen_op_load_fpr_QT1(QFPREG(rs2));
2933 gen_op_fcmpq(rd & 3);
2935 case 0x55: /* fcmpes, V9 %fcc */
2936 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2938 case 0x56: /* fcmped, V9 %fcc */
2939 gen_op_load_fpr_DT0(DFPREG(rs1));
2940 gen_op_load_fpr_DT1(DFPREG(rs2));
2941 gen_op_fcmped(rd & 3);
2943 case 0x57: /* fcmpeq, V9 %fcc */
2944 CHECK_FPU_FEATURE(dc, FLOAT128);
2945 gen_op_load_fpr_QT0(QFPREG(rs1));
2946 gen_op_load_fpr_QT1(QFPREG(rs2));
2947 gen_op_fcmpeq(rd & 3);
2952 } else if (xop == 0x2) {
2955 rs1 = GET_FIELD(insn, 13, 17);
2957 // or %g0, x, y -> mov T0, x; mov y, T0
2958 if (IS_IMM) { /* immediate */
2961 simm = GET_FIELDs(insn, 19, 31);
2962 r_const = tcg_const_tl(simm);
2963 gen_movl_TN_reg(rd, r_const);
2964 tcg_temp_free(r_const);
2965 } else { /* register */
2966 rs2 = GET_FIELD(insn, 27, 31);
2967 gen_movl_reg_TN(rs2, cpu_dst);
2968 gen_movl_TN_reg(rd, cpu_dst);
2971 cpu_src1 = get_src1(insn, cpu_src1);
2972 if (IS_IMM) { /* immediate */
2973 simm = GET_FIELDs(insn, 19, 31);
2974 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2975 gen_movl_TN_reg(rd, cpu_dst);
2976 } else { /* register */
2977 // or x, %g0, y -> mov T1, x; mov y, T1
2978 rs2 = GET_FIELD(insn, 27, 31);
2980 gen_movl_reg_TN(rs2, cpu_src2);
2981 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2982 gen_movl_TN_reg(rd, cpu_dst);
2984 gen_movl_TN_reg(rd, cpu_src1);
2987 #ifdef TARGET_SPARC64
2988 } else if (xop == 0x25) { /* sll, V9 sllx */
2989 cpu_src1 = get_src1(insn, cpu_src1);
2990 if (IS_IMM) { /* immediate */
2991 simm = GET_FIELDs(insn, 20, 31);
2992 if (insn & (1 << 12)) {
2993 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2995 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2997 } else { /* register */
2998 rs2 = GET_FIELD(insn, 27, 31);
2999 gen_movl_reg_TN(rs2, cpu_src2);
3000 if (insn & (1 << 12)) {
3001 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3003 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3005 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3007 gen_movl_TN_reg(rd, cpu_dst);
3008 } else if (xop == 0x26) { /* srl, V9 srlx */
3009 cpu_src1 = get_src1(insn, cpu_src1);
3010 if (IS_IMM) { /* immediate */
3011 simm = GET_FIELDs(insn, 20, 31);
3012 if (insn & (1 << 12)) {
3013 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3015 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3016 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3018 } else { /* register */
3019 rs2 = GET_FIELD(insn, 27, 31);
3020 gen_movl_reg_TN(rs2, cpu_src2);
3021 if (insn & (1 << 12)) {
3022 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3023 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3025 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3026 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3027 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3030 gen_movl_TN_reg(rd, cpu_dst);
3031 } else if (xop == 0x27) { /* sra, V9 srax */
3032 cpu_src1 = get_src1(insn, cpu_src1);
3033 if (IS_IMM) { /* immediate */
3034 simm = GET_FIELDs(insn, 20, 31);
3035 if (insn & (1 << 12)) {
3036 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3038 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3039 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3040 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3042 } else { /* register */
3043 rs2 = GET_FIELD(insn, 27, 31);
3044 gen_movl_reg_TN(rs2, cpu_src2);
3045 if (insn & (1 << 12)) {
3046 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3047 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3049 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3050 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3051 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3052 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3055 gen_movl_TN_reg(rd, cpu_dst);
3057 } else if (xop < 0x36) {
3059 cpu_src1 = get_src1(insn, cpu_src1);
3060 cpu_src2 = get_src2(insn, cpu_src2);
3061 switch (xop & ~0x10) {
3064 simm = GET_FIELDs(insn, 19, 31);
3066 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3067 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3068 dc->cc_op = CC_OP_ADD;
3070 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3074 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3075 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3076 dc->cc_op = CC_OP_ADD;
3078 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3084 simm = GET_FIELDs(insn, 19, 31);
3085 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3087 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3090 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3091 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3092 dc->cc_op = CC_OP_LOGIC;
3097 simm = GET_FIELDs(insn, 19, 31);
3098 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3100 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3103 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3104 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3105 dc->cc_op = CC_OP_LOGIC;
3110 simm = GET_FIELDs(insn, 19, 31);
3111 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3113 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3116 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3117 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3118 dc->cc_op = CC_OP_LOGIC;
3123 simm = GET_FIELDs(insn, 19, 31);
3125 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3127 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3131 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3132 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3133 dc->cc_op = CC_OP_SUB;
3135 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3139 case 0x5: /* andn */
3141 simm = GET_FIELDs(insn, 19, 31);
3142 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3144 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3147 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3148 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3149 dc->cc_op = CC_OP_LOGIC;
3154 simm = GET_FIELDs(insn, 19, 31);
3155 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3157 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3160 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3161 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3162 dc->cc_op = CC_OP_LOGIC;
3165 case 0x7: /* xorn */
3167 simm = GET_FIELDs(insn, 19, 31);
3168 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3170 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3171 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3174 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3175 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3176 dc->cc_op = CC_OP_LOGIC;
3179 case 0x8: /* addx, V9 addc */
3181 simm = GET_FIELDs(insn, 19, 31);
3183 gen_helper_compute_psr();
3184 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3185 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3186 dc->cc_op = CC_OP_ADDX;
3188 gen_helper_compute_psr();
3189 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3190 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3191 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3195 gen_helper_compute_psr();
3196 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3197 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3198 dc->cc_op = CC_OP_ADDX;
3200 gen_helper_compute_psr();
3201 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3202 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3203 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3207 #ifdef TARGET_SPARC64
3208 case 0x9: /* V9 mulx */
3210 simm = GET_FIELDs(insn, 19, 31);
3211 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3213 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3217 case 0xa: /* umul */
3218 CHECK_IU_FEATURE(dc, MUL);
3219 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3221 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3222 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3223 dc->cc_op = CC_OP_LOGIC;
3226 case 0xb: /* smul */
3227 CHECK_IU_FEATURE(dc, MUL);
3228 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3230 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3231 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3232 dc->cc_op = CC_OP_LOGIC;
3235 case 0xc: /* subx, V9 subc */
3237 simm = GET_FIELDs(insn, 19, 31);
3239 gen_helper_compute_psr();
3240 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3241 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3242 dc->cc_op = CC_OP_SUBX;
3244 gen_helper_compute_psr();
3245 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3246 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3247 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3251 gen_helper_compute_psr();
3252 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3253 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3254 dc->cc_op = CC_OP_SUBX;
3256 gen_helper_compute_psr();
3257 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3258 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3259 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3263 #ifdef TARGET_SPARC64
3264 case 0xd: /* V9 udivx */
3265 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3266 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3267 gen_trap_ifdivzero_tl(cpu_cc_src2);
3268 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3271 case 0xe: /* udiv */
3272 CHECK_IU_FEATURE(dc, DIV);
3273 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3275 gen_op_div_cc(cpu_dst);
3276 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3277 dc->cc_op = CC_OP_FLAGS;
3280 case 0xf: /* sdiv */
3281 CHECK_IU_FEATURE(dc, DIV);
3282 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3284 gen_op_div_cc(cpu_dst);
3285 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3286 dc->cc_op = CC_OP_FLAGS;
3292 gen_movl_TN_reg(rd, cpu_dst);
3294 cpu_src1 = get_src1(insn, cpu_src1);
3295 cpu_src2 = get_src2(insn, cpu_src2);
3297 case 0x20: /* taddcc */
3298 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3299 gen_movl_TN_reg(rd, cpu_dst);
3300 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3301 dc->cc_op = CC_OP_FLAGS;
3303 case 0x21: /* tsubcc */
3304 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3305 gen_movl_TN_reg(rd, cpu_dst);
3306 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3307 dc->cc_op = CC_OP_FLAGS;
3309 case 0x22: /* taddcctv */
3310 save_state(dc, cpu_cond);
3311 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3312 gen_movl_TN_reg(rd, cpu_dst);
3313 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3314 dc->cc_op = CC_OP_FLAGS;
3316 case 0x23: /* tsubcctv */
3317 save_state(dc, cpu_cond);
3318 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3319 gen_movl_TN_reg(rd, cpu_dst);
3320 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3321 dc->cc_op = CC_OP_FLAGS;
3323 case 0x24: /* mulscc */
3324 gen_helper_compute_psr();
3325 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3326 gen_movl_TN_reg(rd, cpu_dst);
3327 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3328 dc->cc_op = CC_OP_FLAGS;
3330 #ifndef TARGET_SPARC64
3331 case 0x25: /* sll */
3332 if (IS_IMM) { /* immediate */
3333 simm = GET_FIELDs(insn, 20, 31);
3334 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3335 } else { /* register */
3336 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3337 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3339 gen_movl_TN_reg(rd, cpu_dst);
3341 case 0x26: /* srl */
3342 if (IS_IMM) { /* immediate */
3343 simm = GET_FIELDs(insn, 20, 31);
3344 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3345 } else { /* register */
3346 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3347 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3349 gen_movl_TN_reg(rd, cpu_dst);
3351 case 0x27: /* sra */
3352 if (IS_IMM) { /* immediate */
3353 simm = GET_FIELDs(insn, 20, 31);
3354 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3355 } else { /* register */
3356 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3357 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3359 gen_movl_TN_reg(rd, cpu_dst);
3366 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3367 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3369 #ifndef TARGET_SPARC64
3370 case 0x01 ... 0x0f: /* undefined in the
3374 case 0x10 ... 0x1f: /* implementation-dependent
3380 case 0x2: /* V9 wrccr */
3381 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3382 gen_helper_wrccr(cpu_dst);
3383 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3384 dc->cc_op = CC_OP_FLAGS;
3386 case 0x3: /* V9 wrasi */
3387 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3388 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3390 case 0x6: /* V9 wrfprs */
3391 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3392 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3393 save_state(dc, cpu_cond);
3398 case 0xf: /* V9 sir, nop if user */
3399 #if !defined(CONFIG_USER_ONLY)
3404 case 0x13: /* Graphics Status */
3405 if (gen_trap_ifnofpu(dc, cpu_cond))
3407 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3409 case 0x14: /* Softint set */
3410 if (!supervisor(dc))
3412 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3413 gen_helper_set_softint(cpu_tmp64);
3415 case 0x15: /* Softint clear */
3416 if (!supervisor(dc))
3418 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3419 gen_helper_clear_softint(cpu_tmp64);
3421 case 0x16: /* Softint write */
3422 if (!supervisor(dc))
3424 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3425 gen_helper_write_softint(cpu_tmp64);
3427 case 0x17: /* Tick compare */
3428 #if !defined(CONFIG_USER_ONLY)
3429 if (!supervisor(dc))
3435 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3437 r_tickptr = tcg_temp_new_ptr();
3438 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3439 offsetof(CPUState, tick));
3440 gen_helper_tick_set_limit(r_tickptr,
3442 tcg_temp_free_ptr(r_tickptr);
3445 case 0x18: /* System tick */
3446 #if !defined(CONFIG_USER_ONLY)
3447 if (!supervisor(dc))
3453 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3455 r_tickptr = tcg_temp_new_ptr();
3456 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3457 offsetof(CPUState, stick));
3458 gen_helper_tick_set_count(r_tickptr,
3460 tcg_temp_free_ptr(r_tickptr);
3463 case 0x19: /* System tick compare */
3464 #if !defined(CONFIG_USER_ONLY)
3465 if (!supervisor(dc))
3471 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3473 r_tickptr = tcg_temp_new_ptr();
3474 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3475 offsetof(CPUState, stick));
3476 gen_helper_tick_set_limit(r_tickptr,
3478 tcg_temp_free_ptr(r_tickptr);
3482 case 0x10: /* Performance Control */
3483 case 0x11: /* Performance Instrumentation
3485 case 0x12: /* Dispatch Control */
3492 #if !defined(CONFIG_USER_ONLY)
3493 case 0x31: /* wrpsr, V9 saved, restored */
3495 if (!supervisor(dc))
3497 #ifdef TARGET_SPARC64
3503 gen_helper_restored();
3505 case 2: /* UA2005 allclean */
3506 case 3: /* UA2005 otherw */
3507 case 4: /* UA2005 normalw */
3508 case 5: /* UA2005 invalw */
3514 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3515 gen_helper_wrpsr(cpu_dst);
3516 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3517 dc->cc_op = CC_OP_FLAGS;
3518 save_state(dc, cpu_cond);
3525 case 0x32: /* wrwim, V9 wrpr */
3527 if (!supervisor(dc))
3529 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3530 #ifdef TARGET_SPARC64
3536 r_tsptr = tcg_temp_new_ptr();
3537 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3538 offsetof(CPUState, tsptr));
3539 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3540 offsetof(trap_state, tpc));
3541 tcg_temp_free_ptr(r_tsptr);
3548 r_tsptr = tcg_temp_new_ptr();
3549 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3550 offsetof(CPUState, tsptr));
3551 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3552 offsetof(trap_state, tnpc));
3553 tcg_temp_free_ptr(r_tsptr);
3560 r_tsptr = tcg_temp_new_ptr();
3561 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3562 offsetof(CPUState, tsptr));
3563 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3564 offsetof(trap_state,
3566 tcg_temp_free_ptr(r_tsptr);
3573 r_tsptr = tcg_temp_new_ptr();
3574 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3575 offsetof(CPUState, tsptr));
3576 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3577 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3578 offsetof(trap_state, tt));
3579 tcg_temp_free_ptr(r_tsptr);
3586 r_tickptr = tcg_temp_new_ptr();
3587 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3588 offsetof(CPUState, tick));
3589 gen_helper_tick_set_count(r_tickptr,
3591 tcg_temp_free_ptr(r_tickptr);
3595 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3598 save_state(dc, cpu_cond);
3599 gen_helper_wrpstate(cpu_tmp0);
3605 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3606 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3607 offsetof(CPUSPARCState, tl));
3610 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3611 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3612 offsetof(CPUSPARCState,
3616 gen_helper_wrcwp(cpu_tmp0);
3619 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3620 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3621 offsetof(CPUSPARCState,
3624 case 11: // canrestore
3625 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3626 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3627 offsetof(CPUSPARCState,
3630 case 12: // cleanwin
3631 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3632 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3633 offsetof(CPUSPARCState,
3636 case 13: // otherwin
3637 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3638 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3639 offsetof(CPUSPARCState,
3643 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3644 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3645 offsetof(CPUSPARCState,
3648 case 16: // UA2005 gl
3649 CHECK_IU_FEATURE(dc, GL);
3650 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3651 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3652 offsetof(CPUSPARCState, gl));
3654 case 26: // UA2005 strand status
3655 CHECK_IU_FEATURE(dc, HYPV);
3656 if (!hypervisor(dc))
3658 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3664 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3665 if (dc->def->nwindows != 32)
3666 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3667 (1 << dc->def->nwindows) - 1);
3668 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3672 case 0x33: /* wrtbr, UA2005 wrhpr */
3674 #ifndef TARGET_SPARC64
3675 if (!supervisor(dc))
3677 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3679 CHECK_IU_FEATURE(dc, HYPV);
3680 if (!hypervisor(dc))
3682 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3685 // XXX gen_op_wrhpstate();
3686 save_state(dc, cpu_cond);
3692 // XXX gen_op_wrhtstate();
3695 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3698 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3700 case 31: // hstick_cmpr
3704 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3705 r_tickptr = tcg_temp_new_ptr();
3706 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3707 offsetof(CPUState, hstick));
3708 gen_helper_tick_set_limit(r_tickptr,
3710 tcg_temp_free_ptr(r_tickptr);
3713 case 6: // hver readonly
3721 #ifdef TARGET_SPARC64
3722 case 0x2c: /* V9 movcc */
3724 int cc = GET_FIELD_SP(insn, 11, 12);
3725 int cond = GET_FIELD_SP(insn, 14, 17);
3729 r_cond = tcg_temp_new();
3730 if (insn & (1 << 18)) {
3732 gen_cond(r_cond, 0, cond, dc);
3734 gen_cond(r_cond, 1, cond, dc);
3738 gen_fcond(r_cond, cc, cond);
3741 l1 = gen_new_label();
3743 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3744 if (IS_IMM) { /* immediate */
3747 simm = GET_FIELD_SPs(insn, 0, 10);
3748 r_const = tcg_const_tl(simm);
3749 gen_movl_TN_reg(rd, r_const);
3750 tcg_temp_free(r_const);
3752 rs2 = GET_FIELD_SP(insn, 0, 4);
3753 gen_movl_reg_TN(rs2, cpu_tmp0);
3754 gen_movl_TN_reg(rd, cpu_tmp0);
3757 tcg_temp_free(r_cond);
3760 case 0x2d: /* V9 sdivx */
3761 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3762 gen_movl_TN_reg(rd, cpu_dst);
3764 case 0x2e: /* V9 popc */
3766 cpu_src2 = get_src2(insn, cpu_src2);
3767 gen_helper_popc(cpu_dst, cpu_src2);
3768 gen_movl_TN_reg(rd, cpu_dst);
3770 case 0x2f: /* V9 movr */
3772 int cond = GET_FIELD_SP(insn, 10, 12);
3775 cpu_src1 = get_src1(insn, cpu_src1);
3777 l1 = gen_new_label();
3779 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3781 if (IS_IMM) { /* immediate */
3784 simm = GET_FIELD_SPs(insn, 0, 9);
3785 r_const = tcg_const_tl(simm);
3786 gen_movl_TN_reg(rd, r_const);
3787 tcg_temp_free(r_const);
3789 rs2 = GET_FIELD_SP(insn, 0, 4);
3790 gen_movl_reg_TN(rs2, cpu_tmp0);
3791 gen_movl_TN_reg(rd, cpu_tmp0);
3801 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3802 #ifdef TARGET_SPARC64
3803 int opf = GET_FIELD_SP(insn, 5, 13);
3804 rs1 = GET_FIELD(insn, 13, 17);
3805 rs2 = GET_FIELD(insn, 27, 31);
3806 if (gen_trap_ifnofpu(dc, cpu_cond))
3810 case 0x000: /* VIS I edge8cc */
3811 case 0x001: /* VIS II edge8n */
3812 case 0x002: /* VIS I edge8lcc */
3813 case 0x003: /* VIS II edge8ln */
3814 case 0x004: /* VIS I edge16cc */
3815 case 0x005: /* VIS II edge16n */
3816 case 0x006: /* VIS I edge16lcc */
3817 case 0x007: /* VIS II edge16ln */
3818 case 0x008: /* VIS I edge32cc */
3819 case 0x009: /* VIS II edge32n */
3820 case 0x00a: /* VIS I edge32lcc */
3821 case 0x00b: /* VIS II edge32ln */
3824 case 0x010: /* VIS I array8 */
3825 CHECK_FPU_FEATURE(dc, VIS1);
3826 cpu_src1 = get_src1(insn, cpu_src1);
3827 gen_movl_reg_TN(rs2, cpu_src2);
3828 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3829 gen_movl_TN_reg(rd, cpu_dst);
3831 case 0x012: /* VIS I array16 */
3832 CHECK_FPU_FEATURE(dc, VIS1);
3833 cpu_src1 = get_src1(insn, cpu_src1);
3834 gen_movl_reg_TN(rs2, cpu_src2);
3835 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3836 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3837 gen_movl_TN_reg(rd, cpu_dst);
3839 case 0x014: /* VIS I array32 */
3840 CHECK_FPU_FEATURE(dc, VIS1);
3841 cpu_src1 = get_src1(insn, cpu_src1);
3842 gen_movl_reg_TN(rs2, cpu_src2);
3843 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3844 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3845 gen_movl_TN_reg(rd, cpu_dst);
3847 case 0x018: /* VIS I alignaddr */
3848 CHECK_FPU_FEATURE(dc, VIS1);
3849 cpu_src1 = get_src1(insn, cpu_src1);
3850 gen_movl_reg_TN(rs2, cpu_src2);
3851 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3852 gen_movl_TN_reg(rd, cpu_dst);
3854 case 0x019: /* VIS II bmask */
3855 case 0x01a: /* VIS I alignaddrl */
3858 case 0x020: /* VIS I fcmple16 */
3859 CHECK_FPU_FEATURE(dc, VIS1);
3860 gen_op_load_fpr_DT0(DFPREG(rs1));
3861 gen_op_load_fpr_DT1(DFPREG(rs2));
3862 gen_helper_fcmple16();
3863 gen_op_store_DT0_fpr(DFPREG(rd));
3865 case 0x022: /* VIS I fcmpne16 */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 gen_op_load_fpr_DT0(DFPREG(rs1));
3868 gen_op_load_fpr_DT1(DFPREG(rs2));
3869 gen_helper_fcmpne16();
3870 gen_op_store_DT0_fpr(DFPREG(rd));
3872 case 0x024: /* VIS I fcmple32 */
3873 CHECK_FPU_FEATURE(dc, VIS1);
3874 gen_op_load_fpr_DT0(DFPREG(rs1));
3875 gen_op_load_fpr_DT1(DFPREG(rs2));
3876 gen_helper_fcmple32();
3877 gen_op_store_DT0_fpr(DFPREG(rd));
3879 case 0x026: /* VIS I fcmpne32 */
3880 CHECK_FPU_FEATURE(dc, VIS1);
3881 gen_op_load_fpr_DT0(DFPREG(rs1));
3882 gen_op_load_fpr_DT1(DFPREG(rs2));
3883 gen_helper_fcmpne32();
3884 gen_op_store_DT0_fpr(DFPREG(rd));
3886 case 0x028: /* VIS I fcmpgt16 */
3887 CHECK_FPU_FEATURE(dc, VIS1);
3888 gen_op_load_fpr_DT0(DFPREG(rs1));
3889 gen_op_load_fpr_DT1(DFPREG(rs2));
3890 gen_helper_fcmpgt16();
3891 gen_op_store_DT0_fpr(DFPREG(rd));
3893 case 0x02a: /* VIS I fcmpeq16 */
3894 CHECK_FPU_FEATURE(dc, VIS1);
3895 gen_op_load_fpr_DT0(DFPREG(rs1));
3896 gen_op_load_fpr_DT1(DFPREG(rs2));
3897 gen_helper_fcmpeq16();
3898 gen_op_store_DT0_fpr(DFPREG(rd));
3900 case 0x02c: /* VIS I fcmpgt32 */
3901 CHECK_FPU_FEATURE(dc, VIS1);
3902 gen_op_load_fpr_DT0(DFPREG(rs1));
3903 gen_op_load_fpr_DT1(DFPREG(rs2));
3904 gen_helper_fcmpgt32();
3905 gen_op_store_DT0_fpr(DFPREG(rd));
3907 case 0x02e: /* VIS I fcmpeq32 */
3908 CHECK_FPU_FEATURE(dc, VIS1);
3909 gen_op_load_fpr_DT0(DFPREG(rs1));
3910 gen_op_load_fpr_DT1(DFPREG(rs2));
3911 gen_helper_fcmpeq32();
3912 gen_op_store_DT0_fpr(DFPREG(rd));
3914 case 0x031: /* VIS I fmul8x16 */
3915 CHECK_FPU_FEATURE(dc, VIS1);
3916 gen_op_load_fpr_DT0(DFPREG(rs1));
3917 gen_op_load_fpr_DT1(DFPREG(rs2));
3918 gen_helper_fmul8x16();
3919 gen_op_store_DT0_fpr(DFPREG(rd));
3921 case 0x033: /* VIS I fmul8x16au */
3922 CHECK_FPU_FEATURE(dc, VIS1);
3923 gen_op_load_fpr_DT0(DFPREG(rs1));
3924 gen_op_load_fpr_DT1(DFPREG(rs2));
3925 gen_helper_fmul8x16au();
3926 gen_op_store_DT0_fpr(DFPREG(rd));
3928 case 0x035: /* VIS I fmul8x16al */
3929 CHECK_FPU_FEATURE(dc, VIS1);
3930 gen_op_load_fpr_DT0(DFPREG(rs1));
3931 gen_op_load_fpr_DT1(DFPREG(rs2));
3932 gen_helper_fmul8x16al();
3933 gen_op_store_DT0_fpr(DFPREG(rd));
3935 case 0x036: /* VIS I fmul8sux16 */
3936 CHECK_FPU_FEATURE(dc, VIS1);
3937 gen_op_load_fpr_DT0(DFPREG(rs1));
3938 gen_op_load_fpr_DT1(DFPREG(rs2));
3939 gen_helper_fmul8sux16();
3940 gen_op_store_DT0_fpr(DFPREG(rd));
3942 case 0x037: /* VIS I fmul8ulx16 */
3943 CHECK_FPU_FEATURE(dc, VIS1);
3944 gen_op_load_fpr_DT0(DFPREG(rs1));
3945 gen_op_load_fpr_DT1(DFPREG(rs2));
3946 gen_helper_fmul8ulx16();
3947 gen_op_store_DT0_fpr(DFPREG(rd));
3949 case 0x038: /* VIS I fmuld8sux16 */
3950 CHECK_FPU_FEATURE(dc, VIS1);
3951 gen_op_load_fpr_DT0(DFPREG(rs1));
3952 gen_op_load_fpr_DT1(DFPREG(rs2));
3953 gen_helper_fmuld8sux16();
3954 gen_op_store_DT0_fpr(DFPREG(rd));
3956 case 0x039: /* VIS I fmuld8ulx16 */
3957 CHECK_FPU_FEATURE(dc, VIS1);
3958 gen_op_load_fpr_DT0(DFPREG(rs1));
3959 gen_op_load_fpr_DT1(DFPREG(rs2));
3960 gen_helper_fmuld8ulx16();
3961 gen_op_store_DT0_fpr(DFPREG(rd));
3963 case 0x03a: /* VIS I fpack32 */
3964 case 0x03b: /* VIS I fpack16 */
3965 case 0x03d: /* VIS I fpackfix */
3966 case 0x03e: /* VIS I pdist */
3969 case 0x048: /* VIS I faligndata */
3970 CHECK_FPU_FEATURE(dc, VIS1);
3971 gen_op_load_fpr_DT0(DFPREG(rs1));
3972 gen_op_load_fpr_DT1(DFPREG(rs2));
3973 gen_helper_faligndata();
3974 gen_op_store_DT0_fpr(DFPREG(rd));
3976 case 0x04b: /* VIS I fpmerge */
3977 CHECK_FPU_FEATURE(dc, VIS1);
3978 gen_op_load_fpr_DT0(DFPREG(rs1));
3979 gen_op_load_fpr_DT1(DFPREG(rs2));
3980 gen_helper_fpmerge();
3981 gen_op_store_DT0_fpr(DFPREG(rd));
3983 case 0x04c: /* VIS II bshuffle */
3986 case 0x04d: /* VIS I fexpand */
3987 CHECK_FPU_FEATURE(dc, VIS1);
3988 gen_op_load_fpr_DT0(DFPREG(rs1));
3989 gen_op_load_fpr_DT1(DFPREG(rs2));
3990 gen_helper_fexpand();
3991 gen_op_store_DT0_fpr(DFPREG(rd));
3993 case 0x050: /* VIS I fpadd16 */
3994 CHECK_FPU_FEATURE(dc, VIS1);
3995 gen_op_load_fpr_DT0(DFPREG(rs1));
3996 gen_op_load_fpr_DT1(DFPREG(rs2));
3997 gen_helper_fpadd16();
3998 gen_op_store_DT0_fpr(DFPREG(rd));
4000 case 0x051: /* VIS I fpadd16s */
4001 CHECK_FPU_FEATURE(dc, VIS1);
4002 gen_helper_fpadd16s(cpu_fpr[rd],
4003 cpu_fpr[rs1], cpu_fpr[rs2]);
4005 case 0x052: /* VIS I fpadd32 */
4006 CHECK_FPU_FEATURE(dc, VIS1);
4007 gen_op_load_fpr_DT0(DFPREG(rs1));
4008 gen_op_load_fpr_DT1(DFPREG(rs2));
4009 gen_helper_fpadd32();
4010 gen_op_store_DT0_fpr(DFPREG(rd));
4012 case 0x053: /* VIS I fpadd32s */
4013 CHECK_FPU_FEATURE(dc, VIS1);
4014 gen_helper_fpadd32s(cpu_fpr[rd],
4015 cpu_fpr[rs1], cpu_fpr[rs2]);
4017 case 0x054: /* VIS I fpsub16 */
4018 CHECK_FPU_FEATURE(dc, VIS1);
4019 gen_op_load_fpr_DT0(DFPREG(rs1));
4020 gen_op_load_fpr_DT1(DFPREG(rs2));
4021 gen_helper_fpsub16();
4022 gen_op_store_DT0_fpr(DFPREG(rd));
4024 case 0x055: /* VIS I fpsub16s */
4025 CHECK_FPU_FEATURE(dc, VIS1);
4026 gen_helper_fpsub16s(cpu_fpr[rd],
4027 cpu_fpr[rs1], cpu_fpr[rs2]);
4029 case 0x056: /* VIS I fpsub32 */
4030 CHECK_FPU_FEATURE(dc, VIS1);
4031 gen_op_load_fpr_DT0(DFPREG(rs1));
4032 gen_op_load_fpr_DT1(DFPREG(rs2));
4033 gen_helper_fpsub32();
4034 gen_op_store_DT0_fpr(DFPREG(rd));
4036 case 0x057: /* VIS I fpsub32s */
4037 CHECK_FPU_FEATURE(dc, VIS1);
4038 gen_helper_fpsub32s(cpu_fpr[rd],
4039 cpu_fpr[rs1], cpu_fpr[rs2]);
4041 case 0x060: /* VIS I fzero */
4042 CHECK_FPU_FEATURE(dc, VIS1);
4043 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4044 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4046 case 0x061: /* VIS I fzeros */
4047 CHECK_FPU_FEATURE(dc, VIS1);
4048 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4050 case 0x062: /* VIS I fnor */
4051 CHECK_FPU_FEATURE(dc, VIS1);
4052 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4053 cpu_fpr[DFPREG(rs2)]);
4054 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4055 cpu_fpr[DFPREG(rs2) + 1]);
4057 case 0x063: /* VIS I fnors */
4058 CHECK_FPU_FEATURE(dc, VIS1);
4059 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4061 case 0x064: /* VIS I fandnot2 */
4062 CHECK_FPU_FEATURE(dc, VIS1);
4063 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4064 cpu_fpr[DFPREG(rs2)]);
4065 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4066 cpu_fpr[DFPREG(rs1) + 1],
4067 cpu_fpr[DFPREG(rs2) + 1]);
4069 case 0x065: /* VIS I fandnot2s */
4070 CHECK_FPU_FEATURE(dc, VIS1);
4071 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4073 case 0x066: /* VIS I fnot2 */
4074 CHECK_FPU_FEATURE(dc, VIS1);
4075 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4076 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4077 cpu_fpr[DFPREG(rs2) + 1]);
4079 case 0x067: /* VIS I fnot2s */
4080 CHECK_FPU_FEATURE(dc, VIS1);
4081 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4083 case 0x068: /* VIS I fandnot1 */
4084 CHECK_FPU_FEATURE(dc, VIS1);
4085 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4086 cpu_fpr[DFPREG(rs1)]);
4087 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4088 cpu_fpr[DFPREG(rs2) + 1],
4089 cpu_fpr[DFPREG(rs1) + 1]);
4091 case 0x069: /* VIS I fandnot1s */
4092 CHECK_FPU_FEATURE(dc, VIS1);
4093 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4095 case 0x06a: /* VIS I fnot1 */
4096 CHECK_FPU_FEATURE(dc, VIS1);
4097 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4098 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4099 cpu_fpr[DFPREG(rs1) + 1]);
4101 case 0x06b: /* VIS I fnot1s */
4102 CHECK_FPU_FEATURE(dc, VIS1);
4103 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4105 case 0x06c: /* VIS I fxor */
4106 CHECK_FPU_FEATURE(dc, VIS1);
4107 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4108 cpu_fpr[DFPREG(rs2)]);
4109 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4110 cpu_fpr[DFPREG(rs1) + 1],
4111 cpu_fpr[DFPREG(rs2) + 1]);
4113 case 0x06d: /* VIS I fxors */
4114 CHECK_FPU_FEATURE(dc, VIS1);
4115 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4117 case 0x06e: /* VIS I fnand */
4118 CHECK_FPU_FEATURE(dc, VIS1);
4119 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4120 cpu_fpr[DFPREG(rs2)]);
4121 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4122 cpu_fpr[DFPREG(rs2) + 1]);
4124 case 0x06f: /* VIS I fnands */
4125 CHECK_FPU_FEATURE(dc, VIS1);
4126 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4128 case 0x070: /* VIS I fand */
4129 CHECK_FPU_FEATURE(dc, VIS1);
4130 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4131 cpu_fpr[DFPREG(rs2)]);
4132 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4133 cpu_fpr[DFPREG(rs1) + 1],
4134 cpu_fpr[DFPREG(rs2) + 1]);
4136 case 0x071: /* VIS I fands */
4137 CHECK_FPU_FEATURE(dc, VIS1);
4138 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4140 case 0x072: /* VIS I fxnor */
4141 CHECK_FPU_FEATURE(dc, VIS1);
4142 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4143 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4144 cpu_fpr[DFPREG(rs1)]);
4145 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4146 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4147 cpu_fpr[DFPREG(rs1) + 1]);
4149 case 0x073: /* VIS I fxnors */
4150 CHECK_FPU_FEATURE(dc, VIS1);
4151 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4152 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4154 case 0x074: /* VIS I fsrc1 */
4155 CHECK_FPU_FEATURE(dc, VIS1);
4156 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4157 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4158 cpu_fpr[DFPREG(rs1) + 1]);
4160 case 0x075: /* VIS I fsrc1s */
4161 CHECK_FPU_FEATURE(dc, VIS1);
4162 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4164 case 0x076: /* VIS I fornot2 */
4165 CHECK_FPU_FEATURE(dc, VIS1);
4166 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4167 cpu_fpr[DFPREG(rs2)]);
4168 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4169 cpu_fpr[DFPREG(rs1) + 1],
4170 cpu_fpr[DFPREG(rs2) + 1]);
4172 case 0x077: /* VIS I fornot2s */
4173 CHECK_FPU_FEATURE(dc, VIS1);
4174 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4176 case 0x078: /* VIS I fsrc2 */
4177 CHECK_FPU_FEATURE(dc, VIS1);
4178 gen_op_load_fpr_DT0(DFPREG(rs2));
4179 gen_op_store_DT0_fpr(DFPREG(rd));
4181 case 0x079: /* VIS I fsrc2s */
4182 CHECK_FPU_FEATURE(dc, VIS1);
4183 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4185 case 0x07a: /* VIS I fornot1 */
4186 CHECK_FPU_FEATURE(dc, VIS1);
4187 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4188 cpu_fpr[DFPREG(rs1)]);
4189 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4190 cpu_fpr[DFPREG(rs2) + 1],
4191 cpu_fpr[DFPREG(rs1) + 1]);
4193 case 0x07b: /* VIS I fornot1s */
4194 CHECK_FPU_FEATURE(dc, VIS1);
4195 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4197 case 0x07c: /* VIS I for */
4198 CHECK_FPU_FEATURE(dc, VIS1);
4199 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4200 cpu_fpr[DFPREG(rs2)]);
4201 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4202 cpu_fpr[DFPREG(rs1) + 1],
4203 cpu_fpr[DFPREG(rs2) + 1]);
4205 case 0x07d: /* VIS I fors */
4206 CHECK_FPU_FEATURE(dc, VIS1);
4207 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4209 case 0x07e: /* VIS I fone */
4210 CHECK_FPU_FEATURE(dc, VIS1);
4211 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4212 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4214 case 0x07f: /* VIS I fones */
4215 CHECK_FPU_FEATURE(dc, VIS1);
4216 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4218 case 0x080: /* VIS I shutdown */
4219 case 0x081: /* VIS II siam */
4228 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4229 #ifdef TARGET_SPARC64
4234 #ifdef TARGET_SPARC64
4235 } else if (xop == 0x39) { /* V9 return */
4238 save_state(dc, cpu_cond);
4239 cpu_src1 = get_src1(insn, cpu_src1);
4240 if (IS_IMM) { /* immediate */
4241 simm = GET_FIELDs(insn, 19, 31);
4242 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4243 } else { /* register */
4244 rs2 = GET_FIELD(insn, 27, 31);
4246 gen_movl_reg_TN(rs2, cpu_src2);
4247 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4249 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4251 gen_helper_restore();
4252 gen_mov_pc_npc(dc, cpu_cond);
4253 r_const = tcg_const_i32(3);
4254 gen_helper_check_align(cpu_dst, r_const);
4255 tcg_temp_free_i32(r_const);
4256 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4257 dc->npc = DYNAMIC_PC;
4261 cpu_src1 = get_src1(insn, cpu_src1);
4262 if (IS_IMM) { /* immediate */
4263 simm = GET_FIELDs(insn, 19, 31);
4264 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4265 } else { /* register */
4266 rs2 = GET_FIELD(insn, 27, 31);
4268 gen_movl_reg_TN(rs2, cpu_src2);
4269 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4271 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4274 case 0x38: /* jmpl */
4279 r_pc = tcg_const_tl(dc->pc);
4280 gen_movl_TN_reg(rd, r_pc);
4281 tcg_temp_free(r_pc);
4282 gen_mov_pc_npc(dc, cpu_cond);
4283 r_const = tcg_const_i32(3);
4284 gen_helper_check_align(cpu_dst, r_const);
4285 tcg_temp_free_i32(r_const);
4286 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4287 dc->npc = DYNAMIC_PC;
4290 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4291 case 0x39: /* rett, V9 return */
4295 if (!supervisor(dc))
4297 gen_mov_pc_npc(dc, cpu_cond);
4298 r_const = tcg_const_i32(3);
4299 gen_helper_check_align(cpu_dst, r_const);
4300 tcg_temp_free_i32(r_const);
4301 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4302 dc->npc = DYNAMIC_PC;
4307 case 0x3b: /* flush */
4308 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4310 gen_helper_flush(cpu_dst);
4312 case 0x3c: /* save */
4313 save_state(dc, cpu_cond);
4315 gen_movl_TN_reg(rd, cpu_dst);
4317 case 0x3d: /* restore */
4318 save_state(dc, cpu_cond);
4319 gen_helper_restore();
4320 gen_movl_TN_reg(rd, cpu_dst);
4322 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4323 case 0x3e: /* V9 done/retry */
4327 if (!supervisor(dc))
4329 dc->npc = DYNAMIC_PC;
4330 dc->pc = DYNAMIC_PC;
4334 if (!supervisor(dc))
4336 dc->npc = DYNAMIC_PC;
4337 dc->pc = DYNAMIC_PC;
4353 case 3: /* load/store instructions */
4355 unsigned int xop = GET_FIELD(insn, 7, 12);
4357 cpu_src1 = get_src1(insn, cpu_src1);
4358 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4359 rs2 = GET_FIELD(insn, 27, 31);
4360 gen_movl_reg_TN(rs2, cpu_src2);
4361 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4362 } else if (IS_IMM) { /* immediate */
4363 simm = GET_FIELDs(insn, 19, 31);
4364 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4365 } else { /* register */
4366 rs2 = GET_FIELD(insn, 27, 31);
4368 gen_movl_reg_TN(rs2, cpu_src2);
4369 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4371 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4373 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4374 (xop > 0x17 && xop <= 0x1d ) ||
4375 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4377 case 0x0: /* ld, V9 lduw, load unsigned word */
4378 gen_address_mask(dc, cpu_addr);
4379 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4381 case 0x1: /* ldub, load unsigned byte */
4382 gen_address_mask(dc, cpu_addr);
4383 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4385 case 0x2: /* lduh, load unsigned halfword */
4386 gen_address_mask(dc, cpu_addr);
4387 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4389 case 0x3: /* ldd, load double word */
4395 save_state(dc, cpu_cond);
4396 r_const = tcg_const_i32(7);
4397 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4398 tcg_temp_free_i32(r_const);
4399 gen_address_mask(dc, cpu_addr);
4400 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4401 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4402 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4403 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4404 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4405 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4406 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4409 case 0x9: /* ldsb, load signed byte */
4410 gen_address_mask(dc, cpu_addr);
4411 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4413 case 0xa: /* ldsh, load signed halfword */
4414 gen_address_mask(dc, cpu_addr);
4415 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4417 case 0xd: /* ldstub -- XXX: should be atomically */
4421 gen_address_mask(dc, cpu_addr);
4422 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4423 r_const = tcg_const_tl(0xff);
4424 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4425 tcg_temp_free(r_const);
4428 case 0x0f: /* swap, swap register with memory. Also
4430 CHECK_IU_FEATURE(dc, SWAP);
4431 gen_movl_reg_TN(rd, cpu_val);
4432 gen_address_mask(dc, cpu_addr);
4433 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4434 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4435 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4437 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4438 case 0x10: /* lda, V9 lduwa, load word alternate */
4439 #ifndef TARGET_SPARC64
4442 if (!supervisor(dc))
4445 save_state(dc, cpu_cond);
4446 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4448 case 0x11: /* lduba, load unsigned byte alternate */
4449 #ifndef TARGET_SPARC64
4452 if (!supervisor(dc))
4455 save_state(dc, cpu_cond);
4456 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4458 case 0x12: /* lduha, load unsigned halfword alternate */
4459 #ifndef TARGET_SPARC64
4462 if (!supervisor(dc))
4465 save_state(dc, cpu_cond);
4466 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4468 case 0x13: /* ldda, load double word alternate */
4469 #ifndef TARGET_SPARC64
4472 if (!supervisor(dc))
4477 save_state(dc, cpu_cond);
4478 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4480 case 0x19: /* ldsba, load signed byte alternate */
4481 #ifndef TARGET_SPARC64
4484 if (!supervisor(dc))
4487 save_state(dc, cpu_cond);
4488 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4490 case 0x1a: /* ldsha, load signed halfword alternate */
4491 #ifndef TARGET_SPARC64
4494 if (!supervisor(dc))
4497 save_state(dc, cpu_cond);
4498 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4500 case 0x1d: /* ldstuba -- XXX: should be atomically */
4501 #ifndef TARGET_SPARC64
4504 if (!supervisor(dc))
4507 save_state(dc, cpu_cond);
4508 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4510 case 0x1f: /* swapa, swap reg with alt. memory. Also
4512 CHECK_IU_FEATURE(dc, SWAP);
4513 #ifndef TARGET_SPARC64
4516 if (!supervisor(dc))
4519 save_state(dc, cpu_cond);
4520 gen_movl_reg_TN(rd, cpu_val);
4521 gen_swap_asi(cpu_val, cpu_addr, insn);
4524 #ifndef TARGET_SPARC64
4525 case 0x30: /* ldc */
4526 case 0x31: /* ldcsr */
4527 case 0x33: /* lddc */
4531 #ifdef TARGET_SPARC64
4532 case 0x08: /* V9 ldsw */
4533 gen_address_mask(dc, cpu_addr);
4534 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4536 case 0x0b: /* V9 ldx */
4537 gen_address_mask(dc, cpu_addr);
4538 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4540 case 0x18: /* V9 ldswa */
4541 save_state(dc, cpu_cond);
4542 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4544 case 0x1b: /* V9 ldxa */
4545 save_state(dc, cpu_cond);
4546 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4548 case 0x2d: /* V9 prefetch, no effect */
4550 case 0x30: /* V9 ldfa */
4551 save_state(dc, cpu_cond);
4552 gen_ldf_asi(cpu_addr, insn, 4, rd);
4554 case 0x33: /* V9 lddfa */
4555 save_state(dc, cpu_cond);
4556 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4558 case 0x3d: /* V9 prefetcha, no effect */
4560 case 0x32: /* V9 ldqfa */
4561 CHECK_FPU_FEATURE(dc, FLOAT128);
4562 save_state(dc, cpu_cond);
4563 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4569 gen_movl_TN_reg(rd, cpu_val);
4570 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4573 } else if (xop >= 0x20 && xop < 0x24) {
4574 if (gen_trap_ifnofpu(dc, cpu_cond))
4576 save_state(dc, cpu_cond);
4578 case 0x20: /* ldf, load fpreg */
4579 gen_address_mask(dc, cpu_addr);
4580 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4581 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4583 case 0x21: /* ldfsr, V9 ldxfsr */
4584 #ifdef TARGET_SPARC64
4585 gen_address_mask(dc, cpu_addr);
4587 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4588 gen_helper_ldxfsr(cpu_tmp64);
4592 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4593 gen_helper_ldfsr(cpu_tmp32);
4597 case 0x22: /* ldqf, load quad fpreg */
4601 CHECK_FPU_FEATURE(dc, FLOAT128);
4602 r_const = tcg_const_i32(dc->mem_idx);
4603 gen_helper_ldqf(cpu_addr, r_const);
4604 tcg_temp_free_i32(r_const);
4605 gen_op_store_QT0_fpr(QFPREG(rd));
4608 case 0x23: /* lddf, load double fpreg */
4612 r_const = tcg_const_i32(dc->mem_idx);
4613 gen_helper_lddf(cpu_addr, r_const);
4614 tcg_temp_free_i32(r_const);
4615 gen_op_store_DT0_fpr(DFPREG(rd));
4621 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4622 xop == 0xe || xop == 0x1e) {
4623 gen_movl_reg_TN(rd, cpu_val);
4625 case 0x4: /* st, store word */
4626 gen_address_mask(dc, cpu_addr);
4627 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4629 case 0x5: /* stb, store byte */
4630 gen_address_mask(dc, cpu_addr);
4631 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4633 case 0x6: /* sth, store halfword */
4634 gen_address_mask(dc, cpu_addr);
4635 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4637 case 0x7: /* std, store double word */
4643 save_state(dc, cpu_cond);
4644 gen_address_mask(dc, cpu_addr);
4645 r_const = tcg_const_i32(7);
4646 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4647 tcg_temp_free_i32(r_const);
4648 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4649 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4650 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4653 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4654 case 0x14: /* sta, V9 stwa, store word alternate */
4655 #ifndef TARGET_SPARC64
4658 if (!supervisor(dc))
4661 save_state(dc, cpu_cond);
4662 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4664 case 0x15: /* stba, store byte alternate */
4665 #ifndef TARGET_SPARC64
4668 if (!supervisor(dc))
4671 save_state(dc, cpu_cond);
4672 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4674 case 0x16: /* stha, store halfword alternate */
4675 #ifndef TARGET_SPARC64
4678 if (!supervisor(dc))
4681 save_state(dc, cpu_cond);
4682 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4684 case 0x17: /* stda, store double word alternate */
4685 #ifndef TARGET_SPARC64
4688 if (!supervisor(dc))
4694 save_state(dc, cpu_cond);
4695 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4699 #ifdef TARGET_SPARC64
4700 case 0x0e: /* V9 stx */
4701 gen_address_mask(dc, cpu_addr);
4702 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4704 case 0x1e: /* V9 stxa */
4705 save_state(dc, cpu_cond);
4706 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4712 } else if (xop > 0x23 && xop < 0x28) {
4713 if (gen_trap_ifnofpu(dc, cpu_cond))
4715 save_state(dc, cpu_cond);
4717 case 0x24: /* stf, store fpreg */
4718 gen_address_mask(dc, cpu_addr);
4719 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4720 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4722 case 0x25: /* stfsr, V9 stxfsr */
4723 #ifdef TARGET_SPARC64
4724 gen_address_mask(dc, cpu_addr);
4725 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4727 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4729 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4731 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4732 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4736 #ifdef TARGET_SPARC64
4737 /* V9 stqf, store quad fpreg */
4741 CHECK_FPU_FEATURE(dc, FLOAT128);
4742 gen_op_load_fpr_QT0(QFPREG(rd));
4743 r_const = tcg_const_i32(dc->mem_idx);
4744 gen_helper_stqf(cpu_addr, r_const);
4745 tcg_temp_free_i32(r_const);
4748 #else /* !TARGET_SPARC64 */
4749 /* stdfq, store floating point queue */
4750 #if defined(CONFIG_USER_ONLY)
4753 if (!supervisor(dc))
4755 if (gen_trap_ifnofpu(dc, cpu_cond))
4760 case 0x27: /* stdf, store double fpreg */
4764 gen_op_load_fpr_DT0(DFPREG(rd));
4765 r_const = tcg_const_i32(dc->mem_idx);
4766 gen_helper_stdf(cpu_addr, r_const);
4767 tcg_temp_free_i32(r_const);
4773 } else if (xop > 0x33 && xop < 0x3f) {
4774 save_state(dc, cpu_cond);
4776 #ifdef TARGET_SPARC64
4777 case 0x34: /* V9 stfa */
4778 gen_stf_asi(cpu_addr, insn, 4, rd);
4780 case 0x36: /* V9 stqfa */
4784 CHECK_FPU_FEATURE(dc, FLOAT128);
4785 r_const = tcg_const_i32(7);
4786 gen_helper_check_align(cpu_addr, r_const);
4787 tcg_temp_free_i32(r_const);
4788 gen_op_load_fpr_QT0(QFPREG(rd));
4789 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4792 case 0x37: /* V9 stdfa */
4793 gen_op_load_fpr_DT0(DFPREG(rd));
4794 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4796 case 0x3c: /* V9 casa */
4797 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4798 gen_movl_TN_reg(rd, cpu_val);
4800 case 0x3e: /* V9 casxa */
4801 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4802 gen_movl_TN_reg(rd, cpu_val);
4805 case 0x34: /* stc */
4806 case 0x35: /* stcsr */
4807 case 0x36: /* stdcq */
4808 case 0x37: /* stdc */
4819 /* default case for non jump instructions */
4820 if (dc->npc == DYNAMIC_PC) {
4821 dc->pc = DYNAMIC_PC;
4823 } else if (dc->npc == JUMP_PC) {
4824 /* we can do a static jump */
4825 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4829 dc->npc = dc->npc + 4;
4837 save_state(dc, cpu_cond);
4838 r_const = tcg_const_i32(TT_ILL_INSN);
4839 gen_helper_raise_exception(r_const);
4840 tcg_temp_free_i32(r_const);
4848 save_state(dc, cpu_cond);
4849 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4850 gen_helper_raise_exception(r_const);
4851 tcg_temp_free_i32(r_const);
4855 #if !defined(CONFIG_USER_ONLY)
4860 save_state(dc, cpu_cond);
4861 r_const = tcg_const_i32(TT_PRIV_INSN);
4862 gen_helper_raise_exception(r_const);
4863 tcg_temp_free_i32(r_const);
4869 save_state(dc, cpu_cond);
4870 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4873 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4875 save_state(dc, cpu_cond);
4876 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4880 #ifndef TARGET_SPARC64
4885 save_state(dc, cpu_cond);
4886 r_const = tcg_const_i32(TT_NCP_INSN);
4887 gen_helper_raise_exception(r_const);
4888 tcg_temp_free(r_const);
4895 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4896 int spc, CPUSPARCState *env)
4898 target_ulong pc_start, last_pc;
4899 uint16_t *gen_opc_end;
4900 DisasContext dc1, *dc = &dc1;
4906 memset(dc, 0, sizeof(DisasContext));
4911 dc->npc = (target_ulong) tb->cs_base;
4912 dc->cc_op = CC_OP_DYNAMIC;
4913 dc->mem_idx = cpu_mmu_index(env);
4915 if ((dc->def->features & CPU_FEATURE_FLOAT))
4916 dc->fpu_enabled = cpu_fpu_enabled(env);
4918 dc->fpu_enabled = 0;
4919 #ifdef TARGET_SPARC64
4920 dc->address_mask_32bit = env->pstate & PS_AM;
4922 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4924 cpu_tmp0 = tcg_temp_new();
4925 cpu_tmp32 = tcg_temp_new_i32();
4926 cpu_tmp64 = tcg_temp_new_i64();
4928 cpu_dst = tcg_temp_local_new();
4931 cpu_val = tcg_temp_local_new();
4932 cpu_addr = tcg_temp_local_new();
4935 max_insns = tb->cflags & CF_COUNT_MASK;
4937 max_insns = CF_COUNT_MASK;
4940 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4941 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4942 if (bp->pc == dc->pc) {
4943 if (dc->pc != pc_start)
4944 save_state(dc, cpu_cond);
4953 qemu_log("Search PC...\n");
4954 j = gen_opc_ptr - gen_opc_buf;
4958 gen_opc_instr_start[lj++] = 0;
4959 gen_opc_pc[lj] = dc->pc;
4960 gen_opc_npc[lj] = dc->npc;
4961 gen_opc_instr_start[lj] = 1;
4962 gen_opc_icount[lj] = num_insns;
4965 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4968 disas_sparc_insn(dc);
4973 /* if the next PC is different, we abort now */
4974 if (dc->pc != (last_pc + 4))
4976 /* if we reach a page boundary, we stop generation so that the
4977 PC of a TT_TFAULT exception is always in the right page */
4978 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4980 /* if single step mode, we generate only one instruction and
4981 generate an exception */
4982 if (env->singlestep_enabled || singlestep) {
4983 tcg_gen_movi_tl(cpu_pc, dc->pc);
4987 } while ((gen_opc_ptr < gen_opc_end) &&
4988 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4989 num_insns < max_insns);
4992 tcg_temp_free(cpu_addr);
4993 tcg_temp_free(cpu_val);
4994 tcg_temp_free(cpu_dst);
4995 tcg_temp_free_i64(cpu_tmp64);
4996 tcg_temp_free_i32(cpu_tmp32);
4997 tcg_temp_free(cpu_tmp0);
4998 if (tb->cflags & CF_LAST_IO)
5001 if (dc->pc != DYNAMIC_PC &&
5002 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5003 /* static PC and NPC: we can use direct chaining */
5004 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5006 if (dc->pc != DYNAMIC_PC)
5007 tcg_gen_movi_tl(cpu_pc, dc->pc);
5008 save_npc(dc, cpu_cond);
5012 gen_icount_end(tb, num_insns);
5013 *gen_opc_ptr = INDEX_op_end;
5015 j = gen_opc_ptr - gen_opc_buf;
5018 gen_opc_instr_start[lj++] = 0;
5022 gen_opc_jump_pc[0] = dc->jump_pc[0];
5023 gen_opc_jump_pc[1] = dc->jump_pc[1];
5025 tb->size = last_pc + 4 - pc_start;
5026 tb->icount = num_insns;
5029 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5030 qemu_log("--------------\n");
5031 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5032 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5038 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5040 gen_intermediate_code_internal(tb, 0, env);
5043 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5045 gen_intermediate_code_internal(tb, 1, env);
5048 void gen_intermediate_code_init(CPUSPARCState *env)
5052 static const char * const gregnames[8] = {
5053 NULL, // g0 not used
5062 static const char * const fregnames[64] = {
5063 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5064 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5065 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5066 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5067 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5068 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5069 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5070 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5073 /* init various static tables */
5077 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5078 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5079 offsetof(CPUState, regwptr),
5081 #ifdef TARGET_SPARC64
5082 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5084 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5086 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5088 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5090 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5091 offsetof(CPUState, tick_cmpr),
5093 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5094 offsetof(CPUState, stick_cmpr),
5096 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5097 offsetof(CPUState, hstick_cmpr),
5099 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5101 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5103 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5105 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5106 offsetof(CPUState, ssr), "ssr");
5107 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5108 offsetof(CPUState, version), "ver");
5109 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5110 offsetof(CPUState, softint),
5113 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5116 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5118 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5120 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5121 offsetof(CPUState, cc_src2),
5123 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5125 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5127 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5129 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5131 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5133 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5135 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5136 #ifndef CONFIG_USER_ONLY
5137 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5140 for (i = 1; i < 8; i++)
5141 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5142 offsetof(CPUState, gregs[i]),
5144 for (i = 0; i < TARGET_FPREGS; i++)
5145 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5146 offsetof(CPUState, fpr[i]),
5149 /* register helpers */
5151 #define GEN_HELPER 2
5156 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5157 unsigned long searched_pc, int pc_pos, void *puc)
5160 env->pc = gen_opc_pc[pc_pos];
5161 npc = gen_opc_npc[pc_pos];
5163 /* dynamic NPC: already stored */
5164 } else if (npc == 2) {
5165 target_ulong t2 = (target_ulong)(unsigned long)puc;
5166 /* jump PC: use T2 and the jump targets of the translation */
5168 env->npc = gen_opc_jump_pc[0];
5170 env->npc = gen_opc_jump_pc[1];