4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
45 #ifndef CONFIG_USER_ONLY
48 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
50 static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
51 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
52 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver, cpu_softint;
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
58 /* Floating point registers */
59 static TCGv cpu_fpr[TARGET_FPREGS];
61 #include "gen-icount.h"
63 typedef struct DisasContext {
64 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
70 int address_mask_32bit;
71 struct TranslationBlock *tb;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
96 static int sign_extend(int x, int len)
99 return (x << len) >> len;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_DT0(unsigned int src)
107 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
108 offsetof(CPU_DoubleU, l.upper));
109 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
110 offsetof(CPU_DoubleU, l.lower));
113 static void gen_op_load_fpr_DT1(unsigned int src)
115 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
116 offsetof(CPU_DoubleU, l.upper));
117 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
118 offsetof(CPU_DoubleU, l.lower));
121 static void gen_op_store_DT0_fpr(unsigned int dst)
123 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
124 offsetof(CPU_DoubleU, l.upper));
125 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
126 offsetof(CPU_DoubleU, l.lower));
129 static void gen_op_load_fpr_QT0(unsigned int src)
131 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
132 offsetof(CPU_QuadU, l.upmost));
133 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
134 offsetof(CPU_QuadU, l.upper));
135 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
136 offsetof(CPU_QuadU, l.lower));
137 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
138 offsetof(CPU_QuadU, l.lowest));
141 static void gen_op_load_fpr_QT1(unsigned int src)
143 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
144 offsetof(CPU_QuadU, l.upmost));
145 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
146 offsetof(CPU_QuadU, l.upper));
147 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
148 offsetof(CPU_QuadU, l.lower));
149 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
150 offsetof(CPU_QuadU, l.lowest));
153 static void gen_op_store_QT0_fpr(unsigned int dst)
155 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
156 offsetof(CPU_QuadU, l.upmost));
157 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
158 offsetof(CPU_QuadU, l.upper));
159 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
160 offsetof(CPU_QuadU, l.lower));
161 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
162 offsetof(CPU_QuadU, l.lowest));
166 #ifdef CONFIG_USER_ONLY
167 #define supervisor(dc) 0
168 #ifdef TARGET_SPARC64
169 #define hypervisor(dc) 0
172 #define supervisor(dc) (dc->mem_idx >= 1)
173 #ifdef TARGET_SPARC64
174 #define hypervisor(dc) (dc->mem_idx == 2)
179 #ifdef TARGET_SPARC64
181 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
183 #define AM_CHECK(dc) (1)
187 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
189 #ifdef TARGET_SPARC64
191 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
195 static inline void gen_movl_reg_TN(int reg, TCGv tn)
198 tcg_gen_movi_tl(tn, 0);
200 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
202 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
206 static inline void gen_movl_TN_reg(int reg, TCGv tn)
211 tcg_gen_mov_tl(cpu_gregs[reg], tn);
213 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217 static inline void gen_goto_tb(DisasContext *s, int tb_num,
218 target_ulong pc, target_ulong npc)
220 TranslationBlock *tb;
223 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
224 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
225 /* jump to same page: we can use a direct jump */
226 tcg_gen_goto_tb(tb_num);
227 tcg_gen_movi_tl(cpu_pc, pc);
228 tcg_gen_movi_tl(cpu_npc, npc);
229 tcg_gen_exit_tb((long)tb + tb_num);
231 /* jump to another page: currently not optimized */
232 tcg_gen_movi_tl(cpu_pc, pc);
233 tcg_gen_movi_tl(cpu_npc, npc);
239 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
241 tcg_gen_extu_i32_tl(reg, src);
242 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
243 tcg_gen_andi_tl(reg, reg, 0x1);
246 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
248 tcg_gen_extu_i32_tl(reg, src);
249 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
250 tcg_gen_andi_tl(reg, reg, 0x1);
253 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
260 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
267 static inline void gen_cc_clear_icc(void)
269 tcg_gen_movi_i32(cpu_psr, 0);
272 #ifdef TARGET_SPARC64
273 static inline void gen_cc_clear_xcc(void)
275 tcg_gen_movi_i32(cpu_xcc, 0);
281 env->psr |= PSR_ZERO;
282 if ((int32_t) T0 < 0)
285 static inline void gen_cc_NZ_icc(TCGv dst)
290 l1 = gen_new_label();
291 l2 = gen_new_label();
292 r_temp = tcg_temp_new(TCG_TYPE_TL);
293 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
294 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
295 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
297 tcg_gen_ext_i32_tl(r_temp, dst);
298 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
299 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
301 tcg_temp_free(r_temp);
304 #ifdef TARGET_SPARC64
305 static inline void gen_cc_NZ_xcc(TCGv dst)
309 l1 = gen_new_label();
310 l2 = gen_new_label();
311 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
312 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
314 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
315 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
322 env->psr |= PSR_CARRY;
324 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
326 TCGv r_temp1, r_temp2;
329 l1 = gen_new_label();
330 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
331 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
332 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
333 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
334 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
335 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
337 tcg_temp_free(r_temp1);
338 tcg_temp_free(r_temp2);
341 #ifdef TARGET_SPARC64
342 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
346 l1 = gen_new_label();
347 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
348 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
354 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
357 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
361 r_temp = tcg_temp_new(TCG_TYPE_TL);
362 tcg_gen_xor_tl(r_temp, src1, src2);
363 tcg_gen_xori_tl(r_temp, r_temp, -1);
364 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
365 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
366 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
367 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
368 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
369 tcg_temp_free(r_temp);
370 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
373 #ifdef TARGET_SPARC64
374 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
378 r_temp = tcg_temp_new(TCG_TYPE_TL);
379 tcg_gen_xor_tl(r_temp, src1, src2);
380 tcg_gen_xori_tl(r_temp, r_temp, -1);
381 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
382 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
383 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
384 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
385 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
386 tcg_temp_free(r_temp);
387 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
391 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
393 TCGv r_temp, r_const;
396 l1 = gen_new_label();
398 r_temp = tcg_temp_new(TCG_TYPE_TL);
399 tcg_gen_xor_tl(r_temp, src1, src2);
400 tcg_gen_xori_tl(r_temp, r_temp, -1);
401 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
402 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
403 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
404 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
405 r_const = tcg_const_i32(TT_TOVF);
406 tcg_gen_helper_0_1(raise_exception, r_const);
407 tcg_temp_free(r_const);
409 tcg_temp_free(r_temp);
412 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
416 l1 = gen_new_label();
417 tcg_gen_or_tl(cpu_tmp0, src1, src2);
418 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
419 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
420 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
424 static inline void gen_tag_tv(TCGv src1, TCGv src2)
429 l1 = gen_new_label();
430 tcg_gen_or_tl(cpu_tmp0, src1, src2);
431 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
432 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
433 r_const = tcg_const_i32(TT_TOVF);
434 tcg_gen_helper_0_1(raise_exception, r_const);
435 tcg_temp_free(r_const);
439 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
441 tcg_gen_mov_tl(cpu_cc_src, src1);
442 tcg_gen_mov_tl(cpu_cc_src2, src2);
443 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
445 gen_cc_NZ_icc(cpu_cc_dst);
446 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
447 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 #ifdef TARGET_SPARC64
450 gen_cc_NZ_xcc(cpu_cc_dst);
451 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
452 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
454 tcg_gen_mov_tl(dst, cpu_cc_dst);
457 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
459 tcg_gen_mov_tl(cpu_cc_src, src1);
460 tcg_gen_mov_tl(cpu_cc_src2, src2);
461 gen_mov_reg_C(cpu_tmp0, cpu_psr);
462 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
464 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
465 #ifdef TARGET_SPARC64
467 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
469 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
470 gen_cc_NZ_icc(cpu_cc_dst);
471 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
472 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
473 #ifdef TARGET_SPARC64
474 gen_cc_NZ_xcc(cpu_cc_dst);
475 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
476 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
478 tcg_gen_mov_tl(dst, cpu_cc_dst);
481 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
483 tcg_gen_mov_tl(cpu_cc_src, src1);
484 tcg_gen_mov_tl(cpu_cc_src2, src2);
485 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
487 gen_cc_NZ_icc(cpu_cc_dst);
488 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
489 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
490 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
491 #ifdef TARGET_SPARC64
493 gen_cc_NZ_xcc(cpu_cc_dst);
494 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
495 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
497 tcg_gen_mov_tl(dst, cpu_cc_dst);
500 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
502 tcg_gen_mov_tl(cpu_cc_src, src1);
503 tcg_gen_mov_tl(cpu_cc_src2, src2);
504 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
505 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
506 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
508 gen_cc_NZ_icc(cpu_cc_dst);
509 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
510 #ifdef TARGET_SPARC64
512 gen_cc_NZ_xcc(cpu_cc_dst);
513 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
514 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
516 tcg_gen_mov_tl(dst, cpu_cc_dst);
521 env->psr |= PSR_CARRY;
523 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
525 TCGv r_temp1, r_temp2;
528 l1 = gen_new_label();
529 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
530 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
531 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
532 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
533 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
534 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
536 tcg_temp_free(r_temp1);
537 tcg_temp_free(r_temp2);
540 #ifdef TARGET_SPARC64
541 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
545 l1 = gen_new_label();
546 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
547 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
553 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
556 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
560 r_temp = tcg_temp_new(TCG_TYPE_TL);
561 tcg_gen_xor_tl(r_temp, src1, src2);
562 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
563 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
564 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
565 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
566 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
567 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
568 tcg_temp_free(r_temp);
571 #ifdef TARGET_SPARC64
572 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
576 r_temp = tcg_temp_new(TCG_TYPE_TL);
577 tcg_gen_xor_tl(r_temp, src1, src2);
578 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
579 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
580 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
581 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
582 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
583 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
584 tcg_temp_free(r_temp);
588 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
590 TCGv r_temp, r_const;
593 l1 = gen_new_label();
595 r_temp = tcg_temp_new(TCG_TYPE_TL);
596 tcg_gen_xor_tl(r_temp, src1, src2);
597 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
598 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
599 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
600 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
601 r_const = tcg_const_i32(TT_TOVF);
602 tcg_gen_helper_0_1(raise_exception, r_const);
603 tcg_temp_free(r_const);
605 tcg_temp_free(r_temp);
608 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
610 tcg_gen_mov_tl(cpu_cc_src, src1);
611 tcg_gen_mov_tl(cpu_cc_src2, src2);
612 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
614 gen_cc_NZ_icc(cpu_cc_dst);
615 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
616 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617 #ifdef TARGET_SPARC64
619 gen_cc_NZ_xcc(cpu_cc_dst);
620 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
621 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
623 tcg_gen_mov_tl(dst, cpu_cc_dst);
626 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
628 tcg_gen_mov_tl(cpu_cc_src, src1);
629 tcg_gen_mov_tl(cpu_cc_src2, src2);
630 gen_mov_reg_C(cpu_tmp0, cpu_psr);
631 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
633 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
634 #ifdef TARGET_SPARC64
636 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
638 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
639 gen_cc_NZ_icc(cpu_cc_dst);
640 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
641 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
642 #ifdef TARGET_SPARC64
643 gen_cc_NZ_xcc(cpu_cc_dst);
644 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
645 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
647 tcg_gen_mov_tl(dst, cpu_cc_dst);
650 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
652 tcg_gen_mov_tl(cpu_cc_src, src1);
653 tcg_gen_mov_tl(cpu_cc_src2, src2);
654 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
656 gen_cc_NZ_icc(cpu_cc_dst);
657 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
658 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
659 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
660 #ifdef TARGET_SPARC64
662 gen_cc_NZ_xcc(cpu_cc_dst);
663 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
664 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666 tcg_gen_mov_tl(dst, cpu_cc_dst);
669 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
671 tcg_gen_mov_tl(cpu_cc_src, src1);
672 tcg_gen_mov_tl(cpu_cc_src2, src2);
673 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
674 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
675 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
677 gen_cc_NZ_icc(cpu_cc_dst);
678 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
679 #ifdef TARGET_SPARC64
681 gen_cc_NZ_xcc(cpu_cc_dst);
682 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
683 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
685 tcg_gen_mov_tl(dst, cpu_cc_dst);
688 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
693 l1 = gen_new_label();
694 r_temp = tcg_temp_new(TCG_TYPE_TL);
700 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
701 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
702 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
703 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
704 tcg_gen_movi_tl(cpu_cc_src2, 0);
708 // env->y = (b2 << 31) | (env->y >> 1);
709 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
710 tcg_gen_shli_tl(r_temp, r_temp, 31);
711 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
712 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
713 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
714 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
717 gen_mov_reg_N(cpu_tmp0, cpu_psr);
718 gen_mov_reg_V(r_temp, cpu_psr);
719 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
720 tcg_temp_free(r_temp);
722 // T0 = (b1 << 31) | (T0 >> 1);
724 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
725 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
726 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
728 /* do addition and update flags */
729 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
732 gen_cc_NZ_icc(cpu_cc_dst);
733 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
734 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
735 tcg_gen_mov_tl(dst, cpu_cc_dst);
738 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
740 TCGv r_temp, r_temp2;
742 r_temp = tcg_temp_new(TCG_TYPE_I64);
743 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
745 tcg_gen_extu_i32_i64(r_temp, src2);
746 tcg_gen_extu_i32_i64(r_temp2, src1);
747 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
749 tcg_gen_shri_i64(r_temp, r_temp2, 32);
750 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
751 tcg_temp_free(r_temp);
752 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
753 #ifdef TARGET_SPARC64
754 tcg_gen_mov_i64(dst, r_temp2);
756 tcg_gen_trunc_i64_tl(dst, r_temp2);
758 tcg_temp_free(r_temp2);
761 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
763 TCGv r_temp, r_temp2;
765 r_temp = tcg_temp_new(TCG_TYPE_I64);
766 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
768 tcg_gen_ext_i32_i64(r_temp, src2);
769 tcg_gen_ext_i32_i64(r_temp2, src1);
770 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
772 tcg_gen_shri_i64(r_temp, r_temp2, 32);
773 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
774 tcg_temp_free(r_temp);
775 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
776 #ifdef TARGET_SPARC64
777 tcg_gen_mov_i64(dst, r_temp2);
779 tcg_gen_trunc_i64_tl(dst, r_temp2);
781 tcg_temp_free(r_temp2);
784 #ifdef TARGET_SPARC64
785 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
790 l1 = gen_new_label();
791 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
792 r_const = tcg_const_i32(TT_DIV_ZERO);
793 tcg_gen_helper_0_1(raise_exception, r_const);
794 tcg_temp_free(r_const);
798 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
802 l1 = gen_new_label();
803 l2 = gen_new_label();
804 tcg_gen_mov_tl(cpu_cc_src, src1);
805 tcg_gen_mov_tl(cpu_cc_src2, src2);
806 gen_trap_ifdivzero_tl(cpu_cc_src2);
807 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
808 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
809 tcg_gen_movi_i64(dst, INT64_MIN);
812 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
817 static inline void gen_op_div_cc(TCGv dst)
821 tcg_gen_mov_tl(cpu_cc_dst, dst);
823 gen_cc_NZ_icc(cpu_cc_dst);
824 l1 = gen_new_label();
825 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
826 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
830 static inline void gen_op_logic_cc(TCGv dst)
832 tcg_gen_mov_tl(cpu_cc_dst, dst);
835 gen_cc_NZ_icc(cpu_cc_dst);
836 #ifdef TARGET_SPARC64
838 gen_cc_NZ_xcc(cpu_cc_dst);
843 static inline void gen_op_eval_ba(TCGv dst)
845 tcg_gen_movi_tl(dst, 1);
849 static inline void gen_op_eval_be(TCGv dst, TCGv src)
851 gen_mov_reg_Z(dst, src);
855 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
857 gen_mov_reg_N(cpu_tmp0, src);
858 gen_mov_reg_V(dst, src);
859 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
860 gen_mov_reg_Z(cpu_tmp0, src);
861 tcg_gen_or_tl(dst, dst, cpu_tmp0);
865 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
867 gen_mov_reg_V(cpu_tmp0, src);
868 gen_mov_reg_N(dst, src);
869 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
873 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
875 gen_mov_reg_Z(cpu_tmp0, src);
876 gen_mov_reg_C(dst, src);
877 tcg_gen_or_tl(dst, dst, cpu_tmp0);
881 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
883 gen_mov_reg_C(dst, src);
887 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
889 gen_mov_reg_V(dst, src);
893 static inline void gen_op_eval_bn(TCGv dst)
895 tcg_gen_movi_tl(dst, 0);
899 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
901 gen_mov_reg_N(dst, src);
905 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
907 gen_mov_reg_Z(dst, src);
908 tcg_gen_xori_tl(dst, dst, 0x1);
912 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
914 gen_mov_reg_N(cpu_tmp0, src);
915 gen_mov_reg_V(dst, src);
916 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
917 gen_mov_reg_Z(cpu_tmp0, src);
918 tcg_gen_or_tl(dst, dst, cpu_tmp0);
919 tcg_gen_xori_tl(dst, dst, 0x1);
923 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
925 gen_mov_reg_V(cpu_tmp0, src);
926 gen_mov_reg_N(dst, src);
927 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
928 tcg_gen_xori_tl(dst, dst, 0x1);
932 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
934 gen_mov_reg_Z(cpu_tmp0, src);
935 gen_mov_reg_C(dst, src);
936 tcg_gen_or_tl(dst, dst, cpu_tmp0);
937 tcg_gen_xori_tl(dst, dst, 0x1);
941 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
943 gen_mov_reg_C(dst, src);
944 tcg_gen_xori_tl(dst, dst, 0x1);
948 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
950 gen_mov_reg_N(dst, src);
951 tcg_gen_xori_tl(dst, dst, 0x1);
955 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
957 gen_mov_reg_V(dst, src);
958 tcg_gen_xori_tl(dst, dst, 0x1);
962 FPSR bit field FCC1 | FCC0:
968 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
969 unsigned int fcc_offset)
971 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
972 tcg_gen_andi_tl(reg, reg, 0x1);
975 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
976 unsigned int fcc_offset)
978 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
979 tcg_gen_andi_tl(reg, reg, 0x1);
983 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
984 unsigned int fcc_offset)
986 gen_mov_reg_FCC0(dst, src, fcc_offset);
987 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
988 tcg_gen_or_tl(dst, dst, cpu_tmp0);
991 // 1 or 2: FCC0 ^ FCC1
992 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
993 unsigned int fcc_offset)
995 gen_mov_reg_FCC0(dst, src, fcc_offset);
996 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
997 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1001 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1002 unsigned int fcc_offset)
1004 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1009 unsigned int fcc_offset)
1011 gen_mov_reg_FCC0(dst, src, fcc_offset);
1012 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1013 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1014 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1018 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1019 unsigned int fcc_offset)
1021 gen_mov_reg_FCC1(dst, src, fcc_offset);
1025 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1026 unsigned int fcc_offset)
1028 gen_mov_reg_FCC0(dst, src, fcc_offset);
1029 tcg_gen_xori_tl(dst, dst, 0x1);
1030 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1031 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1035 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1036 unsigned int fcc_offset)
1038 gen_mov_reg_FCC0(dst, src, fcc_offset);
1039 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1040 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1043 // 0: !(FCC0 | FCC1)
1044 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1045 unsigned int fcc_offset)
1047 gen_mov_reg_FCC0(dst, src, fcc_offset);
1048 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1049 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1050 tcg_gen_xori_tl(dst, dst, 0x1);
1053 // 0 or 3: !(FCC0 ^ FCC1)
1054 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1055 unsigned int fcc_offset)
1057 gen_mov_reg_FCC0(dst, src, fcc_offset);
1058 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1059 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1060 tcg_gen_xori_tl(dst, dst, 0x1);
1064 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1065 unsigned int fcc_offset)
1067 gen_mov_reg_FCC0(dst, src, fcc_offset);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1071 // !1: !(FCC0 & !FCC1)
1072 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1073 unsigned int fcc_offset)
1075 gen_mov_reg_FCC0(dst, src, fcc_offset);
1076 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1077 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1078 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1079 tcg_gen_xori_tl(dst, dst, 0x1);
1083 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1084 unsigned int fcc_offset)
1086 gen_mov_reg_FCC1(dst, src, fcc_offset);
1087 tcg_gen_xori_tl(dst, dst, 0x1);
1090 // !2: !(!FCC0 & FCC1)
1091 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1092 unsigned int fcc_offset)
1094 gen_mov_reg_FCC0(dst, src, fcc_offset);
1095 tcg_gen_xori_tl(dst, dst, 0x1);
1096 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1097 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1098 tcg_gen_xori_tl(dst, dst, 0x1);
1101 // !3: !(FCC0 & FCC1)
1102 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1103 unsigned int fcc_offset)
1105 gen_mov_reg_FCC0(dst, src, fcc_offset);
1106 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1107 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1111 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1112 target_ulong pc2, TCGv r_cond)
1116 l1 = gen_new_label();
1118 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1120 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1123 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1126 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1127 target_ulong pc2, TCGv r_cond)
1131 l1 = gen_new_label();
1133 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1135 gen_goto_tb(dc, 0, pc2, pc1);
1138 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1141 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1146 l1 = gen_new_label();
1147 l2 = gen_new_label();
1149 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1151 tcg_gen_movi_tl(cpu_npc, npc1);
1155 tcg_gen_movi_tl(cpu_npc, npc2);
1159 /* call this function before using the condition register as it may
1160 have been set for a jump */
1161 static inline void flush_cond(DisasContext *dc, TCGv cond)
1163 if (dc->npc == JUMP_PC) {
1164 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1165 dc->npc = DYNAMIC_PC;
1169 static inline void save_npc(DisasContext *dc, TCGv cond)
1171 if (dc->npc == JUMP_PC) {
1172 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1173 dc->npc = DYNAMIC_PC;
1174 } else if (dc->npc != DYNAMIC_PC) {
1175 tcg_gen_movi_tl(cpu_npc, dc->npc);
1179 static inline void save_state(DisasContext *dc, TCGv cond)
1181 tcg_gen_movi_tl(cpu_pc, dc->pc);
1185 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1187 if (dc->npc == JUMP_PC) {
1188 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1189 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1190 dc->pc = DYNAMIC_PC;
1191 } else if (dc->npc == DYNAMIC_PC) {
1192 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1193 dc->pc = DYNAMIC_PC;
1199 static inline void gen_op_next_insn(void)
1201 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1202 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1205 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1209 #ifdef TARGET_SPARC64
1219 gen_op_eval_bn(r_dst);
1222 gen_op_eval_be(r_dst, r_src);
1225 gen_op_eval_ble(r_dst, r_src);
1228 gen_op_eval_bl(r_dst, r_src);
1231 gen_op_eval_bleu(r_dst, r_src);
1234 gen_op_eval_bcs(r_dst, r_src);
1237 gen_op_eval_bneg(r_dst, r_src);
1240 gen_op_eval_bvs(r_dst, r_src);
1243 gen_op_eval_ba(r_dst);
1246 gen_op_eval_bne(r_dst, r_src);
1249 gen_op_eval_bg(r_dst, r_src);
1252 gen_op_eval_bge(r_dst, r_src);
1255 gen_op_eval_bgu(r_dst, r_src);
1258 gen_op_eval_bcc(r_dst, r_src);
1261 gen_op_eval_bpos(r_dst, r_src);
1264 gen_op_eval_bvc(r_dst, r_src);
1269 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1271 unsigned int offset;
1291 gen_op_eval_bn(r_dst);
1294 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1297 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1300 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1303 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1306 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1309 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1312 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1315 gen_op_eval_ba(r_dst);
1318 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1321 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1324 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1327 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1330 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1333 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1336 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1341 #ifdef TARGET_SPARC64
1343 static const int gen_tcg_cond_reg[8] = {
1354 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1358 l1 = gen_new_label();
1359 tcg_gen_movi_tl(r_dst, 0);
1360 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1361 tcg_gen_movi_tl(r_dst, 1);
1366 /* XXX: potentially incorrect if dynamic npc */
1367 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1370 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1371 target_ulong target = dc->pc + offset;
1374 /* unconditional not taken */
1376 dc->pc = dc->npc + 4;
1377 dc->npc = dc->pc + 4;
1380 dc->npc = dc->pc + 4;
1382 } else if (cond == 0x8) {
1383 /* unconditional taken */
1386 dc->npc = dc->pc + 4;
1392 flush_cond(dc, r_cond);
1393 gen_cond(r_cond, cc, cond);
1395 gen_branch_a(dc, target, dc->npc, r_cond);
1399 dc->jump_pc[0] = target;
1400 dc->jump_pc[1] = dc->npc + 4;
1406 /* XXX: potentially incorrect if dynamic npc */
1407 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1410 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1411 target_ulong target = dc->pc + offset;
1414 /* unconditional not taken */
1416 dc->pc = dc->npc + 4;
1417 dc->npc = dc->pc + 4;
1420 dc->npc = dc->pc + 4;
1422 } else if (cond == 0x8) {
1423 /* unconditional taken */
1426 dc->npc = dc->pc + 4;
1432 flush_cond(dc, r_cond);
1433 gen_fcond(r_cond, cc, cond);
1435 gen_branch_a(dc, target, dc->npc, r_cond);
1439 dc->jump_pc[0] = target;
1440 dc->jump_pc[1] = dc->npc + 4;
1446 #ifdef TARGET_SPARC64
1447 /* XXX: potentially incorrect if dynamic npc */
1448 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1449 TCGv r_cond, TCGv r_reg)
1451 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1452 target_ulong target = dc->pc + offset;
1454 flush_cond(dc, r_cond);
1455 gen_cond_reg(r_cond, cond, r_reg);
1457 gen_branch_a(dc, target, dc->npc, r_cond);
1461 dc->jump_pc[0] = target;
1462 dc->jump_pc[1] = dc->npc + 4;
1467 static GenOpFunc * const gen_fcmpd[4] = {
1474 static GenOpFunc * const gen_fcmpq[4] = {
1481 static GenOpFunc * const gen_fcmped[4] = {
1488 static GenOpFunc * const gen_fcmpeq[4] = {
1495 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1499 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1502 tcg_gen_helper_0_2(helper_fcmps_fcc1, r_rs1, r_rs2);
1505 tcg_gen_helper_0_2(helper_fcmps_fcc2, r_rs1, r_rs2);
1508 tcg_gen_helper_0_2(helper_fcmps_fcc3, r_rs1, r_rs2);
1513 static inline void gen_op_fcmpd(int fccno)
1515 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1518 static inline void gen_op_fcmpq(int fccno)
1520 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1523 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1527 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1530 tcg_gen_helper_0_2(helper_fcmpes_fcc1, r_rs1, r_rs2);
1533 tcg_gen_helper_0_2(helper_fcmpes_fcc2, r_rs1, r_rs2);
1536 tcg_gen_helper_0_2(helper_fcmpes_fcc3, r_rs1, r_rs2);
1541 static inline void gen_op_fcmped(int fccno)
1543 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1546 static inline void gen_op_fcmpeq(int fccno)
1548 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1553 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1555 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1558 static inline void gen_op_fcmpd(int fccno)
1560 tcg_gen_helper_0_0(helper_fcmpd);
1563 static inline void gen_op_fcmpq(int fccno)
1565 tcg_gen_helper_0_0(helper_fcmpq);
1568 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1570 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1573 static inline void gen_op_fcmped(int fccno)
1575 tcg_gen_helper_0_0(helper_fcmped);
1578 static inline void gen_op_fcmpeq(int fccno)
1580 tcg_gen_helper_0_0(helper_fcmpeq);
1584 static inline void gen_op_fpexception_im(int fsr_flags)
1588 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1589 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1590 r_const = tcg_const_i32(TT_FP_EXCP);
1591 tcg_gen_helper_0_1(raise_exception, r_const);
1592 tcg_temp_free(r_const);
1595 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1597 #if !defined(CONFIG_USER_ONLY)
1598 if (!dc->fpu_enabled) {
1601 save_state(dc, r_cond);
1602 r_const = tcg_const_i32(TT_NFPU_INSN);
1603 tcg_gen_helper_0_1(raise_exception, r_const);
1604 tcg_temp_free(r_const);
1612 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1614 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1617 static inline void gen_clear_float_exceptions(void)
1619 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1623 #ifdef TARGET_SPARC64
1624 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1630 r_asi = tcg_temp_new(TCG_TYPE_I32);
1631 tcg_gen_mov_i32(r_asi, cpu_asi);
1633 asi = GET_FIELD(insn, 19, 26);
1634 r_asi = tcg_const_i32(asi);
1639 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1642 TCGv r_asi, r_size, r_sign;
1644 r_asi = gen_get_asi(insn, addr);
1645 r_size = tcg_const_i32(size);
1646 r_sign = tcg_const_i32(sign);
1647 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1648 tcg_temp_free(r_sign);
1649 tcg_temp_free(r_size);
1650 tcg_temp_free(r_asi);
1653 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1657 r_asi = gen_get_asi(insn, addr);
1658 r_size = tcg_const_i32(size);
1659 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1660 tcg_temp_free(r_size);
1661 tcg_temp_free(r_asi);
1664 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1666 TCGv r_asi, r_size, r_rd;
1668 r_asi = gen_get_asi(insn, addr);
1669 r_size = tcg_const_i32(size);
1670 r_rd = tcg_const_i32(rd);
1671 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1672 tcg_temp_free(r_rd);
1673 tcg_temp_free(r_size);
1674 tcg_temp_free(r_asi);
1677 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1679 TCGv r_asi, r_size, r_rd;
1681 r_asi = gen_get_asi(insn, addr);
1682 r_size = tcg_const_i32(size);
1683 r_rd = tcg_const_i32(rd);
1684 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1685 tcg_temp_free(r_rd);
1686 tcg_temp_free(r_size);
1687 tcg_temp_free(r_asi);
1690 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1692 TCGv r_asi, r_size, r_sign;
1694 r_asi = gen_get_asi(insn, addr);
1695 r_size = tcg_const_i32(4);
1696 r_sign = tcg_const_i32(0);
1697 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1698 tcg_temp_free(r_sign);
1699 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1700 tcg_temp_free(r_size);
1701 tcg_temp_free(r_asi);
1702 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1705 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1709 r_asi = gen_get_asi(insn, addr);
1710 r_rd = tcg_const_i32(rd);
1711 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1712 tcg_temp_free(r_rd);
1713 tcg_temp_free(r_asi);
1716 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1720 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1721 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1722 r_asi = gen_get_asi(insn, addr);
1723 r_size = tcg_const_i32(8);
1724 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1725 tcg_temp_free(r_size);
1726 tcg_temp_free(r_asi);
1729 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1734 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1735 gen_movl_reg_TN(rd, r_val1);
1736 r_asi = gen_get_asi(insn, addr);
1737 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1738 tcg_temp_free(r_asi);
1739 tcg_temp_free(r_val1);
1742 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1747 gen_movl_reg_TN(rd, cpu_tmp64);
1748 r_asi = gen_get_asi(insn, addr);
1749 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1750 tcg_temp_free(r_asi);
1753 #elif !defined(CONFIG_USER_ONLY)
1755 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1758 TCGv r_asi, r_size, r_sign;
1760 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1761 r_size = tcg_const_i32(size);
1762 r_sign = tcg_const_i32(sign);
1763 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1764 tcg_temp_free(r_sign);
1765 tcg_temp_free(r_size);
1766 tcg_temp_free(r_asi);
1767 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1770 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1774 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1775 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1776 r_size = tcg_const_i32(size);
1777 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1778 tcg_temp_free(r_size);
1779 tcg_temp_free(r_asi);
1782 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1784 TCGv r_asi, r_size, r_sign;
1786 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1787 r_size = tcg_const_i32(4);
1788 r_sign = tcg_const_i32(0);
1789 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1790 tcg_temp_free(r_sign);
1791 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1792 tcg_temp_free(r_size);
1793 tcg_temp_free(r_asi);
1794 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1797 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1799 TCGv r_asi, r_size, r_sign;
1801 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1802 r_size = tcg_const_i32(8);
1803 r_sign = tcg_const_i32(0);
1804 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1805 tcg_temp_free(r_sign);
1806 tcg_temp_free(r_size);
1807 tcg_temp_free(r_asi);
1808 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1809 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1810 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1811 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1812 gen_movl_TN_reg(rd, hi);
1815 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1819 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1820 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1821 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1822 r_size = tcg_const_i32(8);
1823 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1824 tcg_temp_free(r_size);
1825 tcg_temp_free(r_asi);
1829 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1830 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1832 TCGv r_val, r_asi, r_size;
1834 gen_ld_asi(dst, addr, insn, 1, 0);
1836 r_val = tcg_const_i64(0xffULL);
1837 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1838 r_size = tcg_const_i32(1);
1839 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1840 tcg_temp_free(r_size);
1841 tcg_temp_free(r_asi);
1842 tcg_temp_free(r_val);
1846 static inline TCGv get_src1(unsigned int insn, TCGv def)
1851 rs1 = GET_FIELD(insn, 13, 17);
1853 r_rs1 = tcg_const_tl(0); // XXX how to free?
1855 r_rs1 = cpu_gregs[rs1];
1857 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1861 static inline TCGv get_src2(unsigned int insn, TCGv def)
1866 if (IS_IMM) { /* immediate */
1867 rs2 = GET_FIELDs(insn, 19, 31);
1868 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1869 } else { /* register */
1870 rs2 = GET_FIELD(insn, 27, 31);
1872 r_rs2 = tcg_const_tl(0); // XXX how to free?
1874 r_rs2 = cpu_gregs[rs2];
1876 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1881 #define CHECK_IU_FEATURE(dc, FEATURE) \
1882 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1884 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1885 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1888 /* before an instruction, dc->pc must be static */
1889 static void disas_sparc_insn(DisasContext * dc)
1891 unsigned int insn, opc, rs1, rs2, rd;
1893 if (unlikely(loglevel & CPU_LOG_TB_OP))
1894 tcg_gen_debug_insn_start(dc->pc);
1895 insn = ldl_code(dc->pc);
1896 opc = GET_FIELD(insn, 0, 1);
1898 rd = GET_FIELD(insn, 2, 6);
1900 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1901 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1904 case 0: /* branches/sethi */
1906 unsigned int xop = GET_FIELD(insn, 7, 9);
1909 #ifdef TARGET_SPARC64
1910 case 0x1: /* V9 BPcc */
1914 target = GET_FIELD_SP(insn, 0, 18);
1915 target = sign_extend(target, 18);
1917 cc = GET_FIELD_SP(insn, 20, 21);
1919 do_branch(dc, target, insn, 0, cpu_cond);
1921 do_branch(dc, target, insn, 1, cpu_cond);
1926 case 0x3: /* V9 BPr */
1928 target = GET_FIELD_SP(insn, 0, 13) |
1929 (GET_FIELD_SP(insn, 20, 21) << 14);
1930 target = sign_extend(target, 16);
1932 cpu_src1 = get_src1(insn, cpu_src1);
1933 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1936 case 0x5: /* V9 FBPcc */
1938 int cc = GET_FIELD_SP(insn, 20, 21);
1939 if (gen_trap_ifnofpu(dc, cpu_cond))
1941 target = GET_FIELD_SP(insn, 0, 18);
1942 target = sign_extend(target, 19);
1944 do_fbranch(dc, target, insn, cc, cpu_cond);
1948 case 0x7: /* CBN+x */
1953 case 0x2: /* BN+x */
1955 target = GET_FIELD(insn, 10, 31);
1956 target = sign_extend(target, 22);
1958 do_branch(dc, target, insn, 0, cpu_cond);
1961 case 0x6: /* FBN+x */
1963 if (gen_trap_ifnofpu(dc, cpu_cond))
1965 target = GET_FIELD(insn, 10, 31);
1966 target = sign_extend(target, 22);
1968 do_fbranch(dc, target, insn, 0, cpu_cond);
1971 case 0x4: /* SETHI */
1973 uint32_t value = GET_FIELD(insn, 10, 31);
1976 r_const = tcg_const_tl(value << 10);
1977 gen_movl_TN_reg(rd, r_const);
1978 tcg_temp_free(r_const);
1981 case 0x0: /* UNIMPL */
1990 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1993 r_const = tcg_const_tl(dc->pc);
1994 gen_movl_TN_reg(15, r_const);
1995 tcg_temp_free(r_const);
1997 gen_mov_pc_npc(dc, cpu_cond);
2001 case 2: /* FPU & Logical Operations */
2003 unsigned int xop = GET_FIELD(insn, 7, 12);
2004 if (xop == 0x3a) { /* generate trap */
2007 cpu_src1 = get_src1(insn, cpu_src1);
2009 rs2 = GET_FIELD(insn, 25, 31);
2010 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2012 rs2 = GET_FIELD(insn, 27, 31);
2014 gen_movl_reg_TN(rs2, cpu_src2);
2015 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2017 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2019 cond = GET_FIELD(insn, 3, 6);
2021 save_state(dc, cpu_cond);
2022 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2023 } else if (cond != 0) {
2024 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2025 #ifdef TARGET_SPARC64
2027 int cc = GET_FIELD_SP(insn, 11, 12);
2029 save_state(dc, cpu_cond);
2031 gen_cond(r_cond, 0, cond);
2033 gen_cond(r_cond, 1, cond);
2037 save_state(dc, cpu_cond);
2038 gen_cond(r_cond, 0, cond);
2040 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2041 tcg_temp_free(r_cond);
2047 } else if (xop == 0x28) {
2048 rs1 = GET_FIELD(insn, 13, 17);
2051 #ifndef TARGET_SPARC64
2052 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2053 manual, rdy on the microSPARC
2055 case 0x0f: /* stbar in the SPARCv8 manual,
2056 rdy on the microSPARC II */
2057 case 0x10 ... 0x1f: /* implementation-dependent in the
2058 SPARCv8 manual, rdy on the
2061 gen_movl_TN_reg(rd, cpu_y);
2063 #ifdef TARGET_SPARC64
2064 case 0x2: /* V9 rdccr */
2065 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2066 gen_movl_TN_reg(rd, cpu_dst);
2068 case 0x3: /* V9 rdasi */
2069 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2070 gen_movl_TN_reg(rd, cpu_dst);
2072 case 0x4: /* V9 rdtick */
2076 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2077 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2078 offsetof(CPUState, tick));
2079 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2081 tcg_temp_free(r_tickptr);
2082 gen_movl_TN_reg(rd, cpu_dst);
2085 case 0x5: /* V9 rdpc */
2089 r_const = tcg_const_tl(dc->pc);
2090 gen_movl_TN_reg(rd, r_const);
2091 tcg_temp_free(r_const);
2094 case 0x6: /* V9 rdfprs */
2095 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2096 gen_movl_TN_reg(rd, cpu_dst);
2098 case 0xf: /* V9 membar */
2099 break; /* no effect */
2100 case 0x13: /* Graphics Status */
2101 if (gen_trap_ifnofpu(dc, cpu_cond))
2103 gen_movl_TN_reg(rd, cpu_gsr);
2105 case 0x16: /* Softint */
2106 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2107 gen_movl_TN_reg(rd, cpu_dst);
2109 case 0x17: /* Tick compare */
2110 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2112 case 0x18: /* System tick */
2116 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2117 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2118 offsetof(CPUState, stick));
2119 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2121 tcg_temp_free(r_tickptr);
2122 gen_movl_TN_reg(rd, cpu_dst);
2125 case 0x19: /* System tick compare */
2126 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2128 case 0x10: /* Performance Control */
2129 case 0x11: /* Performance Instrumentation Counter */
2130 case 0x12: /* Dispatch Control */
2131 case 0x14: /* Softint set, WO */
2132 case 0x15: /* Softint clear, WO */
2137 #if !defined(CONFIG_USER_ONLY)
2138 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2139 #ifndef TARGET_SPARC64
2140 if (!supervisor(dc))
2142 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2144 CHECK_IU_FEATURE(dc, HYPV);
2145 if (!hypervisor(dc))
2147 rs1 = GET_FIELD(insn, 13, 17);
2150 // gen_op_rdhpstate();
2153 // gen_op_rdhtstate();
2156 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2159 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2162 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2164 case 31: // hstick_cmpr
2165 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2171 gen_movl_TN_reg(rd, cpu_dst);
2173 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2174 if (!supervisor(dc))
2176 #ifdef TARGET_SPARC64
2177 rs1 = GET_FIELD(insn, 13, 17);
2183 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2184 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2185 offsetof(CPUState, tsptr));
2186 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2187 offsetof(trap_state, tpc));
2188 tcg_temp_free(r_tsptr);
2195 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2196 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2197 offsetof(CPUState, tsptr));
2198 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2199 offsetof(trap_state, tnpc));
2200 tcg_temp_free(r_tsptr);
2207 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2208 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2209 offsetof(CPUState, tsptr));
2210 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2211 offsetof(trap_state, tstate));
2212 tcg_temp_free(r_tsptr);
2219 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2220 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2221 offsetof(CPUState, tsptr));
2222 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2223 offsetof(trap_state, tt));
2224 tcg_temp_free(r_tsptr);
2231 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2232 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2233 offsetof(CPUState, tick));
2234 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2236 gen_movl_TN_reg(rd, cpu_tmp0);
2237 tcg_temp_free(r_tickptr);
2241 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2244 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2245 offsetof(CPUSPARCState, pstate));
2246 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2249 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2250 offsetof(CPUSPARCState, tl));
2251 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2254 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2255 offsetof(CPUSPARCState, psrpil));
2256 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2259 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2262 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2263 offsetof(CPUSPARCState, cansave));
2264 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2266 case 11: // canrestore
2267 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2268 offsetof(CPUSPARCState, canrestore));
2269 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2271 case 12: // cleanwin
2272 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2273 offsetof(CPUSPARCState, cleanwin));
2274 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2276 case 13: // otherwin
2277 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2278 offsetof(CPUSPARCState, otherwin));
2279 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2282 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2283 offsetof(CPUSPARCState, wstate));
2284 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2286 case 16: // UA2005 gl
2287 CHECK_IU_FEATURE(dc, GL);
2288 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2289 offsetof(CPUSPARCState, gl));
2290 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2292 case 26: // UA2005 strand status
2293 CHECK_IU_FEATURE(dc, HYPV);
2294 if (!hypervisor(dc))
2296 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_ssr);
2299 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2306 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2308 gen_movl_TN_reg(rd, cpu_tmp0);
2310 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2311 #ifdef TARGET_SPARC64
2312 save_state(dc, cpu_cond);
2313 tcg_gen_helper_0_0(helper_flushw);
2315 if (!supervisor(dc))
2317 gen_movl_TN_reg(rd, cpu_tbr);
2321 } else if (xop == 0x34) { /* FPU Operations */
2322 if (gen_trap_ifnofpu(dc, cpu_cond))
2324 gen_op_clear_ieee_excp_and_FTT();
2325 rs1 = GET_FIELD(insn, 13, 17);
2326 rs2 = GET_FIELD(insn, 27, 31);
2327 xop = GET_FIELD(insn, 18, 26);
2329 case 0x1: /* fmovs */
2330 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2332 case 0x5: /* fnegs */
2333 tcg_gen_helper_1_1(helper_fnegs, cpu_fpr[rd],
2336 case 0x9: /* fabss */
2337 tcg_gen_helper_1_1(helper_fabss, cpu_fpr[rd],
2340 case 0x29: /* fsqrts */
2341 CHECK_FPU_FEATURE(dc, FSQRT);
2342 gen_clear_float_exceptions();
2343 tcg_gen_helper_1_1(helper_fsqrts, cpu_tmp32,
2345 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2346 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2348 case 0x2a: /* fsqrtd */
2349 CHECK_FPU_FEATURE(dc, FSQRT);
2350 gen_op_load_fpr_DT1(DFPREG(rs2));
2351 gen_clear_float_exceptions();
2352 tcg_gen_helper_0_0(helper_fsqrtd);
2353 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2354 gen_op_store_DT0_fpr(DFPREG(rd));
2356 case 0x2b: /* fsqrtq */
2357 CHECK_FPU_FEATURE(dc, FLOAT128);
2358 gen_op_load_fpr_QT1(QFPREG(rs2));
2359 gen_clear_float_exceptions();
2360 tcg_gen_helper_0_0(helper_fsqrtq);
2361 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2362 gen_op_store_QT0_fpr(QFPREG(rd));
2364 case 0x41: /* fadds */
2365 gen_clear_float_exceptions();
2366 tcg_gen_helper_1_2(helper_fadds, cpu_tmp32,
2367 cpu_fpr[rs1], cpu_fpr[rs2]);
2368 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2369 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2372 gen_op_load_fpr_DT0(DFPREG(rs1));
2373 gen_op_load_fpr_DT1(DFPREG(rs2));
2374 gen_clear_float_exceptions();
2375 tcg_gen_helper_0_0(helper_faddd);
2376 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2377 gen_op_store_DT0_fpr(DFPREG(rd));
2379 case 0x43: /* faddq */
2380 CHECK_FPU_FEATURE(dc, FLOAT128);
2381 gen_op_load_fpr_QT0(QFPREG(rs1));
2382 gen_op_load_fpr_QT1(QFPREG(rs2));
2383 gen_clear_float_exceptions();
2384 tcg_gen_helper_0_0(helper_faddq);
2385 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2386 gen_op_store_QT0_fpr(QFPREG(rd));
2388 case 0x45: /* fsubs */
2389 gen_clear_float_exceptions();
2390 tcg_gen_helper_1_2(helper_fsubs, cpu_tmp32,
2391 cpu_fpr[rs1], cpu_fpr[rs2]);
2392 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2393 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2396 gen_op_load_fpr_DT0(DFPREG(rs1));
2397 gen_op_load_fpr_DT1(DFPREG(rs2));
2398 gen_clear_float_exceptions();
2399 tcg_gen_helper_0_0(helper_fsubd);
2400 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2401 gen_op_store_DT0_fpr(DFPREG(rd));
2403 case 0x47: /* fsubq */
2404 CHECK_FPU_FEATURE(dc, FLOAT128);
2405 gen_op_load_fpr_QT0(QFPREG(rs1));
2406 gen_op_load_fpr_QT1(QFPREG(rs2));
2407 gen_clear_float_exceptions();
2408 tcg_gen_helper_0_0(helper_fsubq);
2409 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2410 gen_op_store_QT0_fpr(QFPREG(rd));
2412 case 0x49: /* fmuls */
2413 CHECK_FPU_FEATURE(dc, FMUL);
2414 gen_clear_float_exceptions();
2415 tcg_gen_helper_1_2(helper_fmuls, cpu_tmp32,
2416 cpu_fpr[rs1], cpu_fpr[rs2]);
2417 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2418 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2420 case 0x4a: /* fmuld */
2421 CHECK_FPU_FEATURE(dc, FMUL);
2422 gen_op_load_fpr_DT0(DFPREG(rs1));
2423 gen_op_load_fpr_DT1(DFPREG(rs2));
2424 gen_clear_float_exceptions();
2425 tcg_gen_helper_0_0(helper_fmuld);
2426 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2427 gen_op_store_DT0_fpr(DFPREG(rd));
2429 case 0x4b: /* fmulq */
2430 CHECK_FPU_FEATURE(dc, FLOAT128);
2431 CHECK_FPU_FEATURE(dc, FMUL);
2432 gen_op_load_fpr_QT0(QFPREG(rs1));
2433 gen_op_load_fpr_QT1(QFPREG(rs2));
2434 gen_clear_float_exceptions();
2435 tcg_gen_helper_0_0(helper_fmulq);
2436 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2437 gen_op_store_QT0_fpr(QFPREG(rd));
2439 case 0x4d: /* fdivs */
2440 gen_clear_float_exceptions();
2441 tcg_gen_helper_1_2(helper_fdivs, cpu_tmp32,
2442 cpu_fpr[rs1], cpu_fpr[rs2]);
2443 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2444 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2447 gen_op_load_fpr_DT0(DFPREG(rs1));
2448 gen_op_load_fpr_DT1(DFPREG(rs2));
2449 gen_clear_float_exceptions();
2450 tcg_gen_helper_0_0(helper_fdivd);
2451 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2452 gen_op_store_DT0_fpr(DFPREG(rd));
2454 case 0x4f: /* fdivq */
2455 CHECK_FPU_FEATURE(dc, FLOAT128);
2456 gen_op_load_fpr_QT0(QFPREG(rs1));
2457 gen_op_load_fpr_QT1(QFPREG(rs2));
2458 gen_clear_float_exceptions();
2459 tcg_gen_helper_0_0(helper_fdivq);
2460 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2461 gen_op_store_QT0_fpr(QFPREG(rd));
2463 case 0x69: /* fsmuld */
2464 CHECK_FPU_FEATURE(dc, FSMULD);
2465 gen_clear_float_exceptions();
2466 tcg_gen_helper_0_2(helper_fsmuld, cpu_fpr[rs1],
2468 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2469 gen_op_store_DT0_fpr(DFPREG(rd));
2471 case 0x6e: /* fdmulq */
2472 CHECK_FPU_FEATURE(dc, FLOAT128);
2473 gen_op_load_fpr_DT0(DFPREG(rs1));
2474 gen_op_load_fpr_DT1(DFPREG(rs2));
2475 gen_clear_float_exceptions();
2476 tcg_gen_helper_0_0(helper_fdmulq);
2477 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2478 gen_op_store_QT0_fpr(QFPREG(rd));
2480 case 0xc4: /* fitos */
2481 gen_clear_float_exceptions();
2482 tcg_gen_helper_1_1(helper_fitos, cpu_tmp32,
2484 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2485 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2487 case 0xc6: /* fdtos */
2488 gen_op_load_fpr_DT1(DFPREG(rs2));
2489 gen_clear_float_exceptions();
2490 tcg_gen_helper_1_0(helper_fdtos, cpu_tmp32);
2491 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2492 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2494 case 0xc7: /* fqtos */
2495 CHECK_FPU_FEATURE(dc, FLOAT128);
2496 gen_op_load_fpr_QT1(QFPREG(rs2));
2497 gen_clear_float_exceptions();
2498 tcg_gen_helper_1_0(helper_fqtos, cpu_tmp32);
2499 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2500 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2502 case 0xc8: /* fitod */
2503 tcg_gen_helper_0_1(helper_fitod, cpu_fpr[rs2]);
2504 gen_op_store_DT0_fpr(DFPREG(rd));
2506 case 0xc9: /* fstod */
2507 tcg_gen_helper_0_1(helper_fstod, cpu_fpr[rs2]);
2508 gen_op_store_DT0_fpr(DFPREG(rd));
2510 case 0xcb: /* fqtod */
2511 CHECK_FPU_FEATURE(dc, FLOAT128);
2512 gen_op_load_fpr_QT1(QFPREG(rs2));
2513 gen_clear_float_exceptions();
2514 tcg_gen_helper_0_0(helper_fqtod);
2515 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2516 gen_op_store_DT0_fpr(DFPREG(rd));
2518 case 0xcc: /* fitoq */
2519 CHECK_FPU_FEATURE(dc, FLOAT128);
2520 tcg_gen_helper_0_1(helper_fitoq, cpu_fpr[rs2]);
2521 gen_op_store_QT0_fpr(QFPREG(rd));
2523 case 0xcd: /* fstoq */
2524 CHECK_FPU_FEATURE(dc, FLOAT128);
2525 tcg_gen_helper_0_1(helper_fstoq, cpu_fpr[rs2]);
2526 gen_op_store_QT0_fpr(QFPREG(rd));
2528 case 0xce: /* fdtoq */
2529 CHECK_FPU_FEATURE(dc, FLOAT128);
2530 gen_op_load_fpr_DT1(DFPREG(rs2));
2531 tcg_gen_helper_0_0(helper_fdtoq);
2532 gen_op_store_QT0_fpr(QFPREG(rd));
2534 case 0xd1: /* fstoi */
2535 gen_clear_float_exceptions();
2536 tcg_gen_helper_1_1(helper_fstoi, cpu_tmp32,
2538 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2539 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2541 case 0xd2: /* fdtoi */
2542 gen_op_load_fpr_DT1(DFPREG(rs2));
2543 gen_clear_float_exceptions();
2544 tcg_gen_helper_1_0(helper_fdtoi, cpu_tmp32);
2545 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2546 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2548 case 0xd3: /* fqtoi */
2549 CHECK_FPU_FEATURE(dc, FLOAT128);
2550 gen_op_load_fpr_QT1(QFPREG(rs2));
2551 gen_clear_float_exceptions();
2552 tcg_gen_helper_1_0(helper_fqtoi, cpu_tmp32);
2553 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2554 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2556 #ifdef TARGET_SPARC64
2557 case 0x2: /* V9 fmovd */
2558 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2559 cpu_fpr[DFPREG(rs2)]);
2560 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2561 cpu_fpr[DFPREG(rs2) + 1]);
2563 case 0x3: /* V9 fmovq */
2564 CHECK_FPU_FEATURE(dc, FLOAT128);
2565 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2566 cpu_fpr[QFPREG(rs2)]);
2567 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2568 cpu_fpr[QFPREG(rs2) + 1]);
2569 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2570 cpu_fpr[QFPREG(rs2) + 2]);
2571 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2572 cpu_fpr[QFPREG(rs2) + 3]);
2574 case 0x6: /* V9 fnegd */
2575 gen_op_load_fpr_DT1(DFPREG(rs2));
2576 tcg_gen_helper_0_0(helper_fnegd);
2577 gen_op_store_DT0_fpr(DFPREG(rd));
2579 case 0x7: /* V9 fnegq */
2580 CHECK_FPU_FEATURE(dc, FLOAT128);
2581 gen_op_load_fpr_QT1(QFPREG(rs2));
2582 tcg_gen_helper_0_0(helper_fnegq);
2583 gen_op_store_QT0_fpr(QFPREG(rd));
2585 case 0xa: /* V9 fabsd */
2586 gen_op_load_fpr_DT1(DFPREG(rs2));
2587 tcg_gen_helper_0_0(helper_fabsd);
2588 gen_op_store_DT0_fpr(DFPREG(rd));
2590 case 0xb: /* V9 fabsq */
2591 CHECK_FPU_FEATURE(dc, FLOAT128);
2592 gen_op_load_fpr_QT1(QFPREG(rs2));
2593 tcg_gen_helper_0_0(helper_fabsq);
2594 gen_op_store_QT0_fpr(QFPREG(rd));
2596 case 0x81: /* V9 fstox */
2597 gen_clear_float_exceptions();
2598 tcg_gen_helper_0_1(helper_fstox, cpu_fpr[rs2]);
2599 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2600 gen_op_store_DT0_fpr(DFPREG(rd));
2602 case 0x82: /* V9 fdtox */
2603 gen_op_load_fpr_DT1(DFPREG(rs2));
2604 gen_clear_float_exceptions();
2605 tcg_gen_helper_0_0(helper_fdtox);
2606 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2607 gen_op_store_DT0_fpr(DFPREG(rd));
2609 case 0x83: /* V9 fqtox */
2610 CHECK_FPU_FEATURE(dc, FLOAT128);
2611 gen_op_load_fpr_QT1(QFPREG(rs2));
2612 gen_clear_float_exceptions();
2613 tcg_gen_helper_0_0(helper_fqtox);
2614 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2615 gen_op_store_DT0_fpr(DFPREG(rd));
2617 case 0x84: /* V9 fxtos */
2618 gen_op_load_fpr_DT1(DFPREG(rs2));
2619 gen_clear_float_exceptions();
2620 tcg_gen_helper_1_0(helper_fxtos, cpu_tmp32);
2621 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2622 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2624 case 0x88: /* V9 fxtod */
2625 gen_op_load_fpr_DT1(DFPREG(rs2));
2626 gen_clear_float_exceptions();
2627 tcg_gen_helper_0_0(helper_fxtod);
2628 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2629 gen_op_store_DT0_fpr(DFPREG(rd));
2631 case 0x8c: /* V9 fxtoq */
2632 CHECK_FPU_FEATURE(dc, FLOAT128);
2633 gen_op_load_fpr_DT1(DFPREG(rs2));
2634 gen_clear_float_exceptions();
2635 tcg_gen_helper_0_0(helper_fxtoq);
2636 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2637 gen_op_store_QT0_fpr(QFPREG(rd));
2643 } else if (xop == 0x35) { /* FPU Operations */
2644 #ifdef TARGET_SPARC64
2647 if (gen_trap_ifnofpu(dc, cpu_cond))
2649 gen_op_clear_ieee_excp_and_FTT();
2650 rs1 = GET_FIELD(insn, 13, 17);
2651 rs2 = GET_FIELD(insn, 27, 31);
2652 xop = GET_FIELD(insn, 18, 26);
2653 #ifdef TARGET_SPARC64
2654 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2657 l1 = gen_new_label();
2658 cond = GET_FIELD_SP(insn, 14, 17);
2659 cpu_src1 = get_src1(insn, cpu_src1);
2660 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2662 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2665 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2668 l1 = gen_new_label();
2669 cond = GET_FIELD_SP(insn, 14, 17);
2670 cpu_src1 = get_src1(insn, cpu_src1);
2671 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2673 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2674 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2677 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2680 CHECK_FPU_FEATURE(dc, FLOAT128);
2681 l1 = gen_new_label();
2682 cond = GET_FIELD_SP(insn, 14, 17);
2683 cpu_src1 = get_src1(insn, cpu_src1);
2684 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2686 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2687 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2688 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2689 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2695 #ifdef TARGET_SPARC64
2696 #define FMOVSCC(fcc) \
2701 l1 = gen_new_label(); \
2702 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2703 cond = GET_FIELD_SP(insn, 14, 17); \
2704 gen_fcond(r_cond, fcc, cond); \
2705 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2707 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2708 gen_set_label(l1); \
2709 tcg_temp_free(r_cond); \
2711 #define FMOVDCC(fcc) \
2716 l1 = gen_new_label(); \
2717 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2718 cond = GET_FIELD_SP(insn, 14, 17); \
2719 gen_fcond(r_cond, fcc, cond); \
2720 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2722 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2723 cpu_fpr[DFPREG(rs2)]); \
2724 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2725 cpu_fpr[DFPREG(rs2) + 1]); \
2726 gen_set_label(l1); \
2727 tcg_temp_free(r_cond); \
2729 #define FMOVQCC(fcc) \
2734 l1 = gen_new_label(); \
2735 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2736 cond = GET_FIELD_SP(insn, 14, 17); \
2737 gen_fcond(r_cond, fcc, cond); \
2738 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2740 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2741 cpu_fpr[QFPREG(rs2)]); \
2742 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2743 cpu_fpr[QFPREG(rs2) + 1]); \
2744 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2745 cpu_fpr[QFPREG(rs2) + 2]); \
2746 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2747 cpu_fpr[QFPREG(rs2) + 3]); \
2748 gen_set_label(l1); \
2749 tcg_temp_free(r_cond); \
2751 case 0x001: /* V9 fmovscc %fcc0 */
2754 case 0x002: /* V9 fmovdcc %fcc0 */
2757 case 0x003: /* V9 fmovqcc %fcc0 */
2758 CHECK_FPU_FEATURE(dc, FLOAT128);
2761 case 0x041: /* V9 fmovscc %fcc1 */
2764 case 0x042: /* V9 fmovdcc %fcc1 */
2767 case 0x043: /* V9 fmovqcc %fcc1 */
2768 CHECK_FPU_FEATURE(dc, FLOAT128);
2771 case 0x081: /* V9 fmovscc %fcc2 */
2774 case 0x082: /* V9 fmovdcc %fcc2 */
2777 case 0x083: /* V9 fmovqcc %fcc2 */
2778 CHECK_FPU_FEATURE(dc, FLOAT128);
2781 case 0x0c1: /* V9 fmovscc %fcc3 */
2784 case 0x0c2: /* V9 fmovdcc %fcc3 */
2787 case 0x0c3: /* V9 fmovqcc %fcc3 */
2788 CHECK_FPU_FEATURE(dc, FLOAT128);
2794 #define FMOVCC(size_FDQ, icc) \
2799 l1 = gen_new_label(); \
2800 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2801 cond = GET_FIELD_SP(insn, 14, 17); \
2802 gen_cond(r_cond, icc, cond); \
2803 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2805 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2806 (glue(size_FDQ, FPREG(rs2))); \
2807 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2808 (glue(size_FDQ, FPREG(rd))); \
2809 gen_set_label(l1); \
2810 tcg_temp_free(r_cond); \
2812 #define FMOVSCC(icc) \
2817 l1 = gen_new_label(); \
2818 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2819 cond = GET_FIELD_SP(insn, 14, 17); \
2820 gen_cond(r_cond, icc, cond); \
2821 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2823 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2824 gen_set_label(l1); \
2825 tcg_temp_free(r_cond); \
2827 #define FMOVDCC(icc) \
2832 l1 = gen_new_label(); \
2833 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2834 cond = GET_FIELD_SP(insn, 14, 17); \
2835 gen_cond(r_cond, icc, cond); \
2836 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2838 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2839 cpu_fpr[DFPREG(rs2)]); \
2840 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2841 cpu_fpr[DFPREG(rs2) + 1]); \
2842 gen_set_label(l1); \
2843 tcg_temp_free(r_cond); \
2845 #define FMOVQCC(icc) \
2850 l1 = gen_new_label(); \
2851 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2852 cond = GET_FIELD_SP(insn, 14, 17); \
2853 gen_cond(r_cond, icc, cond); \
2854 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2856 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2857 cpu_fpr[QFPREG(rs2)]); \
2858 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2859 cpu_fpr[QFPREG(rs2) + 1]); \
2860 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2861 cpu_fpr[QFPREG(rs2) + 2]); \
2862 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2863 cpu_fpr[QFPREG(rs2) + 3]); \
2864 gen_set_label(l1); \
2865 tcg_temp_free(r_cond); \
2868 case 0x101: /* V9 fmovscc %icc */
2871 case 0x102: /* V9 fmovdcc %icc */
2873 case 0x103: /* V9 fmovqcc %icc */
2874 CHECK_FPU_FEATURE(dc, FLOAT128);
2877 case 0x181: /* V9 fmovscc %xcc */
2880 case 0x182: /* V9 fmovdcc %xcc */
2883 case 0x183: /* V9 fmovqcc %xcc */
2884 CHECK_FPU_FEATURE(dc, FLOAT128);
2891 case 0x51: /* fcmps, V9 %fcc */
2892 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2894 case 0x52: /* fcmpd, V9 %fcc */
2895 gen_op_load_fpr_DT0(DFPREG(rs1));
2896 gen_op_load_fpr_DT1(DFPREG(rs2));
2897 gen_op_fcmpd(rd & 3);
2899 case 0x53: /* fcmpq, V9 %fcc */
2900 CHECK_FPU_FEATURE(dc, FLOAT128);
2901 gen_op_load_fpr_QT0(QFPREG(rs1));
2902 gen_op_load_fpr_QT1(QFPREG(rs2));
2903 gen_op_fcmpq(rd & 3);
2905 case 0x55: /* fcmpes, V9 %fcc */
2906 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2908 case 0x56: /* fcmped, V9 %fcc */
2909 gen_op_load_fpr_DT0(DFPREG(rs1));
2910 gen_op_load_fpr_DT1(DFPREG(rs2));
2911 gen_op_fcmped(rd & 3);
2913 case 0x57: /* fcmpeq, V9 %fcc */
2914 CHECK_FPU_FEATURE(dc, FLOAT128);
2915 gen_op_load_fpr_QT0(QFPREG(rs1));
2916 gen_op_load_fpr_QT1(QFPREG(rs2));
2917 gen_op_fcmpeq(rd & 3);
2922 } else if (xop == 0x2) {
2925 rs1 = GET_FIELD(insn, 13, 17);
2927 // or %g0, x, y -> mov T0, x; mov y, T0
2928 if (IS_IMM) { /* immediate */
2931 rs2 = GET_FIELDs(insn, 19, 31);
2932 r_const = tcg_const_tl((int)rs2);
2933 gen_movl_TN_reg(rd, r_const);
2934 tcg_temp_free(r_const);
2935 } else { /* register */
2936 rs2 = GET_FIELD(insn, 27, 31);
2937 gen_movl_reg_TN(rs2, cpu_dst);
2938 gen_movl_TN_reg(rd, cpu_dst);
2941 cpu_src1 = get_src1(insn, cpu_src1);
2942 if (IS_IMM) { /* immediate */
2943 rs2 = GET_FIELDs(insn, 19, 31);
2944 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2945 gen_movl_TN_reg(rd, cpu_dst);
2946 } else { /* register */
2947 // or x, %g0, y -> mov T1, x; mov y, T1
2948 rs2 = GET_FIELD(insn, 27, 31);
2950 gen_movl_reg_TN(rs2, cpu_src2);
2951 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2952 gen_movl_TN_reg(rd, cpu_dst);
2954 gen_movl_TN_reg(rd, cpu_src1);
2957 #ifdef TARGET_SPARC64
2958 } else if (xop == 0x25) { /* sll, V9 sllx */
2959 cpu_src1 = get_src1(insn, cpu_src1);
2960 if (IS_IMM) { /* immediate */
2961 rs2 = GET_FIELDs(insn, 20, 31);
2962 if (insn & (1 << 12)) {
2963 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2965 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2967 } else { /* register */
2968 rs2 = GET_FIELD(insn, 27, 31);
2969 gen_movl_reg_TN(rs2, cpu_src2);
2970 if (insn & (1 << 12)) {
2971 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2973 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2975 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2977 gen_movl_TN_reg(rd, cpu_dst);
2978 } else if (xop == 0x26) { /* srl, V9 srlx */
2979 cpu_src1 = get_src1(insn, cpu_src1);
2980 if (IS_IMM) { /* immediate */
2981 rs2 = GET_FIELDs(insn, 20, 31);
2982 if (insn & (1 << 12)) {
2983 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2985 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2986 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2988 } else { /* register */
2989 rs2 = GET_FIELD(insn, 27, 31);
2990 gen_movl_reg_TN(rs2, cpu_src2);
2991 if (insn & (1 << 12)) {
2992 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2993 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2995 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2996 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2997 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3000 gen_movl_TN_reg(rd, cpu_dst);
3001 } else if (xop == 0x27) { /* sra, V9 srax */
3002 cpu_src1 = get_src1(insn, cpu_src1);
3003 if (IS_IMM) { /* immediate */
3004 rs2 = GET_FIELDs(insn, 20, 31);
3005 if (insn & (1 << 12)) {
3006 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3008 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3009 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3010 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3012 } else { /* register */
3013 rs2 = GET_FIELD(insn, 27, 31);
3014 gen_movl_reg_TN(rs2, cpu_src2);
3015 if (insn & (1 << 12)) {
3016 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3017 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3019 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3020 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3021 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3022 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3025 gen_movl_TN_reg(rd, cpu_dst);
3027 } else if (xop < 0x36) {
3028 cpu_src1 = get_src1(insn, cpu_src1);
3029 cpu_src2 = get_src2(insn, cpu_src2);
3031 switch (xop & ~0x10) {
3034 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3036 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3039 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3041 gen_op_logic_cc(cpu_dst);
3044 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3046 gen_op_logic_cc(cpu_dst);
3049 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3051 gen_op_logic_cc(cpu_dst);
3055 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3057 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3060 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3061 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
3063 gen_op_logic_cc(cpu_dst);
3066 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3067 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3069 gen_op_logic_cc(cpu_dst);
3072 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3073 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3075 gen_op_logic_cc(cpu_dst);
3079 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3081 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3082 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3083 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3086 #ifdef TARGET_SPARC64
3087 case 0x9: /* V9 mulx */
3088 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3092 CHECK_IU_FEATURE(dc, MUL);
3093 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3095 gen_op_logic_cc(cpu_dst);
3098 CHECK_IU_FEATURE(dc, MUL);
3099 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3101 gen_op_logic_cc(cpu_dst);
3105 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3107 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3108 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3109 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3112 #ifdef TARGET_SPARC64
3113 case 0xd: /* V9 udivx */
3114 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3115 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3116 gen_trap_ifdivzero_tl(cpu_cc_src2);
3117 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3121 CHECK_IU_FEATURE(dc, DIV);
3122 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3125 gen_op_div_cc(cpu_dst);
3128 CHECK_IU_FEATURE(dc, DIV);
3129 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3132 gen_op_div_cc(cpu_dst);
3137 gen_movl_TN_reg(rd, cpu_dst);
3140 case 0x20: /* taddcc */
3141 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3142 gen_movl_TN_reg(rd, cpu_dst);
3144 case 0x21: /* tsubcc */
3145 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3146 gen_movl_TN_reg(rd, cpu_dst);
3148 case 0x22: /* taddcctv */
3149 save_state(dc, cpu_cond);
3150 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3151 gen_movl_TN_reg(rd, cpu_dst);
3153 case 0x23: /* tsubcctv */
3154 save_state(dc, cpu_cond);
3155 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3156 gen_movl_TN_reg(rd, cpu_dst);
3158 case 0x24: /* mulscc */
3159 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3160 gen_movl_TN_reg(rd, cpu_dst);
3162 #ifndef TARGET_SPARC64
3163 case 0x25: /* sll */
3164 if (IS_IMM) { /* immediate */
3165 rs2 = GET_FIELDs(insn, 20, 31);
3166 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3167 } else { /* register */
3168 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3169 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3171 gen_movl_TN_reg(rd, cpu_dst);
3173 case 0x26: /* srl */
3174 if (IS_IMM) { /* immediate */
3175 rs2 = GET_FIELDs(insn, 20, 31);
3176 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3177 } else { /* register */
3178 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3179 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3181 gen_movl_TN_reg(rd, cpu_dst);
3183 case 0x27: /* sra */
3184 if (IS_IMM) { /* immediate */
3185 rs2 = GET_FIELDs(insn, 20, 31);
3186 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3187 } else { /* register */
3188 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3189 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3191 gen_movl_TN_reg(rd, cpu_dst);
3198 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3199 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3201 #ifndef TARGET_SPARC64
3202 case 0x01 ... 0x0f: /* undefined in the
3206 case 0x10 ... 0x1f: /* implementation-dependent
3212 case 0x2: /* V9 wrccr */
3213 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3214 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3216 case 0x3: /* V9 wrasi */
3217 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3218 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3220 case 0x6: /* V9 wrfprs */
3221 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3222 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3223 save_state(dc, cpu_cond);
3228 case 0xf: /* V9 sir, nop if user */
3229 #if !defined(CONFIG_USER_ONLY)
3234 case 0x13: /* Graphics Status */
3235 if (gen_trap_ifnofpu(dc, cpu_cond))
3237 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3239 case 0x14: /* Softint set */
3240 if (!supervisor(dc))
3242 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3243 tcg_gen_helper_0_1(helper_set_softint,
3246 case 0x15: /* Softint clear */
3247 if (!supervisor(dc))
3249 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3250 tcg_gen_helper_0_1(helper_clear_softint,
3253 case 0x16: /* Softint write */
3254 if (!supervisor(dc))
3256 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3257 tcg_gen_helper_0_1(helper_write_softint,
3260 case 0x17: /* Tick compare */
3261 #if !defined(CONFIG_USER_ONLY)
3262 if (!supervisor(dc))
3268 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3270 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3271 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3272 offsetof(CPUState, tick));
3273 tcg_gen_helper_0_2(helper_tick_set_limit,
3274 r_tickptr, cpu_tick_cmpr);
3275 tcg_temp_free(r_tickptr);
3278 case 0x18: /* System tick */
3279 #if !defined(CONFIG_USER_ONLY)
3280 if (!supervisor(dc))
3286 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3288 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3289 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3290 offsetof(CPUState, stick));
3291 tcg_gen_helper_0_2(helper_tick_set_count,
3292 r_tickptr, cpu_dst);
3293 tcg_temp_free(r_tickptr);
3296 case 0x19: /* System tick compare */
3297 #if !defined(CONFIG_USER_ONLY)
3298 if (!supervisor(dc))
3304 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3306 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3307 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3308 offsetof(CPUState, stick));
3309 tcg_gen_helper_0_2(helper_tick_set_limit,
3310 r_tickptr, cpu_stick_cmpr);
3311 tcg_temp_free(r_tickptr);
3315 case 0x10: /* Performance Control */
3316 case 0x11: /* Performance Instrumentation
3318 case 0x12: /* Dispatch Control */
3325 #if !defined(CONFIG_USER_ONLY)
3326 case 0x31: /* wrpsr, V9 saved, restored */
3328 if (!supervisor(dc))
3330 #ifdef TARGET_SPARC64
3333 tcg_gen_helper_0_0(helper_saved);
3336 tcg_gen_helper_0_0(helper_restored);
3338 case 2: /* UA2005 allclean */
3339 case 3: /* UA2005 otherw */
3340 case 4: /* UA2005 normalw */
3341 case 5: /* UA2005 invalw */
3347 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3348 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3349 save_state(dc, cpu_cond);
3356 case 0x32: /* wrwim, V9 wrpr */
3358 if (!supervisor(dc))
3360 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3361 #ifdef TARGET_SPARC64
3367 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3368 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3369 offsetof(CPUState, tsptr));
3370 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3371 offsetof(trap_state, tpc));
3372 tcg_temp_free(r_tsptr);
3379 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3380 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3381 offsetof(CPUState, tsptr));
3382 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3383 offsetof(trap_state, tnpc));
3384 tcg_temp_free(r_tsptr);
3391 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3392 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3393 offsetof(CPUState, tsptr));
3394 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3395 offsetof(trap_state,
3397 tcg_temp_free(r_tsptr);
3404 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3405 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3406 offsetof(CPUState, tsptr));
3407 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3408 offsetof(trap_state, tt));
3409 tcg_temp_free(r_tsptr);
3416 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3417 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3418 offsetof(CPUState, tick));
3419 tcg_gen_helper_0_2(helper_tick_set_count,
3420 r_tickptr, cpu_tmp0);
3421 tcg_temp_free(r_tickptr);
3425 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3428 save_state(dc, cpu_cond);
3429 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3435 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3436 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3437 offsetof(CPUSPARCState, tl));
3440 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3441 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3442 offsetof(CPUSPARCState,
3446 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3449 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3450 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3451 offsetof(CPUSPARCState,
3454 case 11: // canrestore
3455 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3456 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3457 offsetof(CPUSPARCState,
3460 case 12: // cleanwin
3461 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3462 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3463 offsetof(CPUSPARCState,
3466 case 13: // otherwin
3467 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3468 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3469 offsetof(CPUSPARCState,
3473 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3474 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3475 offsetof(CPUSPARCState,
3478 case 16: // UA2005 gl
3479 CHECK_IU_FEATURE(dc, GL);
3480 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3481 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3482 offsetof(CPUSPARCState, gl));
3484 case 26: // UA2005 strand status
3485 CHECK_IU_FEATURE(dc, HYPV);
3486 if (!hypervisor(dc))
3488 tcg_gen_trunc_tl_i32(cpu_ssr, cpu_tmp0);
3494 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3495 if (dc->def->nwindows != 32)
3496 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3497 (1 << dc->def->nwindows) - 1);
3498 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3502 case 0x33: /* wrtbr, UA2005 wrhpr */
3504 #ifndef TARGET_SPARC64
3505 if (!supervisor(dc))
3507 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3509 CHECK_IU_FEATURE(dc, HYPV);
3510 if (!hypervisor(dc))
3512 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3515 // XXX gen_op_wrhpstate();
3516 save_state(dc, cpu_cond);
3522 // XXX gen_op_wrhtstate();
3525 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3528 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3530 case 31: // hstick_cmpr
3534 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3535 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3536 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3537 offsetof(CPUState, hstick));
3538 tcg_gen_helper_0_2(helper_tick_set_limit,
3539 r_tickptr, cpu_hstick_cmpr);
3540 tcg_temp_free(r_tickptr);
3543 case 6: // hver readonly
3551 #ifdef TARGET_SPARC64
3552 case 0x2c: /* V9 movcc */
3554 int cc = GET_FIELD_SP(insn, 11, 12);
3555 int cond = GET_FIELD_SP(insn, 14, 17);
3559 r_cond = tcg_temp_new(TCG_TYPE_TL);
3560 if (insn & (1 << 18)) {
3562 gen_cond(r_cond, 0, cond);
3564 gen_cond(r_cond, 1, cond);
3568 gen_fcond(r_cond, cc, cond);
3571 l1 = gen_new_label();
3573 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3574 if (IS_IMM) { /* immediate */
3577 rs2 = GET_FIELD_SPs(insn, 0, 10);
3578 r_const = tcg_const_tl((int)rs2);
3579 gen_movl_TN_reg(rd, r_const);
3580 tcg_temp_free(r_const);
3582 rs2 = GET_FIELD_SP(insn, 0, 4);
3583 gen_movl_reg_TN(rs2, cpu_tmp0);
3584 gen_movl_TN_reg(rd, cpu_tmp0);
3587 tcg_temp_free(r_cond);
3590 case 0x2d: /* V9 sdivx */
3591 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3592 gen_movl_TN_reg(rd, cpu_dst);
3594 case 0x2e: /* V9 popc */
3596 cpu_src2 = get_src2(insn, cpu_src2);
3597 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3599 gen_movl_TN_reg(rd, cpu_dst);
3601 case 0x2f: /* V9 movr */
3603 int cond = GET_FIELD_SP(insn, 10, 12);
3606 cpu_src1 = get_src1(insn, cpu_src1);
3608 l1 = gen_new_label();
3610 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3612 if (IS_IMM) { /* immediate */
3615 rs2 = GET_FIELD_SPs(insn, 0, 9);
3616 r_const = tcg_const_tl((int)rs2);
3617 gen_movl_TN_reg(rd, r_const);
3618 tcg_temp_free(r_const);
3620 rs2 = GET_FIELD_SP(insn, 0, 4);
3621 gen_movl_reg_TN(rs2, cpu_tmp0);
3622 gen_movl_TN_reg(rd, cpu_tmp0);
3632 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3633 #ifdef TARGET_SPARC64
3634 int opf = GET_FIELD_SP(insn, 5, 13);
3635 rs1 = GET_FIELD(insn, 13, 17);
3636 rs2 = GET_FIELD(insn, 27, 31);
3637 if (gen_trap_ifnofpu(dc, cpu_cond))
3641 case 0x000: /* VIS I edge8cc */
3642 case 0x001: /* VIS II edge8n */
3643 case 0x002: /* VIS I edge8lcc */
3644 case 0x003: /* VIS II edge8ln */
3645 case 0x004: /* VIS I edge16cc */
3646 case 0x005: /* VIS II edge16n */
3647 case 0x006: /* VIS I edge16lcc */
3648 case 0x007: /* VIS II edge16ln */
3649 case 0x008: /* VIS I edge32cc */
3650 case 0x009: /* VIS II edge32n */
3651 case 0x00a: /* VIS I edge32lcc */
3652 case 0x00b: /* VIS II edge32ln */
3655 case 0x010: /* VIS I array8 */
3656 CHECK_FPU_FEATURE(dc, VIS1);
3657 cpu_src1 = get_src1(insn, cpu_src1);
3658 gen_movl_reg_TN(rs2, cpu_src2);
3659 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3661 gen_movl_TN_reg(rd, cpu_dst);
3663 case 0x012: /* VIS I array16 */
3664 CHECK_FPU_FEATURE(dc, VIS1);
3665 cpu_src1 = get_src1(insn, cpu_src1);
3666 gen_movl_reg_TN(rs2, cpu_src2);
3667 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3669 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3670 gen_movl_TN_reg(rd, cpu_dst);
3672 case 0x014: /* VIS I array32 */
3673 CHECK_FPU_FEATURE(dc, VIS1);
3674 cpu_src1 = get_src1(insn, cpu_src1);
3675 gen_movl_reg_TN(rs2, cpu_src2);
3676 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3678 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3679 gen_movl_TN_reg(rd, cpu_dst);
3681 case 0x018: /* VIS I alignaddr */
3682 CHECK_FPU_FEATURE(dc, VIS1);
3683 cpu_src1 = get_src1(insn, cpu_src1);
3684 gen_movl_reg_TN(rs2, cpu_src2);
3685 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3687 gen_movl_TN_reg(rd, cpu_dst);
3689 case 0x019: /* VIS II bmask */
3690 case 0x01a: /* VIS I alignaddrl */
3693 case 0x020: /* VIS I fcmple16 */
3694 CHECK_FPU_FEATURE(dc, VIS1);
3695 gen_op_load_fpr_DT0(DFPREG(rs1));
3696 gen_op_load_fpr_DT1(DFPREG(rs2));
3697 tcg_gen_helper_0_0(helper_fcmple16);
3698 gen_op_store_DT0_fpr(DFPREG(rd));
3700 case 0x022: /* VIS I fcmpne16 */
3701 CHECK_FPU_FEATURE(dc, VIS1);
3702 gen_op_load_fpr_DT0(DFPREG(rs1));
3703 gen_op_load_fpr_DT1(DFPREG(rs2));
3704 tcg_gen_helper_0_0(helper_fcmpne16);
3705 gen_op_store_DT0_fpr(DFPREG(rd));
3707 case 0x024: /* VIS I fcmple32 */
3708 CHECK_FPU_FEATURE(dc, VIS1);
3709 gen_op_load_fpr_DT0(DFPREG(rs1));
3710 gen_op_load_fpr_DT1(DFPREG(rs2));
3711 tcg_gen_helper_0_0(helper_fcmple32);
3712 gen_op_store_DT0_fpr(DFPREG(rd));
3714 case 0x026: /* VIS I fcmpne32 */
3715 CHECK_FPU_FEATURE(dc, VIS1);
3716 gen_op_load_fpr_DT0(DFPREG(rs1));
3717 gen_op_load_fpr_DT1(DFPREG(rs2));
3718 tcg_gen_helper_0_0(helper_fcmpne32);
3719 gen_op_store_DT0_fpr(DFPREG(rd));
3721 case 0x028: /* VIS I fcmpgt16 */
3722 CHECK_FPU_FEATURE(dc, VIS1);
3723 gen_op_load_fpr_DT0(DFPREG(rs1));
3724 gen_op_load_fpr_DT1(DFPREG(rs2));
3725 tcg_gen_helper_0_0(helper_fcmpgt16);
3726 gen_op_store_DT0_fpr(DFPREG(rd));
3728 case 0x02a: /* VIS I fcmpeq16 */
3729 CHECK_FPU_FEATURE(dc, VIS1);
3730 gen_op_load_fpr_DT0(DFPREG(rs1));
3731 gen_op_load_fpr_DT1(DFPREG(rs2));
3732 tcg_gen_helper_0_0(helper_fcmpeq16);
3733 gen_op_store_DT0_fpr(DFPREG(rd));
3735 case 0x02c: /* VIS I fcmpgt32 */
3736 CHECK_FPU_FEATURE(dc, VIS1);
3737 gen_op_load_fpr_DT0(DFPREG(rs1));
3738 gen_op_load_fpr_DT1(DFPREG(rs2));
3739 tcg_gen_helper_0_0(helper_fcmpgt32);
3740 gen_op_store_DT0_fpr(DFPREG(rd));
3742 case 0x02e: /* VIS I fcmpeq32 */
3743 CHECK_FPU_FEATURE(dc, VIS1);
3744 gen_op_load_fpr_DT0(DFPREG(rs1));
3745 gen_op_load_fpr_DT1(DFPREG(rs2));
3746 tcg_gen_helper_0_0(helper_fcmpeq32);
3747 gen_op_store_DT0_fpr(DFPREG(rd));
3749 case 0x031: /* VIS I fmul8x16 */
3750 CHECK_FPU_FEATURE(dc, VIS1);
3751 gen_op_load_fpr_DT0(DFPREG(rs1));
3752 gen_op_load_fpr_DT1(DFPREG(rs2));
3753 tcg_gen_helper_0_0(helper_fmul8x16);
3754 gen_op_store_DT0_fpr(DFPREG(rd));
3756 case 0x033: /* VIS I fmul8x16au */
3757 CHECK_FPU_FEATURE(dc, VIS1);
3758 gen_op_load_fpr_DT0(DFPREG(rs1));
3759 gen_op_load_fpr_DT1(DFPREG(rs2));
3760 tcg_gen_helper_0_0(helper_fmul8x16au);
3761 gen_op_store_DT0_fpr(DFPREG(rd));
3763 case 0x035: /* VIS I fmul8x16al */
3764 CHECK_FPU_FEATURE(dc, VIS1);
3765 gen_op_load_fpr_DT0(DFPREG(rs1));
3766 gen_op_load_fpr_DT1(DFPREG(rs2));
3767 tcg_gen_helper_0_0(helper_fmul8x16al);
3768 gen_op_store_DT0_fpr(DFPREG(rd));
3770 case 0x036: /* VIS I fmul8sux16 */
3771 CHECK_FPU_FEATURE(dc, VIS1);
3772 gen_op_load_fpr_DT0(DFPREG(rs1));
3773 gen_op_load_fpr_DT1(DFPREG(rs2));
3774 tcg_gen_helper_0_0(helper_fmul8sux16);
3775 gen_op_store_DT0_fpr(DFPREG(rd));
3777 case 0x037: /* VIS I fmul8ulx16 */
3778 CHECK_FPU_FEATURE(dc, VIS1);
3779 gen_op_load_fpr_DT0(DFPREG(rs1));
3780 gen_op_load_fpr_DT1(DFPREG(rs2));
3781 tcg_gen_helper_0_0(helper_fmul8ulx16);
3782 gen_op_store_DT0_fpr(DFPREG(rd));
3784 case 0x038: /* VIS I fmuld8sux16 */
3785 CHECK_FPU_FEATURE(dc, VIS1);
3786 gen_op_load_fpr_DT0(DFPREG(rs1));
3787 gen_op_load_fpr_DT1(DFPREG(rs2));
3788 tcg_gen_helper_0_0(helper_fmuld8sux16);
3789 gen_op_store_DT0_fpr(DFPREG(rd));
3791 case 0x039: /* VIS I fmuld8ulx16 */
3792 CHECK_FPU_FEATURE(dc, VIS1);
3793 gen_op_load_fpr_DT0(DFPREG(rs1));
3794 gen_op_load_fpr_DT1(DFPREG(rs2));
3795 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3796 gen_op_store_DT0_fpr(DFPREG(rd));
3798 case 0x03a: /* VIS I fpack32 */
3799 case 0x03b: /* VIS I fpack16 */
3800 case 0x03d: /* VIS I fpackfix */
3801 case 0x03e: /* VIS I pdist */
3804 case 0x048: /* VIS I faligndata */
3805 CHECK_FPU_FEATURE(dc, VIS1);
3806 gen_op_load_fpr_DT0(DFPREG(rs1));
3807 gen_op_load_fpr_DT1(DFPREG(rs2));
3808 tcg_gen_helper_0_0(helper_faligndata);
3809 gen_op_store_DT0_fpr(DFPREG(rd));
3811 case 0x04b: /* VIS I fpmerge */
3812 CHECK_FPU_FEATURE(dc, VIS1);
3813 gen_op_load_fpr_DT0(DFPREG(rs1));
3814 gen_op_load_fpr_DT1(DFPREG(rs2));
3815 tcg_gen_helper_0_0(helper_fpmerge);
3816 gen_op_store_DT0_fpr(DFPREG(rd));
3818 case 0x04c: /* VIS II bshuffle */
3821 case 0x04d: /* VIS I fexpand */
3822 CHECK_FPU_FEATURE(dc, VIS1);
3823 gen_op_load_fpr_DT0(DFPREG(rs1));
3824 gen_op_load_fpr_DT1(DFPREG(rs2));
3825 tcg_gen_helper_0_0(helper_fexpand);
3826 gen_op_store_DT0_fpr(DFPREG(rd));
3828 case 0x050: /* VIS I fpadd16 */
3829 CHECK_FPU_FEATURE(dc, VIS1);
3830 gen_op_load_fpr_DT0(DFPREG(rs1));
3831 gen_op_load_fpr_DT1(DFPREG(rs2));
3832 tcg_gen_helper_0_0(helper_fpadd16);
3833 gen_op_store_DT0_fpr(DFPREG(rd));
3835 case 0x051: /* VIS I fpadd16s */
3836 CHECK_FPU_FEATURE(dc, VIS1);
3837 tcg_gen_helper_1_2(helper_fpadd16s, cpu_fpr[rd],
3838 cpu_fpr[rs1], cpu_fpr[rs2]);
3840 case 0x052: /* VIS I fpadd32 */
3841 CHECK_FPU_FEATURE(dc, VIS1);
3842 gen_op_load_fpr_DT0(DFPREG(rs1));
3843 gen_op_load_fpr_DT1(DFPREG(rs2));
3844 tcg_gen_helper_0_0(helper_fpadd32);
3845 gen_op_store_DT0_fpr(DFPREG(rd));
3847 case 0x053: /* VIS I fpadd32s */
3848 CHECK_FPU_FEATURE(dc, VIS1);
3849 tcg_gen_helper_1_2(helper_fpadd32s, cpu_fpr[rd],
3850 cpu_fpr[rs1], cpu_fpr[rs2]);
3852 case 0x054: /* VIS I fpsub16 */
3853 CHECK_FPU_FEATURE(dc, VIS1);
3854 gen_op_load_fpr_DT0(DFPREG(rs1));
3855 gen_op_load_fpr_DT1(DFPREG(rs2));
3856 tcg_gen_helper_0_0(helper_fpsub16);
3857 gen_op_store_DT0_fpr(DFPREG(rd));
3859 case 0x055: /* VIS I fpsub16s */
3860 CHECK_FPU_FEATURE(dc, VIS1);
3861 tcg_gen_helper_1_2(helper_fpsub16s, cpu_fpr[rd],
3862 cpu_fpr[rs1], cpu_fpr[rs2]);
3864 case 0x056: /* VIS I fpsub32 */
3865 CHECK_FPU_FEATURE(dc, VIS1);
3866 gen_op_load_fpr_DT0(DFPREG(rs1));
3867 gen_op_load_fpr_DT1(DFPREG(rs2));
3868 tcg_gen_helper_0_0(helper_fpsub32);
3869 gen_op_store_DT0_fpr(DFPREG(rd));
3871 case 0x057: /* VIS I fpsub32s */
3872 CHECK_FPU_FEATURE(dc, VIS1);
3873 tcg_gen_helper_1_2(helper_fpsub32s, cpu_fpr[rd],
3874 cpu_fpr[rs1], cpu_fpr[rs2]);
3876 case 0x060: /* VIS I fzero */
3877 CHECK_FPU_FEATURE(dc, VIS1);
3878 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3879 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3881 case 0x061: /* VIS I fzeros */
3882 CHECK_FPU_FEATURE(dc, VIS1);
3883 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3885 case 0x062: /* VIS I fnor */
3886 CHECK_FPU_FEATURE(dc, VIS1);
3887 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3888 cpu_fpr[DFPREG(rs2)]);
3889 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3890 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3891 cpu_fpr[DFPREG(rs2) + 1]);
3892 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3894 case 0x063: /* VIS I fnors */
3895 CHECK_FPU_FEATURE(dc, VIS1);
3896 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3897 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3899 case 0x064: /* VIS I fandnot2 */
3900 CHECK_FPU_FEATURE(dc, VIS1);
3901 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3902 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3903 cpu_fpr[DFPREG(rs2)]);
3904 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
3905 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3906 cpu_fpr[DFPREG(rs2) + 1]);
3908 case 0x065: /* VIS I fandnot2s */
3909 CHECK_FPU_FEATURE(dc, VIS1);
3910 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
3911 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
3913 case 0x066: /* VIS I fnot2 */
3914 CHECK_FPU_FEATURE(dc, VIS1);
3915 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3917 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3918 cpu_fpr[DFPREG(rs2) + 1], -1);
3920 case 0x067: /* VIS I fnot2s */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs2], -1);
3924 case 0x068: /* VIS I fandnot1 */
3925 CHECK_FPU_FEATURE(dc, VIS1);
3926 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3927 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3928 cpu_fpr[DFPREG(rs1)]);
3929 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3930 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3931 cpu_fpr[DFPREG(rs1) + 1]);
3933 case 0x069: /* VIS I fandnot1s */
3934 CHECK_FPU_FEATURE(dc, VIS1);
3935 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3936 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3938 case 0x06a: /* VIS I fnot1 */
3939 CHECK_FPU_FEATURE(dc, VIS1);
3940 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3942 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3943 cpu_fpr[DFPREG(rs1) + 1], -1);
3945 case 0x06b: /* VIS I fnot1s */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs1], -1);
3949 case 0x06c: /* VIS I fxor */
3950 CHECK_FPU_FEATURE(dc, VIS1);
3951 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3952 cpu_fpr[DFPREG(rs2)]);
3953 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3954 cpu_fpr[DFPREG(rs1) + 1],
3955 cpu_fpr[DFPREG(rs2) + 1]);
3957 case 0x06d: /* VIS I fxors */
3958 CHECK_FPU_FEATURE(dc, VIS1);
3959 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3961 case 0x06e: /* VIS I fnand */
3962 CHECK_FPU_FEATURE(dc, VIS1);
3963 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3964 cpu_fpr[DFPREG(rs2)]);
3965 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3966 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3967 cpu_fpr[DFPREG(rs2) + 1]);
3968 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3970 case 0x06f: /* VIS I fnands */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3973 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3975 case 0x070: /* VIS I fand */
3976 CHECK_FPU_FEATURE(dc, VIS1);
3977 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3978 cpu_fpr[DFPREG(rs2)]);
3979 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3980 cpu_fpr[DFPREG(rs1) + 1],
3981 cpu_fpr[DFPREG(rs2) + 1]);
3983 case 0x071: /* VIS I fands */
3984 CHECK_FPU_FEATURE(dc, VIS1);
3985 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3987 case 0x072: /* VIS I fxnor */
3988 CHECK_FPU_FEATURE(dc, VIS1);
3989 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3990 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3991 cpu_fpr[DFPREG(rs1)]);
3992 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3993 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3994 cpu_fpr[DFPREG(rs1) + 1]);
3996 case 0x073: /* VIS I fxnors */
3997 CHECK_FPU_FEATURE(dc, VIS1);
3998 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3999 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4001 case 0x074: /* VIS I fsrc1 */
4002 CHECK_FPU_FEATURE(dc, VIS1);
4003 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4004 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4005 cpu_fpr[DFPREG(rs1) + 1]);
4007 case 0x075: /* VIS I fsrc1s */
4008 CHECK_FPU_FEATURE(dc, VIS1);
4009 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4011 case 0x076: /* VIS I fornot2 */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
4014 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4015 cpu_fpr[DFPREG(rs2)]);
4016 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
4017 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4018 cpu_fpr[DFPREG(rs2) + 1]);
4020 case 0x077: /* VIS I fornot2s */
4021 CHECK_FPU_FEATURE(dc, VIS1);
4022 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
4023 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
4025 case 0x078: /* VIS I fsrc2 */
4026 CHECK_FPU_FEATURE(dc, VIS1);
4027 gen_op_load_fpr_DT0(DFPREG(rs2));
4028 gen_op_store_DT0_fpr(DFPREG(rd));
4030 case 0x079: /* VIS I fsrc2s */
4031 CHECK_FPU_FEATURE(dc, VIS1);
4032 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4034 case 0x07a: /* VIS I fornot1 */
4035 CHECK_FPU_FEATURE(dc, VIS1);
4036 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4037 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4038 cpu_fpr[DFPREG(rs1)]);
4039 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4040 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4041 cpu_fpr[DFPREG(rs1) + 1]);
4043 case 0x07b: /* VIS I fornot1s */
4044 CHECK_FPU_FEATURE(dc, VIS1);
4045 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4046 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4048 case 0x07c: /* VIS I for */
4049 CHECK_FPU_FEATURE(dc, VIS1);
4050 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4051 cpu_fpr[DFPREG(rs2)]);
4052 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4053 cpu_fpr[DFPREG(rs1) + 1],
4054 cpu_fpr[DFPREG(rs2) + 1]);
4056 case 0x07d: /* VIS I fors */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4060 case 0x07e: /* VIS I fone */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4063 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4065 case 0x07f: /* VIS I fones */
4066 CHECK_FPU_FEATURE(dc, VIS1);
4067 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4069 case 0x080: /* VIS I shutdown */
4070 case 0x081: /* VIS II siam */
4079 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4080 #ifdef TARGET_SPARC64
4085 #ifdef TARGET_SPARC64
4086 } else if (xop == 0x39) { /* V9 return */
4089 save_state(dc, cpu_cond);
4090 cpu_src1 = get_src1(insn, cpu_src1);
4091 if (IS_IMM) { /* immediate */
4092 rs2 = GET_FIELDs(insn, 19, 31);
4093 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4094 } else { /* register */
4095 rs2 = GET_FIELD(insn, 27, 31);
4097 gen_movl_reg_TN(rs2, cpu_src2);
4098 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4100 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4102 tcg_gen_helper_0_0(helper_restore);
4103 gen_mov_pc_npc(dc, cpu_cond);
4104 r_const = tcg_const_i32(3);
4105 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4106 tcg_temp_free(r_const);
4107 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4108 dc->npc = DYNAMIC_PC;
4112 cpu_src1 = get_src1(insn, cpu_src1);
4113 if (IS_IMM) { /* immediate */
4114 rs2 = GET_FIELDs(insn, 19, 31);
4115 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4116 } else { /* register */
4117 rs2 = GET_FIELD(insn, 27, 31);
4119 gen_movl_reg_TN(rs2, cpu_src2);
4120 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4122 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4125 case 0x38: /* jmpl */
4129 r_const = tcg_const_tl(dc->pc);
4130 gen_movl_TN_reg(rd, r_const);
4131 tcg_temp_free(r_const);
4132 gen_mov_pc_npc(dc, cpu_cond);
4133 r_const = tcg_const_i32(3);
4134 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4136 tcg_temp_free(r_const);
4137 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4138 dc->npc = DYNAMIC_PC;
4141 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4142 case 0x39: /* rett, V9 return */
4146 if (!supervisor(dc))
4148 gen_mov_pc_npc(dc, cpu_cond);
4149 r_const = tcg_const_i32(3);
4150 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4152 tcg_temp_free(r_const);
4153 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4154 dc->npc = DYNAMIC_PC;
4155 tcg_gen_helper_0_0(helper_rett);
4159 case 0x3b: /* flush */
4160 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4162 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4164 case 0x3c: /* save */
4165 save_state(dc, cpu_cond);
4166 tcg_gen_helper_0_0(helper_save);
4167 gen_movl_TN_reg(rd, cpu_dst);
4169 case 0x3d: /* restore */
4170 save_state(dc, cpu_cond);
4171 tcg_gen_helper_0_0(helper_restore);
4172 gen_movl_TN_reg(rd, cpu_dst);
4174 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4175 case 0x3e: /* V9 done/retry */
4179 if (!supervisor(dc))
4181 dc->npc = DYNAMIC_PC;
4182 dc->pc = DYNAMIC_PC;
4183 tcg_gen_helper_0_0(helper_done);
4186 if (!supervisor(dc))
4188 dc->npc = DYNAMIC_PC;
4189 dc->pc = DYNAMIC_PC;
4190 tcg_gen_helper_0_0(helper_retry);
4205 case 3: /* load/store instructions */
4207 unsigned int xop = GET_FIELD(insn, 7, 12);
4209 cpu_src1 = get_src1(insn, cpu_src1);
4210 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4211 rs2 = GET_FIELD(insn, 27, 31);
4212 gen_movl_reg_TN(rs2, cpu_src2);
4213 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4214 } else if (IS_IMM) { /* immediate */
4215 rs2 = GET_FIELDs(insn, 19, 31);
4216 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4217 } else { /* register */
4218 rs2 = GET_FIELD(insn, 27, 31);
4220 gen_movl_reg_TN(rs2, cpu_src2);
4221 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4223 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4225 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4226 (xop > 0x17 && xop <= 0x1d ) ||
4227 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4229 case 0x0: /* load unsigned word */
4230 gen_address_mask(dc, cpu_addr);
4231 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4233 case 0x1: /* load unsigned byte */
4234 gen_address_mask(dc, cpu_addr);
4235 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4237 case 0x2: /* load unsigned halfword */
4238 gen_address_mask(dc, cpu_addr);
4239 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4241 case 0x3: /* load double word */
4247 save_state(dc, cpu_cond);
4248 r_const = tcg_const_i32(7);
4249 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4250 r_const); // XXX remove
4251 tcg_temp_free(r_const);
4252 gen_address_mask(dc, cpu_addr);
4253 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4254 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4255 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4256 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4257 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4258 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4259 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4262 case 0x9: /* load signed byte */
4263 gen_address_mask(dc, cpu_addr);
4264 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4266 case 0xa: /* load signed halfword */
4267 gen_address_mask(dc, cpu_addr);
4268 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4270 case 0xd: /* ldstub -- XXX: should be atomically */
4274 gen_address_mask(dc, cpu_addr);
4275 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4276 r_const = tcg_const_tl(0xff);
4277 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4278 tcg_temp_free(r_const);
4281 case 0x0f: /* swap register with memory. Also
4283 CHECK_IU_FEATURE(dc, SWAP);
4284 gen_movl_reg_TN(rd, cpu_val);
4285 gen_address_mask(dc, cpu_addr);
4286 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4287 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4288 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4290 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4291 case 0x10: /* load word alternate */
4292 #ifndef TARGET_SPARC64
4295 if (!supervisor(dc))
4298 save_state(dc, cpu_cond);
4299 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4301 case 0x11: /* load unsigned byte alternate */
4302 #ifndef TARGET_SPARC64
4305 if (!supervisor(dc))
4308 save_state(dc, cpu_cond);
4309 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4311 case 0x12: /* load unsigned halfword alternate */
4312 #ifndef TARGET_SPARC64
4315 if (!supervisor(dc))
4318 save_state(dc, cpu_cond);
4319 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4321 case 0x13: /* load double word alternate */
4322 #ifndef TARGET_SPARC64
4325 if (!supervisor(dc))
4330 save_state(dc, cpu_cond);
4331 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4333 case 0x19: /* load signed byte alternate */
4334 #ifndef TARGET_SPARC64
4337 if (!supervisor(dc))
4340 save_state(dc, cpu_cond);
4341 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4343 case 0x1a: /* load signed halfword alternate */
4344 #ifndef TARGET_SPARC64
4347 if (!supervisor(dc))
4350 save_state(dc, cpu_cond);
4351 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4353 case 0x1d: /* ldstuba -- XXX: should be atomically */
4354 #ifndef TARGET_SPARC64
4357 if (!supervisor(dc))
4360 save_state(dc, cpu_cond);
4361 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4363 case 0x1f: /* swap reg with alt. memory. Also
4365 CHECK_IU_FEATURE(dc, SWAP);
4366 #ifndef TARGET_SPARC64
4369 if (!supervisor(dc))
4372 save_state(dc, cpu_cond);
4373 gen_movl_reg_TN(rd, cpu_val);
4374 gen_swap_asi(cpu_val, cpu_addr, insn);
4377 #ifndef TARGET_SPARC64
4378 case 0x30: /* ldc */
4379 case 0x31: /* ldcsr */
4380 case 0x33: /* lddc */
4384 #ifdef TARGET_SPARC64
4385 case 0x08: /* V9 ldsw */
4386 gen_address_mask(dc, cpu_addr);
4387 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4389 case 0x0b: /* V9 ldx */
4390 gen_address_mask(dc, cpu_addr);
4391 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4393 case 0x18: /* V9 ldswa */
4394 save_state(dc, cpu_cond);
4395 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4397 case 0x1b: /* V9 ldxa */
4398 save_state(dc, cpu_cond);
4399 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4401 case 0x2d: /* V9 prefetch, no effect */
4403 case 0x30: /* V9 ldfa */
4404 save_state(dc, cpu_cond);
4405 gen_ldf_asi(cpu_addr, insn, 4, rd);
4407 case 0x33: /* V9 lddfa */
4408 save_state(dc, cpu_cond);
4409 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4411 case 0x3d: /* V9 prefetcha, no effect */
4413 case 0x32: /* V9 ldqfa */
4414 CHECK_FPU_FEATURE(dc, FLOAT128);
4415 save_state(dc, cpu_cond);
4416 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4422 gen_movl_TN_reg(rd, cpu_val);
4423 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4426 } else if (xop >= 0x20 && xop < 0x24) {
4427 if (gen_trap_ifnofpu(dc, cpu_cond))
4429 save_state(dc, cpu_cond);
4431 case 0x20: /* load fpreg */
4432 gen_address_mask(dc, cpu_addr);
4433 tcg_gen_qemu_ld32u(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4435 case 0x21: /* ldfsr, V9 ldxfsr */
4436 #ifdef TARGET_SPARC64
4437 gen_address_mask(dc, cpu_addr);
4439 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4440 tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
4444 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4445 tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
4449 case 0x22: /* load quad fpreg */
4453 CHECK_FPU_FEATURE(dc, FLOAT128);
4454 r_const = tcg_const_i32(dc->mem_idx);
4455 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4456 tcg_temp_free(r_const);
4457 gen_op_store_QT0_fpr(QFPREG(rd));
4460 case 0x23: /* load double fpreg */
4464 r_const = tcg_const_i32(dc->mem_idx);
4465 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4466 tcg_temp_free(r_const);
4467 gen_op_store_DT0_fpr(DFPREG(rd));
4473 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4474 xop == 0xe || xop == 0x1e) {
4475 gen_movl_reg_TN(rd, cpu_val);
4477 case 0x4: /* store word */
4478 gen_address_mask(dc, cpu_addr);
4479 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4481 case 0x5: /* store byte */
4482 gen_address_mask(dc, cpu_addr);
4483 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4485 case 0x6: /* store halfword */
4486 gen_address_mask(dc, cpu_addr);
4487 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4489 case 0x7: /* store double word */
4495 save_state(dc, cpu_cond);
4496 gen_address_mask(dc, cpu_addr);
4497 r_const = tcg_const_i32(7);
4498 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4499 r_const); // XXX remove
4500 tcg_temp_free(r_const);
4501 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4502 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4503 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4506 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4507 case 0x14: /* store word alternate */
4508 #ifndef TARGET_SPARC64
4511 if (!supervisor(dc))
4514 save_state(dc, cpu_cond);
4515 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4517 case 0x15: /* store byte alternate */
4518 #ifndef TARGET_SPARC64
4521 if (!supervisor(dc))
4524 save_state(dc, cpu_cond);
4525 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4527 case 0x16: /* store halfword alternate */
4528 #ifndef TARGET_SPARC64
4531 if (!supervisor(dc))
4534 save_state(dc, cpu_cond);
4535 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4537 case 0x17: /* store double word alternate */
4538 #ifndef TARGET_SPARC64
4541 if (!supervisor(dc))
4547 save_state(dc, cpu_cond);
4548 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4552 #ifdef TARGET_SPARC64
4553 case 0x0e: /* V9 stx */
4554 gen_address_mask(dc, cpu_addr);
4555 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4557 case 0x1e: /* V9 stxa */
4558 save_state(dc, cpu_cond);
4559 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4565 } else if (xop > 0x23 && xop < 0x28) {
4566 if (gen_trap_ifnofpu(dc, cpu_cond))
4568 save_state(dc, cpu_cond);
4570 case 0x24: /* store fpreg */
4571 gen_address_mask(dc, cpu_addr);
4572 tcg_gen_qemu_st32(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4574 case 0x25: /* stfsr, V9 stxfsr */
4575 #ifdef TARGET_SPARC64
4576 gen_address_mask(dc, cpu_addr);
4577 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4579 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4581 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp64);
4582 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4585 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4586 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4590 #ifdef TARGET_SPARC64
4591 /* V9 stqf, store quad fpreg */
4595 CHECK_FPU_FEATURE(dc, FLOAT128);
4596 gen_op_load_fpr_QT0(QFPREG(rd));
4597 r_const = tcg_const_i32(dc->mem_idx);
4598 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4599 tcg_temp_free(r_const);
4602 #else /* !TARGET_SPARC64 */
4603 /* stdfq, store floating point queue */
4604 #if defined(CONFIG_USER_ONLY)
4607 if (!supervisor(dc))
4609 if (gen_trap_ifnofpu(dc, cpu_cond))
4614 case 0x27: /* store double fpreg */
4618 gen_op_load_fpr_DT0(DFPREG(rd));
4619 r_const = tcg_const_i32(dc->mem_idx);
4620 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4621 tcg_temp_free(r_const);
4627 } else if (xop > 0x33 && xop < 0x3f) {
4628 save_state(dc, cpu_cond);
4630 #ifdef TARGET_SPARC64
4631 case 0x34: /* V9 stfa */
4632 gen_stf_asi(cpu_addr, insn, 4, rd);
4634 case 0x36: /* V9 stqfa */
4638 CHECK_FPU_FEATURE(dc, FLOAT128);
4639 r_const = tcg_const_i32(7);
4640 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4642 tcg_temp_free(r_const);
4643 gen_op_load_fpr_QT0(QFPREG(rd));
4644 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4647 case 0x37: /* V9 stdfa */
4648 gen_op_load_fpr_DT0(DFPREG(rd));
4649 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4651 case 0x3c: /* V9 casa */
4652 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4653 gen_movl_TN_reg(rd, cpu_val);
4655 case 0x3e: /* V9 casxa */
4656 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4657 gen_movl_TN_reg(rd, cpu_val);
4660 case 0x34: /* stc */
4661 case 0x35: /* stcsr */
4662 case 0x36: /* stdcq */
4663 case 0x37: /* stdc */
4675 /* default case for non jump instructions */
4676 if (dc->npc == DYNAMIC_PC) {
4677 dc->pc = DYNAMIC_PC;
4679 } else if (dc->npc == JUMP_PC) {
4680 /* we can do a static jump */
4681 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4685 dc->npc = dc->npc + 4;
4693 save_state(dc, cpu_cond);
4694 r_const = tcg_const_i32(TT_ILL_INSN);
4695 tcg_gen_helper_0_1(raise_exception, r_const);
4696 tcg_temp_free(r_const);
4704 save_state(dc, cpu_cond);
4705 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4706 tcg_gen_helper_0_1(raise_exception, r_const);
4707 tcg_temp_free(r_const);
4711 #if !defined(CONFIG_USER_ONLY)
4716 save_state(dc, cpu_cond);
4717 r_const = tcg_const_i32(TT_PRIV_INSN);
4718 tcg_gen_helper_0_1(raise_exception, r_const);
4719 tcg_temp_free(r_const);
4725 save_state(dc, cpu_cond);
4726 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4729 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4731 save_state(dc, cpu_cond);
4732 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4736 #ifndef TARGET_SPARC64
4741 save_state(dc, cpu_cond);
4742 r_const = tcg_const_i32(TT_NCP_INSN);
4743 tcg_gen_helper_0_1(raise_exception, r_const);
4744 tcg_temp_free(r_const);
4751 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4752 int spc, CPUSPARCState *env)
4754 target_ulong pc_start, last_pc;
4755 uint16_t *gen_opc_end;
4756 DisasContext dc1, *dc = &dc1;
4761 memset(dc, 0, sizeof(DisasContext));
4766 dc->npc = (target_ulong) tb->cs_base;
4767 dc->mem_idx = cpu_mmu_index(env);
4769 if ((dc->def->features & CPU_FEATURE_FLOAT))
4770 dc->fpu_enabled = cpu_fpu_enabled(env);
4772 dc->fpu_enabled = 0;
4773 #ifdef TARGET_SPARC64
4774 dc->address_mask_32bit = env->pstate & PS_AM;
4776 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4778 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4779 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4780 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4782 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4785 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4786 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4789 max_insns = tb->cflags & CF_COUNT_MASK;
4791 max_insns = CF_COUNT_MASK;
4794 if (env->nb_breakpoints > 0) {
4795 for(j = 0; j < env->nb_breakpoints; j++) {
4796 if (env->breakpoints[j] == dc->pc) {
4797 if (dc->pc != pc_start)
4798 save_state(dc, cpu_cond);
4799 tcg_gen_helper_0_0(helper_debug);
4808 fprintf(logfile, "Search PC...\n");
4809 j = gen_opc_ptr - gen_opc_buf;
4813 gen_opc_instr_start[lj++] = 0;
4814 gen_opc_pc[lj] = dc->pc;
4815 gen_opc_npc[lj] = dc->npc;
4816 gen_opc_instr_start[lj] = 1;
4817 gen_opc_icount[lj] = num_insns;
4820 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4823 disas_sparc_insn(dc);
4828 /* if the next PC is different, we abort now */
4829 if (dc->pc != (last_pc + 4))
4831 /* if we reach a page boundary, we stop generation so that the
4832 PC of a TT_TFAULT exception is always in the right page */
4833 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4835 /* if single step mode, we generate only one instruction and
4836 generate an exception */
4837 if (env->singlestep_enabled) {
4838 tcg_gen_movi_tl(cpu_pc, dc->pc);
4842 } while ((gen_opc_ptr < gen_opc_end) &&
4843 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4844 num_insns < max_insns);
4847 tcg_temp_free(cpu_addr);
4848 tcg_temp_free(cpu_val);
4849 tcg_temp_free(cpu_dst);
4850 tcg_temp_free(cpu_tmp64);
4851 tcg_temp_free(cpu_tmp32);
4852 tcg_temp_free(cpu_tmp0);
4853 if (tb->cflags & CF_LAST_IO)
4856 if (dc->pc != DYNAMIC_PC &&
4857 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4858 /* static PC and NPC: we can use direct chaining */
4859 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4861 if (dc->pc != DYNAMIC_PC)
4862 tcg_gen_movi_tl(cpu_pc, dc->pc);
4863 save_npc(dc, cpu_cond);
4867 gen_icount_end(tb, num_insns);
4868 *gen_opc_ptr = INDEX_op_end;
4870 j = gen_opc_ptr - gen_opc_buf;
4873 gen_opc_instr_start[lj++] = 0;
4879 gen_opc_jump_pc[0] = dc->jump_pc[0];
4880 gen_opc_jump_pc[1] = dc->jump_pc[1];
4882 tb->size = last_pc + 4 - pc_start;
4883 tb->icount = num_insns;
4886 if (loglevel & CPU_LOG_TB_IN_ASM) {
4887 fprintf(logfile, "--------------\n");
4888 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4889 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4890 fprintf(logfile, "\n");
4895 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4897 gen_intermediate_code_internal(tb, 0, env);
4900 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4902 gen_intermediate_code_internal(tb, 1, env);
4905 void gen_intermediate_code_init(CPUSPARCState *env)
4909 static const char * const gregnames[8] = {
4910 NULL, // g0 not used
4919 static const char * const fregnames[64] = {
4920 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4921 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4922 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4923 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4924 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4925 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4926 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4927 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4930 /* init various static tables */
4934 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4935 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4936 offsetof(CPUState, regwptr),
4938 #ifdef TARGET_SPARC64
4939 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4940 TCG_AREG0, offsetof(CPUState, xcc),
4942 cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
4943 TCG_AREG0, offsetof(CPUState, asi),
4945 cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
4946 TCG_AREG0, offsetof(CPUState, fprs),
4948 cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
4949 TCG_AREG0, offsetof(CPUState, gsr),
4951 cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4953 offsetof(CPUState, tick_cmpr),
4955 cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4957 offsetof(CPUState, stick_cmpr),
4959 cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4961 offsetof(CPUState, hstick_cmpr),
4963 cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4964 offsetof(CPUState, hintp),
4966 cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4967 offsetof(CPUState, htba),
4969 cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4970 offsetof(CPUState, hver),
4972 cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4973 offsetof(CPUState, ssr), "ssr");
4974 cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4975 offsetof(CPUState, version), "ver");
4976 cpu_softint = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
4977 offsetof(CPUState, softint),
4980 cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
4981 TCG_AREG0, offsetof(CPUState, wim),
4984 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
4985 TCG_AREG0, offsetof(CPUState, cond),
4987 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4988 TCG_AREG0, offsetof(CPUState, cc_src),
4990 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4991 offsetof(CPUState, cc_src2),
4993 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4994 TCG_AREG0, offsetof(CPUState, cc_dst),
4996 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4997 TCG_AREG0, offsetof(CPUState, psr),
4999 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
5000 TCG_AREG0, offsetof(CPUState, fsr),
5002 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
5003 TCG_AREG0, offsetof(CPUState, pc),
5005 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
5006 TCG_AREG0, offsetof(CPUState, npc),
5008 cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
5009 TCG_AREG0, offsetof(CPUState, y), "y");
5010 #ifndef CONFIG_USER_ONLY
5011 cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
5012 TCG_AREG0, offsetof(CPUState, tbr),
5015 for (i = 1; i < 8; i++)
5016 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
5017 offsetof(CPUState, gregs[i]),
5019 for (i = 0; i < TARGET_FPREGS; i++)
5020 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
5021 offsetof(CPUState, fpr[i]),
5024 /* register helpers */
5027 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5032 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5033 unsigned long searched_pc, int pc_pos, void *puc)
5036 env->pc = gen_opc_pc[pc_pos];
5037 npc = gen_opc_npc[pc_pos];
5039 /* dynamic NPC: already stored */
5040 } else if (npc == 2) {
5041 target_ulong t2 = (target_ulong)(unsigned long)puc;
5042 /* jump PC: use T2 and the jump targets of the translation */
5044 env->npc = gen_opc_jump_pc[0];
5046 env->npc = gen_opc_jump_pc[1];