4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
45 #ifndef CONFIG_USER_ONLY
48 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
50 static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
51 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
52 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
58 /* Floating point registers */
59 static TCGv cpu_fpr[TARGET_FPREGS];
61 #include "gen-icount.h"
63 typedef struct DisasContext {
64 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
70 int address_mask_32bit;
71 struct TranslationBlock *tb;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
96 static int sign_extend(int x, int len)
99 return (x << len) >> len;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_FT0(unsigned int src)
107 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, ft0));
110 static void gen_op_load_fpr_FT1(unsigned int src)
112 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, ft1));
115 static void gen_op_store_FT0_fpr(unsigned int dst)
117 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, ft0));
120 static void gen_op_load_fpr_DT0(unsigned int src)
122 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
123 offsetof(CPU_DoubleU, l.upper));
124 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
125 offsetof(CPU_DoubleU, l.lower));
128 static void gen_op_load_fpr_DT1(unsigned int src)
130 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
131 offsetof(CPU_DoubleU, l.upper));
132 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
133 offsetof(CPU_DoubleU, l.lower));
136 static void gen_op_store_DT0_fpr(unsigned int dst)
138 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.upper));
140 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
141 offsetof(CPU_DoubleU, l.lower));
144 static void gen_op_load_fpr_QT0(unsigned int src)
146 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upmost));
148 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.upper));
150 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
151 offsetof(CPU_QuadU, l.lower));
152 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
153 offsetof(CPU_QuadU, l.lowest));
156 static void gen_op_load_fpr_QT1(unsigned int src)
158 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.upmost));
160 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.upper));
162 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.lower));
164 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
165 offsetof(CPU_QuadU, l.lowest));
168 static void gen_op_store_QT0_fpr(unsigned int dst)
170 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.upmost));
172 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.upper));
174 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
175 offsetof(CPU_QuadU, l.lower));
176 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
177 offsetof(CPU_QuadU, l.lowest));
181 #ifdef CONFIG_USER_ONLY
182 #define supervisor(dc) 0
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) 0
187 #define supervisor(dc) (dc->mem_idx >= 1)
188 #ifdef TARGET_SPARC64
189 #define hypervisor(dc) (dc->mem_idx == 2)
194 #ifdef TARGET_SPARC64
196 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
198 #define AM_CHECK(dc) (1)
202 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
204 #ifdef TARGET_SPARC64
206 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
210 static inline void gen_movl_reg_TN(int reg, TCGv tn)
213 tcg_gen_movi_tl(tn, 0);
215 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
217 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
221 static inline void gen_movl_TN_reg(int reg, TCGv tn)
226 tcg_gen_mov_tl(cpu_gregs[reg], tn);
228 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
232 static inline void gen_goto_tb(DisasContext *s, int tb_num,
233 target_ulong pc, target_ulong npc)
235 TranslationBlock *tb;
238 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num);
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb((long)tb + tb_num);
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc, pc);
248 tcg_gen_movi_tl(cpu_npc, npc);
254 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
256 tcg_gen_extu_i32_tl(reg, src);
257 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258 tcg_gen_andi_tl(reg, reg, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
263 tcg_gen_extu_i32_tl(reg, src);
264 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265 tcg_gen_andi_tl(reg, reg, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
270 tcg_gen_extu_i32_tl(reg, src);
271 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272 tcg_gen_andi_tl(reg, reg, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
277 tcg_gen_extu_i32_tl(reg, src);
278 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279 tcg_gen_andi_tl(reg, reg, 0x1);
282 static inline void gen_cc_clear_icc(void)
284 tcg_gen_movi_i32(cpu_psr, 0);
287 #ifdef TARGET_SPARC64
288 static inline void gen_cc_clear_xcc(void)
290 tcg_gen_movi_i32(cpu_xcc, 0);
296 env->psr |= PSR_ZERO;
297 if ((int32_t) T0 < 0)
300 static inline void gen_cc_NZ_icc(TCGv dst)
305 l1 = gen_new_label();
306 l2 = gen_new_label();
307 r_temp = tcg_temp_new(TCG_TYPE_TL);
308 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
309 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
310 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
312 tcg_gen_ext_i32_tl(r_temp, dst);
313 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
314 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
316 tcg_temp_free(r_temp);
319 #ifdef TARGET_SPARC64
320 static inline void gen_cc_NZ_xcc(TCGv dst)
324 l1 = gen_new_label();
325 l2 = gen_new_label();
326 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
327 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
329 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
330 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
337 env->psr |= PSR_CARRY;
339 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
341 TCGv r_temp1, r_temp2;
344 l1 = gen_new_label();
345 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
346 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
347 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
348 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
349 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
350 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
352 tcg_temp_free(r_temp1);
353 tcg_temp_free(r_temp2);
356 #ifdef TARGET_SPARC64
357 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
361 l1 = gen_new_label();
362 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
363 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
369 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
372 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
376 r_temp = tcg_temp_new(TCG_TYPE_TL);
377 tcg_gen_xor_tl(r_temp, src1, src2);
378 tcg_gen_xori_tl(r_temp, r_temp, -1);
379 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
380 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
381 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
382 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
383 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
384 tcg_temp_free(r_temp);
385 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
388 #ifdef TARGET_SPARC64
389 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
393 r_temp = tcg_temp_new(TCG_TYPE_TL);
394 tcg_gen_xor_tl(r_temp, src1, src2);
395 tcg_gen_xori_tl(r_temp, r_temp, -1);
396 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
397 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
398 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
399 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
400 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
401 tcg_temp_free(r_temp);
402 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
406 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
408 TCGv r_temp, r_const;
411 l1 = gen_new_label();
413 r_temp = tcg_temp_new(TCG_TYPE_TL);
414 tcg_gen_xor_tl(r_temp, src1, src2);
415 tcg_gen_xori_tl(r_temp, r_temp, -1);
416 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
417 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
418 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
419 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
420 r_const = tcg_const_i32(TT_TOVF);
421 tcg_gen_helper_0_1(raise_exception, r_const);
422 tcg_temp_free(r_const);
424 tcg_temp_free(r_temp);
427 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
431 l1 = gen_new_label();
432 tcg_gen_or_tl(cpu_tmp0, src1, src2);
433 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
434 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
435 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
439 static inline void gen_tag_tv(TCGv src1, TCGv src2)
444 l1 = gen_new_label();
445 tcg_gen_or_tl(cpu_tmp0, src1, src2);
446 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
447 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
448 r_const = tcg_const_i32(TT_TOVF);
449 tcg_gen_helper_0_1(raise_exception, r_const);
450 tcg_temp_free(r_const);
454 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
456 tcg_gen_mov_tl(cpu_cc_src, src1);
457 tcg_gen_mov_tl(cpu_cc_src2, src2);
458 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
460 gen_cc_NZ_icc(cpu_cc_dst);
461 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
462 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
463 #ifdef TARGET_SPARC64
465 gen_cc_NZ_xcc(cpu_cc_dst);
466 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
467 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
469 tcg_gen_mov_tl(dst, cpu_cc_dst);
472 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
474 tcg_gen_mov_tl(cpu_cc_src, src1);
475 tcg_gen_mov_tl(cpu_cc_src2, src2);
476 gen_mov_reg_C(cpu_tmp0, cpu_psr);
477 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
479 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
480 #ifdef TARGET_SPARC64
482 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
484 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
485 gen_cc_NZ_icc(cpu_cc_dst);
486 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
487 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
488 #ifdef TARGET_SPARC64
489 gen_cc_NZ_xcc(cpu_cc_dst);
490 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
491 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
493 tcg_gen_mov_tl(dst, cpu_cc_dst);
496 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
498 tcg_gen_mov_tl(cpu_cc_src, src1);
499 tcg_gen_mov_tl(cpu_cc_src2, src2);
500 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
502 gen_cc_NZ_icc(cpu_cc_dst);
503 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
504 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
505 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
506 #ifdef TARGET_SPARC64
508 gen_cc_NZ_xcc(cpu_cc_dst);
509 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
510 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
512 tcg_gen_mov_tl(dst, cpu_cc_dst);
515 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
517 tcg_gen_mov_tl(cpu_cc_src, src1);
518 tcg_gen_mov_tl(cpu_cc_src2, src2);
519 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
520 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
521 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
523 gen_cc_NZ_icc(cpu_cc_dst);
524 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
525 #ifdef TARGET_SPARC64
527 gen_cc_NZ_xcc(cpu_cc_dst);
528 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
529 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
531 tcg_gen_mov_tl(dst, cpu_cc_dst);
536 env->psr |= PSR_CARRY;
538 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
540 TCGv r_temp1, r_temp2;
543 l1 = gen_new_label();
544 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
545 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
546 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
547 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
548 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
549 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
551 tcg_temp_free(r_temp1);
552 tcg_temp_free(r_temp2);
555 #ifdef TARGET_SPARC64
556 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
560 l1 = gen_new_label();
561 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
562 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
568 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
571 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
575 r_temp = tcg_temp_new(TCG_TYPE_TL);
576 tcg_gen_xor_tl(r_temp, src1, src2);
577 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
578 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
579 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
580 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
581 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
582 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
583 tcg_temp_free(r_temp);
586 #ifdef TARGET_SPARC64
587 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
591 r_temp = tcg_temp_new(TCG_TYPE_TL);
592 tcg_gen_xor_tl(r_temp, src1, src2);
593 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
594 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
595 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
596 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
597 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
598 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
599 tcg_temp_free(r_temp);
603 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
605 TCGv r_temp, r_const;
608 l1 = gen_new_label();
610 r_temp = tcg_temp_new(TCG_TYPE_TL);
611 tcg_gen_xor_tl(r_temp, src1, src2);
612 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
613 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
614 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
615 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
616 r_const = tcg_const_i32(TT_TOVF);
617 tcg_gen_helper_0_1(raise_exception, r_const);
618 tcg_temp_free(r_const);
620 tcg_temp_free(r_temp);
623 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
625 tcg_gen_mov_tl(cpu_cc_src, src1);
626 tcg_gen_mov_tl(cpu_cc_src2, src2);
627 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
629 gen_cc_NZ_icc(cpu_cc_dst);
630 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
631 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
632 #ifdef TARGET_SPARC64
634 gen_cc_NZ_xcc(cpu_cc_dst);
635 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
636 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
638 tcg_gen_mov_tl(dst, cpu_cc_dst);
641 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
643 tcg_gen_mov_tl(cpu_cc_src, src1);
644 tcg_gen_mov_tl(cpu_cc_src2, src2);
645 gen_mov_reg_C(cpu_tmp0, cpu_psr);
646 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
648 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
649 #ifdef TARGET_SPARC64
651 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
653 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
654 gen_cc_NZ_icc(cpu_cc_dst);
655 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
656 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 #ifdef TARGET_SPARC64
658 gen_cc_NZ_xcc(cpu_cc_dst);
659 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
660 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
662 tcg_gen_mov_tl(dst, cpu_cc_dst);
665 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
667 tcg_gen_mov_tl(cpu_cc_src, src1);
668 tcg_gen_mov_tl(cpu_cc_src2, src2);
669 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
671 gen_cc_NZ_icc(cpu_cc_dst);
672 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
673 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
674 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
675 #ifdef TARGET_SPARC64
677 gen_cc_NZ_xcc(cpu_cc_dst);
678 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
679 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
681 tcg_gen_mov_tl(dst, cpu_cc_dst);
684 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
686 tcg_gen_mov_tl(cpu_cc_src, src1);
687 tcg_gen_mov_tl(cpu_cc_src2, src2);
688 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
689 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
690 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
692 gen_cc_NZ_icc(cpu_cc_dst);
693 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
694 #ifdef TARGET_SPARC64
696 gen_cc_NZ_xcc(cpu_cc_dst);
697 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
698 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
700 tcg_gen_mov_tl(dst, cpu_cc_dst);
703 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
708 l1 = gen_new_label();
709 r_temp = tcg_temp_new(TCG_TYPE_TL);
715 tcg_gen_mov_tl(cpu_cc_src, src1);
716 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
717 tcg_gen_mov_tl(cpu_cc_src2, src2);
718 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
719 tcg_gen_movi_tl(cpu_cc_src2, 0);
723 // env->y = (b2 << 31) | (env->y >> 1);
724 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
725 tcg_gen_shli_tl(r_temp, r_temp, 31);
726 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
727 tcg_gen_or_tl(cpu_y, cpu_tmp0, r_temp);
730 gen_mov_reg_N(cpu_tmp0, cpu_psr);
731 gen_mov_reg_V(r_temp, cpu_psr);
732 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
733 tcg_temp_free(r_temp);
735 // T0 = (b1 << 31) | (T0 >> 1);
737 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
738 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
739 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
741 /* do addition and update flags */
742 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
745 gen_cc_NZ_icc(cpu_cc_dst);
746 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
747 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
748 tcg_gen_mov_tl(dst, cpu_cc_dst);
751 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753 TCGv r_temp, r_temp2;
755 r_temp = tcg_temp_new(TCG_TYPE_I64);
756 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
758 tcg_gen_extu_i32_i64(r_temp, src2);
759 tcg_gen_extu_i32_i64(r_temp2, src1);
760 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
762 tcg_gen_shri_i64(r_temp, r_temp2, 32);
763 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
764 tcg_temp_free(r_temp);
765 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
766 #ifdef TARGET_SPARC64
767 tcg_gen_mov_i64(dst, r_temp2);
769 tcg_gen_trunc_i64_tl(dst, r_temp2);
771 tcg_temp_free(r_temp2);
774 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
776 TCGv r_temp, r_temp2;
778 r_temp = tcg_temp_new(TCG_TYPE_I64);
779 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
781 tcg_gen_ext_i32_i64(r_temp, src2);
782 tcg_gen_ext_i32_i64(r_temp2, src1);
783 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
785 tcg_gen_shri_i64(r_temp, r_temp2, 32);
786 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
787 tcg_temp_free(r_temp);
788 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
789 #ifdef TARGET_SPARC64
790 tcg_gen_mov_i64(dst, r_temp2);
792 tcg_gen_trunc_i64_tl(dst, r_temp2);
794 tcg_temp_free(r_temp2);
797 #ifdef TARGET_SPARC64
798 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
803 l1 = gen_new_label();
804 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
805 r_const = tcg_const_i32(TT_DIV_ZERO);
806 tcg_gen_helper_0_1(raise_exception, r_const);
807 tcg_temp_free(r_const);
811 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
815 l1 = gen_new_label();
816 l2 = gen_new_label();
817 tcg_gen_mov_tl(cpu_cc_src, src1);
818 tcg_gen_mov_tl(cpu_cc_src2, src2);
819 gen_trap_ifdivzero_tl(cpu_cc_src2);
820 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
821 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
822 tcg_gen_movi_i64(dst, INT64_MIN);
825 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
830 static inline void gen_op_div_cc(TCGv dst)
834 tcg_gen_mov_tl(cpu_cc_dst, dst);
836 gen_cc_NZ_icc(cpu_cc_dst);
837 l1 = gen_new_label();
838 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
839 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
843 static inline void gen_op_logic_cc(TCGv dst)
845 tcg_gen_mov_tl(cpu_cc_dst, dst);
848 gen_cc_NZ_icc(cpu_cc_dst);
849 #ifdef TARGET_SPARC64
851 gen_cc_NZ_xcc(cpu_cc_dst);
856 static inline void gen_op_eval_ba(TCGv dst)
858 tcg_gen_movi_tl(dst, 1);
862 static inline void gen_op_eval_be(TCGv dst, TCGv src)
864 gen_mov_reg_Z(dst, src);
868 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
870 gen_mov_reg_N(cpu_tmp0, src);
871 gen_mov_reg_V(dst, src);
872 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
873 gen_mov_reg_Z(cpu_tmp0, src);
874 tcg_gen_or_tl(dst, dst, cpu_tmp0);
878 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
880 gen_mov_reg_V(cpu_tmp0, src);
881 gen_mov_reg_N(dst, src);
882 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
886 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
888 gen_mov_reg_Z(cpu_tmp0, src);
889 gen_mov_reg_C(dst, src);
890 tcg_gen_or_tl(dst, dst, cpu_tmp0);
894 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
896 gen_mov_reg_C(dst, src);
900 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
902 gen_mov_reg_V(dst, src);
906 static inline void gen_op_eval_bn(TCGv dst)
908 tcg_gen_movi_tl(dst, 0);
912 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
914 gen_mov_reg_N(dst, src);
918 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
920 gen_mov_reg_Z(dst, src);
921 tcg_gen_xori_tl(dst, dst, 0x1);
925 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
927 gen_mov_reg_N(cpu_tmp0, src);
928 gen_mov_reg_V(dst, src);
929 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
930 gen_mov_reg_Z(cpu_tmp0, src);
931 tcg_gen_or_tl(dst, dst, cpu_tmp0);
932 tcg_gen_xori_tl(dst, dst, 0x1);
936 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
938 gen_mov_reg_V(cpu_tmp0, src);
939 gen_mov_reg_N(dst, src);
940 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
941 tcg_gen_xori_tl(dst, dst, 0x1);
945 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
947 gen_mov_reg_Z(cpu_tmp0, src);
948 gen_mov_reg_C(dst, src);
949 tcg_gen_or_tl(dst, dst, cpu_tmp0);
950 tcg_gen_xori_tl(dst, dst, 0x1);
954 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
956 gen_mov_reg_C(dst, src);
957 tcg_gen_xori_tl(dst, dst, 0x1);
961 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
963 gen_mov_reg_N(dst, src);
964 tcg_gen_xori_tl(dst, dst, 0x1);
968 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
970 gen_mov_reg_V(dst, src);
971 tcg_gen_xori_tl(dst, dst, 0x1);
975 FPSR bit field FCC1 | FCC0:
981 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
982 unsigned int fcc_offset)
984 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
985 tcg_gen_andi_tl(reg, reg, 0x1);
988 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
989 unsigned int fcc_offset)
991 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
992 tcg_gen_andi_tl(reg, reg, 0x1);
996 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
997 unsigned int fcc_offset)
999 gen_mov_reg_FCC0(dst, src, fcc_offset);
1000 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1001 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1004 // 1 or 2: FCC0 ^ FCC1
1005 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1006 unsigned int fcc_offset)
1008 gen_mov_reg_FCC0(dst, src, fcc_offset);
1009 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1010 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1014 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1021 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1022 unsigned int fcc_offset)
1024 gen_mov_reg_FCC0(dst, src, fcc_offset);
1025 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1026 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1027 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1031 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1032 unsigned int fcc_offset)
1034 gen_mov_reg_FCC1(dst, src, fcc_offset);
1038 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1039 unsigned int fcc_offset)
1041 gen_mov_reg_FCC0(dst, src, fcc_offset);
1042 tcg_gen_xori_tl(dst, dst, 0x1);
1043 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1044 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1048 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1049 unsigned int fcc_offset)
1051 gen_mov_reg_FCC0(dst, src, fcc_offset);
1052 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1053 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1056 // 0: !(FCC0 | FCC1)
1057 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1058 unsigned int fcc_offset)
1060 gen_mov_reg_FCC0(dst, src, fcc_offset);
1061 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1062 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1063 tcg_gen_xori_tl(dst, dst, 0x1);
1066 // 0 or 3: !(FCC0 ^ FCC1)
1067 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1068 unsigned int fcc_offset)
1070 gen_mov_reg_FCC0(dst, src, fcc_offset);
1071 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1072 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1073 tcg_gen_xori_tl(dst, dst, 0x1);
1077 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1078 unsigned int fcc_offset)
1080 gen_mov_reg_FCC0(dst, src, fcc_offset);
1081 tcg_gen_xori_tl(dst, dst, 0x1);
1084 // !1: !(FCC0 & !FCC1)
1085 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1086 unsigned int fcc_offset)
1088 gen_mov_reg_FCC0(dst, src, fcc_offset);
1089 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1090 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1091 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1092 tcg_gen_xori_tl(dst, dst, 0x1);
1096 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1097 unsigned int fcc_offset)
1099 gen_mov_reg_FCC1(dst, src, fcc_offset);
1100 tcg_gen_xori_tl(dst, dst, 0x1);
1103 // !2: !(!FCC0 & FCC1)
1104 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1105 unsigned int fcc_offset)
1107 gen_mov_reg_FCC0(dst, src, fcc_offset);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1111 tcg_gen_xori_tl(dst, dst, 0x1);
1114 // !3: !(FCC0 & FCC1)
1115 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1116 unsigned int fcc_offset)
1118 gen_mov_reg_FCC0(dst, src, fcc_offset);
1119 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1120 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1121 tcg_gen_xori_tl(dst, dst, 0x1);
1124 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1125 target_ulong pc2, TCGv r_cond)
1129 l1 = gen_new_label();
1131 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1133 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1136 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1139 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1140 target_ulong pc2, TCGv r_cond)
1144 l1 = gen_new_label();
1146 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1148 gen_goto_tb(dc, 0, pc2, pc1);
1151 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1154 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1159 l1 = gen_new_label();
1160 l2 = gen_new_label();
1162 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1164 tcg_gen_movi_tl(cpu_npc, npc1);
1168 tcg_gen_movi_tl(cpu_npc, npc2);
1172 /* call this function before using the condition register as it may
1173 have been set for a jump */
1174 static inline void flush_cond(DisasContext *dc, TCGv cond)
1176 if (dc->npc == JUMP_PC) {
1177 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1178 dc->npc = DYNAMIC_PC;
1182 static inline void save_npc(DisasContext *dc, TCGv cond)
1184 if (dc->npc == JUMP_PC) {
1185 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1186 dc->npc = DYNAMIC_PC;
1187 } else if (dc->npc != DYNAMIC_PC) {
1188 tcg_gen_movi_tl(cpu_npc, dc->npc);
1192 static inline void save_state(DisasContext *dc, TCGv cond)
1194 tcg_gen_movi_tl(cpu_pc, dc->pc);
1198 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1200 if (dc->npc == JUMP_PC) {
1201 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1202 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1203 dc->pc = DYNAMIC_PC;
1204 } else if (dc->npc == DYNAMIC_PC) {
1205 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1206 dc->pc = DYNAMIC_PC;
1212 static inline void gen_op_next_insn(void)
1214 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1215 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1218 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1222 #ifdef TARGET_SPARC64
1232 gen_op_eval_bn(r_dst);
1235 gen_op_eval_be(r_dst, r_src);
1238 gen_op_eval_ble(r_dst, r_src);
1241 gen_op_eval_bl(r_dst, r_src);
1244 gen_op_eval_bleu(r_dst, r_src);
1247 gen_op_eval_bcs(r_dst, r_src);
1250 gen_op_eval_bneg(r_dst, r_src);
1253 gen_op_eval_bvs(r_dst, r_src);
1256 gen_op_eval_ba(r_dst);
1259 gen_op_eval_bne(r_dst, r_src);
1262 gen_op_eval_bg(r_dst, r_src);
1265 gen_op_eval_bge(r_dst, r_src);
1268 gen_op_eval_bgu(r_dst, r_src);
1271 gen_op_eval_bcc(r_dst, r_src);
1274 gen_op_eval_bpos(r_dst, r_src);
1277 gen_op_eval_bvc(r_dst, r_src);
1282 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1284 unsigned int offset;
1304 gen_op_eval_bn(r_dst);
1307 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1310 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1313 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1316 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1319 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1322 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1325 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1328 gen_op_eval_ba(r_dst);
1331 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1334 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1337 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1340 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1343 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1346 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1349 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1354 #ifdef TARGET_SPARC64
1356 static const int gen_tcg_cond_reg[8] = {
1367 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1371 l1 = gen_new_label();
1372 tcg_gen_movi_tl(r_dst, 0);
1373 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1374 tcg_gen_movi_tl(r_dst, 1);
1379 /* XXX: potentially incorrect if dynamic npc */
1380 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1383 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1384 target_ulong target = dc->pc + offset;
1387 /* unconditional not taken */
1389 dc->pc = dc->npc + 4;
1390 dc->npc = dc->pc + 4;
1393 dc->npc = dc->pc + 4;
1395 } else if (cond == 0x8) {
1396 /* unconditional taken */
1399 dc->npc = dc->pc + 4;
1405 flush_cond(dc, r_cond);
1406 gen_cond(r_cond, cc, cond);
1408 gen_branch_a(dc, target, dc->npc, r_cond);
1412 dc->jump_pc[0] = target;
1413 dc->jump_pc[1] = dc->npc + 4;
1419 /* XXX: potentially incorrect if dynamic npc */
1420 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1423 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1424 target_ulong target = dc->pc + offset;
1427 /* unconditional not taken */
1429 dc->pc = dc->npc + 4;
1430 dc->npc = dc->pc + 4;
1433 dc->npc = dc->pc + 4;
1435 } else if (cond == 0x8) {
1436 /* unconditional taken */
1439 dc->npc = dc->pc + 4;
1445 flush_cond(dc, r_cond);
1446 gen_fcond(r_cond, cc, cond);
1448 gen_branch_a(dc, target, dc->npc, r_cond);
1452 dc->jump_pc[0] = target;
1453 dc->jump_pc[1] = dc->npc + 4;
1459 #ifdef TARGET_SPARC64
1460 /* XXX: potentially incorrect if dynamic npc */
1461 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1462 TCGv r_cond, TCGv r_reg)
1464 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1465 target_ulong target = dc->pc + offset;
1467 flush_cond(dc, r_cond);
1468 gen_cond_reg(r_cond, cond, r_reg);
1470 gen_branch_a(dc, target, dc->npc, r_cond);
1474 dc->jump_pc[0] = target;
1475 dc->jump_pc[1] = dc->npc + 4;
1480 static GenOpFunc * const gen_fcmpd[4] = {
1487 static GenOpFunc * const gen_fcmpq[4] = {
1494 static GenOpFunc * const gen_fcmped[4] = {
1501 static GenOpFunc * const gen_fcmpeq[4] = {
1508 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1512 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1515 tcg_gen_helper_0_2(helper_fcmps_fcc1, r_rs1, r_rs2);
1518 tcg_gen_helper_0_2(helper_fcmps_fcc2, r_rs1, r_rs2);
1521 tcg_gen_helper_0_2(helper_fcmps_fcc3, r_rs1, r_rs2);
1526 static inline void gen_op_fcmpd(int fccno)
1528 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1531 static inline void gen_op_fcmpq(int fccno)
1533 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1536 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1540 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1543 tcg_gen_helper_0_2(helper_fcmpes_fcc1, r_rs1, r_rs2);
1546 tcg_gen_helper_0_2(helper_fcmpes_fcc2, r_rs1, r_rs2);
1549 tcg_gen_helper_0_2(helper_fcmpes_fcc3, r_rs1, r_rs2);
1554 static inline void gen_op_fcmped(int fccno)
1556 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1559 static inline void gen_op_fcmpeq(int fccno)
1561 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1566 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1568 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1571 static inline void gen_op_fcmpd(int fccno)
1573 tcg_gen_helper_0_0(helper_fcmpd);
1576 static inline void gen_op_fcmpq(int fccno)
1578 tcg_gen_helper_0_0(helper_fcmpq);
1581 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1583 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1586 static inline void gen_op_fcmped(int fccno)
1588 tcg_gen_helper_0_0(helper_fcmped);
1591 static inline void gen_op_fcmpeq(int fccno)
1593 tcg_gen_helper_0_0(helper_fcmpeq);
1597 static inline void gen_op_fpexception_im(int fsr_flags)
1601 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1602 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1603 r_const = tcg_const_i32(TT_FP_EXCP);
1604 tcg_gen_helper_0_1(raise_exception, r_const);
1605 tcg_temp_free(r_const);
1608 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1610 #if !defined(CONFIG_USER_ONLY)
1611 if (!dc->fpu_enabled) {
1614 save_state(dc, r_cond);
1615 r_const = tcg_const_i32(TT_NFPU_INSN);
1616 tcg_gen_helper_0_1(raise_exception, r_const);
1617 tcg_temp_free(r_const);
1625 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1627 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1630 static inline void gen_clear_float_exceptions(void)
1632 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1636 #ifdef TARGET_SPARC64
1637 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1643 r_asi = tcg_temp_new(TCG_TYPE_I32);
1644 tcg_gen_mov_i32(r_asi, cpu_asi);
1646 asi = GET_FIELD(insn, 19, 26);
1647 r_asi = tcg_const_i32(asi);
1652 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1655 TCGv r_asi, r_size, r_sign;
1657 r_asi = gen_get_asi(insn, addr);
1658 r_size = tcg_const_i32(size);
1659 r_sign = tcg_const_i32(sign);
1660 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1661 tcg_temp_free(r_sign);
1662 tcg_temp_free(r_size);
1663 tcg_temp_free(r_asi);
1666 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1670 r_asi = gen_get_asi(insn, addr);
1671 r_size = tcg_const_i32(size);
1672 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1673 tcg_temp_free(r_size);
1674 tcg_temp_free(r_asi);
1677 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1679 TCGv r_asi, r_size, r_rd;
1681 r_asi = gen_get_asi(insn, addr);
1682 r_size = tcg_const_i32(size);
1683 r_rd = tcg_const_i32(rd);
1684 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1685 tcg_temp_free(r_rd);
1686 tcg_temp_free(r_size);
1687 tcg_temp_free(r_asi);
1690 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1692 TCGv r_asi, r_size, r_rd;
1694 r_asi = gen_get_asi(insn, addr);
1695 r_size = tcg_const_i32(size);
1696 r_rd = tcg_const_i32(rd);
1697 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1698 tcg_temp_free(r_rd);
1699 tcg_temp_free(r_size);
1700 tcg_temp_free(r_asi);
1703 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1705 TCGv r_asi, r_size, r_sign;
1707 r_asi = gen_get_asi(insn, addr);
1708 r_size = tcg_const_i32(4);
1709 r_sign = tcg_const_i32(0);
1710 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1711 tcg_temp_free(r_sign);
1712 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1713 tcg_temp_free(r_size);
1714 tcg_temp_free(r_asi);
1715 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1718 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1722 r_asi = gen_get_asi(insn, addr);
1723 r_rd = tcg_const_i32(rd);
1724 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1725 tcg_temp_free(r_rd);
1726 tcg_temp_free(r_asi);
1729 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1731 TCGv r_temp, r_asi, r_size;
1733 r_temp = tcg_temp_new(TCG_TYPE_TL);
1734 gen_movl_reg_TN(rd + 1, r_temp);
1735 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi,
1737 tcg_temp_free(r_temp);
1738 r_asi = gen_get_asi(insn, addr);
1739 r_size = tcg_const_i32(8);
1740 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1741 tcg_temp_free(r_size);
1742 tcg_temp_free(r_asi);
1745 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1750 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1751 gen_movl_reg_TN(rd, r_val1);
1752 r_asi = gen_get_asi(insn, addr);
1753 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1754 tcg_temp_free(r_asi);
1755 tcg_temp_free(r_val1);
1758 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1763 gen_movl_reg_TN(rd, cpu_tmp64);
1764 r_asi = gen_get_asi(insn, addr);
1765 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1766 tcg_temp_free(r_asi);
1769 #elif !defined(CONFIG_USER_ONLY)
1771 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1774 TCGv r_asi, r_size, r_sign;
1776 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1777 r_size = tcg_const_i32(size);
1778 r_sign = tcg_const_i32(sign);
1779 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1780 tcg_temp_free(r_sign);
1781 tcg_temp_free(r_size);
1782 tcg_temp_free(r_asi);
1783 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1786 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1790 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1791 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1792 r_size = tcg_const_i32(size);
1793 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1794 tcg_temp_free(r_size);
1795 tcg_temp_free(r_asi);
1798 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1800 TCGv r_asi, r_size, r_sign;
1802 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1803 r_size = tcg_const_i32(4);
1804 r_sign = tcg_const_i32(0);
1805 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1806 tcg_temp_free(r_sign);
1807 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1808 tcg_temp_free(r_size);
1809 tcg_temp_free(r_asi);
1810 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1813 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1815 TCGv r_asi, r_size, r_sign;
1817 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1818 r_size = tcg_const_i32(8);
1819 r_sign = tcg_const_i32(0);
1820 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1821 tcg_temp_free(r_sign);
1822 tcg_temp_free(r_size);
1823 tcg_temp_free(r_asi);
1824 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1825 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1826 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1827 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1828 gen_movl_TN_reg(rd, hi);
1831 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1833 TCGv r_temp, r_asi, r_size;
1835 r_temp = tcg_temp_new(TCG_TYPE_TL);
1836 gen_movl_reg_TN(rd + 1, r_temp);
1837 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi, r_temp);
1838 tcg_temp_free(r_temp);
1839 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1840 r_size = tcg_const_i32(8);
1841 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1842 tcg_temp_free(r_size);
1843 tcg_temp_free(r_asi);
1847 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1848 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1850 TCGv r_val, r_asi, r_size;
1852 gen_ld_asi(dst, addr, insn, 1, 0);
1854 r_val = tcg_const_i64(0xffULL);
1855 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1856 r_size = tcg_const_i32(1);
1857 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1858 tcg_temp_free(r_size);
1859 tcg_temp_free(r_asi);
1860 tcg_temp_free(r_val);
1864 static inline TCGv get_src1(unsigned int insn, TCGv def)
1869 rs1 = GET_FIELD(insn, 13, 17);
1871 r_rs1 = tcg_const_tl(0); // XXX how to free?
1873 r_rs1 = cpu_gregs[rs1];
1875 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1879 static inline TCGv get_src2(unsigned int insn, TCGv def)
1884 if (IS_IMM) { /* immediate */
1885 rs2 = GET_FIELDs(insn, 19, 31);
1886 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1887 } else { /* register */
1888 rs2 = GET_FIELD(insn, 27, 31);
1890 r_rs2 = tcg_const_tl(0); // XXX how to free?
1892 r_rs2 = cpu_gregs[rs2];
1894 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1899 #define CHECK_IU_FEATURE(dc, FEATURE) \
1900 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1902 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1903 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1906 /* before an instruction, dc->pc must be static */
1907 static void disas_sparc_insn(DisasContext * dc)
1909 unsigned int insn, opc, rs1, rs2, rd;
1911 if (unlikely(loglevel & CPU_LOG_TB_OP))
1912 tcg_gen_debug_insn_start(dc->pc);
1913 insn = ldl_code(dc->pc);
1914 opc = GET_FIELD(insn, 0, 1);
1916 rd = GET_FIELD(insn, 2, 6);
1918 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1919 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1922 case 0: /* branches/sethi */
1924 unsigned int xop = GET_FIELD(insn, 7, 9);
1927 #ifdef TARGET_SPARC64
1928 case 0x1: /* V9 BPcc */
1932 target = GET_FIELD_SP(insn, 0, 18);
1933 target = sign_extend(target, 18);
1935 cc = GET_FIELD_SP(insn, 20, 21);
1937 do_branch(dc, target, insn, 0, cpu_cond);
1939 do_branch(dc, target, insn, 1, cpu_cond);
1944 case 0x3: /* V9 BPr */
1946 target = GET_FIELD_SP(insn, 0, 13) |
1947 (GET_FIELD_SP(insn, 20, 21) << 14);
1948 target = sign_extend(target, 16);
1950 cpu_src1 = get_src1(insn, cpu_src1);
1951 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1954 case 0x5: /* V9 FBPcc */
1956 int cc = GET_FIELD_SP(insn, 20, 21);
1957 if (gen_trap_ifnofpu(dc, cpu_cond))
1959 target = GET_FIELD_SP(insn, 0, 18);
1960 target = sign_extend(target, 19);
1962 do_fbranch(dc, target, insn, cc, cpu_cond);
1966 case 0x7: /* CBN+x */
1971 case 0x2: /* BN+x */
1973 target = GET_FIELD(insn, 10, 31);
1974 target = sign_extend(target, 22);
1976 do_branch(dc, target, insn, 0, cpu_cond);
1979 case 0x6: /* FBN+x */
1981 if (gen_trap_ifnofpu(dc, cpu_cond))
1983 target = GET_FIELD(insn, 10, 31);
1984 target = sign_extend(target, 22);
1986 do_fbranch(dc, target, insn, 0, cpu_cond);
1989 case 0x4: /* SETHI */
1991 uint32_t value = GET_FIELD(insn, 10, 31);
1994 r_const = tcg_const_tl(value << 10);
1995 gen_movl_TN_reg(rd, r_const);
1996 tcg_temp_free(r_const);
1999 case 0x0: /* UNIMPL */
2008 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2011 r_const = tcg_const_tl(dc->pc);
2012 gen_movl_TN_reg(15, r_const);
2013 tcg_temp_free(r_const);
2015 gen_mov_pc_npc(dc, cpu_cond);
2019 case 2: /* FPU & Logical Operations */
2021 unsigned int xop = GET_FIELD(insn, 7, 12);
2022 if (xop == 0x3a) { /* generate trap */
2025 cpu_src1 = get_src1(insn, cpu_src1);
2027 rs2 = GET_FIELD(insn, 25, 31);
2028 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2030 rs2 = GET_FIELD(insn, 27, 31);
2032 gen_movl_reg_TN(rs2, cpu_src2);
2033 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2035 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2037 cond = GET_FIELD(insn, 3, 6);
2039 save_state(dc, cpu_cond);
2040 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2041 } else if (cond != 0) {
2042 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2043 #ifdef TARGET_SPARC64
2045 int cc = GET_FIELD_SP(insn, 11, 12);
2047 save_state(dc, cpu_cond);
2049 gen_cond(r_cond, 0, cond);
2051 gen_cond(r_cond, 1, cond);
2055 save_state(dc, cpu_cond);
2056 gen_cond(r_cond, 0, cond);
2058 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2059 tcg_temp_free(r_cond);
2065 } else if (xop == 0x28) {
2066 rs1 = GET_FIELD(insn, 13, 17);
2069 #ifndef TARGET_SPARC64
2070 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2071 manual, rdy on the microSPARC
2073 case 0x0f: /* stbar in the SPARCv8 manual,
2074 rdy on the microSPARC II */
2075 case 0x10 ... 0x1f: /* implementation-dependent in the
2076 SPARCv8 manual, rdy on the
2079 gen_movl_TN_reg(rd, cpu_y);
2081 #ifdef TARGET_SPARC64
2082 case 0x2: /* V9 rdccr */
2083 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2084 gen_movl_TN_reg(rd, cpu_dst);
2086 case 0x3: /* V9 rdasi */
2087 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2088 gen_movl_TN_reg(rd, cpu_dst);
2090 case 0x4: /* V9 rdtick */
2094 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2095 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2096 offsetof(CPUState, tick));
2097 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2099 tcg_temp_free(r_tickptr);
2100 gen_movl_TN_reg(rd, cpu_dst);
2103 case 0x5: /* V9 rdpc */
2107 r_const = tcg_const_tl(dc->pc);
2108 gen_movl_TN_reg(rd, r_const);
2109 tcg_temp_free(r_const);
2112 case 0x6: /* V9 rdfprs */
2113 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2114 gen_movl_TN_reg(rd, cpu_dst);
2116 case 0xf: /* V9 membar */
2117 break; /* no effect */
2118 case 0x13: /* Graphics Status */
2119 if (gen_trap_ifnofpu(dc, cpu_cond))
2121 gen_movl_TN_reg(rd, cpu_gsr);
2123 case 0x17: /* Tick compare */
2124 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2126 case 0x18: /* System tick */
2130 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2131 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2132 offsetof(CPUState, stick));
2133 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2135 tcg_temp_free(r_tickptr);
2136 gen_movl_TN_reg(rd, cpu_dst);
2139 case 0x19: /* System tick compare */
2140 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2142 case 0x10: /* Performance Control */
2143 case 0x11: /* Performance Instrumentation Counter */
2144 case 0x12: /* Dispatch Control */
2145 case 0x14: /* Softint set, WO */
2146 case 0x15: /* Softint clear, WO */
2147 case 0x16: /* Softint write */
2152 #if !defined(CONFIG_USER_ONLY)
2153 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2154 #ifndef TARGET_SPARC64
2155 if (!supervisor(dc))
2157 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2159 CHECK_IU_FEATURE(dc, HYPV);
2160 if (!hypervisor(dc))
2162 rs1 = GET_FIELD(insn, 13, 17);
2165 // gen_op_rdhpstate();
2168 // gen_op_rdhtstate();
2171 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2174 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2177 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2179 case 31: // hstick_cmpr
2180 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2186 gen_movl_TN_reg(rd, cpu_dst);
2188 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2189 if (!supervisor(dc))
2191 #ifdef TARGET_SPARC64
2192 rs1 = GET_FIELD(insn, 13, 17);
2198 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2199 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2200 offsetof(CPUState, tsptr));
2201 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2202 offsetof(trap_state, tpc));
2203 tcg_temp_free(r_tsptr);
2210 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2211 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2212 offsetof(CPUState, tsptr));
2213 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2214 offsetof(trap_state, tnpc));
2215 tcg_temp_free(r_tsptr);
2222 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2223 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2224 offsetof(CPUState, tsptr));
2225 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2226 offsetof(trap_state, tstate));
2227 tcg_temp_free(r_tsptr);
2234 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2235 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2236 offsetof(CPUState, tsptr));
2237 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2238 offsetof(trap_state, tt));
2239 tcg_temp_free(r_tsptr);
2246 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2247 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2248 offsetof(CPUState, tick));
2249 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2251 gen_movl_TN_reg(rd, cpu_tmp0);
2252 tcg_temp_free(r_tickptr);
2256 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2259 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2260 offsetof(CPUSPARCState, pstate));
2261 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2264 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2265 offsetof(CPUSPARCState, tl));
2266 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2269 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2270 offsetof(CPUSPARCState, psrpil));
2271 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2274 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2277 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2278 offsetof(CPUSPARCState, cansave));
2279 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2281 case 11: // canrestore
2282 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2283 offsetof(CPUSPARCState, canrestore));
2284 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2286 case 12: // cleanwin
2287 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2288 offsetof(CPUSPARCState, cleanwin));
2289 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2291 case 13: // otherwin
2292 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2293 offsetof(CPUSPARCState, otherwin));
2294 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2297 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2298 offsetof(CPUSPARCState, wstate));
2299 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2301 case 16: // UA2005 gl
2302 CHECK_IU_FEATURE(dc, GL);
2303 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2304 offsetof(CPUSPARCState, gl));
2305 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2307 case 26: // UA2005 strand status
2308 CHECK_IU_FEATURE(dc, HYPV);
2309 if (!hypervisor(dc))
2311 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_ssr);
2314 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2321 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2323 gen_movl_TN_reg(rd, cpu_tmp0);
2325 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2326 #ifdef TARGET_SPARC64
2327 save_state(dc, cpu_cond);
2328 tcg_gen_helper_0_0(helper_flushw);
2330 if (!supervisor(dc))
2332 gen_movl_TN_reg(rd, cpu_tbr);
2336 } else if (xop == 0x34) { /* FPU Operations */
2337 if (gen_trap_ifnofpu(dc, cpu_cond))
2339 gen_op_clear_ieee_excp_and_FTT();
2340 rs1 = GET_FIELD(insn, 13, 17);
2341 rs2 = GET_FIELD(insn, 27, 31);
2342 xop = GET_FIELD(insn, 18, 26);
2344 case 0x1: /* fmovs */
2345 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2347 case 0x5: /* fnegs */
2348 tcg_gen_helper_1_1(helper_fnegs, cpu_fpr[rd],
2351 case 0x9: /* fabss */
2352 tcg_gen_helper_1_1(helper_fabss, cpu_fpr[rd],
2355 case 0x29: /* fsqrts */
2356 CHECK_FPU_FEATURE(dc, FSQRT);
2357 gen_clear_float_exceptions();
2358 tcg_gen_helper_1_1(helper_fsqrts, cpu_tmp32,
2360 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2361 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2363 case 0x2a: /* fsqrtd */
2364 CHECK_FPU_FEATURE(dc, FSQRT);
2365 gen_op_load_fpr_DT1(DFPREG(rs2));
2366 gen_clear_float_exceptions();
2367 tcg_gen_helper_0_0(helper_fsqrtd);
2368 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2369 gen_op_store_DT0_fpr(DFPREG(rd));
2371 case 0x2b: /* fsqrtq */
2372 CHECK_FPU_FEATURE(dc, FLOAT128);
2373 gen_op_load_fpr_QT1(QFPREG(rs2));
2374 gen_clear_float_exceptions();
2375 tcg_gen_helper_0_0(helper_fsqrtq);
2376 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2377 gen_op_store_QT0_fpr(QFPREG(rd));
2379 case 0x41: /* fadds */
2380 gen_clear_float_exceptions();
2381 tcg_gen_helper_1_2(helper_fadds, cpu_tmp32,
2382 cpu_fpr[rs1], cpu_fpr[rs2]);
2383 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2384 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2387 gen_op_load_fpr_DT0(DFPREG(rs1));
2388 gen_op_load_fpr_DT1(DFPREG(rs2));
2389 gen_clear_float_exceptions();
2390 tcg_gen_helper_0_0(helper_faddd);
2391 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2392 gen_op_store_DT0_fpr(DFPREG(rd));
2394 case 0x43: /* faddq */
2395 CHECK_FPU_FEATURE(dc, FLOAT128);
2396 gen_op_load_fpr_QT0(QFPREG(rs1));
2397 gen_op_load_fpr_QT1(QFPREG(rs2));
2398 gen_clear_float_exceptions();
2399 tcg_gen_helper_0_0(helper_faddq);
2400 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2401 gen_op_store_QT0_fpr(QFPREG(rd));
2403 case 0x45: /* fsubs */
2404 gen_clear_float_exceptions();
2405 tcg_gen_helper_1_2(helper_fsubs, cpu_tmp32,
2406 cpu_fpr[rs1], cpu_fpr[rs2]);
2407 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2408 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2411 gen_op_load_fpr_DT0(DFPREG(rs1));
2412 gen_op_load_fpr_DT1(DFPREG(rs2));
2413 gen_clear_float_exceptions();
2414 tcg_gen_helper_0_0(helper_fsubd);
2415 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2416 gen_op_store_DT0_fpr(DFPREG(rd));
2418 case 0x47: /* fsubq */
2419 CHECK_FPU_FEATURE(dc, FLOAT128);
2420 gen_op_load_fpr_QT0(QFPREG(rs1));
2421 gen_op_load_fpr_QT1(QFPREG(rs2));
2422 gen_clear_float_exceptions();
2423 tcg_gen_helper_0_0(helper_fsubq);
2424 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2425 gen_op_store_QT0_fpr(QFPREG(rd));
2427 case 0x49: /* fmuls */
2428 CHECK_FPU_FEATURE(dc, FMUL);
2429 gen_clear_float_exceptions();
2430 tcg_gen_helper_1_2(helper_fmuls, cpu_tmp32,
2431 cpu_fpr[rs1], cpu_fpr[rs2]);
2432 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2433 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2435 case 0x4a: /* fmuld */
2436 CHECK_FPU_FEATURE(dc, FMUL);
2437 gen_op_load_fpr_DT0(DFPREG(rs1));
2438 gen_op_load_fpr_DT1(DFPREG(rs2));
2439 gen_clear_float_exceptions();
2440 tcg_gen_helper_0_0(helper_fmuld);
2441 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2442 gen_op_store_DT0_fpr(DFPREG(rd));
2444 case 0x4b: /* fmulq */
2445 CHECK_FPU_FEATURE(dc, FLOAT128);
2446 CHECK_FPU_FEATURE(dc, FMUL);
2447 gen_op_load_fpr_QT0(QFPREG(rs1));
2448 gen_op_load_fpr_QT1(QFPREG(rs2));
2449 gen_clear_float_exceptions();
2450 tcg_gen_helper_0_0(helper_fmulq);
2451 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2452 gen_op_store_QT0_fpr(QFPREG(rd));
2454 case 0x4d: /* fdivs */
2455 gen_clear_float_exceptions();
2456 tcg_gen_helper_1_2(helper_fdivs, cpu_tmp32,
2457 cpu_fpr[rs1], cpu_fpr[rs2]);
2458 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2459 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2462 gen_op_load_fpr_DT0(DFPREG(rs1));
2463 gen_op_load_fpr_DT1(DFPREG(rs2));
2464 gen_clear_float_exceptions();
2465 tcg_gen_helper_0_0(helper_fdivd);
2466 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2467 gen_op_store_DT0_fpr(DFPREG(rd));
2469 case 0x4f: /* fdivq */
2470 CHECK_FPU_FEATURE(dc, FLOAT128);
2471 gen_op_load_fpr_QT0(QFPREG(rs1));
2472 gen_op_load_fpr_QT1(QFPREG(rs2));
2473 gen_clear_float_exceptions();
2474 tcg_gen_helper_0_0(helper_fdivq);
2475 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2476 gen_op_store_QT0_fpr(QFPREG(rd));
2479 CHECK_FPU_FEATURE(dc, FSMULD);
2480 gen_op_load_fpr_FT0(rs1);
2481 gen_op_load_fpr_FT1(rs2);
2482 gen_clear_float_exceptions();
2483 tcg_gen_helper_0_0(helper_fsmuld);
2484 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2485 gen_op_store_DT0_fpr(DFPREG(rd));
2487 case 0x6e: /* fdmulq */
2488 CHECK_FPU_FEATURE(dc, FLOAT128);
2489 gen_op_load_fpr_DT0(DFPREG(rs1));
2490 gen_op_load_fpr_DT1(DFPREG(rs2));
2491 gen_clear_float_exceptions();
2492 tcg_gen_helper_0_0(helper_fdmulq);
2493 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2494 gen_op_store_QT0_fpr(QFPREG(rd));
2496 case 0xc4: /* fitos */
2497 gen_clear_float_exceptions();
2498 tcg_gen_helper_1_1(helper_fitos, cpu_tmp32,
2500 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2501 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2504 gen_op_load_fpr_DT1(DFPREG(rs2));
2505 gen_clear_float_exceptions();
2506 tcg_gen_helper_0_0(helper_fdtos);
2507 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2508 gen_op_store_FT0_fpr(rd);
2510 case 0xc7: /* fqtos */
2511 CHECK_FPU_FEATURE(dc, FLOAT128);
2512 gen_op_load_fpr_QT1(QFPREG(rs2));
2513 gen_clear_float_exceptions();
2514 tcg_gen_helper_0_0(helper_fqtos);
2515 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2516 gen_op_store_FT0_fpr(rd);
2519 gen_op_load_fpr_FT1(rs2);
2520 tcg_gen_helper_0_0(helper_fitod);
2521 gen_op_store_DT0_fpr(DFPREG(rd));
2524 gen_op_load_fpr_FT1(rs2);
2525 tcg_gen_helper_0_0(helper_fstod);
2526 gen_op_store_DT0_fpr(DFPREG(rd));
2528 case 0xcb: /* fqtod */
2529 CHECK_FPU_FEATURE(dc, FLOAT128);
2530 gen_op_load_fpr_QT1(QFPREG(rs2));
2531 gen_clear_float_exceptions();
2532 tcg_gen_helper_0_0(helper_fqtod);
2533 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2534 gen_op_store_DT0_fpr(DFPREG(rd));
2536 case 0xcc: /* fitoq */
2537 CHECK_FPU_FEATURE(dc, FLOAT128);
2538 gen_op_load_fpr_FT1(rs2);
2539 tcg_gen_helper_0_0(helper_fitoq);
2540 gen_op_store_QT0_fpr(QFPREG(rd));
2542 case 0xcd: /* fstoq */
2543 CHECK_FPU_FEATURE(dc, FLOAT128);
2544 gen_op_load_fpr_FT1(rs2);
2545 tcg_gen_helper_0_0(helper_fstoq);
2546 gen_op_store_QT0_fpr(QFPREG(rd));
2548 case 0xce: /* fdtoq */
2549 CHECK_FPU_FEATURE(dc, FLOAT128);
2550 gen_op_load_fpr_DT1(DFPREG(rs2));
2551 tcg_gen_helper_0_0(helper_fdtoq);
2552 gen_op_store_QT0_fpr(QFPREG(rd));
2554 case 0xd1: /* fstoi */
2555 gen_clear_float_exceptions();
2556 tcg_gen_helper_1_1(helper_fstoi, cpu_tmp32,
2558 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2559 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2562 gen_op_load_fpr_DT1(DFPREG(rs2));
2563 gen_clear_float_exceptions();
2564 tcg_gen_helper_0_0(helper_fdtoi);
2565 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2566 gen_op_store_FT0_fpr(rd);
2568 case 0xd3: /* fqtoi */
2569 CHECK_FPU_FEATURE(dc, FLOAT128);
2570 gen_op_load_fpr_QT1(QFPREG(rs2));
2571 gen_clear_float_exceptions();
2572 tcg_gen_helper_0_0(helper_fqtoi);
2573 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2574 gen_op_store_FT0_fpr(rd);
2576 #ifdef TARGET_SPARC64
2577 case 0x2: /* V9 fmovd */
2578 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2579 cpu_fpr[DFPREG(rs2)]);
2580 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2581 cpu_fpr[DFPREG(rs2) + 1]);
2583 case 0x3: /* V9 fmovq */
2584 CHECK_FPU_FEATURE(dc, FLOAT128);
2585 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2586 cpu_fpr[QFPREG(rs2)]);
2587 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2588 cpu_fpr[QFPREG(rs2) + 1]);
2589 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2590 cpu_fpr[QFPREG(rs2) + 2]);
2591 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2592 cpu_fpr[QFPREG(rs2) + 3]);
2594 case 0x6: /* V9 fnegd */
2595 gen_op_load_fpr_DT1(DFPREG(rs2));
2596 tcg_gen_helper_0_0(helper_fnegd);
2597 gen_op_store_DT0_fpr(DFPREG(rd));
2599 case 0x7: /* V9 fnegq */
2600 CHECK_FPU_FEATURE(dc, FLOAT128);
2601 gen_op_load_fpr_QT1(QFPREG(rs2));
2602 tcg_gen_helper_0_0(helper_fnegq);
2603 gen_op_store_QT0_fpr(QFPREG(rd));
2605 case 0xa: /* V9 fabsd */
2606 gen_op_load_fpr_DT1(DFPREG(rs2));
2607 tcg_gen_helper_0_0(helper_fabsd);
2608 gen_op_store_DT0_fpr(DFPREG(rd));
2610 case 0xb: /* V9 fabsq */
2611 CHECK_FPU_FEATURE(dc, FLOAT128);
2612 gen_op_load_fpr_QT1(QFPREG(rs2));
2613 tcg_gen_helper_0_0(helper_fabsq);
2614 gen_op_store_QT0_fpr(QFPREG(rd));
2616 case 0x81: /* V9 fstox */
2617 gen_op_load_fpr_FT1(rs2);
2618 gen_clear_float_exceptions();
2619 tcg_gen_helper_0_0(helper_fstox);
2620 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2621 gen_op_store_DT0_fpr(DFPREG(rd));
2623 case 0x82: /* V9 fdtox */
2624 gen_op_load_fpr_DT1(DFPREG(rs2));
2625 gen_clear_float_exceptions();
2626 tcg_gen_helper_0_0(helper_fdtox);
2627 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2628 gen_op_store_DT0_fpr(DFPREG(rd));
2630 case 0x83: /* V9 fqtox */
2631 CHECK_FPU_FEATURE(dc, FLOAT128);
2632 gen_op_load_fpr_QT1(QFPREG(rs2));
2633 gen_clear_float_exceptions();
2634 tcg_gen_helper_0_0(helper_fqtox);
2635 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2636 gen_op_store_DT0_fpr(DFPREG(rd));
2638 case 0x84: /* V9 fxtos */
2639 gen_op_load_fpr_DT1(DFPREG(rs2));
2640 gen_clear_float_exceptions();
2641 tcg_gen_helper_0_0(helper_fxtos);
2642 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2643 gen_op_store_FT0_fpr(rd);
2645 case 0x88: /* V9 fxtod */
2646 gen_op_load_fpr_DT1(DFPREG(rs2));
2647 gen_clear_float_exceptions();
2648 tcg_gen_helper_0_0(helper_fxtod);
2649 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2650 gen_op_store_DT0_fpr(DFPREG(rd));
2652 case 0x8c: /* V9 fxtoq */
2653 CHECK_FPU_FEATURE(dc, FLOAT128);
2654 gen_op_load_fpr_DT1(DFPREG(rs2));
2655 gen_clear_float_exceptions();
2656 tcg_gen_helper_0_0(helper_fxtoq);
2657 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2658 gen_op_store_QT0_fpr(QFPREG(rd));
2664 } else if (xop == 0x35) { /* FPU Operations */
2665 #ifdef TARGET_SPARC64
2668 if (gen_trap_ifnofpu(dc, cpu_cond))
2670 gen_op_clear_ieee_excp_and_FTT();
2671 rs1 = GET_FIELD(insn, 13, 17);
2672 rs2 = GET_FIELD(insn, 27, 31);
2673 xop = GET_FIELD(insn, 18, 26);
2674 #ifdef TARGET_SPARC64
2675 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2678 l1 = gen_new_label();
2679 cond = GET_FIELD_SP(insn, 14, 17);
2680 cpu_src1 = get_src1(insn, cpu_src1);
2681 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2683 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2686 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2689 l1 = gen_new_label();
2690 cond = GET_FIELD_SP(insn, 14, 17);
2691 cpu_src1 = get_src1(insn, cpu_src1);
2692 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2694 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2695 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2698 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2701 CHECK_FPU_FEATURE(dc, FLOAT128);
2702 l1 = gen_new_label();
2703 cond = GET_FIELD_SP(insn, 14, 17);
2704 cpu_src1 = get_src1(insn, cpu_src1);
2705 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2707 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2708 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2709 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2710 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2716 #ifdef TARGET_SPARC64
2717 #define FMOVSCC(fcc) \
2722 l1 = gen_new_label(); \
2723 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2724 cond = GET_FIELD_SP(insn, 14, 17); \
2725 gen_fcond(r_cond, fcc, cond); \
2726 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2728 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2729 gen_set_label(l1); \
2730 tcg_temp_free(r_cond); \
2732 #define FMOVDCC(fcc) \
2737 l1 = gen_new_label(); \
2738 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2739 cond = GET_FIELD_SP(insn, 14, 17); \
2740 gen_fcond(r_cond, fcc, cond); \
2741 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2743 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2744 cpu_fpr[DFPREG(rs2)]); \
2745 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2746 cpu_fpr[DFPREG(rs2) + 1]); \
2747 gen_set_label(l1); \
2748 tcg_temp_free(r_cond); \
2750 #define FMOVQCC(fcc) \
2755 l1 = gen_new_label(); \
2756 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2757 cond = GET_FIELD_SP(insn, 14, 17); \
2758 gen_fcond(r_cond, fcc, cond); \
2759 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2761 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2762 cpu_fpr[QFPREG(rs2)]); \
2763 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2764 cpu_fpr[QFPREG(rs2) + 1]); \
2765 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2766 cpu_fpr[QFPREG(rs2) + 2]); \
2767 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2768 cpu_fpr[QFPREG(rs2) + 3]); \
2769 gen_set_label(l1); \
2770 tcg_temp_free(r_cond); \
2772 case 0x001: /* V9 fmovscc %fcc0 */
2775 case 0x002: /* V9 fmovdcc %fcc0 */
2778 case 0x003: /* V9 fmovqcc %fcc0 */
2779 CHECK_FPU_FEATURE(dc, FLOAT128);
2782 case 0x041: /* V9 fmovscc %fcc1 */
2785 case 0x042: /* V9 fmovdcc %fcc1 */
2788 case 0x043: /* V9 fmovqcc %fcc1 */
2789 CHECK_FPU_FEATURE(dc, FLOAT128);
2792 case 0x081: /* V9 fmovscc %fcc2 */
2795 case 0x082: /* V9 fmovdcc %fcc2 */
2798 case 0x083: /* V9 fmovqcc %fcc2 */
2799 CHECK_FPU_FEATURE(dc, FLOAT128);
2802 case 0x0c1: /* V9 fmovscc %fcc3 */
2805 case 0x0c2: /* V9 fmovdcc %fcc3 */
2808 case 0x0c3: /* V9 fmovqcc %fcc3 */
2809 CHECK_FPU_FEATURE(dc, FLOAT128);
2815 #define FMOVCC(size_FDQ, icc) \
2820 l1 = gen_new_label(); \
2821 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2822 cond = GET_FIELD_SP(insn, 14, 17); \
2823 gen_cond(r_cond, icc, cond); \
2824 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2826 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2827 (glue(size_FDQ, FPREG(rs2))); \
2828 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2829 (glue(size_FDQ, FPREG(rd))); \
2830 gen_set_label(l1); \
2831 tcg_temp_free(r_cond); \
2833 #define FMOVSCC(icc) \
2838 l1 = gen_new_label(); \
2839 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2840 cond = GET_FIELD_SP(insn, 14, 17); \
2841 gen_cond(r_cond, icc, cond); \
2842 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2844 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2845 gen_set_label(l1); \
2846 tcg_temp_free(r_cond); \
2848 #define FMOVDCC(icc) \
2853 l1 = gen_new_label(); \
2854 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2855 cond = GET_FIELD_SP(insn, 14, 17); \
2856 gen_cond(r_cond, icc, cond); \
2857 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2859 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2860 cpu_fpr[DFPREG(rs2)]); \
2861 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2862 cpu_fpr[DFPREG(rs2) + 1]); \
2863 gen_set_label(l1); \
2864 tcg_temp_free(r_cond); \
2866 #define FMOVQCC(icc) \
2871 l1 = gen_new_label(); \
2872 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2873 cond = GET_FIELD_SP(insn, 14, 17); \
2874 gen_cond(r_cond, icc, cond); \
2875 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2877 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2878 cpu_fpr[QFPREG(rs2)]); \
2879 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2880 cpu_fpr[QFPREG(rs2) + 1]); \
2881 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2882 cpu_fpr[QFPREG(rs2) + 2]); \
2883 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2884 cpu_fpr[QFPREG(rs2) + 3]); \
2885 gen_set_label(l1); \
2886 tcg_temp_free(r_cond); \
2889 case 0x101: /* V9 fmovscc %icc */
2892 case 0x102: /* V9 fmovdcc %icc */
2894 case 0x103: /* V9 fmovqcc %icc */
2895 CHECK_FPU_FEATURE(dc, FLOAT128);
2898 case 0x181: /* V9 fmovscc %xcc */
2901 case 0x182: /* V9 fmovdcc %xcc */
2904 case 0x183: /* V9 fmovqcc %xcc */
2905 CHECK_FPU_FEATURE(dc, FLOAT128);
2912 case 0x51: /* fcmps, V9 %fcc */
2913 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2915 case 0x52: /* fcmpd, V9 %fcc */
2916 gen_op_load_fpr_DT0(DFPREG(rs1));
2917 gen_op_load_fpr_DT1(DFPREG(rs2));
2918 gen_op_fcmpd(rd & 3);
2920 case 0x53: /* fcmpq, V9 %fcc */
2921 CHECK_FPU_FEATURE(dc, FLOAT128);
2922 gen_op_load_fpr_QT0(QFPREG(rs1));
2923 gen_op_load_fpr_QT1(QFPREG(rs2));
2924 gen_op_fcmpq(rd & 3);
2926 case 0x55: /* fcmpes, V9 %fcc */
2927 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2929 case 0x56: /* fcmped, V9 %fcc */
2930 gen_op_load_fpr_DT0(DFPREG(rs1));
2931 gen_op_load_fpr_DT1(DFPREG(rs2));
2932 gen_op_fcmped(rd & 3);
2934 case 0x57: /* fcmpeq, V9 %fcc */
2935 CHECK_FPU_FEATURE(dc, FLOAT128);
2936 gen_op_load_fpr_QT0(QFPREG(rs1));
2937 gen_op_load_fpr_QT1(QFPREG(rs2));
2938 gen_op_fcmpeq(rd & 3);
2943 } else if (xop == 0x2) {
2946 rs1 = GET_FIELD(insn, 13, 17);
2948 // or %g0, x, y -> mov T0, x; mov y, T0
2949 if (IS_IMM) { /* immediate */
2952 rs2 = GET_FIELDs(insn, 19, 31);
2953 r_const = tcg_const_tl((int)rs2);
2954 gen_movl_TN_reg(rd, r_const);
2955 tcg_temp_free(r_const);
2956 } else { /* register */
2957 rs2 = GET_FIELD(insn, 27, 31);
2958 gen_movl_reg_TN(rs2, cpu_dst);
2959 gen_movl_TN_reg(rd, cpu_dst);
2962 cpu_src1 = get_src1(insn, cpu_src1);
2963 if (IS_IMM) { /* immediate */
2964 rs2 = GET_FIELDs(insn, 19, 31);
2965 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2966 gen_movl_TN_reg(rd, cpu_dst);
2967 } else { /* register */
2968 // or x, %g0, y -> mov T1, x; mov y, T1
2969 rs2 = GET_FIELD(insn, 27, 31);
2971 gen_movl_reg_TN(rs2, cpu_src2);
2972 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2973 gen_movl_TN_reg(rd, cpu_dst);
2975 gen_movl_TN_reg(rd, cpu_src1);
2978 #ifdef TARGET_SPARC64
2979 } else if (xop == 0x25) { /* sll, V9 sllx */
2980 cpu_src1 = get_src1(insn, cpu_src1);
2981 if (IS_IMM) { /* immediate */
2982 rs2 = GET_FIELDs(insn, 20, 31);
2983 if (insn & (1 << 12)) {
2984 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2986 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2988 } else { /* register */
2989 rs2 = GET_FIELD(insn, 27, 31);
2990 gen_movl_reg_TN(rs2, cpu_src2);
2991 if (insn & (1 << 12)) {
2992 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2994 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2996 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2998 gen_movl_TN_reg(rd, cpu_dst);
2999 } else if (xop == 0x26) { /* srl, V9 srlx */
3000 cpu_src1 = get_src1(insn, cpu_src1);
3001 if (IS_IMM) { /* immediate */
3002 rs2 = GET_FIELDs(insn, 20, 31);
3003 if (insn & (1 << 12)) {
3004 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3006 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3007 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3009 } else { /* register */
3010 rs2 = GET_FIELD(insn, 27, 31);
3011 gen_movl_reg_TN(rs2, cpu_src2);
3012 if (insn & (1 << 12)) {
3013 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3014 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3016 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3017 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3018 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3021 gen_movl_TN_reg(rd, cpu_dst);
3022 } else if (xop == 0x27) { /* sra, V9 srax */
3023 cpu_src1 = get_src1(insn, cpu_src1);
3024 if (IS_IMM) { /* immediate */
3025 rs2 = GET_FIELDs(insn, 20, 31);
3026 if (insn & (1 << 12)) {
3027 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3029 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3030 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3031 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3033 } else { /* register */
3034 rs2 = GET_FIELD(insn, 27, 31);
3035 gen_movl_reg_TN(rs2, cpu_src2);
3036 if (insn & (1 << 12)) {
3037 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3038 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3040 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3041 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3042 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3043 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3046 gen_movl_TN_reg(rd, cpu_dst);
3048 } else if (xop < 0x36) {
3049 cpu_src1 = get_src1(insn, cpu_src1);
3050 cpu_src2 = get_src2(insn, cpu_src2);
3052 switch (xop & ~0x10) {
3055 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3057 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3060 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3062 gen_op_logic_cc(cpu_dst);
3065 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3067 gen_op_logic_cc(cpu_dst);
3070 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3072 gen_op_logic_cc(cpu_dst);
3076 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3078 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3081 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3082 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
3084 gen_op_logic_cc(cpu_dst);
3087 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3088 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3090 gen_op_logic_cc(cpu_dst);
3093 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3094 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3096 gen_op_logic_cc(cpu_dst);
3100 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3102 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3103 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3104 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3107 #ifdef TARGET_SPARC64
3108 case 0x9: /* V9 mulx */
3109 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3113 CHECK_IU_FEATURE(dc, MUL);
3114 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3116 gen_op_logic_cc(cpu_dst);
3119 CHECK_IU_FEATURE(dc, MUL);
3120 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3122 gen_op_logic_cc(cpu_dst);
3126 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3128 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3129 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3130 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3133 #ifdef TARGET_SPARC64
3134 case 0xd: /* V9 udivx */
3135 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3136 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3137 gen_trap_ifdivzero_tl(cpu_cc_src2);
3138 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3142 CHECK_IU_FEATURE(dc, DIV);
3143 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3146 gen_op_div_cc(cpu_dst);
3149 CHECK_IU_FEATURE(dc, DIV);
3150 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3153 gen_op_div_cc(cpu_dst);
3158 gen_movl_TN_reg(rd, cpu_dst);
3161 case 0x20: /* taddcc */
3162 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3163 gen_movl_TN_reg(rd, cpu_dst);
3165 case 0x21: /* tsubcc */
3166 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3167 gen_movl_TN_reg(rd, cpu_dst);
3169 case 0x22: /* taddcctv */
3170 save_state(dc, cpu_cond);
3171 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3172 gen_movl_TN_reg(rd, cpu_dst);
3174 case 0x23: /* tsubcctv */
3175 save_state(dc, cpu_cond);
3176 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3177 gen_movl_TN_reg(rd, cpu_dst);
3179 case 0x24: /* mulscc */
3180 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3181 gen_movl_TN_reg(rd, cpu_dst);
3183 #ifndef TARGET_SPARC64
3184 case 0x25: /* sll */
3185 if (IS_IMM) { /* immediate */
3186 rs2 = GET_FIELDs(insn, 20, 31);
3187 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3188 } else { /* register */
3189 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3190 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3192 gen_movl_TN_reg(rd, cpu_dst);
3194 case 0x26: /* srl */
3195 if (IS_IMM) { /* immediate */
3196 rs2 = GET_FIELDs(insn, 20, 31);
3197 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3198 } else { /* register */
3199 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3200 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3202 gen_movl_TN_reg(rd, cpu_dst);
3204 case 0x27: /* sra */
3205 if (IS_IMM) { /* immediate */
3206 rs2 = GET_FIELDs(insn, 20, 31);
3207 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3208 } else { /* register */
3209 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3210 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3212 gen_movl_TN_reg(rd, cpu_dst);
3219 tcg_gen_xor_tl(cpu_y, cpu_src1, cpu_src2);
3221 #ifndef TARGET_SPARC64
3222 case 0x01 ... 0x0f: /* undefined in the
3226 case 0x10 ... 0x1f: /* implementation-dependent
3232 case 0x2: /* V9 wrccr */
3233 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3234 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3236 case 0x3: /* V9 wrasi */
3237 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3238 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3240 case 0x6: /* V9 wrfprs */
3241 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3242 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3243 save_state(dc, cpu_cond);
3248 case 0xf: /* V9 sir, nop if user */
3249 #if !defined(CONFIG_USER_ONLY)
3254 case 0x13: /* Graphics Status */
3255 if (gen_trap_ifnofpu(dc, cpu_cond))
3257 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3259 case 0x17: /* Tick compare */
3260 #if !defined(CONFIG_USER_ONLY)
3261 if (!supervisor(dc))
3267 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3269 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3270 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3271 offsetof(CPUState, tick));
3272 tcg_gen_helper_0_2(helper_tick_set_limit,
3273 r_tickptr, cpu_tick_cmpr);
3274 tcg_temp_free(r_tickptr);
3277 case 0x18: /* System tick */
3278 #if !defined(CONFIG_USER_ONLY)
3279 if (!supervisor(dc))
3285 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3287 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3288 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3289 offsetof(CPUState, stick));
3290 tcg_gen_helper_0_2(helper_tick_set_count,
3291 r_tickptr, cpu_dst);
3292 tcg_temp_free(r_tickptr);
3295 case 0x19: /* System tick compare */
3296 #if !defined(CONFIG_USER_ONLY)
3297 if (!supervisor(dc))
3303 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3305 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3306 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3307 offsetof(CPUState, stick));
3308 tcg_gen_helper_0_2(helper_tick_set_limit,
3309 r_tickptr, cpu_stick_cmpr);
3310 tcg_temp_free(r_tickptr);
3314 case 0x10: /* Performance Control */
3315 case 0x11: /* Performance Instrumentation
3317 case 0x12: /* Dispatch Control */
3318 case 0x14: /* Softint set */
3319 case 0x15: /* Softint clear */
3320 case 0x16: /* Softint write */
3327 #if !defined(CONFIG_USER_ONLY)
3328 case 0x31: /* wrpsr, V9 saved, restored */
3330 if (!supervisor(dc))
3332 #ifdef TARGET_SPARC64
3335 tcg_gen_helper_0_0(helper_saved);
3338 tcg_gen_helper_0_0(helper_restored);
3340 case 2: /* UA2005 allclean */
3341 case 3: /* UA2005 otherw */
3342 case 4: /* UA2005 normalw */
3343 case 5: /* UA2005 invalw */
3349 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3350 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3351 save_state(dc, cpu_cond);
3358 case 0x32: /* wrwim, V9 wrpr */
3360 if (!supervisor(dc))
3362 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3363 #ifdef TARGET_SPARC64
3369 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3370 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3371 offsetof(CPUState, tsptr));
3372 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3373 offsetof(trap_state, tpc));
3374 tcg_temp_free(r_tsptr);
3381 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3382 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3383 offsetof(CPUState, tsptr));
3384 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3385 offsetof(trap_state, tnpc));
3386 tcg_temp_free(r_tsptr);
3393 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3394 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3395 offsetof(CPUState, tsptr));
3396 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3397 offsetof(trap_state,
3399 tcg_temp_free(r_tsptr);
3406 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3407 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3408 offsetof(CPUState, tsptr));
3409 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3410 offsetof(trap_state, tt));
3411 tcg_temp_free(r_tsptr);
3418 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3419 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3420 offsetof(CPUState, tick));
3421 tcg_gen_helper_0_2(helper_tick_set_count,
3422 r_tickptr, cpu_tmp0);
3423 tcg_temp_free(r_tickptr);
3427 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3430 save_state(dc, cpu_cond);
3431 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3437 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3438 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3439 offsetof(CPUSPARCState, tl));
3442 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3443 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3444 offsetof(CPUSPARCState,
3448 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3451 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3452 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3453 offsetof(CPUSPARCState,
3456 case 11: // canrestore
3457 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3458 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3459 offsetof(CPUSPARCState,
3462 case 12: // cleanwin
3463 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3464 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3465 offsetof(CPUSPARCState,
3468 case 13: // otherwin
3469 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3470 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3471 offsetof(CPUSPARCState,
3475 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3476 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3477 offsetof(CPUSPARCState,
3480 case 16: // UA2005 gl
3481 CHECK_IU_FEATURE(dc, GL);
3482 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3483 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3484 offsetof(CPUSPARCState, gl));
3486 case 26: // UA2005 strand status
3487 CHECK_IU_FEATURE(dc, HYPV);
3488 if (!hypervisor(dc))
3490 tcg_gen_trunc_tl_i32(cpu_ssr, cpu_tmp0);
3496 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3497 if (dc->def->nwindows != 32)
3498 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3499 (1 << dc->def->nwindows) - 1);
3500 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3504 case 0x33: /* wrtbr, UA2005 wrhpr */
3506 #ifndef TARGET_SPARC64
3507 if (!supervisor(dc))
3509 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3511 CHECK_IU_FEATURE(dc, HYPV);
3512 if (!hypervisor(dc))
3514 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3517 // XXX gen_op_wrhpstate();
3518 save_state(dc, cpu_cond);
3524 // XXX gen_op_wrhtstate();
3527 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3530 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3532 case 31: // hstick_cmpr
3536 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3537 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3538 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3539 offsetof(CPUState, hstick));
3540 tcg_gen_helper_0_2(helper_tick_set_limit,
3541 r_tickptr, cpu_hstick_cmpr);
3542 tcg_temp_free(r_tickptr);
3545 case 6: // hver readonly
3553 #ifdef TARGET_SPARC64
3554 case 0x2c: /* V9 movcc */
3556 int cc = GET_FIELD_SP(insn, 11, 12);
3557 int cond = GET_FIELD_SP(insn, 14, 17);
3561 r_cond = tcg_temp_new(TCG_TYPE_TL);
3562 if (insn & (1 << 18)) {
3564 gen_cond(r_cond, 0, cond);
3566 gen_cond(r_cond, 1, cond);
3570 gen_fcond(r_cond, cc, cond);
3573 l1 = gen_new_label();
3575 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3576 if (IS_IMM) { /* immediate */
3579 rs2 = GET_FIELD_SPs(insn, 0, 10);
3580 r_const = tcg_const_tl((int)rs2);
3581 gen_movl_TN_reg(rd, r_const);
3582 tcg_temp_free(r_const);
3584 rs2 = GET_FIELD_SP(insn, 0, 4);
3585 gen_movl_reg_TN(rs2, cpu_tmp0);
3586 gen_movl_TN_reg(rd, cpu_tmp0);
3589 tcg_temp_free(r_cond);
3592 case 0x2d: /* V9 sdivx */
3593 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3594 gen_movl_TN_reg(rd, cpu_dst);
3596 case 0x2e: /* V9 popc */
3598 cpu_src2 = get_src2(insn, cpu_src2);
3599 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3601 gen_movl_TN_reg(rd, cpu_dst);
3603 case 0x2f: /* V9 movr */
3605 int cond = GET_FIELD_SP(insn, 10, 12);
3608 cpu_src1 = get_src1(insn, cpu_src1);
3610 l1 = gen_new_label();
3612 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3614 if (IS_IMM) { /* immediate */
3617 rs2 = GET_FIELD_SPs(insn, 0, 9);
3618 r_const = tcg_const_tl((int)rs2);
3619 gen_movl_TN_reg(rd, r_const);
3620 tcg_temp_free(r_const);
3622 rs2 = GET_FIELD_SP(insn, 0, 4);
3623 gen_movl_reg_TN(rs2, cpu_tmp0);
3624 gen_movl_TN_reg(rd, cpu_tmp0);
3634 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3635 #ifdef TARGET_SPARC64
3636 int opf = GET_FIELD_SP(insn, 5, 13);
3637 rs1 = GET_FIELD(insn, 13, 17);
3638 rs2 = GET_FIELD(insn, 27, 31);
3639 if (gen_trap_ifnofpu(dc, cpu_cond))
3643 case 0x000: /* VIS I edge8cc */
3644 case 0x001: /* VIS II edge8n */
3645 case 0x002: /* VIS I edge8lcc */
3646 case 0x003: /* VIS II edge8ln */
3647 case 0x004: /* VIS I edge16cc */
3648 case 0x005: /* VIS II edge16n */
3649 case 0x006: /* VIS I edge16lcc */
3650 case 0x007: /* VIS II edge16ln */
3651 case 0x008: /* VIS I edge32cc */
3652 case 0x009: /* VIS II edge32n */
3653 case 0x00a: /* VIS I edge32lcc */
3654 case 0x00b: /* VIS II edge32ln */
3657 case 0x010: /* VIS I array8 */
3658 CHECK_FPU_FEATURE(dc, VIS1);
3659 cpu_src1 = get_src1(insn, cpu_src1);
3660 gen_movl_reg_TN(rs2, cpu_src2);
3661 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3663 gen_movl_TN_reg(rd, cpu_dst);
3665 case 0x012: /* VIS I array16 */
3666 CHECK_FPU_FEATURE(dc, VIS1);
3667 cpu_src1 = get_src1(insn, cpu_src1);
3668 gen_movl_reg_TN(rs2, cpu_src2);
3669 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3671 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3672 gen_movl_TN_reg(rd, cpu_dst);
3674 case 0x014: /* VIS I array32 */
3675 CHECK_FPU_FEATURE(dc, VIS1);
3676 cpu_src1 = get_src1(insn, cpu_src1);
3677 gen_movl_reg_TN(rs2, cpu_src2);
3678 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3680 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3681 gen_movl_TN_reg(rd, cpu_dst);
3683 case 0x018: /* VIS I alignaddr */
3684 CHECK_FPU_FEATURE(dc, VIS1);
3685 cpu_src1 = get_src1(insn, cpu_src1);
3686 gen_movl_reg_TN(rs2, cpu_src2);
3687 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3689 gen_movl_TN_reg(rd, cpu_dst);
3691 case 0x019: /* VIS II bmask */
3692 case 0x01a: /* VIS I alignaddrl */
3695 case 0x020: /* VIS I fcmple16 */
3696 CHECK_FPU_FEATURE(dc, VIS1);
3697 gen_op_load_fpr_DT0(DFPREG(rs1));
3698 gen_op_load_fpr_DT1(DFPREG(rs2));
3699 tcg_gen_helper_0_0(helper_fcmple16);
3700 gen_op_store_DT0_fpr(DFPREG(rd));
3702 case 0x022: /* VIS I fcmpne16 */
3703 CHECK_FPU_FEATURE(dc, VIS1);
3704 gen_op_load_fpr_DT0(DFPREG(rs1));
3705 gen_op_load_fpr_DT1(DFPREG(rs2));
3706 tcg_gen_helper_0_0(helper_fcmpne16);
3707 gen_op_store_DT0_fpr(DFPREG(rd));
3709 case 0x024: /* VIS I fcmple32 */
3710 CHECK_FPU_FEATURE(dc, VIS1);
3711 gen_op_load_fpr_DT0(DFPREG(rs1));
3712 gen_op_load_fpr_DT1(DFPREG(rs2));
3713 tcg_gen_helper_0_0(helper_fcmple32);
3714 gen_op_store_DT0_fpr(DFPREG(rd));
3716 case 0x026: /* VIS I fcmpne32 */
3717 CHECK_FPU_FEATURE(dc, VIS1);
3718 gen_op_load_fpr_DT0(DFPREG(rs1));
3719 gen_op_load_fpr_DT1(DFPREG(rs2));
3720 tcg_gen_helper_0_0(helper_fcmpne32);
3721 gen_op_store_DT0_fpr(DFPREG(rd));
3723 case 0x028: /* VIS I fcmpgt16 */
3724 CHECK_FPU_FEATURE(dc, VIS1);
3725 gen_op_load_fpr_DT0(DFPREG(rs1));
3726 gen_op_load_fpr_DT1(DFPREG(rs2));
3727 tcg_gen_helper_0_0(helper_fcmpgt16);
3728 gen_op_store_DT0_fpr(DFPREG(rd));
3730 case 0x02a: /* VIS I fcmpeq16 */
3731 CHECK_FPU_FEATURE(dc, VIS1);
3732 gen_op_load_fpr_DT0(DFPREG(rs1));
3733 gen_op_load_fpr_DT1(DFPREG(rs2));
3734 tcg_gen_helper_0_0(helper_fcmpeq16);
3735 gen_op_store_DT0_fpr(DFPREG(rd));
3737 case 0x02c: /* VIS I fcmpgt32 */
3738 CHECK_FPU_FEATURE(dc, VIS1);
3739 gen_op_load_fpr_DT0(DFPREG(rs1));
3740 gen_op_load_fpr_DT1(DFPREG(rs2));
3741 tcg_gen_helper_0_0(helper_fcmpgt32);
3742 gen_op_store_DT0_fpr(DFPREG(rd));
3744 case 0x02e: /* VIS I fcmpeq32 */
3745 CHECK_FPU_FEATURE(dc, VIS1);
3746 gen_op_load_fpr_DT0(DFPREG(rs1));
3747 gen_op_load_fpr_DT1(DFPREG(rs2));
3748 tcg_gen_helper_0_0(helper_fcmpeq32);
3749 gen_op_store_DT0_fpr(DFPREG(rd));
3751 case 0x031: /* VIS I fmul8x16 */
3752 CHECK_FPU_FEATURE(dc, VIS1);
3753 gen_op_load_fpr_DT0(DFPREG(rs1));
3754 gen_op_load_fpr_DT1(DFPREG(rs2));
3755 tcg_gen_helper_0_0(helper_fmul8x16);
3756 gen_op_store_DT0_fpr(DFPREG(rd));
3758 case 0x033: /* VIS I fmul8x16au */
3759 CHECK_FPU_FEATURE(dc, VIS1);
3760 gen_op_load_fpr_DT0(DFPREG(rs1));
3761 gen_op_load_fpr_DT1(DFPREG(rs2));
3762 tcg_gen_helper_0_0(helper_fmul8x16au);
3763 gen_op_store_DT0_fpr(DFPREG(rd));
3765 case 0x035: /* VIS I fmul8x16al */
3766 CHECK_FPU_FEATURE(dc, VIS1);
3767 gen_op_load_fpr_DT0(DFPREG(rs1));
3768 gen_op_load_fpr_DT1(DFPREG(rs2));
3769 tcg_gen_helper_0_0(helper_fmul8x16al);
3770 gen_op_store_DT0_fpr(DFPREG(rd));
3772 case 0x036: /* VIS I fmul8sux16 */
3773 CHECK_FPU_FEATURE(dc, VIS1);
3774 gen_op_load_fpr_DT0(DFPREG(rs1));
3775 gen_op_load_fpr_DT1(DFPREG(rs2));
3776 tcg_gen_helper_0_0(helper_fmul8sux16);
3777 gen_op_store_DT0_fpr(DFPREG(rd));
3779 case 0x037: /* VIS I fmul8ulx16 */
3780 CHECK_FPU_FEATURE(dc, VIS1);
3781 gen_op_load_fpr_DT0(DFPREG(rs1));
3782 gen_op_load_fpr_DT1(DFPREG(rs2));
3783 tcg_gen_helper_0_0(helper_fmul8ulx16);
3784 gen_op_store_DT0_fpr(DFPREG(rd));
3786 case 0x038: /* VIS I fmuld8sux16 */
3787 CHECK_FPU_FEATURE(dc, VIS1);
3788 gen_op_load_fpr_DT0(DFPREG(rs1));
3789 gen_op_load_fpr_DT1(DFPREG(rs2));
3790 tcg_gen_helper_0_0(helper_fmuld8sux16);
3791 gen_op_store_DT0_fpr(DFPREG(rd));
3793 case 0x039: /* VIS I fmuld8ulx16 */
3794 CHECK_FPU_FEATURE(dc, VIS1);
3795 gen_op_load_fpr_DT0(DFPREG(rs1));
3796 gen_op_load_fpr_DT1(DFPREG(rs2));
3797 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3798 gen_op_store_DT0_fpr(DFPREG(rd));
3800 case 0x03a: /* VIS I fpack32 */
3801 case 0x03b: /* VIS I fpack16 */
3802 case 0x03d: /* VIS I fpackfix */
3803 case 0x03e: /* VIS I pdist */
3806 case 0x048: /* VIS I faligndata */
3807 CHECK_FPU_FEATURE(dc, VIS1);
3808 gen_op_load_fpr_DT0(DFPREG(rs1));
3809 gen_op_load_fpr_DT1(DFPREG(rs2));
3810 tcg_gen_helper_0_0(helper_faligndata);
3811 gen_op_store_DT0_fpr(DFPREG(rd));
3813 case 0x04b: /* VIS I fpmerge */
3814 CHECK_FPU_FEATURE(dc, VIS1);
3815 gen_op_load_fpr_DT0(DFPREG(rs1));
3816 gen_op_load_fpr_DT1(DFPREG(rs2));
3817 tcg_gen_helper_0_0(helper_fpmerge);
3818 gen_op_store_DT0_fpr(DFPREG(rd));
3820 case 0x04c: /* VIS II bshuffle */
3823 case 0x04d: /* VIS I fexpand */
3824 CHECK_FPU_FEATURE(dc, VIS1);
3825 gen_op_load_fpr_DT0(DFPREG(rs1));
3826 gen_op_load_fpr_DT1(DFPREG(rs2));
3827 tcg_gen_helper_0_0(helper_fexpand);
3828 gen_op_store_DT0_fpr(DFPREG(rd));
3830 case 0x050: /* VIS I fpadd16 */
3831 CHECK_FPU_FEATURE(dc, VIS1);
3832 gen_op_load_fpr_DT0(DFPREG(rs1));
3833 gen_op_load_fpr_DT1(DFPREG(rs2));
3834 tcg_gen_helper_0_0(helper_fpadd16);
3835 gen_op_store_DT0_fpr(DFPREG(rd));
3837 case 0x051: /* VIS I fpadd16s */
3838 CHECK_FPU_FEATURE(dc, VIS1);
3839 tcg_gen_helper_1_2(helper_fpadd16s, cpu_fpr[rd],
3840 cpu_fpr[rs1], cpu_fpr[rs2]);
3842 case 0x052: /* VIS I fpadd32 */
3843 CHECK_FPU_FEATURE(dc, VIS1);
3844 gen_op_load_fpr_DT0(DFPREG(rs1));
3845 gen_op_load_fpr_DT1(DFPREG(rs2));
3846 tcg_gen_helper_0_0(helper_fpadd32);
3847 gen_op_store_DT0_fpr(DFPREG(rd));
3849 case 0x053: /* VIS I fpadd32s */
3850 CHECK_FPU_FEATURE(dc, VIS1);
3851 tcg_gen_helper_1_2(helper_fpadd32s, cpu_fpr[rd],
3852 cpu_fpr[rs1], cpu_fpr[rs2]);
3854 case 0x054: /* VIS I fpsub16 */
3855 CHECK_FPU_FEATURE(dc, VIS1);
3856 gen_op_load_fpr_DT0(DFPREG(rs1));
3857 gen_op_load_fpr_DT1(DFPREG(rs2));
3858 tcg_gen_helper_0_0(helper_fpsub16);
3859 gen_op_store_DT0_fpr(DFPREG(rd));
3861 case 0x055: /* VIS I fpsub16s */
3862 CHECK_FPU_FEATURE(dc, VIS1);
3863 tcg_gen_helper_1_2(helper_fpsub16s, cpu_fpr[rd],
3864 cpu_fpr[rs1], cpu_fpr[rs2]);
3866 case 0x056: /* VIS I fpsub32 */
3867 CHECK_FPU_FEATURE(dc, VIS1);
3868 gen_op_load_fpr_DT0(DFPREG(rs1));
3869 gen_op_load_fpr_DT1(DFPREG(rs2));
3870 tcg_gen_helper_0_0(helper_fpsub32);
3871 gen_op_store_DT0_fpr(DFPREG(rd));
3873 case 0x057: /* VIS I fpsub32s */
3874 CHECK_FPU_FEATURE(dc, VIS1);
3875 tcg_gen_helper_1_2(helper_fpsub32s, cpu_fpr[rd],
3876 cpu_fpr[rs1], cpu_fpr[rs2]);
3878 case 0x060: /* VIS I fzero */
3879 CHECK_FPU_FEATURE(dc, VIS1);
3880 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3881 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3883 case 0x061: /* VIS I fzeros */
3884 CHECK_FPU_FEATURE(dc, VIS1);
3885 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3887 case 0x062: /* VIS I fnor */
3888 CHECK_FPU_FEATURE(dc, VIS1);
3889 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3890 cpu_fpr[DFPREG(rs2)]);
3891 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3892 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3893 cpu_fpr[DFPREG(rs2) + 1]);
3894 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3896 case 0x063: /* VIS I fnors */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3899 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3901 case 0x064: /* VIS I fandnot2 */
3902 CHECK_FPU_FEATURE(dc, VIS1);
3903 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3904 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3905 cpu_fpr[DFPREG(rs2)]);
3906 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
3907 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3908 cpu_fpr[DFPREG(rs2) + 1]);
3910 case 0x065: /* VIS I fandnot2s */
3911 CHECK_FPU_FEATURE(dc, VIS1);
3912 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
3913 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
3915 case 0x066: /* VIS I fnot2 */
3916 CHECK_FPU_FEATURE(dc, VIS1);
3917 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3919 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3920 cpu_fpr[DFPREG(rs2) + 1], -1);
3922 case 0x067: /* VIS I fnot2s */
3923 CHECK_FPU_FEATURE(dc, VIS1);
3924 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs2], -1);
3926 case 0x068: /* VIS I fandnot1 */
3927 CHECK_FPU_FEATURE(dc, VIS1);
3928 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3929 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3930 cpu_fpr[DFPREG(rs1)]);
3931 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3932 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3933 cpu_fpr[DFPREG(rs1) + 1]);
3935 case 0x069: /* VIS I fandnot1s */
3936 CHECK_FPU_FEATURE(dc, VIS1);
3937 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3938 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3940 case 0x06a: /* VIS I fnot1 */
3941 CHECK_FPU_FEATURE(dc, VIS1);
3942 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3944 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3945 cpu_fpr[DFPREG(rs1) + 1], -1);
3947 case 0x06b: /* VIS I fnot1s */
3948 CHECK_FPU_FEATURE(dc, VIS1);
3949 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs1], -1);
3951 case 0x06c: /* VIS I fxor */
3952 CHECK_FPU_FEATURE(dc, VIS1);
3953 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3954 cpu_fpr[DFPREG(rs2)]);
3955 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3956 cpu_fpr[DFPREG(rs1) + 1],
3957 cpu_fpr[DFPREG(rs2) + 1]);
3959 case 0x06d: /* VIS I fxors */
3960 CHECK_FPU_FEATURE(dc, VIS1);
3961 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3963 case 0x06e: /* VIS I fnand */
3964 CHECK_FPU_FEATURE(dc, VIS1);
3965 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3966 cpu_fpr[DFPREG(rs2)]);
3967 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3968 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3969 cpu_fpr[DFPREG(rs2) + 1]);
3970 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3972 case 0x06f: /* VIS I fnands */
3973 CHECK_FPU_FEATURE(dc, VIS1);
3974 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3975 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3977 case 0x070: /* VIS I fand */
3978 CHECK_FPU_FEATURE(dc, VIS1);
3979 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3980 cpu_fpr[DFPREG(rs2)]);
3981 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3982 cpu_fpr[DFPREG(rs1) + 1],
3983 cpu_fpr[DFPREG(rs2) + 1]);
3985 case 0x071: /* VIS I fands */
3986 CHECK_FPU_FEATURE(dc, VIS1);
3987 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3989 case 0x072: /* VIS I fxnor */
3990 CHECK_FPU_FEATURE(dc, VIS1);
3991 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3992 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3993 cpu_fpr[DFPREG(rs1)]);
3994 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3995 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3996 cpu_fpr[DFPREG(rs1) + 1]);
3998 case 0x073: /* VIS I fxnors */
3999 CHECK_FPU_FEATURE(dc, VIS1);
4000 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4001 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4003 case 0x074: /* VIS I fsrc1 */
4004 CHECK_FPU_FEATURE(dc, VIS1);
4005 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4006 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4007 cpu_fpr[DFPREG(rs1) + 1]);
4009 case 0x075: /* VIS I fsrc1s */
4010 CHECK_FPU_FEATURE(dc, VIS1);
4011 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4013 case 0x076: /* VIS I fornot2 */
4014 CHECK_FPU_FEATURE(dc, VIS1);
4015 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
4016 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4017 cpu_fpr[DFPREG(rs2)]);
4018 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
4019 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4020 cpu_fpr[DFPREG(rs2) + 1]);
4022 case 0x077: /* VIS I fornot2s */
4023 CHECK_FPU_FEATURE(dc, VIS1);
4024 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
4025 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
4027 case 0x078: /* VIS I fsrc2 */
4028 CHECK_FPU_FEATURE(dc, VIS1);
4029 gen_op_load_fpr_DT0(DFPREG(rs2));
4030 gen_op_store_DT0_fpr(DFPREG(rd));
4032 case 0x079: /* VIS I fsrc2s */
4033 CHECK_FPU_FEATURE(dc, VIS1);
4034 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4036 case 0x07a: /* VIS I fornot1 */
4037 CHECK_FPU_FEATURE(dc, VIS1);
4038 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4039 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4040 cpu_fpr[DFPREG(rs1)]);
4041 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4042 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4043 cpu_fpr[DFPREG(rs1) + 1]);
4045 case 0x07b: /* VIS I fornot1s */
4046 CHECK_FPU_FEATURE(dc, VIS1);
4047 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4048 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4050 case 0x07c: /* VIS I for */
4051 CHECK_FPU_FEATURE(dc, VIS1);
4052 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4053 cpu_fpr[DFPREG(rs2)]);
4054 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4055 cpu_fpr[DFPREG(rs1) + 1],
4056 cpu_fpr[DFPREG(rs2) + 1]);
4058 case 0x07d: /* VIS I fors */
4059 CHECK_FPU_FEATURE(dc, VIS1);
4060 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4062 case 0x07e: /* VIS I fone */
4063 CHECK_FPU_FEATURE(dc, VIS1);
4064 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4065 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4067 case 0x07f: /* VIS I fones */
4068 CHECK_FPU_FEATURE(dc, VIS1);
4069 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4071 case 0x080: /* VIS I shutdown */
4072 case 0x081: /* VIS II siam */
4081 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4082 #ifdef TARGET_SPARC64
4087 #ifdef TARGET_SPARC64
4088 } else if (xop == 0x39) { /* V9 return */
4091 save_state(dc, cpu_cond);
4092 cpu_src1 = get_src1(insn, cpu_src1);
4093 if (IS_IMM) { /* immediate */
4094 rs2 = GET_FIELDs(insn, 19, 31);
4095 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4096 } else { /* register */
4097 rs2 = GET_FIELD(insn, 27, 31);
4099 gen_movl_reg_TN(rs2, cpu_src2);
4100 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4102 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4104 tcg_gen_helper_0_0(helper_restore);
4105 gen_mov_pc_npc(dc, cpu_cond);
4106 r_const = tcg_const_i32(3);
4107 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4108 tcg_temp_free(r_const);
4109 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4110 dc->npc = DYNAMIC_PC;
4114 cpu_src1 = get_src1(insn, cpu_src1);
4115 if (IS_IMM) { /* immediate */
4116 rs2 = GET_FIELDs(insn, 19, 31);
4117 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4118 } else { /* register */
4119 rs2 = GET_FIELD(insn, 27, 31);
4121 gen_movl_reg_TN(rs2, cpu_src2);
4122 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4124 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4127 case 0x38: /* jmpl */
4131 r_const = tcg_const_tl(dc->pc);
4132 gen_movl_TN_reg(rd, r_const);
4133 tcg_temp_free(r_const);
4134 gen_mov_pc_npc(dc, cpu_cond);
4135 r_const = tcg_const_i32(3);
4136 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4138 tcg_temp_free(r_const);
4139 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4140 dc->npc = DYNAMIC_PC;
4143 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4144 case 0x39: /* rett, V9 return */
4148 if (!supervisor(dc))
4150 gen_mov_pc_npc(dc, cpu_cond);
4151 r_const = tcg_const_i32(3);
4152 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4154 tcg_temp_free(r_const);
4155 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4156 dc->npc = DYNAMIC_PC;
4157 tcg_gen_helper_0_0(helper_rett);
4161 case 0x3b: /* flush */
4162 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4164 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4166 case 0x3c: /* save */
4167 save_state(dc, cpu_cond);
4168 tcg_gen_helper_0_0(helper_save);
4169 gen_movl_TN_reg(rd, cpu_dst);
4171 case 0x3d: /* restore */
4172 save_state(dc, cpu_cond);
4173 tcg_gen_helper_0_0(helper_restore);
4174 gen_movl_TN_reg(rd, cpu_dst);
4176 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4177 case 0x3e: /* V9 done/retry */
4181 if (!supervisor(dc))
4183 dc->npc = DYNAMIC_PC;
4184 dc->pc = DYNAMIC_PC;
4185 tcg_gen_helper_0_0(helper_done);
4188 if (!supervisor(dc))
4190 dc->npc = DYNAMIC_PC;
4191 dc->pc = DYNAMIC_PC;
4192 tcg_gen_helper_0_0(helper_retry);
4207 case 3: /* load/store instructions */
4209 unsigned int xop = GET_FIELD(insn, 7, 12);
4211 cpu_src1 = get_src1(insn, cpu_src1);
4212 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4213 rs2 = GET_FIELD(insn, 27, 31);
4214 gen_movl_reg_TN(rs2, cpu_src2);
4215 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4216 } else if (IS_IMM) { /* immediate */
4217 rs2 = GET_FIELDs(insn, 19, 31);
4218 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4219 } else { /* register */
4220 rs2 = GET_FIELD(insn, 27, 31);
4222 gen_movl_reg_TN(rs2, cpu_src2);
4223 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4225 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4227 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4228 (xop > 0x17 && xop <= 0x1d ) ||
4229 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4231 case 0x0: /* load unsigned word */
4232 gen_address_mask(dc, cpu_addr);
4233 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4235 case 0x1: /* load unsigned byte */
4236 gen_address_mask(dc, cpu_addr);
4237 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4239 case 0x2: /* load unsigned halfword */
4240 gen_address_mask(dc, cpu_addr);
4241 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4243 case 0x3: /* load double word */
4249 save_state(dc, cpu_cond);
4250 r_const = tcg_const_i32(7);
4251 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4252 r_const); // XXX remove
4253 tcg_temp_free(r_const);
4254 gen_address_mask(dc, cpu_addr);
4255 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4256 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4257 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4258 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4259 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4260 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4261 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4264 case 0x9: /* load signed byte */
4265 gen_address_mask(dc, cpu_addr);
4266 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4268 case 0xa: /* load signed halfword */
4269 gen_address_mask(dc, cpu_addr);
4270 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4272 case 0xd: /* ldstub -- XXX: should be atomically */
4276 gen_address_mask(dc, cpu_addr);
4277 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4278 r_const = tcg_const_tl(0xff);
4279 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4280 tcg_temp_free(r_const);
4283 case 0x0f: /* swap register with memory. Also
4285 CHECK_IU_FEATURE(dc, SWAP);
4286 gen_movl_reg_TN(rd, cpu_val);
4287 gen_address_mask(dc, cpu_addr);
4288 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4289 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4290 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4292 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4293 case 0x10: /* load word alternate */
4294 #ifndef TARGET_SPARC64
4297 if (!supervisor(dc))
4300 save_state(dc, cpu_cond);
4301 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4303 case 0x11: /* load unsigned byte alternate */
4304 #ifndef TARGET_SPARC64
4307 if (!supervisor(dc))
4310 save_state(dc, cpu_cond);
4311 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4313 case 0x12: /* load unsigned halfword alternate */
4314 #ifndef TARGET_SPARC64
4317 if (!supervisor(dc))
4320 save_state(dc, cpu_cond);
4321 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4323 case 0x13: /* load double word alternate */
4324 #ifndef TARGET_SPARC64
4327 if (!supervisor(dc))
4332 save_state(dc, cpu_cond);
4333 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4335 case 0x19: /* load signed byte alternate */
4336 #ifndef TARGET_SPARC64
4339 if (!supervisor(dc))
4342 save_state(dc, cpu_cond);
4343 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4345 case 0x1a: /* load signed halfword alternate */
4346 #ifndef TARGET_SPARC64
4349 if (!supervisor(dc))
4352 save_state(dc, cpu_cond);
4353 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4355 case 0x1d: /* ldstuba -- XXX: should be atomically */
4356 #ifndef TARGET_SPARC64
4359 if (!supervisor(dc))
4362 save_state(dc, cpu_cond);
4363 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4365 case 0x1f: /* swap reg with alt. memory. Also
4367 CHECK_IU_FEATURE(dc, SWAP);
4368 #ifndef TARGET_SPARC64
4371 if (!supervisor(dc))
4374 save_state(dc, cpu_cond);
4375 gen_movl_reg_TN(rd, cpu_val);
4376 gen_swap_asi(cpu_val, cpu_addr, insn);
4379 #ifndef TARGET_SPARC64
4380 case 0x30: /* ldc */
4381 case 0x31: /* ldcsr */
4382 case 0x33: /* lddc */
4386 #ifdef TARGET_SPARC64
4387 case 0x08: /* V9 ldsw */
4388 gen_address_mask(dc, cpu_addr);
4389 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4391 case 0x0b: /* V9 ldx */
4392 gen_address_mask(dc, cpu_addr);
4393 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4395 case 0x18: /* V9 ldswa */
4396 save_state(dc, cpu_cond);
4397 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4399 case 0x1b: /* V9 ldxa */
4400 save_state(dc, cpu_cond);
4401 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4403 case 0x2d: /* V9 prefetch, no effect */
4405 case 0x30: /* V9 ldfa */
4406 save_state(dc, cpu_cond);
4407 gen_ldf_asi(cpu_addr, insn, 4, rd);
4409 case 0x33: /* V9 lddfa */
4410 save_state(dc, cpu_cond);
4411 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4413 case 0x3d: /* V9 prefetcha, no effect */
4415 case 0x32: /* V9 ldqfa */
4416 CHECK_FPU_FEATURE(dc, FLOAT128);
4417 save_state(dc, cpu_cond);
4418 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4424 gen_movl_TN_reg(rd, cpu_val);
4425 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4428 } else if (xop >= 0x20 && xop < 0x24) {
4429 if (gen_trap_ifnofpu(dc, cpu_cond))
4431 save_state(dc, cpu_cond);
4433 case 0x20: /* load fpreg */
4434 gen_address_mask(dc, cpu_addr);
4435 tcg_gen_qemu_ld32u(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4437 case 0x21: /* ldfsr, V9 ldxfsr */
4438 #ifdef TARGET_SPARC64
4439 gen_address_mask(dc, cpu_addr);
4441 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4442 tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
4446 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4447 tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
4451 case 0x22: /* load quad fpreg */
4455 CHECK_FPU_FEATURE(dc, FLOAT128);
4456 r_const = tcg_const_i32(dc->mem_idx);
4457 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4458 tcg_temp_free(r_const);
4459 gen_op_store_QT0_fpr(QFPREG(rd));
4462 case 0x23: /* load double fpreg */
4466 r_const = tcg_const_i32(dc->mem_idx);
4467 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4468 tcg_temp_free(r_const);
4469 gen_op_store_DT0_fpr(DFPREG(rd));
4475 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4476 xop == 0xe || xop == 0x1e) {
4477 gen_movl_reg_TN(rd, cpu_val);
4479 case 0x4: /* store word */
4480 gen_address_mask(dc, cpu_addr);
4481 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4483 case 0x5: /* store byte */
4484 gen_address_mask(dc, cpu_addr);
4485 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4487 case 0x6: /* store halfword */
4488 gen_address_mask(dc, cpu_addr);
4489 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4491 case 0x7: /* store double word */
4495 TCGv r_low, r_const;
4497 save_state(dc, cpu_cond);
4498 gen_address_mask(dc, cpu_addr);
4499 r_const = tcg_const_i32(7);
4500 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4501 r_const); // XXX remove
4502 tcg_temp_free(r_const);
4503 r_low = tcg_temp_new(TCG_TYPE_TL);
4504 gen_movl_reg_TN(rd + 1, r_low);
4505 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_val,
4507 tcg_temp_free(r_low);
4508 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4511 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4512 case 0x14: /* store word alternate */
4513 #ifndef TARGET_SPARC64
4516 if (!supervisor(dc))
4519 save_state(dc, cpu_cond);
4520 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4522 case 0x15: /* store byte alternate */
4523 #ifndef TARGET_SPARC64
4526 if (!supervisor(dc))
4529 save_state(dc, cpu_cond);
4530 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4532 case 0x16: /* store halfword alternate */
4533 #ifndef TARGET_SPARC64
4536 if (!supervisor(dc))
4539 save_state(dc, cpu_cond);
4540 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4542 case 0x17: /* store double word alternate */
4543 #ifndef TARGET_SPARC64
4546 if (!supervisor(dc))
4552 save_state(dc, cpu_cond);
4553 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4557 #ifdef TARGET_SPARC64
4558 case 0x0e: /* V9 stx */
4559 gen_address_mask(dc, cpu_addr);
4560 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4562 case 0x1e: /* V9 stxa */
4563 save_state(dc, cpu_cond);
4564 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4570 } else if (xop > 0x23 && xop < 0x28) {
4571 if (gen_trap_ifnofpu(dc, cpu_cond))
4573 save_state(dc, cpu_cond);
4575 case 0x24: /* store fpreg */
4576 gen_address_mask(dc, cpu_addr);
4577 tcg_gen_qemu_st32(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4579 case 0x25: /* stfsr, V9 stxfsr */
4580 #ifdef TARGET_SPARC64
4581 gen_address_mask(dc, cpu_addr);
4582 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4584 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4586 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp64);
4587 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4590 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4591 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4595 #ifdef TARGET_SPARC64
4596 /* V9 stqf, store quad fpreg */
4600 CHECK_FPU_FEATURE(dc, FLOAT128);
4601 gen_op_load_fpr_QT0(QFPREG(rd));
4602 r_const = tcg_const_i32(dc->mem_idx);
4603 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4604 tcg_temp_free(r_const);
4607 #else /* !TARGET_SPARC64 */
4608 /* stdfq, store floating point queue */
4609 #if defined(CONFIG_USER_ONLY)
4612 if (!supervisor(dc))
4614 if (gen_trap_ifnofpu(dc, cpu_cond))
4619 case 0x27: /* store double fpreg */
4623 gen_op_load_fpr_DT0(DFPREG(rd));
4624 r_const = tcg_const_i32(dc->mem_idx);
4625 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4626 tcg_temp_free(r_const);
4632 } else if (xop > 0x33 && xop < 0x3f) {
4633 save_state(dc, cpu_cond);
4635 #ifdef TARGET_SPARC64
4636 case 0x34: /* V9 stfa */
4637 gen_stf_asi(cpu_addr, insn, 4, rd);
4639 case 0x36: /* V9 stqfa */
4643 CHECK_FPU_FEATURE(dc, FLOAT128);
4644 r_const = tcg_const_i32(7);
4645 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4647 tcg_temp_free(r_const);
4648 gen_op_load_fpr_QT0(QFPREG(rd));
4649 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4652 case 0x37: /* V9 stdfa */
4653 gen_op_load_fpr_DT0(DFPREG(rd));
4654 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4656 case 0x3c: /* V9 casa */
4657 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4658 gen_movl_TN_reg(rd, cpu_val);
4660 case 0x3e: /* V9 casxa */
4661 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4662 gen_movl_TN_reg(rd, cpu_val);
4665 case 0x34: /* stc */
4666 case 0x35: /* stcsr */
4667 case 0x36: /* stdcq */
4668 case 0x37: /* stdc */
4680 /* default case for non jump instructions */
4681 if (dc->npc == DYNAMIC_PC) {
4682 dc->pc = DYNAMIC_PC;
4684 } else if (dc->npc == JUMP_PC) {
4685 /* we can do a static jump */
4686 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4690 dc->npc = dc->npc + 4;
4698 save_state(dc, cpu_cond);
4699 r_const = tcg_const_i32(TT_ILL_INSN);
4700 tcg_gen_helper_0_1(raise_exception, r_const);
4701 tcg_temp_free(r_const);
4709 save_state(dc, cpu_cond);
4710 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4711 tcg_gen_helper_0_1(raise_exception, r_const);
4712 tcg_temp_free(r_const);
4716 #if !defined(CONFIG_USER_ONLY)
4721 save_state(dc, cpu_cond);
4722 r_const = tcg_const_i32(TT_PRIV_INSN);
4723 tcg_gen_helper_0_1(raise_exception, r_const);
4724 tcg_temp_free(r_const);
4730 save_state(dc, cpu_cond);
4731 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4734 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4736 save_state(dc, cpu_cond);
4737 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4741 #ifndef TARGET_SPARC64
4746 save_state(dc, cpu_cond);
4747 r_const = tcg_const_i32(TT_NCP_INSN);
4748 tcg_gen_helper_0_1(raise_exception, r_const);
4749 tcg_temp_free(r_const);
4756 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4757 int spc, CPUSPARCState *env)
4759 target_ulong pc_start, last_pc;
4760 uint16_t *gen_opc_end;
4761 DisasContext dc1, *dc = &dc1;
4766 memset(dc, 0, sizeof(DisasContext));
4771 dc->npc = (target_ulong) tb->cs_base;
4772 dc->mem_idx = cpu_mmu_index(env);
4774 if ((dc->def->features & CPU_FEATURE_FLOAT))
4775 dc->fpu_enabled = cpu_fpu_enabled(env);
4777 dc->fpu_enabled = 0;
4778 #ifdef TARGET_SPARC64
4779 dc->address_mask_32bit = env->pstate & PS_AM;
4781 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4783 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4784 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4785 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4787 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4790 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4791 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4794 max_insns = tb->cflags & CF_COUNT_MASK;
4796 max_insns = CF_COUNT_MASK;
4799 if (env->nb_breakpoints > 0) {
4800 for(j = 0; j < env->nb_breakpoints; j++) {
4801 if (env->breakpoints[j] == dc->pc) {
4802 if (dc->pc != pc_start)
4803 save_state(dc, cpu_cond);
4804 tcg_gen_helper_0_0(helper_debug);
4813 fprintf(logfile, "Search PC...\n");
4814 j = gen_opc_ptr - gen_opc_buf;
4818 gen_opc_instr_start[lj++] = 0;
4819 gen_opc_pc[lj] = dc->pc;
4820 gen_opc_npc[lj] = dc->npc;
4821 gen_opc_instr_start[lj] = 1;
4822 gen_opc_icount[lj] = num_insns;
4825 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4828 disas_sparc_insn(dc);
4833 /* if the next PC is different, we abort now */
4834 if (dc->pc != (last_pc + 4))
4836 /* if we reach a page boundary, we stop generation so that the
4837 PC of a TT_TFAULT exception is always in the right page */
4838 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4840 /* if single step mode, we generate only one instruction and
4841 generate an exception */
4842 if (env->singlestep_enabled) {
4843 tcg_gen_movi_tl(cpu_pc, dc->pc);
4847 } while ((gen_opc_ptr < gen_opc_end) &&
4848 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4849 num_insns < max_insns);
4852 tcg_temp_free(cpu_addr);
4853 tcg_temp_free(cpu_val);
4854 tcg_temp_free(cpu_dst);
4855 tcg_temp_free(cpu_tmp64);
4856 tcg_temp_free(cpu_tmp32);
4857 tcg_temp_free(cpu_tmp0);
4858 if (tb->cflags & CF_LAST_IO)
4861 if (dc->pc != DYNAMIC_PC &&
4862 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4863 /* static PC and NPC: we can use direct chaining */
4864 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4866 if (dc->pc != DYNAMIC_PC)
4867 tcg_gen_movi_tl(cpu_pc, dc->pc);
4868 save_npc(dc, cpu_cond);
4872 gen_icount_end(tb, num_insns);
4873 *gen_opc_ptr = INDEX_op_end;
4875 j = gen_opc_ptr - gen_opc_buf;
4878 gen_opc_instr_start[lj++] = 0;
4884 gen_opc_jump_pc[0] = dc->jump_pc[0];
4885 gen_opc_jump_pc[1] = dc->jump_pc[1];
4887 tb->size = last_pc + 4 - pc_start;
4888 tb->icount = num_insns;
4891 if (loglevel & CPU_LOG_TB_IN_ASM) {
4892 fprintf(logfile, "--------------\n");
4893 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4894 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4895 fprintf(logfile, "\n");
4900 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4902 gen_intermediate_code_internal(tb, 0, env);
4905 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4907 gen_intermediate_code_internal(tb, 1, env);
4910 void gen_intermediate_code_init(CPUSPARCState *env)
4914 static const char * const gregnames[8] = {
4915 NULL, // g0 not used
4924 static const char * const fregnames[64] = {
4925 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4926 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4927 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4928 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4929 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4930 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4931 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4932 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4935 /* init various static tables */
4939 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4940 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4941 offsetof(CPUState, regwptr),
4943 #ifdef TARGET_SPARC64
4944 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4945 TCG_AREG0, offsetof(CPUState, xcc),
4947 cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
4948 TCG_AREG0, offsetof(CPUState, asi),
4950 cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
4951 TCG_AREG0, offsetof(CPUState, fprs),
4953 cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
4954 TCG_AREG0, offsetof(CPUState, gsr),
4956 cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4958 offsetof(CPUState, tick_cmpr),
4960 cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4962 offsetof(CPUState, stick_cmpr),
4964 cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4966 offsetof(CPUState, hstick_cmpr),
4968 cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4969 offsetof(CPUState, hintp),
4971 cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4972 offsetof(CPUState, htba),
4974 cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4975 offsetof(CPUState, hver),
4977 cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4978 offsetof(CPUState, ssr), "ssr");
4979 cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4980 offsetof(CPUState, version), "ver");
4982 cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
4983 TCG_AREG0, offsetof(CPUState, wim),
4986 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
4987 TCG_AREG0, offsetof(CPUState, cond),
4989 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4990 TCG_AREG0, offsetof(CPUState, cc_src),
4992 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4993 offsetof(CPUState, cc_src2),
4995 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4996 TCG_AREG0, offsetof(CPUState, cc_dst),
4998 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4999 TCG_AREG0, offsetof(CPUState, psr),
5001 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
5002 TCG_AREG0, offsetof(CPUState, fsr),
5004 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
5005 TCG_AREG0, offsetof(CPUState, pc),
5007 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
5008 TCG_AREG0, offsetof(CPUState, npc),
5010 cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
5011 TCG_AREG0, offsetof(CPUState, y), "y");
5012 #ifndef CONFIG_USER_ONLY
5013 cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
5014 TCG_AREG0, offsetof(CPUState, tbr),
5017 for (i = 1; i < 8; i++)
5018 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
5019 offsetof(CPUState, gregs[i]),
5021 for (i = 0; i < TARGET_FPREGS; i++)
5022 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
5023 offsetof(CPUState, fpr[i]),
5026 /* register helpers */
5029 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5034 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5035 unsigned long searched_pc, int pc_pos, void *puc)
5038 env->pc = gen_opc_pc[pc_pos];
5039 npc = gen_opc_npc[pc_pos];
5041 /* dynamic NPC: already stored */
5042 } else if (npc == 2) {
5043 target_ulong t2 = (target_ulong)(unsigned long)puc;
5044 /* jump PC: use T2 and the jump targets of the translation */
5046 env->npc = gen_opc_jump_pc[0];
5048 env->npc = gen_opc_jump_pc[1];