4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
189 #ifdef TARGET_SPARC64
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
193 #define AM_CHECK(dc) (1)
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
208 tcg_gen_movi_tl(tn, 0);
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_cc_clear_icc(void)
279 tcg_gen_movi_i32(cpu_psr, 0);
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
285 tcg_gen_movi_i32(cpu_xcc, 0);
291 env->psr |= PSR_ZERO;
292 if ((int32_t) T0 < 0)
295 static inline void gen_cc_NZ_icc(TCGv dst)
300 l1 = gen_new_label();
301 l2 = gen_new_label();
302 r_temp = tcg_temp_new();
303 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
307 tcg_gen_ext32s_tl(r_temp, dst);
308 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
311 tcg_temp_free(r_temp);
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
319 l1 = gen_new_label();
320 l2 = gen_new_label();
321 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
324 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
332 env->psr |= PSR_CARRY;
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
336 TCGv r_temp1, r_temp2;
339 l1 = gen_new_label();
340 r_temp1 = tcg_temp_new();
341 r_temp2 = tcg_temp_new();
342 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
347 tcg_temp_free(r_temp1);
348 tcg_temp_free(r_temp2);
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
356 l1 = gen_new_label();
357 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
364 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
371 r_temp = tcg_temp_new();
372 tcg_gen_xor_tl(r_temp, src1, src2);
373 tcg_gen_not_tl(r_temp, r_temp);
374 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379 tcg_temp_free(r_temp);
380 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
388 r_temp = tcg_temp_new();
389 tcg_gen_xor_tl(r_temp, src1, src2);
390 tcg_gen_not_tl(r_temp, r_temp);
391 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396 tcg_temp_free(r_temp);
397 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
407 l1 = gen_new_label();
409 r_temp = tcg_temp_new();
410 tcg_gen_xor_tl(r_temp, src1, src2);
411 tcg_gen_not_tl(r_temp, r_temp);
412 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416 r_const = tcg_const_i32(TT_TOVF);
417 gen_helper_raise_exception(r_const);
418 tcg_temp_free_i32(r_const);
420 tcg_temp_free(r_temp);
423 static inline void gen_tag_tv(TCGv src1, TCGv src2)
428 l1 = gen_new_label();
429 tcg_gen_or_tl(cpu_tmp0, src1, src2);
430 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
431 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
432 r_const = tcg_const_i32(TT_TOVF);
433 gen_helper_raise_exception(r_const);
434 tcg_temp_free_i32(r_const);
438 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
440 tcg_gen_mov_tl(cpu_cc_src, src1);
441 tcg_gen_movi_tl(cpu_cc_src2, src2);
442 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
443 tcg_gen_mov_tl(dst, cpu_cc_dst);
446 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
448 tcg_gen_mov_tl(cpu_cc_src, src1);
449 tcg_gen_mov_tl(cpu_cc_src2, src2);
450 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
451 tcg_gen_mov_tl(dst, cpu_cc_dst);
454 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
456 tcg_gen_mov_tl(cpu_cc_src, src1);
457 tcg_gen_movi_tl(cpu_cc_src2, src2);
458 gen_mov_reg_C(cpu_tmp0, cpu_psr);
459 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
460 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
461 tcg_gen_mov_tl(dst, cpu_cc_dst);
464 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
466 tcg_gen_mov_tl(cpu_cc_src, src1);
467 tcg_gen_mov_tl(cpu_cc_src2, src2);
468 gen_mov_reg_C(cpu_tmp0, cpu_psr);
469 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
470 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
471 tcg_gen_mov_tl(dst, cpu_cc_dst);
474 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
476 tcg_gen_mov_tl(cpu_cc_src, src1);
477 tcg_gen_mov_tl(cpu_cc_src2, src2);
478 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
479 tcg_gen_mov_tl(dst, cpu_cc_dst);
482 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
484 tcg_gen_mov_tl(cpu_cc_src, src1);
485 tcg_gen_mov_tl(cpu_cc_src2, src2);
486 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
487 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
488 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
489 tcg_gen_mov_tl(dst, cpu_cc_dst);
492 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
498 l1 = gen_new_label();
500 r_temp = tcg_temp_new();
501 tcg_gen_xor_tl(r_temp, src1, src2);
502 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
503 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
504 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
505 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
506 r_const = tcg_const_i32(TT_TOVF);
507 gen_helper_raise_exception(r_const);
508 tcg_temp_free_i32(r_const);
510 tcg_temp_free(r_temp);
513 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
515 tcg_gen_mov_tl(cpu_cc_src, src1);
516 tcg_gen_movi_tl(cpu_cc_src2, src2);
518 tcg_gen_mov_tl(cpu_cc_dst, src1);
519 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
520 dc->cc_op = CC_OP_LOGIC;
522 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
523 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
524 dc->cc_op = CC_OP_SUB;
526 tcg_gen_mov_tl(dst, cpu_cc_dst);
529 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
531 tcg_gen_mov_tl(cpu_cc_src, src1);
532 tcg_gen_mov_tl(cpu_cc_src2, src2);
533 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
534 tcg_gen_mov_tl(dst, cpu_cc_dst);
537 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
539 tcg_gen_mov_tl(cpu_cc_src, src1);
540 tcg_gen_movi_tl(cpu_cc_src2, src2);
541 gen_mov_reg_C(cpu_tmp0, cpu_psr);
542 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
543 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
544 tcg_gen_mov_tl(dst, cpu_cc_dst);
547 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
549 tcg_gen_mov_tl(cpu_cc_src, src1);
550 tcg_gen_mov_tl(cpu_cc_src2, src2);
551 gen_mov_reg_C(cpu_tmp0, cpu_psr);
552 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
553 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
554 tcg_gen_mov_tl(dst, cpu_cc_dst);
557 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
559 tcg_gen_mov_tl(cpu_cc_src, src1);
560 tcg_gen_mov_tl(cpu_cc_src2, src2);
561 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
562 tcg_gen_mov_tl(dst, cpu_cc_dst);
565 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
567 tcg_gen_mov_tl(cpu_cc_src, src1);
568 tcg_gen_mov_tl(cpu_cc_src2, src2);
569 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
570 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
571 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
572 tcg_gen_mov_tl(dst, cpu_cc_dst);
575 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
580 l1 = gen_new_label();
581 r_temp = tcg_temp_new();
587 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
588 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
589 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
590 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
591 tcg_gen_movi_tl(cpu_cc_src2, 0);
595 // env->y = (b2 << 31) | (env->y >> 1);
596 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
597 tcg_gen_shli_tl(r_temp, r_temp, 31);
598 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
599 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
600 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
601 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
604 gen_mov_reg_N(cpu_tmp0, cpu_psr);
605 gen_mov_reg_V(r_temp, cpu_psr);
606 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
607 tcg_temp_free(r_temp);
609 // T0 = (b1 << 31) | (T0 >> 1);
611 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
612 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
613 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
615 /* do addition and update flags */
616 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
619 gen_cc_NZ_icc(cpu_cc_dst);
620 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
621 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
622 tcg_gen_mov_tl(dst, cpu_cc_dst);
625 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
627 TCGv_i64 r_temp, r_temp2;
629 r_temp = tcg_temp_new_i64();
630 r_temp2 = tcg_temp_new_i64();
632 tcg_gen_extu_tl_i64(r_temp, src2);
633 tcg_gen_extu_tl_i64(r_temp2, src1);
634 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
636 tcg_gen_shri_i64(r_temp, r_temp2, 32);
637 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
638 tcg_temp_free_i64(r_temp);
639 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
640 #ifdef TARGET_SPARC64
641 tcg_gen_mov_i64(dst, r_temp2);
643 tcg_gen_trunc_i64_tl(dst, r_temp2);
645 tcg_temp_free_i64(r_temp2);
648 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
650 TCGv_i64 r_temp, r_temp2;
652 r_temp = tcg_temp_new_i64();
653 r_temp2 = tcg_temp_new_i64();
655 tcg_gen_ext_tl_i64(r_temp, src2);
656 tcg_gen_ext_tl_i64(r_temp2, src1);
657 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
659 tcg_gen_shri_i64(r_temp, r_temp2, 32);
660 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
661 tcg_temp_free_i64(r_temp);
662 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
663 #ifdef TARGET_SPARC64
664 tcg_gen_mov_i64(dst, r_temp2);
666 tcg_gen_trunc_i64_tl(dst, r_temp2);
668 tcg_temp_free_i64(r_temp2);
671 #ifdef TARGET_SPARC64
672 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
677 l1 = gen_new_label();
678 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
679 r_const = tcg_const_i32(TT_DIV_ZERO);
680 gen_helper_raise_exception(r_const);
681 tcg_temp_free_i32(r_const);
685 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
689 l1 = gen_new_label();
690 l2 = gen_new_label();
691 tcg_gen_mov_tl(cpu_cc_src, src1);
692 tcg_gen_mov_tl(cpu_cc_src2, src2);
693 gen_trap_ifdivzero_tl(cpu_cc_src2);
694 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
695 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
696 tcg_gen_movi_i64(dst, INT64_MIN);
699 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
704 static inline void gen_op_div_cc(TCGv dst)
708 tcg_gen_mov_tl(cpu_cc_dst, dst);
710 gen_cc_NZ_icc(cpu_cc_dst);
711 l1 = gen_new_label();
712 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
713 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
718 static inline void gen_op_eval_ba(TCGv dst)
720 tcg_gen_movi_tl(dst, 1);
724 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
726 gen_mov_reg_Z(dst, src);
730 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
732 gen_mov_reg_N(cpu_tmp0, src);
733 gen_mov_reg_V(dst, src);
734 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
735 gen_mov_reg_Z(cpu_tmp0, src);
736 tcg_gen_or_tl(dst, dst, cpu_tmp0);
740 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
742 gen_mov_reg_V(cpu_tmp0, src);
743 gen_mov_reg_N(dst, src);
744 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
748 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
750 gen_mov_reg_Z(cpu_tmp0, src);
751 gen_mov_reg_C(dst, src);
752 tcg_gen_or_tl(dst, dst, cpu_tmp0);
756 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
758 gen_mov_reg_C(dst, src);
762 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
764 gen_mov_reg_V(dst, src);
768 static inline void gen_op_eval_bn(TCGv dst)
770 tcg_gen_movi_tl(dst, 0);
774 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
776 gen_mov_reg_N(dst, src);
780 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
782 gen_mov_reg_Z(dst, src);
783 tcg_gen_xori_tl(dst, dst, 0x1);
787 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
789 gen_mov_reg_N(cpu_tmp0, src);
790 gen_mov_reg_V(dst, src);
791 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
792 gen_mov_reg_Z(cpu_tmp0, src);
793 tcg_gen_or_tl(dst, dst, cpu_tmp0);
794 tcg_gen_xori_tl(dst, dst, 0x1);
798 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
800 gen_mov_reg_V(cpu_tmp0, src);
801 gen_mov_reg_N(dst, src);
802 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
803 tcg_gen_xori_tl(dst, dst, 0x1);
807 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
809 gen_mov_reg_Z(cpu_tmp0, src);
810 gen_mov_reg_C(dst, src);
811 tcg_gen_or_tl(dst, dst, cpu_tmp0);
812 tcg_gen_xori_tl(dst, dst, 0x1);
816 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
818 gen_mov_reg_C(dst, src);
819 tcg_gen_xori_tl(dst, dst, 0x1);
823 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
825 gen_mov_reg_N(dst, src);
826 tcg_gen_xori_tl(dst, dst, 0x1);
830 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
832 gen_mov_reg_V(dst, src);
833 tcg_gen_xori_tl(dst, dst, 0x1);
837 FPSR bit field FCC1 | FCC0:
843 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
844 unsigned int fcc_offset)
846 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
847 tcg_gen_andi_tl(reg, reg, 0x1);
850 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
851 unsigned int fcc_offset)
853 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
854 tcg_gen_andi_tl(reg, reg, 0x1);
858 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
859 unsigned int fcc_offset)
861 gen_mov_reg_FCC0(dst, src, fcc_offset);
862 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
863 tcg_gen_or_tl(dst, dst, cpu_tmp0);
866 // 1 or 2: FCC0 ^ FCC1
867 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
868 unsigned int fcc_offset)
870 gen_mov_reg_FCC0(dst, src, fcc_offset);
871 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
872 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
876 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
877 unsigned int fcc_offset)
879 gen_mov_reg_FCC0(dst, src, fcc_offset);
883 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
884 unsigned int fcc_offset)
886 gen_mov_reg_FCC0(dst, src, fcc_offset);
887 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
888 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
889 tcg_gen_and_tl(dst, dst, cpu_tmp0);
893 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
894 unsigned int fcc_offset)
896 gen_mov_reg_FCC1(dst, src, fcc_offset);
900 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
901 unsigned int fcc_offset)
903 gen_mov_reg_FCC0(dst, src, fcc_offset);
904 tcg_gen_xori_tl(dst, dst, 0x1);
905 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
906 tcg_gen_and_tl(dst, dst, cpu_tmp0);
910 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
911 unsigned int fcc_offset)
913 gen_mov_reg_FCC0(dst, src, fcc_offset);
914 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
915 tcg_gen_and_tl(dst, dst, cpu_tmp0);
919 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
920 unsigned int fcc_offset)
922 gen_mov_reg_FCC0(dst, src, fcc_offset);
923 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
924 tcg_gen_or_tl(dst, dst, cpu_tmp0);
925 tcg_gen_xori_tl(dst, dst, 0x1);
928 // 0 or 3: !(FCC0 ^ FCC1)
929 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
930 unsigned int fcc_offset)
932 gen_mov_reg_FCC0(dst, src, fcc_offset);
933 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
934 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
935 tcg_gen_xori_tl(dst, dst, 0x1);
939 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
940 unsigned int fcc_offset)
942 gen_mov_reg_FCC0(dst, src, fcc_offset);
943 tcg_gen_xori_tl(dst, dst, 0x1);
946 // !1: !(FCC0 & !FCC1)
947 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
948 unsigned int fcc_offset)
950 gen_mov_reg_FCC0(dst, src, fcc_offset);
951 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
952 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
953 tcg_gen_and_tl(dst, dst, cpu_tmp0);
954 tcg_gen_xori_tl(dst, dst, 0x1);
958 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
959 unsigned int fcc_offset)
961 gen_mov_reg_FCC1(dst, src, fcc_offset);
962 tcg_gen_xori_tl(dst, dst, 0x1);
965 // !2: !(!FCC0 & FCC1)
966 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
967 unsigned int fcc_offset)
969 gen_mov_reg_FCC0(dst, src, fcc_offset);
970 tcg_gen_xori_tl(dst, dst, 0x1);
971 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
972 tcg_gen_and_tl(dst, dst, cpu_tmp0);
973 tcg_gen_xori_tl(dst, dst, 0x1);
976 // !3: !(FCC0 & FCC1)
977 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
978 unsigned int fcc_offset)
980 gen_mov_reg_FCC0(dst, src, fcc_offset);
981 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
982 tcg_gen_and_tl(dst, dst, cpu_tmp0);
983 tcg_gen_xori_tl(dst, dst, 0x1);
986 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
987 target_ulong pc2, TCGv r_cond)
991 l1 = gen_new_label();
993 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
995 gen_goto_tb(dc, 0, pc1, pc1 + 4);
998 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1001 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1002 target_ulong pc2, TCGv r_cond)
1006 l1 = gen_new_label();
1008 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1010 gen_goto_tb(dc, 0, pc2, pc1);
1013 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1016 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1021 l1 = gen_new_label();
1022 l2 = gen_new_label();
1024 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1026 tcg_gen_movi_tl(cpu_npc, npc1);
1030 tcg_gen_movi_tl(cpu_npc, npc2);
1034 /* call this function before using the condition register as it may
1035 have been set for a jump */
1036 static inline void flush_cond(DisasContext *dc, TCGv cond)
1038 if (dc->npc == JUMP_PC) {
1039 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1040 dc->npc = DYNAMIC_PC;
1044 static inline void save_npc(DisasContext *dc, TCGv cond)
1046 if (dc->npc == JUMP_PC) {
1047 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1048 dc->npc = DYNAMIC_PC;
1049 } else if (dc->npc != DYNAMIC_PC) {
1050 tcg_gen_movi_tl(cpu_npc, dc->npc);
1054 static inline void save_state(DisasContext *dc, TCGv cond)
1056 tcg_gen_movi_tl(cpu_pc, dc->pc);
1060 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1062 if (dc->npc == JUMP_PC) {
1063 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1064 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1065 dc->pc = DYNAMIC_PC;
1066 } else if (dc->npc == DYNAMIC_PC) {
1067 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1068 dc->pc = DYNAMIC_PC;
1074 static inline void gen_op_next_insn(void)
1076 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1077 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1080 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1085 #ifdef TARGET_SPARC64
1093 switch (dc->cc_op) {
1097 gen_helper_compute_psr();
1098 dc->cc_op = CC_OP_FLAGS;
1103 gen_op_eval_bn(r_dst);
1106 gen_op_eval_be(r_dst, r_src);
1109 gen_op_eval_ble(r_dst, r_src);
1112 gen_op_eval_bl(r_dst, r_src);
1115 gen_op_eval_bleu(r_dst, r_src);
1118 gen_op_eval_bcs(r_dst, r_src);
1121 gen_op_eval_bneg(r_dst, r_src);
1124 gen_op_eval_bvs(r_dst, r_src);
1127 gen_op_eval_ba(r_dst);
1130 gen_op_eval_bne(r_dst, r_src);
1133 gen_op_eval_bg(r_dst, r_src);
1136 gen_op_eval_bge(r_dst, r_src);
1139 gen_op_eval_bgu(r_dst, r_src);
1142 gen_op_eval_bcc(r_dst, r_src);
1145 gen_op_eval_bpos(r_dst, r_src);
1148 gen_op_eval_bvc(r_dst, r_src);
1153 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1155 unsigned int offset;
1175 gen_op_eval_bn(r_dst);
1178 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1181 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1184 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1187 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1190 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1193 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1196 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1199 gen_op_eval_ba(r_dst);
1202 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1205 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1208 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1211 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1214 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1217 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1220 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1225 #ifdef TARGET_SPARC64
1227 static const int gen_tcg_cond_reg[8] = {
1238 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1242 l1 = gen_new_label();
1243 tcg_gen_movi_tl(r_dst, 0);
1244 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1245 tcg_gen_movi_tl(r_dst, 1);
1250 /* XXX: potentially incorrect if dynamic npc */
1251 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1254 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1255 target_ulong target = dc->pc + offset;
1258 /* unconditional not taken */
1260 dc->pc = dc->npc + 4;
1261 dc->npc = dc->pc + 4;
1264 dc->npc = dc->pc + 4;
1266 } else if (cond == 0x8) {
1267 /* unconditional taken */
1270 dc->npc = dc->pc + 4;
1276 flush_cond(dc, r_cond);
1277 gen_cond(r_cond, cc, cond, dc);
1279 gen_branch_a(dc, target, dc->npc, r_cond);
1283 dc->jump_pc[0] = target;
1284 dc->jump_pc[1] = dc->npc + 4;
1290 /* XXX: potentially incorrect if dynamic npc */
1291 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1294 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1295 target_ulong target = dc->pc + offset;
1298 /* unconditional not taken */
1300 dc->pc = dc->npc + 4;
1301 dc->npc = dc->pc + 4;
1304 dc->npc = dc->pc + 4;
1306 } else if (cond == 0x8) {
1307 /* unconditional taken */
1310 dc->npc = dc->pc + 4;
1316 flush_cond(dc, r_cond);
1317 gen_fcond(r_cond, cc, cond);
1319 gen_branch_a(dc, target, dc->npc, r_cond);
1323 dc->jump_pc[0] = target;
1324 dc->jump_pc[1] = dc->npc + 4;
1330 #ifdef TARGET_SPARC64
1331 /* XXX: potentially incorrect if dynamic npc */
1332 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1333 TCGv r_cond, TCGv r_reg)
1335 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1336 target_ulong target = dc->pc + offset;
1338 flush_cond(dc, r_cond);
1339 gen_cond_reg(r_cond, cond, r_reg);
1341 gen_branch_a(dc, target, dc->npc, r_cond);
1345 dc->jump_pc[0] = target;
1346 dc->jump_pc[1] = dc->npc + 4;
1351 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1355 gen_helper_fcmps(r_rs1, r_rs2);
1358 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1361 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1364 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1369 static inline void gen_op_fcmpd(int fccno)
1376 gen_helper_fcmpd_fcc1();
1379 gen_helper_fcmpd_fcc2();
1382 gen_helper_fcmpd_fcc3();
1387 static inline void gen_op_fcmpq(int fccno)
1394 gen_helper_fcmpq_fcc1();
1397 gen_helper_fcmpq_fcc2();
1400 gen_helper_fcmpq_fcc3();
1405 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1409 gen_helper_fcmpes(r_rs1, r_rs2);
1412 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1415 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1418 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1423 static inline void gen_op_fcmped(int fccno)
1427 gen_helper_fcmped();
1430 gen_helper_fcmped_fcc1();
1433 gen_helper_fcmped_fcc2();
1436 gen_helper_fcmped_fcc3();
1441 static inline void gen_op_fcmpeq(int fccno)
1445 gen_helper_fcmpeq();
1448 gen_helper_fcmpeq_fcc1();
1451 gen_helper_fcmpeq_fcc2();
1454 gen_helper_fcmpeq_fcc3();
1461 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1463 gen_helper_fcmps(r_rs1, r_rs2);
1466 static inline void gen_op_fcmpd(int fccno)
1471 static inline void gen_op_fcmpq(int fccno)
1476 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1478 gen_helper_fcmpes(r_rs1, r_rs2);
1481 static inline void gen_op_fcmped(int fccno)
1483 gen_helper_fcmped();
1486 static inline void gen_op_fcmpeq(int fccno)
1488 gen_helper_fcmpeq();
1492 static inline void gen_op_fpexception_im(int fsr_flags)
1496 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1497 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1498 r_const = tcg_const_i32(TT_FP_EXCP);
1499 gen_helper_raise_exception(r_const);
1500 tcg_temp_free_i32(r_const);
1503 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1505 #if !defined(CONFIG_USER_ONLY)
1506 if (!dc->fpu_enabled) {
1509 save_state(dc, r_cond);
1510 r_const = tcg_const_i32(TT_NFPU_INSN);
1511 gen_helper_raise_exception(r_const);
1512 tcg_temp_free_i32(r_const);
1520 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1522 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1525 static inline void gen_clear_float_exceptions(void)
1527 gen_helper_clear_float_exceptions();
1531 #ifdef TARGET_SPARC64
1532 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1538 r_asi = tcg_temp_new_i32();
1539 tcg_gen_mov_i32(r_asi, cpu_asi);
1541 asi = GET_FIELD(insn, 19, 26);
1542 r_asi = tcg_const_i32(asi);
1547 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1550 TCGv_i32 r_asi, r_size, r_sign;
1552 r_asi = gen_get_asi(insn, addr);
1553 r_size = tcg_const_i32(size);
1554 r_sign = tcg_const_i32(sign);
1555 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1556 tcg_temp_free_i32(r_sign);
1557 tcg_temp_free_i32(r_size);
1558 tcg_temp_free_i32(r_asi);
1561 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1563 TCGv_i32 r_asi, r_size;
1565 r_asi = gen_get_asi(insn, addr);
1566 r_size = tcg_const_i32(size);
1567 gen_helper_st_asi(addr, src, r_asi, r_size);
1568 tcg_temp_free_i32(r_size);
1569 tcg_temp_free_i32(r_asi);
1572 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1574 TCGv_i32 r_asi, r_size, r_rd;
1576 r_asi = gen_get_asi(insn, addr);
1577 r_size = tcg_const_i32(size);
1578 r_rd = tcg_const_i32(rd);
1579 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1580 tcg_temp_free_i32(r_rd);
1581 tcg_temp_free_i32(r_size);
1582 tcg_temp_free_i32(r_asi);
1585 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1587 TCGv_i32 r_asi, r_size, r_rd;
1589 r_asi = gen_get_asi(insn, addr);
1590 r_size = tcg_const_i32(size);
1591 r_rd = tcg_const_i32(rd);
1592 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1593 tcg_temp_free_i32(r_rd);
1594 tcg_temp_free_i32(r_size);
1595 tcg_temp_free_i32(r_asi);
1598 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1600 TCGv_i32 r_asi, r_size, r_sign;
1602 r_asi = gen_get_asi(insn, addr);
1603 r_size = tcg_const_i32(4);
1604 r_sign = tcg_const_i32(0);
1605 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1606 tcg_temp_free_i32(r_sign);
1607 gen_helper_st_asi(addr, dst, r_asi, r_size);
1608 tcg_temp_free_i32(r_size);
1609 tcg_temp_free_i32(r_asi);
1610 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1613 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1615 TCGv_i32 r_asi, r_rd;
1617 r_asi = gen_get_asi(insn, addr);
1618 r_rd = tcg_const_i32(rd);
1619 gen_helper_ldda_asi(addr, r_asi, r_rd);
1620 tcg_temp_free_i32(r_rd);
1621 tcg_temp_free_i32(r_asi);
1624 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1626 TCGv_i32 r_asi, r_size;
1628 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1629 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1630 r_asi = gen_get_asi(insn, addr);
1631 r_size = tcg_const_i32(8);
1632 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1633 tcg_temp_free_i32(r_size);
1634 tcg_temp_free_i32(r_asi);
1637 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1643 r_val1 = tcg_temp_new();
1644 gen_movl_reg_TN(rd, r_val1);
1645 r_asi = gen_get_asi(insn, addr);
1646 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1647 tcg_temp_free_i32(r_asi);
1648 tcg_temp_free(r_val1);
1651 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1656 gen_movl_reg_TN(rd, cpu_tmp64);
1657 r_asi = gen_get_asi(insn, addr);
1658 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1659 tcg_temp_free_i32(r_asi);
1662 #elif !defined(CONFIG_USER_ONLY)
1664 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1667 TCGv_i32 r_asi, r_size, r_sign;
1669 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1670 r_size = tcg_const_i32(size);
1671 r_sign = tcg_const_i32(sign);
1672 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1673 tcg_temp_free(r_sign);
1674 tcg_temp_free(r_size);
1675 tcg_temp_free(r_asi);
1676 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1679 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1681 TCGv_i32 r_asi, r_size;
1683 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1684 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1685 r_size = tcg_const_i32(size);
1686 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1687 tcg_temp_free(r_size);
1688 tcg_temp_free(r_asi);
1691 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1693 TCGv_i32 r_asi, r_size, r_sign;
1696 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1697 r_size = tcg_const_i32(4);
1698 r_sign = tcg_const_i32(0);
1699 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1700 tcg_temp_free(r_sign);
1701 r_val = tcg_temp_new_i64();
1702 tcg_gen_extu_tl_i64(r_val, dst);
1703 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1704 tcg_temp_free_i64(r_val);
1705 tcg_temp_free(r_size);
1706 tcg_temp_free(r_asi);
1707 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1710 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1712 TCGv_i32 r_asi, r_size, r_sign;
1714 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1715 r_size = tcg_const_i32(8);
1716 r_sign = tcg_const_i32(0);
1717 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1718 tcg_temp_free(r_sign);
1719 tcg_temp_free(r_size);
1720 tcg_temp_free(r_asi);
1721 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1722 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1723 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1724 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1725 gen_movl_TN_reg(rd, hi);
1728 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1730 TCGv_i32 r_asi, r_size;
1732 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1733 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1734 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1735 r_size = tcg_const_i32(8);
1736 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1737 tcg_temp_free(r_size);
1738 tcg_temp_free(r_asi);
1742 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1743 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1746 TCGv_i32 r_asi, r_size;
1748 gen_ld_asi(dst, addr, insn, 1, 0);
1750 r_val = tcg_const_i64(0xffULL);
1751 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1752 r_size = tcg_const_i32(1);
1753 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1754 tcg_temp_free_i32(r_size);
1755 tcg_temp_free_i32(r_asi);
1756 tcg_temp_free_i64(r_val);
1760 static inline TCGv get_src1(unsigned int insn, TCGv def)
1765 rs1 = GET_FIELD(insn, 13, 17);
1767 r_rs1 = tcg_const_tl(0); // XXX how to free?
1769 r_rs1 = cpu_gregs[rs1];
1771 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1775 static inline TCGv get_src2(unsigned int insn, TCGv def)
1779 if (IS_IMM) { /* immediate */
1782 simm = GET_FIELDs(insn, 19, 31);
1783 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1784 } else { /* register */
1787 rs2 = GET_FIELD(insn, 27, 31);
1789 r_rs2 = tcg_const_tl(0); // XXX how to free?
1791 r_rs2 = cpu_gregs[rs2];
1793 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1798 #define CHECK_IU_FEATURE(dc, FEATURE) \
1799 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1801 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1802 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1805 /* before an instruction, dc->pc must be static */
1806 static void disas_sparc_insn(DisasContext * dc)
1808 unsigned int insn, opc, rs1, rs2, rd;
1811 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1812 tcg_gen_debug_insn_start(dc->pc);
1813 insn = ldl_code(dc->pc);
1814 opc = GET_FIELD(insn, 0, 1);
1816 rd = GET_FIELD(insn, 2, 6);
1818 cpu_src1 = tcg_temp_new(); // const
1819 cpu_src2 = tcg_temp_new(); // const
1822 case 0: /* branches/sethi */
1824 unsigned int xop = GET_FIELD(insn, 7, 9);
1827 #ifdef TARGET_SPARC64
1828 case 0x1: /* V9 BPcc */
1832 target = GET_FIELD_SP(insn, 0, 18);
1833 target = sign_extend(target, 18);
1835 cc = GET_FIELD_SP(insn, 20, 21);
1837 do_branch(dc, target, insn, 0, cpu_cond);
1839 do_branch(dc, target, insn, 1, cpu_cond);
1844 case 0x3: /* V9 BPr */
1846 target = GET_FIELD_SP(insn, 0, 13) |
1847 (GET_FIELD_SP(insn, 20, 21) << 14);
1848 target = sign_extend(target, 16);
1850 cpu_src1 = get_src1(insn, cpu_src1);
1851 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1854 case 0x5: /* V9 FBPcc */
1856 int cc = GET_FIELD_SP(insn, 20, 21);
1857 if (gen_trap_ifnofpu(dc, cpu_cond))
1859 target = GET_FIELD_SP(insn, 0, 18);
1860 target = sign_extend(target, 19);
1862 do_fbranch(dc, target, insn, cc, cpu_cond);
1866 case 0x7: /* CBN+x */
1871 case 0x2: /* BN+x */
1873 target = GET_FIELD(insn, 10, 31);
1874 target = sign_extend(target, 22);
1876 do_branch(dc, target, insn, 0, cpu_cond);
1879 case 0x6: /* FBN+x */
1881 if (gen_trap_ifnofpu(dc, cpu_cond))
1883 target = GET_FIELD(insn, 10, 31);
1884 target = sign_extend(target, 22);
1886 do_fbranch(dc, target, insn, 0, cpu_cond);
1889 case 0x4: /* SETHI */
1891 uint32_t value = GET_FIELD(insn, 10, 31);
1894 r_const = tcg_const_tl(value << 10);
1895 gen_movl_TN_reg(rd, r_const);
1896 tcg_temp_free(r_const);
1899 case 0x0: /* UNIMPL */
1908 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1911 r_const = tcg_const_tl(dc->pc);
1912 gen_movl_TN_reg(15, r_const);
1913 tcg_temp_free(r_const);
1915 gen_mov_pc_npc(dc, cpu_cond);
1919 case 2: /* FPU & Logical Operations */
1921 unsigned int xop = GET_FIELD(insn, 7, 12);
1922 if (xop == 0x3a) { /* generate trap */
1925 cpu_src1 = get_src1(insn, cpu_src1);
1927 rs2 = GET_FIELD(insn, 25, 31);
1928 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1930 rs2 = GET_FIELD(insn, 27, 31);
1932 gen_movl_reg_TN(rs2, cpu_src2);
1933 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1935 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1937 cond = GET_FIELD(insn, 3, 6);
1939 save_state(dc, cpu_cond);
1940 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1942 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1944 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1945 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1946 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1947 gen_helper_raise_exception(cpu_tmp32);
1948 } else if (cond != 0) {
1949 TCGv r_cond = tcg_temp_new();
1951 #ifdef TARGET_SPARC64
1953 int cc = GET_FIELD_SP(insn, 11, 12);
1955 save_state(dc, cpu_cond);
1957 gen_cond(r_cond, 0, cond, dc);
1959 gen_cond(r_cond, 1, cond, dc);
1963 save_state(dc, cpu_cond);
1964 gen_cond(r_cond, 0, cond, dc);
1966 l1 = gen_new_label();
1967 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1969 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1971 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1973 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1974 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1975 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1976 gen_helper_raise_exception(cpu_tmp32);
1979 tcg_temp_free(r_cond);
1985 } else if (xop == 0x28) {
1986 rs1 = GET_FIELD(insn, 13, 17);
1989 #ifndef TARGET_SPARC64
1990 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1991 manual, rdy on the microSPARC
1993 case 0x0f: /* stbar in the SPARCv8 manual,
1994 rdy on the microSPARC II */
1995 case 0x10 ... 0x1f: /* implementation-dependent in the
1996 SPARCv8 manual, rdy on the
1999 gen_movl_TN_reg(rd, cpu_y);
2001 #ifdef TARGET_SPARC64
2002 case 0x2: /* V9 rdccr */
2003 gen_helper_compute_psr();
2004 gen_helper_rdccr(cpu_dst);
2005 gen_movl_TN_reg(rd, cpu_dst);
2007 case 0x3: /* V9 rdasi */
2008 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2009 gen_movl_TN_reg(rd, cpu_dst);
2011 case 0x4: /* V9 rdtick */
2015 r_tickptr = tcg_temp_new_ptr();
2016 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2017 offsetof(CPUState, tick));
2018 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2019 tcg_temp_free_ptr(r_tickptr);
2020 gen_movl_TN_reg(rd, cpu_dst);
2023 case 0x5: /* V9 rdpc */
2027 r_const = tcg_const_tl(dc->pc);
2028 gen_movl_TN_reg(rd, r_const);
2029 tcg_temp_free(r_const);
2032 case 0x6: /* V9 rdfprs */
2033 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2034 gen_movl_TN_reg(rd, cpu_dst);
2036 case 0xf: /* V9 membar */
2037 break; /* no effect */
2038 case 0x13: /* Graphics Status */
2039 if (gen_trap_ifnofpu(dc, cpu_cond))
2041 gen_movl_TN_reg(rd, cpu_gsr);
2043 case 0x16: /* Softint */
2044 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2045 gen_movl_TN_reg(rd, cpu_dst);
2047 case 0x17: /* Tick compare */
2048 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2050 case 0x18: /* System tick */
2054 r_tickptr = tcg_temp_new_ptr();
2055 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2056 offsetof(CPUState, stick));
2057 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2058 tcg_temp_free_ptr(r_tickptr);
2059 gen_movl_TN_reg(rd, cpu_dst);
2062 case 0x19: /* System tick compare */
2063 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2065 case 0x10: /* Performance Control */
2066 case 0x11: /* Performance Instrumentation Counter */
2067 case 0x12: /* Dispatch Control */
2068 case 0x14: /* Softint set, WO */
2069 case 0x15: /* Softint clear, WO */
2074 #if !defined(CONFIG_USER_ONLY)
2075 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2076 #ifndef TARGET_SPARC64
2077 if (!supervisor(dc))
2079 gen_helper_compute_psr();
2080 dc->cc_op = CC_OP_FLAGS;
2081 gen_helper_rdpsr(cpu_dst);
2083 CHECK_IU_FEATURE(dc, HYPV);
2084 if (!hypervisor(dc))
2086 rs1 = GET_FIELD(insn, 13, 17);
2089 // gen_op_rdhpstate();
2092 // gen_op_rdhtstate();
2095 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2098 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2101 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2103 case 31: // hstick_cmpr
2104 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2110 gen_movl_TN_reg(rd, cpu_dst);
2112 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2113 if (!supervisor(dc))
2115 #ifdef TARGET_SPARC64
2116 rs1 = GET_FIELD(insn, 13, 17);
2122 r_tsptr = tcg_temp_new_ptr();
2123 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2124 offsetof(CPUState, tsptr));
2125 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2126 offsetof(trap_state, tpc));
2127 tcg_temp_free_ptr(r_tsptr);
2134 r_tsptr = tcg_temp_new_ptr();
2135 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2136 offsetof(CPUState, tsptr));
2137 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2138 offsetof(trap_state, tnpc));
2139 tcg_temp_free_ptr(r_tsptr);
2146 r_tsptr = tcg_temp_new_ptr();
2147 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2148 offsetof(CPUState, tsptr));
2149 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2150 offsetof(trap_state, tstate));
2151 tcg_temp_free_ptr(r_tsptr);
2158 r_tsptr = tcg_temp_new_ptr();
2159 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2160 offsetof(CPUState, tsptr));
2161 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2162 offsetof(trap_state, tt));
2163 tcg_temp_free_ptr(r_tsptr);
2164 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2171 r_tickptr = tcg_temp_new_ptr();
2172 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2173 offsetof(CPUState, tick));
2174 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2175 gen_movl_TN_reg(rd, cpu_tmp0);
2176 tcg_temp_free_ptr(r_tickptr);
2180 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2183 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2184 offsetof(CPUSPARCState, pstate));
2185 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2188 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2189 offsetof(CPUSPARCState, tl));
2190 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2193 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2194 offsetof(CPUSPARCState, psrpil));
2195 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2198 gen_helper_rdcwp(cpu_tmp0);
2201 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2202 offsetof(CPUSPARCState, cansave));
2203 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2205 case 11: // canrestore
2206 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2207 offsetof(CPUSPARCState, canrestore));
2208 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2210 case 12: // cleanwin
2211 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2212 offsetof(CPUSPARCState, cleanwin));
2213 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2215 case 13: // otherwin
2216 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2217 offsetof(CPUSPARCState, otherwin));
2218 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2221 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2222 offsetof(CPUSPARCState, wstate));
2223 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2225 case 16: // UA2005 gl
2226 CHECK_IU_FEATURE(dc, GL);
2227 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2228 offsetof(CPUSPARCState, gl));
2229 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2231 case 26: // UA2005 strand status
2232 CHECK_IU_FEATURE(dc, HYPV);
2233 if (!hypervisor(dc))
2235 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2238 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2245 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2247 gen_movl_TN_reg(rd, cpu_tmp0);
2249 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2250 #ifdef TARGET_SPARC64
2251 save_state(dc, cpu_cond);
2252 gen_helper_flushw();
2254 if (!supervisor(dc))
2256 gen_movl_TN_reg(rd, cpu_tbr);
2260 } else if (xop == 0x34) { /* FPU Operations */
2261 if (gen_trap_ifnofpu(dc, cpu_cond))
2263 gen_op_clear_ieee_excp_and_FTT();
2264 rs1 = GET_FIELD(insn, 13, 17);
2265 rs2 = GET_FIELD(insn, 27, 31);
2266 xop = GET_FIELD(insn, 18, 26);
2268 case 0x1: /* fmovs */
2269 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2271 case 0x5: /* fnegs */
2272 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2274 case 0x9: /* fabss */
2275 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2277 case 0x29: /* fsqrts */
2278 CHECK_FPU_FEATURE(dc, FSQRT);
2279 gen_clear_float_exceptions();
2280 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2281 gen_helper_check_ieee_exceptions();
2282 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2284 case 0x2a: /* fsqrtd */
2285 CHECK_FPU_FEATURE(dc, FSQRT);
2286 gen_op_load_fpr_DT1(DFPREG(rs2));
2287 gen_clear_float_exceptions();
2288 gen_helper_fsqrtd();
2289 gen_helper_check_ieee_exceptions();
2290 gen_op_store_DT0_fpr(DFPREG(rd));
2292 case 0x2b: /* fsqrtq */
2293 CHECK_FPU_FEATURE(dc, FLOAT128);
2294 gen_op_load_fpr_QT1(QFPREG(rs2));
2295 gen_clear_float_exceptions();
2296 gen_helper_fsqrtq();
2297 gen_helper_check_ieee_exceptions();
2298 gen_op_store_QT0_fpr(QFPREG(rd));
2300 case 0x41: /* fadds */
2301 gen_clear_float_exceptions();
2302 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2303 gen_helper_check_ieee_exceptions();
2304 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2306 case 0x42: /* faddd */
2307 gen_op_load_fpr_DT0(DFPREG(rs1));
2308 gen_op_load_fpr_DT1(DFPREG(rs2));
2309 gen_clear_float_exceptions();
2311 gen_helper_check_ieee_exceptions();
2312 gen_op_store_DT0_fpr(DFPREG(rd));
2314 case 0x43: /* faddq */
2315 CHECK_FPU_FEATURE(dc, FLOAT128);
2316 gen_op_load_fpr_QT0(QFPREG(rs1));
2317 gen_op_load_fpr_QT1(QFPREG(rs2));
2318 gen_clear_float_exceptions();
2320 gen_helper_check_ieee_exceptions();
2321 gen_op_store_QT0_fpr(QFPREG(rd));
2323 case 0x45: /* fsubs */
2324 gen_clear_float_exceptions();
2325 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2326 gen_helper_check_ieee_exceptions();
2327 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2329 case 0x46: /* fsubd */
2330 gen_op_load_fpr_DT0(DFPREG(rs1));
2331 gen_op_load_fpr_DT1(DFPREG(rs2));
2332 gen_clear_float_exceptions();
2334 gen_helper_check_ieee_exceptions();
2335 gen_op_store_DT0_fpr(DFPREG(rd));
2337 case 0x47: /* fsubq */
2338 CHECK_FPU_FEATURE(dc, FLOAT128);
2339 gen_op_load_fpr_QT0(QFPREG(rs1));
2340 gen_op_load_fpr_QT1(QFPREG(rs2));
2341 gen_clear_float_exceptions();
2343 gen_helper_check_ieee_exceptions();
2344 gen_op_store_QT0_fpr(QFPREG(rd));
2346 case 0x49: /* fmuls */
2347 CHECK_FPU_FEATURE(dc, FMUL);
2348 gen_clear_float_exceptions();
2349 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2350 gen_helper_check_ieee_exceptions();
2351 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2353 case 0x4a: /* fmuld */
2354 CHECK_FPU_FEATURE(dc, FMUL);
2355 gen_op_load_fpr_DT0(DFPREG(rs1));
2356 gen_op_load_fpr_DT1(DFPREG(rs2));
2357 gen_clear_float_exceptions();
2359 gen_helper_check_ieee_exceptions();
2360 gen_op_store_DT0_fpr(DFPREG(rd));
2362 case 0x4b: /* fmulq */
2363 CHECK_FPU_FEATURE(dc, FLOAT128);
2364 CHECK_FPU_FEATURE(dc, FMUL);
2365 gen_op_load_fpr_QT0(QFPREG(rs1));
2366 gen_op_load_fpr_QT1(QFPREG(rs2));
2367 gen_clear_float_exceptions();
2369 gen_helper_check_ieee_exceptions();
2370 gen_op_store_QT0_fpr(QFPREG(rd));
2372 case 0x4d: /* fdivs */
2373 gen_clear_float_exceptions();
2374 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2375 gen_helper_check_ieee_exceptions();
2376 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2378 case 0x4e: /* fdivd */
2379 gen_op_load_fpr_DT0(DFPREG(rs1));
2380 gen_op_load_fpr_DT1(DFPREG(rs2));
2381 gen_clear_float_exceptions();
2383 gen_helper_check_ieee_exceptions();
2384 gen_op_store_DT0_fpr(DFPREG(rd));
2386 case 0x4f: /* fdivq */
2387 CHECK_FPU_FEATURE(dc, FLOAT128);
2388 gen_op_load_fpr_QT0(QFPREG(rs1));
2389 gen_op_load_fpr_QT1(QFPREG(rs2));
2390 gen_clear_float_exceptions();
2392 gen_helper_check_ieee_exceptions();
2393 gen_op_store_QT0_fpr(QFPREG(rd));
2395 case 0x69: /* fsmuld */
2396 CHECK_FPU_FEATURE(dc, FSMULD);
2397 gen_clear_float_exceptions();
2398 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2399 gen_helper_check_ieee_exceptions();
2400 gen_op_store_DT0_fpr(DFPREG(rd));
2402 case 0x6e: /* fdmulq */
2403 CHECK_FPU_FEATURE(dc, FLOAT128);
2404 gen_op_load_fpr_DT0(DFPREG(rs1));
2405 gen_op_load_fpr_DT1(DFPREG(rs2));
2406 gen_clear_float_exceptions();
2407 gen_helper_fdmulq();
2408 gen_helper_check_ieee_exceptions();
2409 gen_op_store_QT0_fpr(QFPREG(rd));
2411 case 0xc4: /* fitos */
2412 gen_clear_float_exceptions();
2413 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2414 gen_helper_check_ieee_exceptions();
2415 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2417 case 0xc6: /* fdtos */
2418 gen_op_load_fpr_DT1(DFPREG(rs2));
2419 gen_clear_float_exceptions();
2420 gen_helper_fdtos(cpu_tmp32);
2421 gen_helper_check_ieee_exceptions();
2422 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2424 case 0xc7: /* fqtos */
2425 CHECK_FPU_FEATURE(dc, FLOAT128);
2426 gen_op_load_fpr_QT1(QFPREG(rs2));
2427 gen_clear_float_exceptions();
2428 gen_helper_fqtos(cpu_tmp32);
2429 gen_helper_check_ieee_exceptions();
2430 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2432 case 0xc8: /* fitod */
2433 gen_helper_fitod(cpu_fpr[rs2]);
2434 gen_op_store_DT0_fpr(DFPREG(rd));
2436 case 0xc9: /* fstod */
2437 gen_helper_fstod(cpu_fpr[rs2]);
2438 gen_op_store_DT0_fpr(DFPREG(rd));
2440 case 0xcb: /* fqtod */
2441 CHECK_FPU_FEATURE(dc, FLOAT128);
2442 gen_op_load_fpr_QT1(QFPREG(rs2));
2443 gen_clear_float_exceptions();
2445 gen_helper_check_ieee_exceptions();
2446 gen_op_store_DT0_fpr(DFPREG(rd));
2448 case 0xcc: /* fitoq */
2449 CHECK_FPU_FEATURE(dc, FLOAT128);
2450 gen_helper_fitoq(cpu_fpr[rs2]);
2451 gen_op_store_QT0_fpr(QFPREG(rd));
2453 case 0xcd: /* fstoq */
2454 CHECK_FPU_FEATURE(dc, FLOAT128);
2455 gen_helper_fstoq(cpu_fpr[rs2]);
2456 gen_op_store_QT0_fpr(QFPREG(rd));
2458 case 0xce: /* fdtoq */
2459 CHECK_FPU_FEATURE(dc, FLOAT128);
2460 gen_op_load_fpr_DT1(DFPREG(rs2));
2462 gen_op_store_QT0_fpr(QFPREG(rd));
2464 case 0xd1: /* fstoi */
2465 gen_clear_float_exceptions();
2466 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2467 gen_helper_check_ieee_exceptions();
2468 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2470 case 0xd2: /* fdtoi */
2471 gen_op_load_fpr_DT1(DFPREG(rs2));
2472 gen_clear_float_exceptions();
2473 gen_helper_fdtoi(cpu_tmp32);
2474 gen_helper_check_ieee_exceptions();
2475 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2477 case 0xd3: /* fqtoi */
2478 CHECK_FPU_FEATURE(dc, FLOAT128);
2479 gen_op_load_fpr_QT1(QFPREG(rs2));
2480 gen_clear_float_exceptions();
2481 gen_helper_fqtoi(cpu_tmp32);
2482 gen_helper_check_ieee_exceptions();
2483 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2485 #ifdef TARGET_SPARC64
2486 case 0x2: /* V9 fmovd */
2487 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2488 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2489 cpu_fpr[DFPREG(rs2) + 1]);
2491 case 0x3: /* V9 fmovq */
2492 CHECK_FPU_FEATURE(dc, FLOAT128);
2493 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2494 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2495 cpu_fpr[QFPREG(rs2) + 1]);
2496 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2497 cpu_fpr[QFPREG(rs2) + 2]);
2498 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2499 cpu_fpr[QFPREG(rs2) + 3]);
2501 case 0x6: /* V9 fnegd */
2502 gen_op_load_fpr_DT1(DFPREG(rs2));
2504 gen_op_store_DT0_fpr(DFPREG(rd));
2506 case 0x7: /* V9 fnegq */
2507 CHECK_FPU_FEATURE(dc, FLOAT128);
2508 gen_op_load_fpr_QT1(QFPREG(rs2));
2510 gen_op_store_QT0_fpr(QFPREG(rd));
2512 case 0xa: /* V9 fabsd */
2513 gen_op_load_fpr_DT1(DFPREG(rs2));
2515 gen_op_store_DT0_fpr(DFPREG(rd));
2517 case 0xb: /* V9 fabsq */
2518 CHECK_FPU_FEATURE(dc, FLOAT128);
2519 gen_op_load_fpr_QT1(QFPREG(rs2));
2521 gen_op_store_QT0_fpr(QFPREG(rd));
2523 case 0x81: /* V9 fstox */
2524 gen_clear_float_exceptions();
2525 gen_helper_fstox(cpu_fpr[rs2]);
2526 gen_helper_check_ieee_exceptions();
2527 gen_op_store_DT0_fpr(DFPREG(rd));
2529 case 0x82: /* V9 fdtox */
2530 gen_op_load_fpr_DT1(DFPREG(rs2));
2531 gen_clear_float_exceptions();
2533 gen_helper_check_ieee_exceptions();
2534 gen_op_store_DT0_fpr(DFPREG(rd));
2536 case 0x83: /* V9 fqtox */
2537 CHECK_FPU_FEATURE(dc, FLOAT128);
2538 gen_op_load_fpr_QT1(QFPREG(rs2));
2539 gen_clear_float_exceptions();
2541 gen_helper_check_ieee_exceptions();
2542 gen_op_store_DT0_fpr(DFPREG(rd));
2544 case 0x84: /* V9 fxtos */
2545 gen_op_load_fpr_DT1(DFPREG(rs2));
2546 gen_clear_float_exceptions();
2547 gen_helper_fxtos(cpu_tmp32);
2548 gen_helper_check_ieee_exceptions();
2549 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2551 case 0x88: /* V9 fxtod */
2552 gen_op_load_fpr_DT1(DFPREG(rs2));
2553 gen_clear_float_exceptions();
2555 gen_helper_check_ieee_exceptions();
2556 gen_op_store_DT0_fpr(DFPREG(rd));
2558 case 0x8c: /* V9 fxtoq */
2559 CHECK_FPU_FEATURE(dc, FLOAT128);
2560 gen_op_load_fpr_DT1(DFPREG(rs2));
2561 gen_clear_float_exceptions();
2563 gen_helper_check_ieee_exceptions();
2564 gen_op_store_QT0_fpr(QFPREG(rd));
2570 } else if (xop == 0x35) { /* FPU Operations */
2571 #ifdef TARGET_SPARC64
2574 if (gen_trap_ifnofpu(dc, cpu_cond))
2576 gen_op_clear_ieee_excp_and_FTT();
2577 rs1 = GET_FIELD(insn, 13, 17);
2578 rs2 = GET_FIELD(insn, 27, 31);
2579 xop = GET_FIELD(insn, 18, 26);
2580 #ifdef TARGET_SPARC64
2581 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2584 l1 = gen_new_label();
2585 cond = GET_FIELD_SP(insn, 14, 17);
2586 cpu_src1 = get_src1(insn, cpu_src1);
2587 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2589 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2592 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2595 l1 = gen_new_label();
2596 cond = GET_FIELD_SP(insn, 14, 17);
2597 cpu_src1 = get_src1(insn, cpu_src1);
2598 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2600 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2601 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2604 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2607 CHECK_FPU_FEATURE(dc, FLOAT128);
2608 l1 = gen_new_label();
2609 cond = GET_FIELD_SP(insn, 14, 17);
2610 cpu_src1 = get_src1(insn, cpu_src1);
2611 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2613 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2614 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2615 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2616 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2622 #ifdef TARGET_SPARC64
2623 #define FMOVSCC(fcc) \
2628 l1 = gen_new_label(); \
2629 r_cond = tcg_temp_new(); \
2630 cond = GET_FIELD_SP(insn, 14, 17); \
2631 gen_fcond(r_cond, fcc, cond); \
2632 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2634 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2635 gen_set_label(l1); \
2636 tcg_temp_free(r_cond); \
2638 #define FMOVDCC(fcc) \
2643 l1 = gen_new_label(); \
2644 r_cond = tcg_temp_new(); \
2645 cond = GET_FIELD_SP(insn, 14, 17); \
2646 gen_fcond(r_cond, fcc, cond); \
2647 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2649 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2650 cpu_fpr[DFPREG(rs2)]); \
2651 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2652 cpu_fpr[DFPREG(rs2) + 1]); \
2653 gen_set_label(l1); \
2654 tcg_temp_free(r_cond); \
2656 #define FMOVQCC(fcc) \
2661 l1 = gen_new_label(); \
2662 r_cond = tcg_temp_new(); \
2663 cond = GET_FIELD_SP(insn, 14, 17); \
2664 gen_fcond(r_cond, fcc, cond); \
2665 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2667 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2668 cpu_fpr[QFPREG(rs2)]); \
2669 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2670 cpu_fpr[QFPREG(rs2) + 1]); \
2671 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2672 cpu_fpr[QFPREG(rs2) + 2]); \
2673 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2674 cpu_fpr[QFPREG(rs2) + 3]); \
2675 gen_set_label(l1); \
2676 tcg_temp_free(r_cond); \
2678 case 0x001: /* V9 fmovscc %fcc0 */
2681 case 0x002: /* V9 fmovdcc %fcc0 */
2684 case 0x003: /* V9 fmovqcc %fcc0 */
2685 CHECK_FPU_FEATURE(dc, FLOAT128);
2688 case 0x041: /* V9 fmovscc %fcc1 */
2691 case 0x042: /* V9 fmovdcc %fcc1 */
2694 case 0x043: /* V9 fmovqcc %fcc1 */
2695 CHECK_FPU_FEATURE(dc, FLOAT128);
2698 case 0x081: /* V9 fmovscc %fcc2 */
2701 case 0x082: /* V9 fmovdcc %fcc2 */
2704 case 0x083: /* V9 fmovqcc %fcc2 */
2705 CHECK_FPU_FEATURE(dc, FLOAT128);
2708 case 0x0c1: /* V9 fmovscc %fcc3 */
2711 case 0x0c2: /* V9 fmovdcc %fcc3 */
2714 case 0x0c3: /* V9 fmovqcc %fcc3 */
2715 CHECK_FPU_FEATURE(dc, FLOAT128);
2721 #define FMOVSCC(icc) \
2726 l1 = gen_new_label(); \
2727 r_cond = tcg_temp_new(); \
2728 cond = GET_FIELD_SP(insn, 14, 17); \
2729 gen_cond(r_cond, icc, cond, dc); \
2730 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2732 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2733 gen_set_label(l1); \
2734 tcg_temp_free(r_cond); \
2736 #define FMOVDCC(icc) \
2741 l1 = gen_new_label(); \
2742 r_cond = tcg_temp_new(); \
2743 cond = GET_FIELD_SP(insn, 14, 17); \
2744 gen_cond(r_cond, icc, cond, dc); \
2745 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2747 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2748 cpu_fpr[DFPREG(rs2)]); \
2749 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2750 cpu_fpr[DFPREG(rs2) + 1]); \
2751 gen_set_label(l1); \
2752 tcg_temp_free(r_cond); \
2754 #define FMOVQCC(icc) \
2759 l1 = gen_new_label(); \
2760 r_cond = tcg_temp_new(); \
2761 cond = GET_FIELD_SP(insn, 14, 17); \
2762 gen_cond(r_cond, icc, cond, dc); \
2763 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2765 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2766 cpu_fpr[QFPREG(rs2)]); \
2767 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2768 cpu_fpr[QFPREG(rs2) + 1]); \
2769 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2770 cpu_fpr[QFPREG(rs2) + 2]); \
2771 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2772 cpu_fpr[QFPREG(rs2) + 3]); \
2773 gen_set_label(l1); \
2774 tcg_temp_free(r_cond); \
2777 case 0x101: /* V9 fmovscc %icc */
2780 case 0x102: /* V9 fmovdcc %icc */
2782 case 0x103: /* V9 fmovqcc %icc */
2783 CHECK_FPU_FEATURE(dc, FLOAT128);
2786 case 0x181: /* V9 fmovscc %xcc */
2789 case 0x182: /* V9 fmovdcc %xcc */
2792 case 0x183: /* V9 fmovqcc %xcc */
2793 CHECK_FPU_FEATURE(dc, FLOAT128);
2800 case 0x51: /* fcmps, V9 %fcc */
2801 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2803 case 0x52: /* fcmpd, V9 %fcc */
2804 gen_op_load_fpr_DT0(DFPREG(rs1));
2805 gen_op_load_fpr_DT1(DFPREG(rs2));
2806 gen_op_fcmpd(rd & 3);
2808 case 0x53: /* fcmpq, V9 %fcc */
2809 CHECK_FPU_FEATURE(dc, FLOAT128);
2810 gen_op_load_fpr_QT0(QFPREG(rs1));
2811 gen_op_load_fpr_QT1(QFPREG(rs2));
2812 gen_op_fcmpq(rd & 3);
2814 case 0x55: /* fcmpes, V9 %fcc */
2815 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2817 case 0x56: /* fcmped, V9 %fcc */
2818 gen_op_load_fpr_DT0(DFPREG(rs1));
2819 gen_op_load_fpr_DT1(DFPREG(rs2));
2820 gen_op_fcmped(rd & 3);
2822 case 0x57: /* fcmpeq, V9 %fcc */
2823 CHECK_FPU_FEATURE(dc, FLOAT128);
2824 gen_op_load_fpr_QT0(QFPREG(rs1));
2825 gen_op_load_fpr_QT1(QFPREG(rs2));
2826 gen_op_fcmpeq(rd & 3);
2831 } else if (xop == 0x2) {
2834 rs1 = GET_FIELD(insn, 13, 17);
2836 // or %g0, x, y -> mov T0, x; mov y, T0
2837 if (IS_IMM) { /* immediate */
2840 simm = GET_FIELDs(insn, 19, 31);
2841 r_const = tcg_const_tl(simm);
2842 gen_movl_TN_reg(rd, r_const);
2843 tcg_temp_free(r_const);
2844 } else { /* register */
2845 rs2 = GET_FIELD(insn, 27, 31);
2846 gen_movl_reg_TN(rs2, cpu_dst);
2847 gen_movl_TN_reg(rd, cpu_dst);
2850 cpu_src1 = get_src1(insn, cpu_src1);
2851 if (IS_IMM) { /* immediate */
2852 simm = GET_FIELDs(insn, 19, 31);
2853 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2854 gen_movl_TN_reg(rd, cpu_dst);
2855 } else { /* register */
2856 // or x, %g0, y -> mov T1, x; mov y, T1
2857 rs2 = GET_FIELD(insn, 27, 31);
2859 gen_movl_reg_TN(rs2, cpu_src2);
2860 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2861 gen_movl_TN_reg(rd, cpu_dst);
2863 gen_movl_TN_reg(rd, cpu_src1);
2866 #ifdef TARGET_SPARC64
2867 } else if (xop == 0x25) { /* sll, V9 sllx */
2868 cpu_src1 = get_src1(insn, cpu_src1);
2869 if (IS_IMM) { /* immediate */
2870 simm = GET_FIELDs(insn, 20, 31);
2871 if (insn & (1 << 12)) {
2872 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2874 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2876 } else { /* register */
2877 rs2 = GET_FIELD(insn, 27, 31);
2878 gen_movl_reg_TN(rs2, cpu_src2);
2879 if (insn & (1 << 12)) {
2880 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2882 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2884 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2886 gen_movl_TN_reg(rd, cpu_dst);
2887 } else if (xop == 0x26) { /* srl, V9 srlx */
2888 cpu_src1 = get_src1(insn, cpu_src1);
2889 if (IS_IMM) { /* immediate */
2890 simm = GET_FIELDs(insn, 20, 31);
2891 if (insn & (1 << 12)) {
2892 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2894 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2895 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2897 } else { /* register */
2898 rs2 = GET_FIELD(insn, 27, 31);
2899 gen_movl_reg_TN(rs2, cpu_src2);
2900 if (insn & (1 << 12)) {
2901 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2902 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2904 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2905 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2906 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2909 gen_movl_TN_reg(rd, cpu_dst);
2910 } else if (xop == 0x27) { /* sra, V9 srax */
2911 cpu_src1 = get_src1(insn, cpu_src1);
2912 if (IS_IMM) { /* immediate */
2913 simm = GET_FIELDs(insn, 20, 31);
2914 if (insn & (1 << 12)) {
2915 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2917 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2918 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2919 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2921 } else { /* register */
2922 rs2 = GET_FIELD(insn, 27, 31);
2923 gen_movl_reg_TN(rs2, cpu_src2);
2924 if (insn & (1 << 12)) {
2925 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2926 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2928 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2929 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2930 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2931 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2934 gen_movl_TN_reg(rd, cpu_dst);
2936 } else if (xop < 0x36) {
2938 cpu_src1 = get_src1(insn, cpu_src1);
2939 cpu_src2 = get_src2(insn, cpu_src2);
2940 switch (xop & ~0x10) {
2943 simm = GET_FIELDs(insn, 19, 31);
2945 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2946 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2947 dc->cc_op = CC_OP_ADD;
2949 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2953 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2954 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2955 dc->cc_op = CC_OP_ADD;
2957 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2963 simm = GET_FIELDs(insn, 19, 31);
2964 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2966 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2969 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2970 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2971 dc->cc_op = CC_OP_LOGIC;
2976 simm = GET_FIELDs(insn, 19, 31);
2977 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2979 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2982 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2983 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2984 dc->cc_op = CC_OP_LOGIC;
2989 simm = GET_FIELDs(insn, 19, 31);
2990 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2992 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2995 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2996 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2997 dc->cc_op = CC_OP_LOGIC;
3002 simm = GET_FIELDs(insn, 19, 31);
3004 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3006 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3010 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3011 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3012 dc->cc_op = CC_OP_SUB;
3014 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3018 case 0x5: /* andn */
3020 simm = GET_FIELDs(insn, 19, 31);
3021 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3023 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3026 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3027 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3028 dc->cc_op = CC_OP_LOGIC;
3033 simm = GET_FIELDs(insn, 19, 31);
3034 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3036 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3039 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3040 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3041 dc->cc_op = CC_OP_LOGIC;
3044 case 0x7: /* xorn */
3046 simm = GET_FIELDs(insn, 19, 31);
3047 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3049 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3050 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3053 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3054 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3055 dc->cc_op = CC_OP_LOGIC;
3058 case 0x8: /* addx, V9 addc */
3060 simm = GET_FIELDs(insn, 19, 31);
3062 gen_helper_compute_psr();
3063 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3064 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3065 dc->cc_op = CC_OP_ADDX;
3067 gen_helper_compute_psr();
3068 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3069 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3070 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3074 gen_helper_compute_psr();
3075 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3076 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3077 dc->cc_op = CC_OP_ADDX;
3079 gen_helper_compute_psr();
3080 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3081 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3082 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3086 #ifdef TARGET_SPARC64
3087 case 0x9: /* V9 mulx */
3089 simm = GET_FIELDs(insn, 19, 31);
3090 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3092 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3096 case 0xa: /* umul */
3097 CHECK_IU_FEATURE(dc, MUL);
3098 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3100 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3101 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3102 dc->cc_op = CC_OP_LOGIC;
3105 case 0xb: /* smul */
3106 CHECK_IU_FEATURE(dc, MUL);
3107 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3109 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3110 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3111 dc->cc_op = CC_OP_LOGIC;
3114 case 0xc: /* subx, V9 subc */
3116 simm = GET_FIELDs(insn, 19, 31);
3118 gen_helper_compute_psr();
3119 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3120 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3121 dc->cc_op = CC_OP_SUBX;
3123 gen_helper_compute_psr();
3124 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3125 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3126 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3130 gen_helper_compute_psr();
3131 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3132 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3133 dc->cc_op = CC_OP_SUBX;
3135 gen_helper_compute_psr();
3136 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3137 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3138 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3142 #ifdef TARGET_SPARC64
3143 case 0xd: /* V9 udivx */
3144 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3145 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3146 gen_trap_ifdivzero_tl(cpu_cc_src2);
3147 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3150 case 0xe: /* udiv */
3151 CHECK_IU_FEATURE(dc, DIV);
3152 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3154 gen_op_div_cc(cpu_dst);
3155 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3156 dc->cc_op = CC_OP_FLAGS;
3159 case 0xf: /* sdiv */
3160 CHECK_IU_FEATURE(dc, DIV);
3161 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3163 gen_op_div_cc(cpu_dst);
3164 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3165 dc->cc_op = CC_OP_FLAGS;
3171 gen_movl_TN_reg(rd, cpu_dst);
3173 cpu_src1 = get_src1(insn, cpu_src1);
3174 cpu_src2 = get_src2(insn, cpu_src2);
3176 case 0x20: /* taddcc */
3177 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3178 gen_movl_TN_reg(rd, cpu_dst);
3179 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3180 dc->cc_op = CC_OP_TADD;
3182 case 0x21: /* tsubcc */
3183 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3184 gen_movl_TN_reg(rd, cpu_dst);
3185 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3186 dc->cc_op = CC_OP_TSUB;
3188 case 0x22: /* taddcctv */
3189 save_state(dc, cpu_cond);
3190 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3191 gen_movl_TN_reg(rd, cpu_dst);
3192 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3193 dc->cc_op = CC_OP_TADDTV;
3195 case 0x23: /* tsubcctv */
3196 save_state(dc, cpu_cond);
3197 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3198 gen_movl_TN_reg(rd, cpu_dst);
3199 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3200 dc->cc_op = CC_OP_TSUBTV;
3202 case 0x24: /* mulscc */
3203 gen_helper_compute_psr();
3204 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3205 gen_movl_TN_reg(rd, cpu_dst);
3206 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3207 dc->cc_op = CC_OP_FLAGS;
3209 #ifndef TARGET_SPARC64
3210 case 0x25: /* sll */
3211 if (IS_IMM) { /* immediate */
3212 simm = GET_FIELDs(insn, 20, 31);
3213 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3214 } else { /* register */
3215 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3216 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3218 gen_movl_TN_reg(rd, cpu_dst);
3220 case 0x26: /* srl */
3221 if (IS_IMM) { /* immediate */
3222 simm = GET_FIELDs(insn, 20, 31);
3223 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3224 } else { /* register */
3225 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3226 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3228 gen_movl_TN_reg(rd, cpu_dst);
3230 case 0x27: /* sra */
3231 if (IS_IMM) { /* immediate */
3232 simm = GET_FIELDs(insn, 20, 31);
3233 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3234 } else { /* register */
3235 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3236 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3238 gen_movl_TN_reg(rd, cpu_dst);
3245 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3246 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3248 #ifndef TARGET_SPARC64
3249 case 0x01 ... 0x0f: /* undefined in the
3253 case 0x10 ... 0x1f: /* implementation-dependent
3259 case 0x2: /* V9 wrccr */
3260 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3261 gen_helper_wrccr(cpu_dst);
3262 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3263 dc->cc_op = CC_OP_FLAGS;
3265 case 0x3: /* V9 wrasi */
3266 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3267 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3269 case 0x6: /* V9 wrfprs */
3270 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3271 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3272 save_state(dc, cpu_cond);
3277 case 0xf: /* V9 sir, nop if user */
3278 #if !defined(CONFIG_USER_ONLY)
3283 case 0x13: /* Graphics Status */
3284 if (gen_trap_ifnofpu(dc, cpu_cond))
3286 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3288 case 0x14: /* Softint set */
3289 if (!supervisor(dc))
3291 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3292 gen_helper_set_softint(cpu_tmp64);
3294 case 0x15: /* Softint clear */
3295 if (!supervisor(dc))
3297 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3298 gen_helper_clear_softint(cpu_tmp64);
3300 case 0x16: /* Softint write */
3301 if (!supervisor(dc))
3303 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3304 gen_helper_write_softint(cpu_tmp64);
3306 case 0x17: /* Tick compare */
3307 #if !defined(CONFIG_USER_ONLY)
3308 if (!supervisor(dc))
3314 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3316 r_tickptr = tcg_temp_new_ptr();
3317 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3318 offsetof(CPUState, tick));
3319 gen_helper_tick_set_limit(r_tickptr,
3321 tcg_temp_free_ptr(r_tickptr);
3324 case 0x18: /* System tick */
3325 #if !defined(CONFIG_USER_ONLY)
3326 if (!supervisor(dc))
3332 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3334 r_tickptr = tcg_temp_new_ptr();
3335 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3336 offsetof(CPUState, stick));
3337 gen_helper_tick_set_count(r_tickptr,
3339 tcg_temp_free_ptr(r_tickptr);
3342 case 0x19: /* System tick compare */
3343 #if !defined(CONFIG_USER_ONLY)
3344 if (!supervisor(dc))
3350 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3352 r_tickptr = tcg_temp_new_ptr();
3353 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3354 offsetof(CPUState, stick));
3355 gen_helper_tick_set_limit(r_tickptr,
3357 tcg_temp_free_ptr(r_tickptr);
3361 case 0x10: /* Performance Control */
3362 case 0x11: /* Performance Instrumentation
3364 case 0x12: /* Dispatch Control */
3371 #if !defined(CONFIG_USER_ONLY)
3372 case 0x31: /* wrpsr, V9 saved, restored */
3374 if (!supervisor(dc))
3376 #ifdef TARGET_SPARC64
3382 gen_helper_restored();
3384 case 2: /* UA2005 allclean */
3385 case 3: /* UA2005 otherw */
3386 case 4: /* UA2005 normalw */
3387 case 5: /* UA2005 invalw */
3393 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3394 gen_helper_wrpsr(cpu_dst);
3395 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3396 dc->cc_op = CC_OP_FLAGS;
3397 save_state(dc, cpu_cond);
3404 case 0x32: /* wrwim, V9 wrpr */
3406 if (!supervisor(dc))
3408 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3409 #ifdef TARGET_SPARC64
3415 r_tsptr = tcg_temp_new_ptr();
3416 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3417 offsetof(CPUState, tsptr));
3418 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3419 offsetof(trap_state, tpc));
3420 tcg_temp_free_ptr(r_tsptr);
3427 r_tsptr = tcg_temp_new_ptr();
3428 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3429 offsetof(CPUState, tsptr));
3430 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3431 offsetof(trap_state, tnpc));
3432 tcg_temp_free_ptr(r_tsptr);
3439 r_tsptr = tcg_temp_new_ptr();
3440 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3441 offsetof(CPUState, tsptr));
3442 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3443 offsetof(trap_state,
3445 tcg_temp_free_ptr(r_tsptr);
3452 r_tsptr = tcg_temp_new_ptr();
3453 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3454 offsetof(CPUState, tsptr));
3455 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3456 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3457 offsetof(trap_state, tt));
3458 tcg_temp_free_ptr(r_tsptr);
3465 r_tickptr = tcg_temp_new_ptr();
3466 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3467 offsetof(CPUState, tick));
3468 gen_helper_tick_set_count(r_tickptr,
3470 tcg_temp_free_ptr(r_tickptr);
3474 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3477 save_state(dc, cpu_cond);
3478 gen_helper_wrpstate(cpu_tmp0);
3484 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3485 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3486 offsetof(CPUSPARCState, tl));
3489 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3490 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3491 offsetof(CPUSPARCState,
3495 gen_helper_wrcwp(cpu_tmp0);
3498 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3499 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3500 offsetof(CPUSPARCState,
3503 case 11: // canrestore
3504 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3505 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3506 offsetof(CPUSPARCState,
3509 case 12: // cleanwin
3510 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3511 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3512 offsetof(CPUSPARCState,
3515 case 13: // otherwin
3516 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3517 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3518 offsetof(CPUSPARCState,
3522 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3523 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3524 offsetof(CPUSPARCState,
3527 case 16: // UA2005 gl
3528 CHECK_IU_FEATURE(dc, GL);
3529 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3530 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3531 offsetof(CPUSPARCState, gl));
3533 case 26: // UA2005 strand status
3534 CHECK_IU_FEATURE(dc, HYPV);
3535 if (!hypervisor(dc))
3537 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3543 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3544 if (dc->def->nwindows != 32)
3545 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3546 (1 << dc->def->nwindows) - 1);
3547 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3551 case 0x33: /* wrtbr, UA2005 wrhpr */
3553 #ifndef TARGET_SPARC64
3554 if (!supervisor(dc))
3556 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3558 CHECK_IU_FEATURE(dc, HYPV);
3559 if (!hypervisor(dc))
3561 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3564 // XXX gen_op_wrhpstate();
3565 save_state(dc, cpu_cond);
3571 // XXX gen_op_wrhtstate();
3574 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3577 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3579 case 31: // hstick_cmpr
3583 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3584 r_tickptr = tcg_temp_new_ptr();
3585 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3586 offsetof(CPUState, hstick));
3587 gen_helper_tick_set_limit(r_tickptr,
3589 tcg_temp_free_ptr(r_tickptr);
3592 case 6: // hver readonly
3600 #ifdef TARGET_SPARC64
3601 case 0x2c: /* V9 movcc */
3603 int cc = GET_FIELD_SP(insn, 11, 12);
3604 int cond = GET_FIELD_SP(insn, 14, 17);
3608 r_cond = tcg_temp_new();
3609 if (insn & (1 << 18)) {
3611 gen_cond(r_cond, 0, cond, dc);
3613 gen_cond(r_cond, 1, cond, dc);
3617 gen_fcond(r_cond, cc, cond);
3620 l1 = gen_new_label();
3622 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3623 if (IS_IMM) { /* immediate */
3626 simm = GET_FIELD_SPs(insn, 0, 10);
3627 r_const = tcg_const_tl(simm);
3628 gen_movl_TN_reg(rd, r_const);
3629 tcg_temp_free(r_const);
3631 rs2 = GET_FIELD_SP(insn, 0, 4);
3632 gen_movl_reg_TN(rs2, cpu_tmp0);
3633 gen_movl_TN_reg(rd, cpu_tmp0);
3636 tcg_temp_free(r_cond);
3639 case 0x2d: /* V9 sdivx */
3640 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3641 gen_movl_TN_reg(rd, cpu_dst);
3643 case 0x2e: /* V9 popc */
3645 cpu_src2 = get_src2(insn, cpu_src2);
3646 gen_helper_popc(cpu_dst, cpu_src2);
3647 gen_movl_TN_reg(rd, cpu_dst);
3649 case 0x2f: /* V9 movr */
3651 int cond = GET_FIELD_SP(insn, 10, 12);
3654 cpu_src1 = get_src1(insn, cpu_src1);
3656 l1 = gen_new_label();
3658 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3660 if (IS_IMM) { /* immediate */
3663 simm = GET_FIELD_SPs(insn, 0, 9);
3664 r_const = tcg_const_tl(simm);
3665 gen_movl_TN_reg(rd, r_const);
3666 tcg_temp_free(r_const);
3668 rs2 = GET_FIELD_SP(insn, 0, 4);
3669 gen_movl_reg_TN(rs2, cpu_tmp0);
3670 gen_movl_TN_reg(rd, cpu_tmp0);
3680 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3681 #ifdef TARGET_SPARC64
3682 int opf = GET_FIELD_SP(insn, 5, 13);
3683 rs1 = GET_FIELD(insn, 13, 17);
3684 rs2 = GET_FIELD(insn, 27, 31);
3685 if (gen_trap_ifnofpu(dc, cpu_cond))
3689 case 0x000: /* VIS I edge8cc */
3690 case 0x001: /* VIS II edge8n */
3691 case 0x002: /* VIS I edge8lcc */
3692 case 0x003: /* VIS II edge8ln */
3693 case 0x004: /* VIS I edge16cc */
3694 case 0x005: /* VIS II edge16n */
3695 case 0x006: /* VIS I edge16lcc */
3696 case 0x007: /* VIS II edge16ln */
3697 case 0x008: /* VIS I edge32cc */
3698 case 0x009: /* VIS II edge32n */
3699 case 0x00a: /* VIS I edge32lcc */
3700 case 0x00b: /* VIS II edge32ln */
3703 case 0x010: /* VIS I array8 */
3704 CHECK_FPU_FEATURE(dc, VIS1);
3705 cpu_src1 = get_src1(insn, cpu_src1);
3706 gen_movl_reg_TN(rs2, cpu_src2);
3707 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3708 gen_movl_TN_reg(rd, cpu_dst);
3710 case 0x012: /* VIS I array16 */
3711 CHECK_FPU_FEATURE(dc, VIS1);
3712 cpu_src1 = get_src1(insn, cpu_src1);
3713 gen_movl_reg_TN(rs2, cpu_src2);
3714 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3715 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3716 gen_movl_TN_reg(rd, cpu_dst);
3718 case 0x014: /* VIS I array32 */
3719 CHECK_FPU_FEATURE(dc, VIS1);
3720 cpu_src1 = get_src1(insn, cpu_src1);
3721 gen_movl_reg_TN(rs2, cpu_src2);
3722 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3723 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3724 gen_movl_TN_reg(rd, cpu_dst);
3726 case 0x018: /* VIS I alignaddr */
3727 CHECK_FPU_FEATURE(dc, VIS1);
3728 cpu_src1 = get_src1(insn, cpu_src1);
3729 gen_movl_reg_TN(rs2, cpu_src2);
3730 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3731 gen_movl_TN_reg(rd, cpu_dst);
3733 case 0x019: /* VIS II bmask */
3734 case 0x01a: /* VIS I alignaddrl */
3737 case 0x020: /* VIS I fcmple16 */
3738 CHECK_FPU_FEATURE(dc, VIS1);
3739 gen_op_load_fpr_DT0(DFPREG(rs1));
3740 gen_op_load_fpr_DT1(DFPREG(rs2));
3741 gen_helper_fcmple16();
3742 gen_op_store_DT0_fpr(DFPREG(rd));
3744 case 0x022: /* VIS I fcmpne16 */
3745 CHECK_FPU_FEATURE(dc, VIS1);
3746 gen_op_load_fpr_DT0(DFPREG(rs1));
3747 gen_op_load_fpr_DT1(DFPREG(rs2));
3748 gen_helper_fcmpne16();
3749 gen_op_store_DT0_fpr(DFPREG(rd));
3751 case 0x024: /* VIS I fcmple32 */
3752 CHECK_FPU_FEATURE(dc, VIS1);
3753 gen_op_load_fpr_DT0(DFPREG(rs1));
3754 gen_op_load_fpr_DT1(DFPREG(rs2));
3755 gen_helper_fcmple32();
3756 gen_op_store_DT0_fpr(DFPREG(rd));
3758 case 0x026: /* VIS I fcmpne32 */
3759 CHECK_FPU_FEATURE(dc, VIS1);
3760 gen_op_load_fpr_DT0(DFPREG(rs1));
3761 gen_op_load_fpr_DT1(DFPREG(rs2));
3762 gen_helper_fcmpne32();
3763 gen_op_store_DT0_fpr(DFPREG(rd));
3765 case 0x028: /* VIS I fcmpgt16 */
3766 CHECK_FPU_FEATURE(dc, VIS1);
3767 gen_op_load_fpr_DT0(DFPREG(rs1));
3768 gen_op_load_fpr_DT1(DFPREG(rs2));
3769 gen_helper_fcmpgt16();
3770 gen_op_store_DT0_fpr(DFPREG(rd));
3772 case 0x02a: /* VIS I fcmpeq16 */
3773 CHECK_FPU_FEATURE(dc, VIS1);
3774 gen_op_load_fpr_DT0(DFPREG(rs1));
3775 gen_op_load_fpr_DT1(DFPREG(rs2));
3776 gen_helper_fcmpeq16();
3777 gen_op_store_DT0_fpr(DFPREG(rd));
3779 case 0x02c: /* VIS I fcmpgt32 */
3780 CHECK_FPU_FEATURE(dc, VIS1);
3781 gen_op_load_fpr_DT0(DFPREG(rs1));
3782 gen_op_load_fpr_DT1(DFPREG(rs2));
3783 gen_helper_fcmpgt32();
3784 gen_op_store_DT0_fpr(DFPREG(rd));
3786 case 0x02e: /* VIS I fcmpeq32 */
3787 CHECK_FPU_FEATURE(dc, VIS1);
3788 gen_op_load_fpr_DT0(DFPREG(rs1));
3789 gen_op_load_fpr_DT1(DFPREG(rs2));
3790 gen_helper_fcmpeq32();
3791 gen_op_store_DT0_fpr(DFPREG(rd));
3793 case 0x031: /* VIS I fmul8x16 */
3794 CHECK_FPU_FEATURE(dc, VIS1);
3795 gen_op_load_fpr_DT0(DFPREG(rs1));
3796 gen_op_load_fpr_DT1(DFPREG(rs2));
3797 gen_helper_fmul8x16();
3798 gen_op_store_DT0_fpr(DFPREG(rd));
3800 case 0x033: /* VIS I fmul8x16au */
3801 CHECK_FPU_FEATURE(dc, VIS1);
3802 gen_op_load_fpr_DT0(DFPREG(rs1));
3803 gen_op_load_fpr_DT1(DFPREG(rs2));
3804 gen_helper_fmul8x16au();
3805 gen_op_store_DT0_fpr(DFPREG(rd));
3807 case 0x035: /* VIS I fmul8x16al */
3808 CHECK_FPU_FEATURE(dc, VIS1);
3809 gen_op_load_fpr_DT0(DFPREG(rs1));
3810 gen_op_load_fpr_DT1(DFPREG(rs2));
3811 gen_helper_fmul8x16al();
3812 gen_op_store_DT0_fpr(DFPREG(rd));
3814 case 0x036: /* VIS I fmul8sux16 */
3815 CHECK_FPU_FEATURE(dc, VIS1);
3816 gen_op_load_fpr_DT0(DFPREG(rs1));
3817 gen_op_load_fpr_DT1(DFPREG(rs2));
3818 gen_helper_fmul8sux16();
3819 gen_op_store_DT0_fpr(DFPREG(rd));
3821 case 0x037: /* VIS I fmul8ulx16 */
3822 CHECK_FPU_FEATURE(dc, VIS1);
3823 gen_op_load_fpr_DT0(DFPREG(rs1));
3824 gen_op_load_fpr_DT1(DFPREG(rs2));
3825 gen_helper_fmul8ulx16();
3826 gen_op_store_DT0_fpr(DFPREG(rd));
3828 case 0x038: /* VIS I fmuld8sux16 */
3829 CHECK_FPU_FEATURE(dc, VIS1);
3830 gen_op_load_fpr_DT0(DFPREG(rs1));
3831 gen_op_load_fpr_DT1(DFPREG(rs2));
3832 gen_helper_fmuld8sux16();
3833 gen_op_store_DT0_fpr(DFPREG(rd));
3835 case 0x039: /* VIS I fmuld8ulx16 */
3836 CHECK_FPU_FEATURE(dc, VIS1);
3837 gen_op_load_fpr_DT0(DFPREG(rs1));
3838 gen_op_load_fpr_DT1(DFPREG(rs2));
3839 gen_helper_fmuld8ulx16();
3840 gen_op_store_DT0_fpr(DFPREG(rd));
3842 case 0x03a: /* VIS I fpack32 */
3843 case 0x03b: /* VIS I fpack16 */
3844 case 0x03d: /* VIS I fpackfix */
3845 case 0x03e: /* VIS I pdist */
3848 case 0x048: /* VIS I faligndata */
3849 CHECK_FPU_FEATURE(dc, VIS1);
3850 gen_op_load_fpr_DT0(DFPREG(rs1));
3851 gen_op_load_fpr_DT1(DFPREG(rs2));
3852 gen_helper_faligndata();
3853 gen_op_store_DT0_fpr(DFPREG(rd));
3855 case 0x04b: /* VIS I fpmerge */
3856 CHECK_FPU_FEATURE(dc, VIS1);
3857 gen_op_load_fpr_DT0(DFPREG(rs1));
3858 gen_op_load_fpr_DT1(DFPREG(rs2));
3859 gen_helper_fpmerge();
3860 gen_op_store_DT0_fpr(DFPREG(rd));
3862 case 0x04c: /* VIS II bshuffle */
3865 case 0x04d: /* VIS I fexpand */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 gen_op_load_fpr_DT0(DFPREG(rs1));
3868 gen_op_load_fpr_DT1(DFPREG(rs2));
3869 gen_helper_fexpand();
3870 gen_op_store_DT0_fpr(DFPREG(rd));
3872 case 0x050: /* VIS I fpadd16 */
3873 CHECK_FPU_FEATURE(dc, VIS1);
3874 gen_op_load_fpr_DT0(DFPREG(rs1));
3875 gen_op_load_fpr_DT1(DFPREG(rs2));
3876 gen_helper_fpadd16();
3877 gen_op_store_DT0_fpr(DFPREG(rd));
3879 case 0x051: /* VIS I fpadd16s */
3880 CHECK_FPU_FEATURE(dc, VIS1);
3881 gen_helper_fpadd16s(cpu_fpr[rd],
3882 cpu_fpr[rs1], cpu_fpr[rs2]);
3884 case 0x052: /* VIS I fpadd32 */
3885 CHECK_FPU_FEATURE(dc, VIS1);
3886 gen_op_load_fpr_DT0(DFPREG(rs1));
3887 gen_op_load_fpr_DT1(DFPREG(rs2));
3888 gen_helper_fpadd32();
3889 gen_op_store_DT0_fpr(DFPREG(rd));
3891 case 0x053: /* VIS I fpadd32s */
3892 CHECK_FPU_FEATURE(dc, VIS1);
3893 gen_helper_fpadd32s(cpu_fpr[rd],
3894 cpu_fpr[rs1], cpu_fpr[rs2]);
3896 case 0x054: /* VIS I fpsub16 */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 gen_op_load_fpr_DT0(DFPREG(rs1));
3899 gen_op_load_fpr_DT1(DFPREG(rs2));
3900 gen_helper_fpsub16();
3901 gen_op_store_DT0_fpr(DFPREG(rd));
3903 case 0x055: /* VIS I fpsub16s */
3904 CHECK_FPU_FEATURE(dc, VIS1);
3905 gen_helper_fpsub16s(cpu_fpr[rd],
3906 cpu_fpr[rs1], cpu_fpr[rs2]);
3908 case 0x056: /* VIS I fpsub32 */
3909 CHECK_FPU_FEATURE(dc, VIS1);
3910 gen_op_load_fpr_DT0(DFPREG(rs1));
3911 gen_op_load_fpr_DT1(DFPREG(rs2));
3912 gen_helper_fpsub32();
3913 gen_op_store_DT0_fpr(DFPREG(rd));
3915 case 0x057: /* VIS I fpsub32s */
3916 CHECK_FPU_FEATURE(dc, VIS1);
3917 gen_helper_fpsub32s(cpu_fpr[rd],
3918 cpu_fpr[rs1], cpu_fpr[rs2]);
3920 case 0x060: /* VIS I fzero */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3923 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3925 case 0x061: /* VIS I fzeros */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3929 case 0x062: /* VIS I fnor */
3930 CHECK_FPU_FEATURE(dc, VIS1);
3931 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3932 cpu_fpr[DFPREG(rs2)]);
3933 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3934 cpu_fpr[DFPREG(rs2) + 1]);
3936 case 0x063: /* VIS I fnors */
3937 CHECK_FPU_FEATURE(dc, VIS1);
3938 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3940 case 0x064: /* VIS I fandnot2 */
3941 CHECK_FPU_FEATURE(dc, VIS1);
3942 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3943 cpu_fpr[DFPREG(rs2)]);
3944 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3945 cpu_fpr[DFPREG(rs1) + 1],
3946 cpu_fpr[DFPREG(rs2) + 1]);
3948 case 0x065: /* VIS I fandnot2s */
3949 CHECK_FPU_FEATURE(dc, VIS1);
3950 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3952 case 0x066: /* VIS I fnot2 */
3953 CHECK_FPU_FEATURE(dc, VIS1);
3954 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3955 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3956 cpu_fpr[DFPREG(rs2) + 1]);
3958 case 0x067: /* VIS I fnot2s */
3959 CHECK_FPU_FEATURE(dc, VIS1);
3960 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3962 case 0x068: /* VIS I fandnot1 */
3963 CHECK_FPU_FEATURE(dc, VIS1);
3964 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3965 cpu_fpr[DFPREG(rs1)]);
3966 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3967 cpu_fpr[DFPREG(rs2) + 1],
3968 cpu_fpr[DFPREG(rs1) + 1]);
3970 case 0x069: /* VIS I fandnot1s */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3974 case 0x06a: /* VIS I fnot1 */
3975 CHECK_FPU_FEATURE(dc, VIS1);
3976 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3977 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3978 cpu_fpr[DFPREG(rs1) + 1]);
3980 case 0x06b: /* VIS I fnot1s */
3981 CHECK_FPU_FEATURE(dc, VIS1);
3982 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3984 case 0x06c: /* VIS I fxor */
3985 CHECK_FPU_FEATURE(dc, VIS1);
3986 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3987 cpu_fpr[DFPREG(rs2)]);
3988 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3989 cpu_fpr[DFPREG(rs1) + 1],
3990 cpu_fpr[DFPREG(rs2) + 1]);
3992 case 0x06d: /* VIS I fxors */
3993 CHECK_FPU_FEATURE(dc, VIS1);
3994 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3996 case 0x06e: /* VIS I fnand */
3997 CHECK_FPU_FEATURE(dc, VIS1);
3998 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3999 cpu_fpr[DFPREG(rs2)]);
4000 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4001 cpu_fpr[DFPREG(rs2) + 1]);
4003 case 0x06f: /* VIS I fnands */
4004 CHECK_FPU_FEATURE(dc, VIS1);
4005 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4007 case 0x070: /* VIS I fand */
4008 CHECK_FPU_FEATURE(dc, VIS1);
4009 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4010 cpu_fpr[DFPREG(rs2)]);
4011 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4012 cpu_fpr[DFPREG(rs1) + 1],
4013 cpu_fpr[DFPREG(rs2) + 1]);
4015 case 0x071: /* VIS I fands */
4016 CHECK_FPU_FEATURE(dc, VIS1);
4017 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4019 case 0x072: /* VIS I fxnor */
4020 CHECK_FPU_FEATURE(dc, VIS1);
4021 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4022 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4023 cpu_fpr[DFPREG(rs1)]);
4024 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4025 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4026 cpu_fpr[DFPREG(rs1) + 1]);
4028 case 0x073: /* VIS I fxnors */
4029 CHECK_FPU_FEATURE(dc, VIS1);
4030 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4031 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4033 case 0x074: /* VIS I fsrc1 */
4034 CHECK_FPU_FEATURE(dc, VIS1);
4035 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4036 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4037 cpu_fpr[DFPREG(rs1) + 1]);
4039 case 0x075: /* VIS I fsrc1s */
4040 CHECK_FPU_FEATURE(dc, VIS1);
4041 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4043 case 0x076: /* VIS I fornot2 */
4044 CHECK_FPU_FEATURE(dc, VIS1);
4045 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4046 cpu_fpr[DFPREG(rs2)]);
4047 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4048 cpu_fpr[DFPREG(rs1) + 1],
4049 cpu_fpr[DFPREG(rs2) + 1]);
4051 case 0x077: /* VIS I fornot2s */
4052 CHECK_FPU_FEATURE(dc, VIS1);
4053 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4055 case 0x078: /* VIS I fsrc2 */
4056 CHECK_FPU_FEATURE(dc, VIS1);
4057 gen_op_load_fpr_DT0(DFPREG(rs2));
4058 gen_op_store_DT0_fpr(DFPREG(rd));
4060 case 0x079: /* VIS I fsrc2s */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4064 case 0x07a: /* VIS I fornot1 */
4065 CHECK_FPU_FEATURE(dc, VIS1);
4066 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4067 cpu_fpr[DFPREG(rs1)]);
4068 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4069 cpu_fpr[DFPREG(rs2) + 1],
4070 cpu_fpr[DFPREG(rs1) + 1]);
4072 case 0x07b: /* VIS I fornot1s */
4073 CHECK_FPU_FEATURE(dc, VIS1);
4074 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4076 case 0x07c: /* VIS I for */
4077 CHECK_FPU_FEATURE(dc, VIS1);
4078 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4079 cpu_fpr[DFPREG(rs2)]);
4080 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4081 cpu_fpr[DFPREG(rs1) + 1],
4082 cpu_fpr[DFPREG(rs2) + 1]);
4084 case 0x07d: /* VIS I fors */
4085 CHECK_FPU_FEATURE(dc, VIS1);
4086 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4088 case 0x07e: /* VIS I fone */
4089 CHECK_FPU_FEATURE(dc, VIS1);
4090 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4091 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4093 case 0x07f: /* VIS I fones */
4094 CHECK_FPU_FEATURE(dc, VIS1);
4095 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4097 case 0x080: /* VIS I shutdown */
4098 case 0x081: /* VIS II siam */
4107 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4108 #ifdef TARGET_SPARC64
4113 #ifdef TARGET_SPARC64
4114 } else if (xop == 0x39) { /* V9 return */
4117 save_state(dc, cpu_cond);
4118 cpu_src1 = get_src1(insn, cpu_src1);
4119 if (IS_IMM) { /* immediate */
4120 simm = GET_FIELDs(insn, 19, 31);
4121 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4122 } else { /* register */
4123 rs2 = GET_FIELD(insn, 27, 31);
4125 gen_movl_reg_TN(rs2, cpu_src2);
4126 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4128 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4130 gen_helper_restore();
4131 gen_mov_pc_npc(dc, cpu_cond);
4132 r_const = tcg_const_i32(3);
4133 gen_helper_check_align(cpu_dst, r_const);
4134 tcg_temp_free_i32(r_const);
4135 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4136 dc->npc = DYNAMIC_PC;
4140 cpu_src1 = get_src1(insn, cpu_src1);
4141 if (IS_IMM) { /* immediate */
4142 simm = GET_FIELDs(insn, 19, 31);
4143 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4144 } else { /* register */
4145 rs2 = GET_FIELD(insn, 27, 31);
4147 gen_movl_reg_TN(rs2, cpu_src2);
4148 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4150 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4153 case 0x38: /* jmpl */
4158 r_pc = tcg_const_tl(dc->pc);
4159 gen_movl_TN_reg(rd, r_pc);
4160 tcg_temp_free(r_pc);
4161 gen_mov_pc_npc(dc, cpu_cond);
4162 r_const = tcg_const_i32(3);
4163 gen_helper_check_align(cpu_dst, r_const);
4164 tcg_temp_free_i32(r_const);
4165 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4166 dc->npc = DYNAMIC_PC;
4169 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4170 case 0x39: /* rett, V9 return */
4174 if (!supervisor(dc))
4176 gen_mov_pc_npc(dc, cpu_cond);
4177 r_const = tcg_const_i32(3);
4178 gen_helper_check_align(cpu_dst, r_const);
4179 tcg_temp_free_i32(r_const);
4180 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4181 dc->npc = DYNAMIC_PC;
4186 case 0x3b: /* flush */
4187 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4189 gen_helper_flush(cpu_dst);
4191 case 0x3c: /* save */
4192 save_state(dc, cpu_cond);
4194 gen_movl_TN_reg(rd, cpu_dst);
4196 case 0x3d: /* restore */
4197 save_state(dc, cpu_cond);
4198 gen_helper_restore();
4199 gen_movl_TN_reg(rd, cpu_dst);
4201 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4202 case 0x3e: /* V9 done/retry */
4206 if (!supervisor(dc))
4208 dc->npc = DYNAMIC_PC;
4209 dc->pc = DYNAMIC_PC;
4213 if (!supervisor(dc))
4215 dc->npc = DYNAMIC_PC;
4216 dc->pc = DYNAMIC_PC;
4232 case 3: /* load/store instructions */
4234 unsigned int xop = GET_FIELD(insn, 7, 12);
4236 cpu_src1 = get_src1(insn, cpu_src1);
4237 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4238 rs2 = GET_FIELD(insn, 27, 31);
4239 gen_movl_reg_TN(rs2, cpu_src2);
4240 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4241 } else if (IS_IMM) { /* immediate */
4242 simm = GET_FIELDs(insn, 19, 31);
4243 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4244 } else { /* register */
4245 rs2 = GET_FIELD(insn, 27, 31);
4247 gen_movl_reg_TN(rs2, cpu_src2);
4248 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4250 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4252 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4253 (xop > 0x17 && xop <= 0x1d ) ||
4254 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4256 case 0x0: /* ld, V9 lduw, load unsigned word */
4257 gen_address_mask(dc, cpu_addr);
4258 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4260 case 0x1: /* ldub, load unsigned byte */
4261 gen_address_mask(dc, cpu_addr);
4262 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4264 case 0x2: /* lduh, load unsigned halfword */
4265 gen_address_mask(dc, cpu_addr);
4266 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4268 case 0x3: /* ldd, load double word */
4274 save_state(dc, cpu_cond);
4275 r_const = tcg_const_i32(7);
4276 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4277 tcg_temp_free_i32(r_const);
4278 gen_address_mask(dc, cpu_addr);
4279 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4280 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4281 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4282 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4283 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4284 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4285 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4288 case 0x9: /* ldsb, load signed byte */
4289 gen_address_mask(dc, cpu_addr);
4290 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4292 case 0xa: /* ldsh, load signed halfword */
4293 gen_address_mask(dc, cpu_addr);
4294 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4296 case 0xd: /* ldstub -- XXX: should be atomically */
4300 gen_address_mask(dc, cpu_addr);
4301 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4302 r_const = tcg_const_tl(0xff);
4303 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4304 tcg_temp_free(r_const);
4307 case 0x0f: /* swap, swap register with memory. Also
4309 CHECK_IU_FEATURE(dc, SWAP);
4310 gen_movl_reg_TN(rd, cpu_val);
4311 gen_address_mask(dc, cpu_addr);
4312 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4313 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4314 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4316 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4317 case 0x10: /* lda, V9 lduwa, load word alternate */
4318 #ifndef TARGET_SPARC64
4321 if (!supervisor(dc))
4324 save_state(dc, cpu_cond);
4325 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4327 case 0x11: /* lduba, load unsigned byte alternate */
4328 #ifndef TARGET_SPARC64
4331 if (!supervisor(dc))
4334 save_state(dc, cpu_cond);
4335 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4337 case 0x12: /* lduha, load unsigned halfword alternate */
4338 #ifndef TARGET_SPARC64
4341 if (!supervisor(dc))
4344 save_state(dc, cpu_cond);
4345 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4347 case 0x13: /* ldda, load double word alternate */
4348 #ifndef TARGET_SPARC64
4351 if (!supervisor(dc))
4356 save_state(dc, cpu_cond);
4357 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4359 case 0x19: /* ldsba, load signed byte alternate */
4360 #ifndef TARGET_SPARC64
4363 if (!supervisor(dc))
4366 save_state(dc, cpu_cond);
4367 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4369 case 0x1a: /* ldsha, load signed halfword alternate */
4370 #ifndef TARGET_SPARC64
4373 if (!supervisor(dc))
4376 save_state(dc, cpu_cond);
4377 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4379 case 0x1d: /* ldstuba -- XXX: should be atomically */
4380 #ifndef TARGET_SPARC64
4383 if (!supervisor(dc))
4386 save_state(dc, cpu_cond);
4387 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4389 case 0x1f: /* swapa, swap reg with alt. memory. Also
4391 CHECK_IU_FEATURE(dc, SWAP);
4392 #ifndef TARGET_SPARC64
4395 if (!supervisor(dc))
4398 save_state(dc, cpu_cond);
4399 gen_movl_reg_TN(rd, cpu_val);
4400 gen_swap_asi(cpu_val, cpu_addr, insn);
4403 #ifndef TARGET_SPARC64
4404 case 0x30: /* ldc */
4405 case 0x31: /* ldcsr */
4406 case 0x33: /* lddc */
4410 #ifdef TARGET_SPARC64
4411 case 0x08: /* V9 ldsw */
4412 gen_address_mask(dc, cpu_addr);
4413 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4415 case 0x0b: /* V9 ldx */
4416 gen_address_mask(dc, cpu_addr);
4417 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4419 case 0x18: /* V9 ldswa */
4420 save_state(dc, cpu_cond);
4421 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4423 case 0x1b: /* V9 ldxa */
4424 save_state(dc, cpu_cond);
4425 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4427 case 0x2d: /* V9 prefetch, no effect */
4429 case 0x30: /* V9 ldfa */
4430 save_state(dc, cpu_cond);
4431 gen_ldf_asi(cpu_addr, insn, 4, rd);
4433 case 0x33: /* V9 lddfa */
4434 save_state(dc, cpu_cond);
4435 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4437 case 0x3d: /* V9 prefetcha, no effect */
4439 case 0x32: /* V9 ldqfa */
4440 CHECK_FPU_FEATURE(dc, FLOAT128);
4441 save_state(dc, cpu_cond);
4442 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4448 gen_movl_TN_reg(rd, cpu_val);
4449 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4452 } else if (xop >= 0x20 && xop < 0x24) {
4453 if (gen_trap_ifnofpu(dc, cpu_cond))
4455 save_state(dc, cpu_cond);
4457 case 0x20: /* ldf, load fpreg */
4458 gen_address_mask(dc, cpu_addr);
4459 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4460 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4462 case 0x21: /* ldfsr, V9 ldxfsr */
4463 #ifdef TARGET_SPARC64
4464 gen_address_mask(dc, cpu_addr);
4466 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4467 gen_helper_ldxfsr(cpu_tmp64);
4471 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4472 gen_helper_ldfsr(cpu_tmp32);
4476 case 0x22: /* ldqf, load quad fpreg */
4480 CHECK_FPU_FEATURE(dc, FLOAT128);
4481 r_const = tcg_const_i32(dc->mem_idx);
4482 gen_helper_ldqf(cpu_addr, r_const);
4483 tcg_temp_free_i32(r_const);
4484 gen_op_store_QT0_fpr(QFPREG(rd));
4487 case 0x23: /* lddf, load double fpreg */
4491 r_const = tcg_const_i32(dc->mem_idx);
4492 gen_helper_lddf(cpu_addr, r_const);
4493 tcg_temp_free_i32(r_const);
4494 gen_op_store_DT0_fpr(DFPREG(rd));
4500 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4501 xop == 0xe || xop == 0x1e) {
4502 gen_movl_reg_TN(rd, cpu_val);
4504 case 0x4: /* st, store word */
4505 gen_address_mask(dc, cpu_addr);
4506 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4508 case 0x5: /* stb, store byte */
4509 gen_address_mask(dc, cpu_addr);
4510 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4512 case 0x6: /* sth, store halfword */
4513 gen_address_mask(dc, cpu_addr);
4514 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4516 case 0x7: /* std, store double word */
4522 save_state(dc, cpu_cond);
4523 gen_address_mask(dc, cpu_addr);
4524 r_const = tcg_const_i32(7);
4525 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4526 tcg_temp_free_i32(r_const);
4527 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4528 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4529 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4532 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4533 case 0x14: /* sta, V9 stwa, store word alternate */
4534 #ifndef TARGET_SPARC64
4537 if (!supervisor(dc))
4540 save_state(dc, cpu_cond);
4541 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4543 case 0x15: /* stba, store byte alternate */
4544 #ifndef TARGET_SPARC64
4547 if (!supervisor(dc))
4550 save_state(dc, cpu_cond);
4551 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4553 case 0x16: /* stha, store halfword alternate */
4554 #ifndef TARGET_SPARC64
4557 if (!supervisor(dc))
4560 save_state(dc, cpu_cond);
4561 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4563 case 0x17: /* stda, store double word alternate */
4564 #ifndef TARGET_SPARC64
4567 if (!supervisor(dc))
4573 save_state(dc, cpu_cond);
4574 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4578 #ifdef TARGET_SPARC64
4579 case 0x0e: /* V9 stx */
4580 gen_address_mask(dc, cpu_addr);
4581 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4583 case 0x1e: /* V9 stxa */
4584 save_state(dc, cpu_cond);
4585 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4591 } else if (xop > 0x23 && xop < 0x28) {
4592 if (gen_trap_ifnofpu(dc, cpu_cond))
4594 save_state(dc, cpu_cond);
4596 case 0x24: /* stf, store fpreg */
4597 gen_address_mask(dc, cpu_addr);
4598 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4599 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4601 case 0x25: /* stfsr, V9 stxfsr */
4602 #ifdef TARGET_SPARC64
4603 gen_address_mask(dc, cpu_addr);
4604 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4606 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4608 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4610 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4611 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4615 #ifdef TARGET_SPARC64
4616 /* V9 stqf, store quad fpreg */
4620 CHECK_FPU_FEATURE(dc, FLOAT128);
4621 gen_op_load_fpr_QT0(QFPREG(rd));
4622 r_const = tcg_const_i32(dc->mem_idx);
4623 gen_helper_stqf(cpu_addr, r_const);
4624 tcg_temp_free_i32(r_const);
4627 #else /* !TARGET_SPARC64 */
4628 /* stdfq, store floating point queue */
4629 #if defined(CONFIG_USER_ONLY)
4632 if (!supervisor(dc))
4634 if (gen_trap_ifnofpu(dc, cpu_cond))
4639 case 0x27: /* stdf, store double fpreg */
4643 gen_op_load_fpr_DT0(DFPREG(rd));
4644 r_const = tcg_const_i32(dc->mem_idx);
4645 gen_helper_stdf(cpu_addr, r_const);
4646 tcg_temp_free_i32(r_const);
4652 } else if (xop > 0x33 && xop < 0x3f) {
4653 save_state(dc, cpu_cond);
4655 #ifdef TARGET_SPARC64
4656 case 0x34: /* V9 stfa */
4657 gen_stf_asi(cpu_addr, insn, 4, rd);
4659 case 0x36: /* V9 stqfa */
4663 CHECK_FPU_FEATURE(dc, FLOAT128);
4664 r_const = tcg_const_i32(7);
4665 gen_helper_check_align(cpu_addr, r_const);
4666 tcg_temp_free_i32(r_const);
4667 gen_op_load_fpr_QT0(QFPREG(rd));
4668 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4671 case 0x37: /* V9 stdfa */
4672 gen_op_load_fpr_DT0(DFPREG(rd));
4673 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4675 case 0x3c: /* V9 casa */
4676 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4677 gen_movl_TN_reg(rd, cpu_val);
4679 case 0x3e: /* V9 casxa */
4680 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4681 gen_movl_TN_reg(rd, cpu_val);
4684 case 0x34: /* stc */
4685 case 0x35: /* stcsr */
4686 case 0x36: /* stdcq */
4687 case 0x37: /* stdc */
4698 /* default case for non jump instructions */
4699 if (dc->npc == DYNAMIC_PC) {
4700 dc->pc = DYNAMIC_PC;
4702 } else if (dc->npc == JUMP_PC) {
4703 /* we can do a static jump */
4704 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4708 dc->npc = dc->npc + 4;
4716 save_state(dc, cpu_cond);
4717 r_const = tcg_const_i32(TT_ILL_INSN);
4718 gen_helper_raise_exception(r_const);
4719 tcg_temp_free_i32(r_const);
4727 save_state(dc, cpu_cond);
4728 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4729 gen_helper_raise_exception(r_const);
4730 tcg_temp_free_i32(r_const);
4734 #if !defined(CONFIG_USER_ONLY)
4739 save_state(dc, cpu_cond);
4740 r_const = tcg_const_i32(TT_PRIV_INSN);
4741 gen_helper_raise_exception(r_const);
4742 tcg_temp_free_i32(r_const);
4748 save_state(dc, cpu_cond);
4749 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4752 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4754 save_state(dc, cpu_cond);
4755 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4759 #ifndef TARGET_SPARC64
4764 save_state(dc, cpu_cond);
4765 r_const = tcg_const_i32(TT_NCP_INSN);
4766 gen_helper_raise_exception(r_const);
4767 tcg_temp_free(r_const);
4774 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4775 int spc, CPUSPARCState *env)
4777 target_ulong pc_start, last_pc;
4778 uint16_t *gen_opc_end;
4779 DisasContext dc1, *dc = &dc1;
4785 memset(dc, 0, sizeof(DisasContext));
4790 dc->npc = (target_ulong) tb->cs_base;
4791 dc->cc_op = CC_OP_DYNAMIC;
4792 dc->mem_idx = cpu_mmu_index(env);
4794 if ((dc->def->features & CPU_FEATURE_FLOAT))
4795 dc->fpu_enabled = cpu_fpu_enabled(env);
4797 dc->fpu_enabled = 0;
4798 #ifdef TARGET_SPARC64
4799 dc->address_mask_32bit = env->pstate & PS_AM;
4801 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4803 cpu_tmp0 = tcg_temp_new();
4804 cpu_tmp32 = tcg_temp_new_i32();
4805 cpu_tmp64 = tcg_temp_new_i64();
4807 cpu_dst = tcg_temp_local_new();
4810 cpu_val = tcg_temp_local_new();
4811 cpu_addr = tcg_temp_local_new();
4814 max_insns = tb->cflags & CF_COUNT_MASK;
4816 max_insns = CF_COUNT_MASK;
4819 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4820 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4821 if (bp->pc == dc->pc) {
4822 if (dc->pc != pc_start)
4823 save_state(dc, cpu_cond);
4832 qemu_log("Search PC...\n");
4833 j = gen_opc_ptr - gen_opc_buf;
4837 gen_opc_instr_start[lj++] = 0;
4838 gen_opc_pc[lj] = dc->pc;
4839 gen_opc_npc[lj] = dc->npc;
4840 gen_opc_instr_start[lj] = 1;
4841 gen_opc_icount[lj] = num_insns;
4844 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4847 disas_sparc_insn(dc);
4852 /* if the next PC is different, we abort now */
4853 if (dc->pc != (last_pc + 4))
4855 /* if we reach a page boundary, we stop generation so that the
4856 PC of a TT_TFAULT exception is always in the right page */
4857 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4859 /* if single step mode, we generate only one instruction and
4860 generate an exception */
4861 if (env->singlestep_enabled || singlestep) {
4862 tcg_gen_movi_tl(cpu_pc, dc->pc);
4866 } while ((gen_opc_ptr < gen_opc_end) &&
4867 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4868 num_insns < max_insns);
4871 tcg_temp_free(cpu_addr);
4872 tcg_temp_free(cpu_val);
4873 tcg_temp_free(cpu_dst);
4874 tcg_temp_free_i64(cpu_tmp64);
4875 tcg_temp_free_i32(cpu_tmp32);
4876 tcg_temp_free(cpu_tmp0);
4877 if (tb->cflags & CF_LAST_IO)
4880 if (dc->pc != DYNAMIC_PC &&
4881 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4882 /* static PC and NPC: we can use direct chaining */
4883 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4885 if (dc->pc != DYNAMIC_PC)
4886 tcg_gen_movi_tl(cpu_pc, dc->pc);
4887 save_npc(dc, cpu_cond);
4891 gen_icount_end(tb, num_insns);
4892 *gen_opc_ptr = INDEX_op_end;
4894 j = gen_opc_ptr - gen_opc_buf;
4897 gen_opc_instr_start[lj++] = 0;
4901 gen_opc_jump_pc[0] = dc->jump_pc[0];
4902 gen_opc_jump_pc[1] = dc->jump_pc[1];
4904 tb->size = last_pc + 4 - pc_start;
4905 tb->icount = num_insns;
4908 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4909 qemu_log("--------------\n");
4910 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4911 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4917 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4919 gen_intermediate_code_internal(tb, 0, env);
4922 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4924 gen_intermediate_code_internal(tb, 1, env);
4927 void gen_intermediate_code_init(CPUSPARCState *env)
4931 static const char * const gregnames[8] = {
4932 NULL, // g0 not used
4941 static const char * const fregnames[64] = {
4942 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4943 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4944 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4945 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4946 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4947 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4948 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4949 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4952 /* init various static tables */
4956 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4957 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4958 offsetof(CPUState, regwptr),
4960 #ifdef TARGET_SPARC64
4961 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4963 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4965 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4967 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4969 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4970 offsetof(CPUState, tick_cmpr),
4972 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4973 offsetof(CPUState, stick_cmpr),
4975 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4976 offsetof(CPUState, hstick_cmpr),
4978 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4980 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4982 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4984 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4985 offsetof(CPUState, ssr), "ssr");
4986 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4987 offsetof(CPUState, version), "ver");
4988 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4989 offsetof(CPUState, softint),
4992 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4995 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4997 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4999 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5000 offsetof(CPUState, cc_src2),
5002 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5004 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5006 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5008 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5010 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5012 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5014 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5015 #ifndef CONFIG_USER_ONLY
5016 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5019 for (i = 1; i < 8; i++)
5020 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5021 offsetof(CPUState, gregs[i]),
5023 for (i = 0; i < TARGET_FPREGS; i++)
5024 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5025 offsetof(CPUState, fpr[i]),
5028 /* register helpers */
5030 #define GEN_HELPER 2
5035 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5036 unsigned long searched_pc, int pc_pos, void *puc)
5039 env->pc = gen_opc_pc[pc_pos];
5040 npc = gen_opc_npc[pc_pos];
5042 /* dynamic NPC: already stored */
5043 } else if (npc == 2) {
5044 target_ulong t2 = (target_ulong)(unsigned long)puc;
5045 /* jump PC: use T2 and the jump targets of the translation */
5047 env->npc = gen_opc_jump_pc[0];
5049 env->npc = gen_opc_jump_pc[1];